Create a key from the dashboard and configure your base URL by following the API Keys & Base URLs guide. Once the key is stored (for example, in GENERALCOMPUTE_API_KEY) you can initialize the SDKs as shown below.
The Vercel AI SDK works seamlessly with General Compute. Here’s a complete Next.js chat app with streaming.Check out the full example project in our examples/vercel-ai-chat directory.
No framework needed — here’s a complete streaming chat using just Node.js and the General Compute SDK.Check out the full example project in our examples/node-streaming directory.
index.ts
import GeneralCompute from "@generalcompute/sdk";import * as readline from "readline";const client = new GeneralCompute();const rl = readline.createInterface({ input: process.stdin, output: process.stdout,});const messages: { role: "system" | "user" | "assistant"; content: string }[] = [ { role: "system", content: "You are a helpful assistant." },];async function chat(userMessage: string) { messages.push({ role: "user", content: userMessage }); const stream = await client.chat.completions.create({ model: "minimax-m2.7", messages, stream: true, }); let assistantMessage = ""; process.stdout.write("\nAssistant: "); for await (const chunk of stream) { const content = chunk.choices[0]?.delta?.content; if (content) { process.stdout.write(content); assistantMessage += content; } } console.log("\n"); messages.push({ role: "assistant", content: assistantMessage });}function prompt() { rl.question("You: ", async (input) => { if (input.toLowerCase() === "exit") { rl.close(); return; } await chat(input); prompt(); });}console.log('Chat with General Compute (type "exit" to quit)\n');prompt();