SDKs
TypeScript / JavaScript
Use the OpenAI Node.js SDK with AssistantRouter.
TypeScript / JavaScript
Use the official OpenAI Node.js SDK with AssistantRouter.
Installation
npm install openaipnpm add openaiyarn add openaibun add openaiConfiguration
import OpenAI from 'openai';
const client = new OpenAI({
baseURL: 'https://api.assistantrouter.com/v1',
apiKey: process.env.ASSISTANTROUTER_API_KEY, // sk-xxx...
});Basic Usage
Chat Completions
Use your assistant ID to make chat completion requests:
const response = await client.chat.completions.create({
// @ts-ignore - assistant_id is our extension
assistant_id: 'your-assistant-uuid',
messages: [
{ role: 'user', content: 'What is the capital of France?' },
],
});
console.log(response.choices[0].message.content);When using an assistant, your dashboard-configured system prompt, model, and tools are automatically applied. Alternatively, provide model instead of assistant_id for direct mode without assistant features.
Streaming
const stream = await client.chat.completions.create({
// @ts-ignore
assistant_id: 'your-assistant-uuid',
messages: [{ role: 'user', content: 'Tell me a story.' }],
stream: true,
});
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content || '';
process.stdout.write(content);
}With Optional Overrides
You can override assistant settings per-request:
const response = await client.chat.completions.create({
// @ts-ignore
assistant_id: 'your-assistant-uuid',
messages: [{ role: 'user', content: 'Hello!' }],
// Optional overrides
model: 'anthropic/claude-haiku-4.5', // Override the model
temperature: 0.7, // Override temperature
max_tokens: 1000, // Limit output length
// Tracking
user_id: 'user_123', // Track by user
conversation_id: 'conv_abc', // Continue a conversation
});TypeScript Types
The SDK includes full TypeScript support:
import OpenAI from 'openai';
import type { ChatCompletionMessageParam } from 'openai/resources/chat';
const messages: ChatCompletionMessageParam[] = [
{ role: 'user', content: 'Hello!' },
];
// Custom extension for our response
interface AssistantRouterExtension {
x_assistantrouter?: {
cost_cents: number;
tools_used: string[];
was_fallback: boolean;
};
}Error Handling
import OpenAI from 'openai';
try {
const response = await client.chat.completions.create({
// @ts-ignore
assistant_id: 'your-assistant-uuid',
messages: [{ role: 'user', content: 'Hello!' }],
});
} catch (error) {
if (error instanceof OpenAI.APIError) {
console.error('Status:', error.status);
console.error('Message:', error.message);
// Handle specific error types
const body = error.error as { type: string };
switch (body.type) {
case 'insufficient_balance':
console.log('Please add credits to your wallet');
break;
case 'rate_limit_exceeded':
console.log('Rate limited, retrying...');
break;
}
}
}Function Calling
const response = await client.chat.completions.create({
// @ts-ignore
assistant_id: 'your-assistant-uuid',
messages: [{ role: 'user', content: 'What is the weather in Paris?' }],
tools: [
{
type: 'function',
function: {
name: 'get_weather',
description: 'Get current weather for a location',
parameters: {
type: 'object',
properties: {
location: { type: 'string' },
},
required: ['location'],
},
},
},
],
});
// Handle tool calls
if (response.choices[0].finish_reason === 'tool_calls') {
const toolCall = response.choices[0].message.tool_calls![0];
const args = JSON.parse(toolCall.function.arguments);
// Execute your function and continue
const weather = await getWeather(args.location);
const finalResponse = await client.chat.completions.create({
// @ts-ignore
assistant_id: 'your-assistant-uuid',
messages: [
{ role: 'user', content: 'What is the weather in Paris?' },
response.choices[0].message,
{
role: 'tool',
tool_call_id: toolCall.id,
content: JSON.stringify(weather),
},
],
});
}Next.js Integration
App Router (Server Component)
// app/api/chat/route.ts
import OpenAI from 'openai';
import { NextResponse } from 'next/server';
const client = new OpenAI({
baseURL: 'https://api.assistantrouter.com/v1',
apiKey: process.env.ASSISTANTROUTER_API_KEY,
});
export async function POST(req: Request) {
const { messages } = await req.json();
const response = await client.chat.completions.create({
// @ts-ignore
assistant_id: process.env.ASSISTANT_ID,
messages,
});
return NextResponse.json(response);
}Streaming with Vercel AI SDK
// app/api/chat/route.ts
import { createOpenAI } from '@ai-sdk/openai';
import { streamText } from 'ai';
const openai = createOpenAI({
baseURL: 'https://api.assistantrouter.com/v1',
apiKey: process.env.ASSISTANTROUTER_API_KEY,
});
export async function POST(req: Request) {
const { messages } = await req.json();
const result = await streamText({
model: openai('your-assistant-uuid'), // Use assistant ID as model
messages,
});
return result.toDataStreamResponse();
}Other API Endpoints
Check Wallet Balance
const response = await fetch('https://api.assistantrouter.com/v1/wallet', {
headers: {
'Authorization': `Bearer ${process.env.ASSISTANTROUTER_API_KEY}`,
},
});
const { data } = await response.json();
console.log(`Balance: $${(data.available_cents / 100).toFixed(2)}`);Get Usage Statistics
const response = await fetch('https://api.assistantrouter.com/v1/usage', {
headers: {
'Authorization': `Bearer ${process.env.ASSISTANTROUTER_API_KEY}`,
},
});
const { data } = await response.json();
console.log(`Total cost: $${(data.summary.total_cost_cents / 100).toFixed(2)}`);List Available Models
const response = await fetch('https://api.assistantrouter.com/v1/models', {
headers: {
'Authorization': `Bearer ${process.env.ASSISTANTROUTER_API_KEY}`,
},
});
const { data, recommended } = await response.json();
console.log('Recommended models:', recommended.map(m => m.name));