Setup
Copy
import { AIStats } from "@ai-stats/sdk";
const client = new AIStats({ apiKey: process.env.AI_STATS_API_KEY! });
Chat completions
Copy
const completion = await client.generateText({
model: "openai/gpt-4o-mini",
messages: [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: "What is AI Stats?" },
],
temperature: 0.7,
});
for await (const line of client.streamText({
model: "openai/gpt-4o-mini",
messages: [{ role: "user", content: "Tell me a story" }],
})) {
console.log(line);
}
Responses
Copy
const response = await client.generateResponse({
model: "openai/gpt-4.1",
input: [{ role: "user", content: [{ type: "output_text", text: "Summarise this" }] }],
});
for await (const line of client.streamResponse({
model: "openai/gpt-4.1",
input: [{ role: "user", content: [{ type: "output_text", text: "Stream this" }] }],
stream: true,
})) {
console.log(line);
}
Images
Copy
await client.generateImage({
model: "openai/gpt-image-1",
prompt: "A lighthouse at golden hour",
});
await client.generateImageEdit({
model: "openai/gpt-image-1",
prompt: "Make it sunset",
image: "data:image/png;base64,...",
});
Audio
Copy
await client.generateSpeech({
model: "openai/gpt-4o-mini-tts",
input: "Hello world",
});
await client.generateTranscription({
model: "openai/gpt-4o-transcribe",
file: "data:audio/mp3;base64,...",
});
await client.generateTranslation({
model: "openai/gpt-4o-translate",
file: "data:audio/mp3;base64,...",
});
Video
Copy
await client.generateVideo({
model: "openai/gpt-video-1",
prompt: "A serene mountain lake at sunrise",
});
Embeddings
Copy
const embedding = await client.generateEmbedding({
model: "openai/text-embedding-3-large",
input: "Sample text",
});
Moderations
Copy
await client.generateModeration({
model: "openai/omni-moderation-latest",
input: "Some text to check",
});
Batch & files
Copy
const file = await client.uploadFile({ purpose: "batch", file: new Blob(["{}"], { type: "application/json" }) });
const batch = await client.createBatch({
input_file_id: file.id,
endpoint: "responses",
completion_window: "24h",
});
const batchStatus = await client.getBatch(batch.id);
const files = await client.listFiles();
Models & health
Copy
const models = await client.getModels();
const health = await client.getHealth();
Error handling
Copy
try {
await client.generateText({ model: "invalid", messages: [] });
} catch (error) {
console.error("API Error:", (error as Error).message);
}