Setup
from ai_stats import AIStats
client = AIStats(api_key="your-api-key")
Chat completions
completion = client.generate_text(
{
"model": "openai/gpt-4o-mini",
"messages": [{"role": "user", "content": "Hi!"}],
}
)
for line in client.stream_text(
{
"model": "openai/gpt-4o-mini",
"messages": [{"role": "user", "content": "Stream a story"}],
}
):
print(line)
Responses
resp = client.generate_response(
{
"model": "openai/gpt-4.1",
"input": [{"role": "user", "content": [{"type": "output_text", "text": "Summarise"}]}],
}
)
for line in client.stream_response(
{
"model": "openai/gpt-4.1",
"input": [{"role": "user", "content": [{"type": "output_text", "text": "Stream it"}]}],
}
):
print(line)
Messages (Anthropic-compatible)
message = client.generate_message(
{
"model": "anthropic/claude-3-5-sonnet-latest",
"messages": [{"role": "user", "content": "Hello from messages API"}],
"max_tokens": 128,
}
)
for line in client.stream_messages(
{
"model": "anthropic/claude-3-5-sonnet-latest",
"messages": [{"role": "user", "content": "Stream a short hello"}],
"max_tokens": 128,
"stream": True,
}
):
print(line)
Embeddings and moderations
embedding = client.generate_embedding(
{"model": "openai/text-embedding-3-large", "input": "Sample text"}
)
moderation = client.generate_moderation(
{"model": "openai/omni-moderation-latest", "input": "Text to check"}
)
Models and health
models = client.get_models()
health = client.get_health()
Control-plane helpers
providers = client.list_providers()
credits = client.get_credits({"team_id": "your-team-id"})
activity = client.get_activity({"team_id": "your-team-id", "days": 30})
Endpoints currently returning coming_soon
These helpers exist in the SDK but currently return a placeholder payload:
generate_image, generate_image_edit
generate_speech, generate_transcription, generate_translation
generate_video
create_batch, get_batch
upload_file, list_files, get_file
get_generation
placeholder = client.generate_video(
{"model": "openai/gpt-video-1", "prompt": "A mountain sunrise"}
)
print(placeholder)
# {
# "status": "coming_soon",
# "endpoint": "videos",
# "message": "This endpoint is not yet supported in the SDK.",
# "payload": {...}
# }
Error handling
try:
client.generate_text({"model": "invalid", "messages": []})
except Exception as exc:
print(f"API error: {exc}")
Configuration options
from ai_stats import AIStats
client = AIStats(
api_key="your-api-key",
base_url="https://api.phaseo.app/v1",
timeout=30.0,
)
Last modified on February 17, 2026