# Async streaming
async with AIStats(api_key="your-api-key") as client:
async for chunk in client.stream_chat_completions(
model="openai/gpt-4o-mini",
messages=[{"role": "user", "content": "Tell me a story"}]
):
delta = chunk["choices"][0]["delta"]
if "content" in delta:
print(delta["content"], end="", flush=True)
# Sync streaming
with AIStatsSync(api_key="your-api-key") as client:
for chunk in client.stream_chat_completions(
model="openai/gpt-4o-mini",
messages=[{"role": "user", "content": "Tell me a story"}]
):
delta = chunk["choices"][0]["delta"]
if "content" in delta:
print(delta["content"], end="", flush=True)