function chat<TAdapter, TModel>(options): AsyncIterable<StreamChunk>;
function chat<TAdapter, TModel>(options): AsyncIterable<StreamChunk>;
Defined in: core/chat.ts:741
Standalone chat streaming function with type inference from adapter Returns an async iterable of StreamChunks for streaming responses Includes automatic tool execution loop
TAdapter extends AIAdapter<any, any, any, any, any, any, DefaultMessageMetadataByModality>
TModel extends any
ChatStreamOptionsForModel<TAdapter, TModel>
Chat options
AsyncIterable<StreamChunk>
const stream = chat({
adapter: openai(),
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Hello!' }],
tools: [weatherTool], // Optional: auto-executed when called
});
for await (const chunk of stream) {
if (chunk.type === 'content') {
console.log(chunk.delta);
}
}
const stream = chat({
adapter: openai(),
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Hello!' }],
tools: [weatherTool], // Optional: auto-executed when called
});
for await (const chunk of stream) {
if (chunk.type === 'content') {
console.log(chunk.delta);
}
}
