function extendAdapter<TFactory, TDefs>(factory, _customModels): (model, ...args) => InferAdapterReturn<TFactory>;function extendAdapter<TFactory, TDefs>(factory, _customModels): (model, ...args) => InferAdapterReturn<TFactory>;Defined in: packages/typescript/ai/src/extend-adapter.ts:166
Extends an existing adapter factory with additional custom models.
The extended adapter accepts both original models (with full original type inference) and custom models (with types from your definitions).
At runtime, this simply passes through to the original factory - no validation is performed. The original factory's signature is fully preserved, including any config parameters.
TFactory extends (...args) => any
TDefs extends readonly ExtendedModelDef<string, readonly Modality[], unknown>[]
TFactory
The original adapter factory function (e.g., openaiText, anthropicText)
TDefs
A new factory function that accepts both original and custom models
(model, ...args): InferAdapterReturn<TFactory>;(model, ...args): InferAdapterReturn<TFactory>;InferFactoryModels<TFactory> | ExtractCustomModelNames<TDefs>
...InferConfig<TFactory> extends undefined ? [] : [InferConfig<TFactory>]
InferAdapterReturn<TFactory>
import { extendAdapter, createModel } from '@tanstack/ai'
import { openaiText } from '@tanstack/ai-openai'
// Define custom models
const customModels = [
createModel('my-fine-tuned-gpt4', ['text', 'image']),
createModel('local-llama', ['text']),
] as const
// Create extended adapter
const myOpenai = extendAdapter(openaiText, customModels)
// Use with original models - full type inference preserved
const gpt4 = myOpenai('gpt-4o')
// Use with custom models
const custom = myOpenai('my-fine-tuned-gpt4')
// Type error: 'invalid-model' is not a valid model
// myOpenai('invalid-model')
// Works with chat()
chat({
adapter: myOpenai('my-fine-tuned-gpt4'),
messages: [...]
})import { extendAdapter, createModel } from '@tanstack/ai'
import { openaiText } from '@tanstack/ai-openai'
// Define custom models
const customModels = [
createModel('my-fine-tuned-gpt4', ['text', 'image']),
createModel('local-llama', ['text']),
] as const
// Create extended adapter
const myOpenai = extendAdapter(openaiText, customModels)
// Use with original models - full type inference preserved
const gpt4 = myOpenai('gpt-4o')
// Use with custom models
const custom = myOpenai('my-fine-tuned-gpt4')
// Type error: 'invalid-model' is not a valid model
// myOpenai('invalid-model')
// Works with chat()
chat({
adapter: myOpenai('my-fine-tuned-gpt4'),
messages: [...]
})