OpenAIResponses
Defined in: packages/providers/openai/src/responses.ts:49
Extends
ToolCallLLM
<OpenAIResponsesChatOptions
>
Constructors
Constructor
new OpenAIResponses(
init?
):OpenAIResponses
Defined in: packages/providers/openai/src/responses.ts:76
Parameters
init?
Omit
<Partial
<OpenAIResponses
>, "session"
> & object
Returns
OpenAIResponses
Overrides
ToolCallLLM<OpenAIResponsesChatOptions>.constructor
Properties
model
model:
string
Defined in: packages/providers/openai/src/responses.ts:50
temperature
temperature:
number
Defined in: packages/providers/openai/src/responses.ts:51
topP
topP:
number
Defined in: packages/providers/openai/src/responses.ts:52
maxOutputTokens?
optional
maxOutputTokens:number
Defined in: packages/providers/openai/src/responses.ts:53
additionalChatOptions?
optional
additionalChatOptions:OpenAIResponsesChatOptions
Defined in: packages/providers/openai/src/responses.ts:54
reasoningEffort?
optional
reasoningEffort:"high"
|"low"
|"medium"
Defined in: packages/providers/openai/src/responses.ts:55
apiKey?
optional
apiKey:string
Defined in: packages/providers/openai/src/responses.ts:56
baseURL?
optional
baseURL:string
Defined in: packages/providers/openai/src/responses.ts:57
maxRetries
maxRetries:
number
Defined in: packages/providers/openai/src/responses.ts:58
timeout?
optional
timeout:number
Defined in: packages/providers/openai/src/responses.ts:59
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Defined in: packages/providers/openai/src/responses.ts:60
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/responses.ts:64
Returns
Promise
<LLMInstance
>
trackPreviousResponses
trackPreviousResponses:
boolean
Defined in: packages/providers/openai/src/responses.ts:65
store
store:
boolean
Defined in: packages/providers/openai/src/responses.ts:66
user
user:
string
Defined in: packages/providers/openai/src/responses.ts:67
callMetadata
callMetadata:
StoredValue
Defined in: packages/providers/openai/src/responses.ts:68
builtInTools
builtInTools:
null
|Tool
[]
Defined in: packages/providers/openai/src/responses.ts:69
strict
strict:
boolean
Defined in: packages/providers/openai/src/responses.ts:70
include
include:
null
|ResponseIncludable
[]
Defined in: packages/providers/openai/src/responses.ts:71
instructions
instructions:
string
Defined in: packages/providers/openai/src/responses.ts:72
previousResponseId
previousResponseId:
null
|string
Defined in: packages/providers/openai/src/responses.ts:73
truncation
truncation:
null
|"auto"
|"disabled"
Defined in: packages/providers/openai/src/responses.ts:74
Accessors
session
Get Signature
get session():
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/responses.ts:127
Returns
Promise
<LLMInstance
>
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: packages/providers/openai/src/responses.ts:134
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
metadata
Get Signature
get metadata():
LLMMetadata
&object
Defined in: packages/providers/openai/src/responses.ts:138
Returns
LLMMetadata
& object
Overrides
ToolCallLLM.metadata
Methods
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<OpenAIResponsesMessageOptions
>,any
,any
>>
Defined in: packages/providers/openai/src/responses.ts:541
Parameters
params
LLMChatParamsStreaming
<OpenAIResponsesChatOptions
, OpenAIResponsesMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<OpenAIResponsesMessageOptions
>, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<OpenAIResponsesMessageOptions
>>
Defined in: packages/providers/openai/src/responses.ts:547
Parameters
params
LLMChatParamsNonStreaming
<OpenAIResponsesChatOptions
, OpenAIResponsesMessageOptions
>
Returns
Promise
<ChatResponse
<OpenAIResponsesMessageOptions
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<OpenAIResponsesMessageOptions
>>
Defined in: packages/providers/openai/src/responses.ts:630
Parameters
baseRequestParams
ResponseCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<OpenAIResponsesMessageOptions
>>
toOpenAIResponsesRole()
toOpenAIResponsesRole(
messageType
):OpenAIResponsesRole
Defined in: packages/providers/openai/src/responses.ts:649
Parameters
messageType
MessageType
Returns
OpenAIResponsesRole
toOpenAIResponseMessage()
toOpenAIResponseMessage(
message
):ResponseInputItem
|ResponseInputItem
[]
Defined in: packages/providers/openai/src/responses.ts:772
Parameters
message
ChatMessage
<OpenAIResponsesMessageOptions
>
Returns
ResponseInputItem
| ResponseInputItem
[]
toOpenAIResponseMessages()
toOpenAIResponseMessages(
messages
):ResponseInput
Defined in: packages/providers/openai/src/responses.ts:790
Parameters
messages
ChatMessage
<OpenAIResponsesMessageOptions
>[]
Returns
ResponseInput
toResponsesTool()
toResponsesTool(
tool
):Tool
Defined in: packages/providers/openai/src/responses.ts:805
Parameters
tool
BaseTool
Returns
Tool