OpenAIResponses
Defined in: packages/providers/openai/src/responses.ts:48
Extends
ToolCallLLM
<OpenAIResponsesChatOptions
>
Constructors
Constructor
new OpenAIResponses(
init?
):OpenAIResponses
Defined in: packages/providers/openai/src/responses.ts:76
Parameters
init?
Omit
<Partial
<OpenAIResponses
>, "session"
> & object
Returns
OpenAIResponses
Overrides
ToolCallLLM<OpenAIResponsesChatOptions>.constructor
Properties
model
model:
string
Defined in: packages/providers/openai/src/responses.ts:49
temperature
temperature:
number
Defined in: packages/providers/openai/src/responses.ts:50
topP
topP:
number
Defined in: packages/providers/openai/src/responses.ts:51
maxOutputTokens?
optional
maxOutputTokens:number
Defined in: packages/providers/openai/src/responses.ts:52
additionalChatOptions?
optional
additionalChatOptions:OpenAIResponsesChatOptions
Defined in: packages/providers/openai/src/responses.ts:53
reasoningEffort?
optional
reasoningEffort:"high"
|"low"
|"medium"
Defined in: packages/providers/openai/src/responses.ts:54
apiKey?
optional
apiKey:string
Defined in: packages/providers/openai/src/responses.ts:55
baseURL?
optional
baseURL:string
Defined in: packages/providers/openai/src/responses.ts:56
maxRetries
maxRetries:
number
Defined in: packages/providers/openai/src/responses.ts:57
timeout?
optional
timeout:number
Defined in: packages/providers/openai/src/responses.ts:58
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"timeout"
|"maxRetries"
>
Defined in: packages/providers/openai/src/responses.ts:59
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/responses.ts:62
Returns
Promise
<LLMInstance
>
trackPreviousResponses
trackPreviousResponses:
boolean
Defined in: packages/providers/openai/src/responses.ts:65
store
store:
boolean
Defined in: packages/providers/openai/src/responses.ts:66
user
user:
string
Defined in: packages/providers/openai/src/responses.ts:67
callMetadata
callMetadata:
StoredValue
Defined in: packages/providers/openai/src/responses.ts:68
builtInTools
builtInTools:
null
|Tool
[]
Defined in: packages/providers/openai/src/responses.ts:69
strict
strict:
boolean
Defined in: packages/providers/openai/src/responses.ts:70
include
include:
null
|ResponseIncludable
[]
Defined in: packages/providers/openai/src/responses.ts:71
instructions
instructions:
string
Defined in: packages/providers/openai/src/responses.ts:72
previousResponseId
previousResponseId:
null
|string
Defined in: packages/providers/openai/src/responses.ts:73
truncation
truncation:
null
|"auto"
|"disabled"
Defined in: packages/providers/openai/src/responses.ts:74
Accessors
session
Get Signature
get session():
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/responses.ts:150
Returns
Promise
<LLMInstance
>
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: packages/providers/openai/src/responses.ts:157
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
metadata
Get Signature
get metadata():
LLMMetadata
&object
Defined in: packages/providers/openai/src/responses.ts:161
Returns
LLMMetadata
& object
Overrides
ToolCallLLM.metadata
Methods
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Defined in: packages/providers/openai/src/responses.ts:506
Parameters
params
LLMChatParamsStreaming
<OpenAIResponsesChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/responses.ts:512
Parameters
params
LLMChatParamsNonStreaming
<OpenAIResponsesChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/responses.ts:595
Parameters
baseRequestParams
ResponseCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
toOpenAIResponsesRole()
toOpenAIResponsesRole(
messageType
):OpenAIResponsesRole
Defined in: packages/providers/openai/src/responses.ts:614
Parameters
messageType
MessageType
Returns
OpenAIResponsesRole
toOpenAIResponseMessage()
toOpenAIResponseMessage(
message
):ResponseInputItem
|ResponseInputItem
[]
Defined in: packages/providers/openai/src/responses.ts:723
Parameters
message
ChatMessage
<ToolCallLLMMessageOptions
>
Returns
ResponseInputItem
| ResponseInputItem
[]
toOpenAIResponseMessages()
toOpenAIResponseMessages(
messages
):ResponseInput
Defined in: packages/providers/openai/src/responses.ts:741
Parameters
messages
ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ResponseInput
toResponsesTool()
toResponsesTool(
tool
):Tool
Defined in: packages/providers/openai/src/responses.ts:756
Parameters
tool
BaseTool
Returns
Tool