OpenAIResponsesParams

@ApiStatus.Experimental
class OpenAIResponsesParams(temperature: Double? = null, maxTokens: Int? = null, numberOfChoices: Int? = null, speculation: String? = null, schema: LLMParams.Schema? = null, toolChoice: LLMParams.ToolChoice? = null, user: String? = null, includeThoughts: Boolean? = null, thinkingBudget: Int? = null, additionalProperties: Map<String, JsonElement>? = null, val background: Boolean? = null, val include: List<String>? = null, val maxToolCalls: Int? = null, val parallelToolCalls: Boolean? = null, val reasoning: ReasoningConfig? = null, val truncation: Truncation? = null, val promptCacheKey: String? = null, val safetyIdentifier: String? = null, val serviceTier: ServiceTier? = null, val store: Boolean? = null, val logprobs: Boolean? = null, val topLogprobs: Int? = null, val topP: Double? = null) : LLMParams, OpenAIParams(source)

OpenAI Responses API parameters layered on top of LLMParams.

Use these options to generate text or JSON, call built-in tools (e.g., web/file search) or your own functions, enable background processing, include auxiliary outputs, and tune sampling, reasoning, and truncation behavior. All parameters are optional; when unset, provider/model defaults apply.

Constructors

Link copied to clipboard
constructor(temperature: Double? = null, maxTokens: Int? = null, numberOfChoices: Int? = null, speculation: String? = null, schema: LLMParams.Schema? = null, toolChoice: LLMParams.ToolChoice? = null, user: String? = null, includeThoughts: Boolean? = null, thinkingBudget: Int? = null, additionalProperties: Map<String, JsonElement>? = null, background: Boolean? = null, include: List<String>? = null, maxToolCalls: Int? = null, parallelToolCalls: Boolean? = null, reasoning: ReasoningConfig? = null, truncation: Truncation? = null, promptCacheKey: String? = null, safetyIdentifier: String? = null, serviceTier: ServiceTier? = null, store: Boolean? = null, logprobs: Boolean? = null, topLogprobs: Int? = null, topP: Double? = null)

Properties

Link copied to clipboard
val background: Boolean? = null

Run the response in the background (non-blocking).

Link copied to clipboard
val include: List<String>? = null

Additional output sections to include (see the list above).

Link copied to clipboard
Link copied to clipboard
val logprobs: Boolean? = null

Whether to include log-probabilities for output tokens.

Link copied to clipboard
Link copied to clipboard
val maxToolCalls: Int? = null

Maximum total number of built-in tool calls allowed in this response (≥ 0).

Link copied to clipboard
Link copied to clipboard

Whether tool calls may run in parallel.

Link copied to clipboard
val promptCacheKey: String? = null

Stable cache key for prompt caching (non-blank when provided).

Link copied to clipboard

Reasoning configuration for reasoning-capable models.

Link copied to clipboard

Stable app-scoped user ID for policy enforcement (non-blank when provided).

Link copied to clipboard
Link copied to clipboard

Processing tier selection for cost/latency trade-offs.

Link copied to clipboard
Link copied to clipboard
val store: Boolean? = null

Whether the provider may store outputs for later retrieval/evals.

Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
val topLogprobs: Int? = null

Number of top alternatives per position (0–20). Requires logprobs = true.

Link copied to clipboard
val topP: Double? = null

Nucleus sampling in (0.0, 1.0]; use instead of temperature.

Link copied to clipboard
val truncation: Truncation? = null

Truncation strategy when nearing the context window.

Link copied to clipboard
val user: String?

Functions

Link copied to clipboard
operator fun component1(): Double?
Link copied to clipboard
operator fun component10(): Map<String, JsonElement>?
Link copied to clipboard
operator fun component2(): Int?
Link copied to clipboard
operator fun component3(): Int?
Link copied to clipboard
operator fun component4(): String?
Link copied to clipboard
operator fun component5(): LLMParams.Schema?
Link copied to clipboard
Link copied to clipboard
operator fun component7(): String?
Link copied to clipboard
operator fun component8(): Boolean?
Link copied to clipboard
operator fun component9(): Int?
Link copied to clipboard
fun copy(temperature: Double? = this.temperature, maxTokens: Int? = this.maxTokens, numberOfChoices: Int? = this.numberOfChoices, speculation: String? = this.speculation, schema: LLMParams.Schema? = this.schema, toolChoice: LLMParams.ToolChoice? = this.toolChoice, user: String? = this.user, includeThoughts: Boolean? = this.includeThoughts, thinkingBudget: Int? = this.thinkingBudget, additionalProperties: Map<String, JsonElement>? = this.additionalProperties, background: Boolean? = this.background, include: List<String>? = this.include, maxToolCalls: Int? = this.maxToolCalls, parallelToolCalls: Boolean? = this.parallelToolCalls, reasoning: ReasoningConfig? = this.reasoning, truncation: Truncation? = this.truncation, promptCacheKey: String? = this.promptCacheKey, safetyIdentifier: String? = this.safetyIdentifier, serviceTier: ServiceTier? = this.serviceTier, store: Boolean? = this.store, logprobs: Boolean? = this.logprobs, topLogprobs: Int? = this.topLogprobs, topP: Double? = this.topP): OpenAIResponsesParams

Creates a copy of this instance with the ability to modify any of its properties.

open fun copy(temperature: Double?, maxTokens: Int?, numberOfChoices: Int?, speculation: String?, schema: LLMParams.Schema?, toolChoice: LLMParams.ToolChoice?, user: String?, includeThoughts: Boolean?, thinkingBudget: Int?, additionalProperties: Map<String, JsonElement>?): LLMParams
Link copied to clipboard
fun default(default: LLMParams): LLMParams
Link copied to clipboard
open operator override fun equals(other: Any?): Boolean
Link copied to clipboard
open override fun hashCode(): Int
Link copied to clipboard
open override fun toString(): String