LettaAPI.Model.CompletionCreateParamsNonStreaming (letta_api v1.0.0)

Summary

Types

t()

@type t() :: %LettaAPI.Model.CompletionCreateParamsNonStreaming{
  audio: LettaAPI.Model.ChatCompletionAudioParam.t() | nil,
  frequency_penalty: number() | nil,
  function_call: LettaAPI.Model.FunctionCall.t() | nil,
  functions:
    [LettaAPI.Model.OpenaiTypesChatCompletionCreateParamsFunction.t()] | nil,
  logit_bias: %{optional(String.t()) => integer()} | nil,
  logprobs: boolean() | nil,
  max_completion_tokens: integer() | nil,
  max_tokens: integer() | nil,
  messages: [LettaAPI.Model.CompletionCreateParamsNonStreamingMessagesInner.t()],
  metadata: %{optional(String.t()) => String.t()} | nil,
  modalities: [String.t()] | nil,
  model: LettaAPI.Model.Model.t(),
  n: integer() | nil,
  parallel_tool_calls: boolean() | nil,
  prediction: LettaAPI.Model.ChatCompletionPredictionContentParam.t() | nil,
  presence_penalty: number() | nil,
  reasoning_effort: String.t() | nil,
  response_format: LettaAPI.Model.ResponseFormat.t() | nil,
  seed: integer() | nil,
  service_tier: String.t() | nil,
  stop: LettaAPI.Model.Stop.t() | nil,
  store: boolean() | nil,
  stream: boolean() | nil,
  stream_options: LettaAPI.Model.ChatCompletionStreamOptionsParam.t() | nil,
  temperature: number() | nil,
  tool_choice: LettaAPI.Model.ToolChoice.t() | nil,
  tools: [LettaAPI.Model.ChatCompletionToolParam.t()] | nil,
  top_logprobs: integer() | nil,
  top_p: number() | nil,
  user: String.t() | nil,
  web_search_options: LettaAPI.Model.WebSearchOptions.t() | nil
}

Functions

decode(value)