gleamstral/chat
Types
Represents a chat conversation with configuration options
pub type Chat {
Chat(client: client.Client, config: Config)
}
Constructors
-
Chat(client: client.Client, config: Config)
pub type ChatCompletionChoice {
ChatCompletionChoice(
index: Int,
message: message.Message,
finish_reason: FinishReason,
)
}
Constructors
-
ChatCompletionChoice( index: Int, message: message.Message, finish_reason: FinishReason, )
pub type Config {
Config(
temperature: Float,
max_tokens: Int,
top_p: Float,
stream: Bool,
stop: List(String),
random_seed: Int,
response_format: ResponseFormat,
tools: List(tool.Tool),
tool_choice: tool.ToolChoice,
presence_penalty: Float,
frequency_penalty: Float,
n: Int,
prediction: Prediction,
safe_prompt: Bool,
)
}
Constructors
-
Config( temperature: Float, max_tokens: Int, top_p: Float, stream: Bool, stop: List(String), random_seed: Int, response_format: ResponseFormat, tools: List(tool.Tool), tool_choice: tool.ToolChoice, presence_penalty: Float, frequency_penalty: Float, n: Int, prediction: Prediction, safe_prompt: Bool, )
pub type FinishReason {
Stop
Length
ModelLength
Err
ToolCalls
}
Constructors
-
Stop
-
Length
-
ModelLength
-
Err
-
ToolCalls
pub type Prediction {
Content(String)
}
Constructors
-
Content(String)
pub type Response {
Response(
id: String,
object: String,
created: Int,
model: String,
choices: List(ChatCompletionChoice),
usage: Usage,
)
}
Constructors
-
Response( id: String, object: String, created: Int, model: String, choices: List(ChatCompletionChoice), usage: Usage, )
pub type ResponseFormat {
JsonObject
Text
}
Constructors
-
JsonObject
-
Text
Functions
pub fn complete_request(
chat: Chat,
model: Model,
messages: List(Message),
) -> Request(String)
Creates an HTTP request for the Chat API endpoint
This function prepares a request to be sent to the Mistral AI Chat API. It needs to be paired with an HTTP client to actually send the request, and the response should be handled with client.handle_response using the appropriate decoder.
Example
// Create the request
let req = chat
|> chat.set_temperature(0.7)
|> chat.complete_request(model.MistralSmall, messages)
// Send the request with your HTTP client
use response <- result.try(http_client.send(req))
// Handle the response with the appropriate decoder
client.handle_response(response, using: chat.response_decoder())
pub fn handle_response(
response: Response(String),
) -> Result(gleamstral/chat.Response, Error)
Handle HTTP responses from a chat completion request
Example
let assert Ok(response) =
chat.complete_request(chat, model.MistralSmall, messages)
|> httpc.send
let assert Ok(response) =
chat.handle_response(chat, response)
pub fn new(client: Client) -> Chat
Creates a new Chat with default configuration using the provided client
Example
let client = client.new("your-api-key")
let chat = chat.new(client)
pub fn response_decoder() -> Decoder(Response)
pub fn set_frequency_penalty(
chat: Chat,
frequency_penalty: Float,
) -> Chat
pub fn set_max_tokens(chat: Chat, max_tokens: Int) -> Chat
pub fn set_prediction(chat: Chat, prediction: Prediction) -> Chat
pub fn set_presence_penalty(
chat: Chat,
presence_penalty: Float,
) -> Chat
pub fn set_random_seed(chat: Chat, random_seed: Int) -> Chat
pub fn set_response_format(
chat: Chat,
response_format: ResponseFormat,
) -> Chat
pub fn set_safe_prompt(chat: Chat, safe_prompt: Bool) -> Chat
pub fn set_stream(chat: Chat, stream: Bool) -> Chat
pub fn set_temperature(chat: Chat, temperature: Float) -> Chat
pub fn set_tool_choice(
chat: Chat,
tool_choice: ToolChoice,
) -> Chat