MistralClient (mistralex_ai v0.1.0)

View Source

Elixir client for the Mistral AI API.

This module provides a comprehensive interface to the Mistral AI API with complete feature parity to the Python SDK. It supports all major API endpoints including chat completions, embeddings, model management, file operations, and advanced features like streaming responses and structured outputs.

Configuration

Configure your API key and other settings:

config :mistralex_ai,
  api_key: "your-api-key",
  base_url: "https://api.mistral.ai",
  timeout: 30_000

Or set environment variables:

export MISTRAL_API_KEY="your-api-key"

Basic Usage

# Chat completion
{:ok, response} = MistralClient.chat([
  %{role: "user", content: "Hello, how are you?"}
])

# Generate embeddings
{:ok, embeddings} = MistralClient.embeddings("Hello world")

# List available models
{:ok, models} = MistralClient.models()

Streaming

# Stream chat responses
MistralClient.chat_stream([
  %{role: "user", content: "Tell me a story"}
], fn chunk ->
  IO.write(chunk.choices |> hd() |> get_in(["delta", "content"]) || "")
end)

Error Handling

All functions return {:ok, result} on success or {:error, reason} on failure. The client handles retries, rate limiting, and network errors automatically.

Summary

Functions

Append new messages to an existing conversation.

Archive a fine-tuned model.

Check if Beta APIs are available for the current API key.

Get Beta API status and available features.

Cancel a running batch job.

Cancel a fine-tuning job.

Cancel a fine-tuning job.

Create a chat completion.

Create a streaming chat completion.

Classify text into categories.

Classify chat conversations into categories.

Get the current configuration.

Create a new AI agent with specific instructions and tools.

Create a new batch job for processing multiple requests.

Create a fine-tuning job.

Create a fine-tuning job.

Delete a file.

Generate embeddings for the given input.

List uploaded files.

Perform FIM (Fill-in-the-Middle) completion for code.

Stream FIM completion with real-time results.

Retrieve a specific agent by ID.

Get details of a specific batch job by ID.

Get the history of a conversation.

Get details of a specific fine-tuning job.

Get details of a specific fine-tuning job.

List all agents with optional pagination.

List batch jobs with optional filtering and pagination.

List conversations with optional pagination.

List fine-tuning jobs with optional filtering.

List fine-tuning jobs with optional filtering.

Retrieve a specific model.

List available models.

Moderate text content for safety and policy violations.

Moderate chat conversations for safety and policy violations.

Create a new client with custom configuration.

Process a document or image using OCR (Optical Character Recognition).

Process a document or image using OCR with a structured request.

Start a new conversation with an agent or model.

Start a new conversation with streaming responses.

Start a validated fine-tuning job.

Start a validated fine-tuning job.

Unarchive a fine-tuned model.

Update an agent's configuration.

Update a fine-tuned model.

Types

batch_options()

@type batch_options() :: %{
  page: integer() | nil,
  page_size: integer() | nil,
  model: String.t() | nil,
  metadata: map() | nil,
  created_after: DateTime.t() | nil,
  created_by_me: boolean() | nil,
  status: [atom()] | nil
}

chat_options()

@type chat_options() :: %{
  model: String.t(),
  temperature: float() | nil,
  max_tokens: integer() | nil,
  top_p: float() | nil,
  stream: boolean() | nil,
  tools: list() | nil,
  tool_choice: String.t() | map() | nil,
  response_format: map() | nil
}

embedding_options()

@type embedding_options() :: %{
  model: String.t(),
  encoding_format: String.t() | nil,
  dimensions: integer() | nil
}

fim_options()

@type fim_options() :: %{
  model: String.t(),
  suffix: String.t() | nil,
  temperature: float() | nil,
  top_p: float() | nil,
  max_tokens: integer() | nil,
  min_tokens: integer() | nil,
  stop: String.t() | [String.t()] | nil,
  random_seed: integer() | nil
}

fine_tuning_options()

@type fine_tuning_options() :: %{
  page: integer() | nil,
  page_size: integer() | nil,
  model: String.t() | nil,
  created_after: DateTime.t() | nil,
  created_before: DateTime.t() | nil,
  created_by_me: boolean() | nil,
  status: atom() | nil,
  wandb_project: String.t() | nil,
  wandb_name: String.t() | nil,
  suffix: String.t() | nil
}

message()

@type message() :: %{
  role: String.t(),
  content: String.t(),
  name: String.t() | nil,
  tool_calls: list() | nil,
  tool_call_id: String.t() | nil
}

ocr_options()

@type ocr_options() :: %{
  id: String.t() | nil,
  pages: [integer()] | nil,
  include_image_base64: boolean() | nil,
  image_limit: integer() | nil,
  image_min_size: integer() | nil,
  bbox_annotation_format: map() | nil,
  document_annotation_format: map() | nil
}

Functions

append_to_conversation(conversation_id, request)

@spec append_to_conversation(String.t(), map()) ::
  {:ok, MistralClient.Models.Beta.ConversationResponse.t()} | {:error, term()}

Append new messages to an existing conversation.

Examples

{:ok, response} = MistralClient.append_to_conversation(conversation_id, %{
  inputs: "What's the weather like today?"
})

archive_fine_tuned_model(model_id)

@spec archive_fine_tuned_model(String.t()) :: {:ok, map()} | {:error, term()}

Archive a fine-tuned model.

beta_available?()

@spec beta_available?() :: boolean()

Check if Beta APIs are available for the current API key.

Examples

if MistralClient.beta_available?() do
  IO.puts("Beta features are available!")
end

beta_status()

@spec beta_status() :: {:ok, map()} | {:error, term()}

Get Beta API status and available features.

Examples

{:ok, status} = MistralClient.beta_status()
IO.inspect(status.features)

cancel_batch_job(job_id)

@spec cancel_batch_job(String.t()) ::
  {:ok, MistralClient.Models.BatchJobOut.t()} | {:error, term()}

Cancel a running batch job.

Parameters

  • job_id - The batch job ID to cancel

Examples

{:ok, job} = MistralClient.cancel_batch_job("batch_abc123")
# job.status will be :cancellation_requested or :cancelled

cancel_fine_tuning_job(job_id)

@spec cancel_fine_tuning_job(String.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Cancel a fine-tuning job.

cancel_job(job_id)

@spec cancel_job(String.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Cancel a fine-tuning job.

Examples

{:ok, job} = MistralClient.cancel_job("job-123")

chat(messages, options \\ %{})

@spec chat([message()], chat_options()) :: {:ok, map()} | {:error, term()}

Create a chat completion.

Parameters

  • messages - List of message maps with role and content
  • options - Optional parameters (model, temperature, etc.)

Examples

{:ok, response} = MistralClient.chat([
  %{role: "user", content: "Hello!"}
])

{:ok, response} = MistralClient.chat(
  [%{role: "user", content: "Hello!"}],
  %{model: "mistral-large-latest", temperature: 0.7}
)

chat_stream(messages, callback, options \\ %{})

@spec chat_stream([message()], function(), chat_options()) :: :ok | {:error, term()}

Create a streaming chat completion.

Parameters

  • messages - List of message maps with role and content
  • callback - Function to handle each chunk
  • options - Optional parameters

Examples

MistralClient.chat_stream([
  %{role: "user", content: "Tell me a story"}
], fn chunk ->
  content = get_in(chunk, ["choices", Access.at(0), "delta", "content"])
  if content, do: IO.write(content)
end)

classify(model, inputs)

@spec classify(String.t(), String.t() | [String.t()]) ::
  {:ok, MistralClient.Models.ClassificationResponse.t()} | {:error, term()}

Classify text into categories.

Parameters

  • model - Model to use for classification
  • inputs - Text to classify (string or list of strings)

Examples

# Classify a single text
{:ok, response} = MistralClient.classify(
  "mistral-classifier-latest",
  "This is some text to classify"
)

# Classify multiple texts
{:ok, response} = MistralClient.classify(
  "mistral-classifier-latest",
  ["Text 1", "Text 2", "Text 3"]
)

classify_chat(model, inputs)

@spec classify_chat(String.t(), [map()]) ::
  {:ok, MistralClient.Models.ClassificationResponse.t()} | {:error, term()}

Classify chat conversations into categories.

Parameters

  • model - Model to use for classification
  • inputs - Chat conversations to classify

Examples

{:ok, response} = MistralClient.classify_chat(
  "mistral-classifier-latest",
  [%{messages: [%{role: "user", content: "Hello"}]}]
)

config()

@spec config() :: map()

Get the current configuration.

Examples

config = MistralClient.config()

create_agent(request)

@spec create_agent(map()) ::
  {:ok, MistralClient.Models.Beta.Agent.t()} | {:error, term()}

Create a new AI agent with specific instructions and tools.

Parameters

  • request - Agent creation request with:
    • :name - Agent name (required)
    • :model - Model to use (required)
    • :instructions - Instructions for the agent (optional)
    • :tools - List of tools available to the agent (optional)
    • :description - Agent description (optional)

Examples

{:ok, agent} = MistralClient.create_agent(%{
  name: "Customer Support Agent",
  model: "mistral-large-latest",
  instructions: "You are a helpful customer support agent.",
  tools: [
    %{
      type: "function",
      function: %{
        name: "get_order_status",
        description: "Get the status of a customer order"
      }
    }
  ]
})

create_batch_job(request)

@spec create_batch_job(map() | MistralClient.Models.BatchJobIn.t()) ::
  {:ok, MistralClient.Models.BatchJobOut.t()} | {:error, term()}

Create a new batch job for processing multiple requests.

Parameters

  • request - Batch job request with:
    • :input_files - List of file IDs to process (required)
    • :endpoint - API endpoint to use (required)
    • :model - Model to use for processing (required)
    • :metadata - Optional metadata map
    • :timeout_hours - Timeout in hours (default: 24)

Examples

{:ok, job} = MistralClient.create_batch_job(%{
  input_files: ["file-abc123", "file-def456"],
  endpoint: "/v1/chat/completions",
  model: "mistral-large-latest",
  metadata: %{"project" => "customer-support"},
  timeout_hours: 48
})

create_fine_tuning_job(request)

@spec create_fine_tuning_job(MistralClient.Models.FineTuningJobRequest.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Create a fine-tuning job.

create_job(request)

Create a fine-tuning job.

Examples

request = %MistralClient.Models.FineTuningJobRequest{
  model: "open-mistral-7b",
  hyperparameters: %MistralClient.Models.CompletionTrainingParameters{
    learning_rate: 0.0001
  }
}
{:ok, job} = MistralClient.create_job(request)

delete_file(file_id)

@spec delete_file(String.t()) :: {:ok, map()} | {:error, term()}

Delete a file.

Parameters

  • file_id - The ID of the file to delete

Examples

{:ok, _} = MistralClient.delete_file("file-123")

embeddings(input, options \\ %{})

@spec embeddings(String.t() | [String.t()], embedding_options()) ::
  {:ok, map()} | {:error, term()}

Generate embeddings for the given input.

Parameters

  • input - Text string or list of strings to embed
  • options - Optional parameters (model, dimensions, etc.)

Examples

{:ok, embeddings} = MistralClient.embeddings("Hello world")

{:ok, embeddings} = MistralClient.embeddings(
  ["Hello", "World"],
  %{model: "mistral-embed", dimensions: 1024}
)

files()

@spec files() :: {:ok, [map()]} | {:error, term()}

List uploaded files.

Examples

{:ok, files} = MistralClient.files()

fim_complete(model, prompt, options \\ %{})

@spec fim_complete(String.t(), String.t(), fim_options()) ::
  {:ok, map()} | {:error, term()}

Perform FIM (Fill-in-the-Middle) completion for code.

Parameters

  • model - Codestral model ID ("codestral-2405" or "codestral-latest")
  • prompt - The code prefix to complete
  • options - Optional parameters (suffix, temperature, etc.)

Examples

{:ok, completion} = MistralClient.fim_complete(
  "codestral-2405",
  "def fibonacci(n):",
  %{suffix: "return result"}
)

fim_stream(model, prompt, callback, options \\ %{})

@spec fim_stream(String.t(), String.t(), function(), fim_options()) ::
  {:ok, term()} | {:error, term()}

Stream FIM completion with real-time results.

Parameters

  • model - Codestral model ID
  • prompt - The code prefix to complete
  • callback - Function to handle each chunk
  • options - Optional parameters

Examples

MistralClient.fim_stream(
  "codestral-2405",
  "def fibonacci(n):",
  fn chunk ->
    if chunk.content, do: IO.write(chunk.content)
  end,
  %{suffix: "return result"}
)

get_agent(agent_id)

@spec get_agent(String.t()) ::
  {:ok, MistralClient.Models.Beta.Agent.t()} | {:error, term()}

Retrieve a specific agent by ID.

Examples

{:ok, agent} = MistralClient.get_agent("agent_123")

get_batch_job(job_id)

@spec get_batch_job(String.t()) ::
  {:ok, MistralClient.Models.BatchJobOut.t()} | {:error, term()}

Get details of a specific batch job by ID.

Parameters

  • job_id - The batch job ID

Examples

{:ok, job} = MistralClient.get_batch_job("batch_abc123")
IO.puts("Status: #{job.status}")
IO.puts("Progress: #{job.completed_requests}/#{job.total_requests}")

get_conversation_history(conversation_id)

@spec get_conversation_history(String.t()) ::
  {:ok, MistralClient.Models.Beta.ConversationHistory.t()} | {:error, term()}

Get the history of a conversation.

Examples

{:ok, history} = MistralClient.get_conversation_history(conversation_id)

get_fine_tuning_job(job_id)

@spec get_fine_tuning_job(String.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Get details of a specific fine-tuning job.

get_job(job_id)

@spec get_job(String.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Get details of a specific fine-tuning job.

Examples

{:ok, job} = MistralClient.get_job("job-123")

list_agents(options \\ %{})

@spec list_agents(map()) ::
  {:ok, [MistralClient.Models.Beta.Agent.t()]} | {:error, term()}

List all agents with optional pagination.

Examples

{:ok, agents} = MistralClient.list_agents()
{:ok, agents} = MistralClient.list_agents(%{page: 1, page_size: 10})

list_batch_jobs(options \\ %{})

@spec list_batch_jobs(batch_options()) ::
  {:ok, MistralClient.Models.BatchJobsOut.t()} | {:error, term()}

List batch jobs with optional filtering and pagination.

Parameters

  • options - Optional filtering parameters:
    • :page - Page number (default: 0)
    • :page_size - Number of jobs per page (default: 100)
    • :model - Filter by model name
    • :metadata - Filter by metadata
    • :created_after - Filter by creation date (DateTime)
    • :created_by_me - Filter by ownership (boolean, default: false)
    • :status - Filter by status list (e.g., [:running, :queued])

Examples

# List all batch jobs
{:ok, jobs} = MistralClient.list_batch_jobs()

# List with filtering
{:ok, jobs} = MistralClient.list_batch_jobs(%{
  page: 0,
  page_size: 50,
  status: [:running, :queued],
  model: "mistral-large-latest"
})

list_conversations(options \\ %{})

@spec list_conversations(map()) ::
  {:ok, [MistralClient.Models.Beta.Conversation.t()]} | {:error, term()}

List conversations with optional pagination.

Examples

{:ok, conversations} = MistralClient.list_conversations()

list_fine_tuning_jobs(options \\ %{})

@spec list_fine_tuning_jobs(fine_tuning_options()) ::
  {:ok, MistralClient.Models.FineTuningJobsResponse.t()} | {:error, term()}

List fine-tuning jobs with optional filtering.

list_jobs(options \\ %{})

@spec list_jobs(map()) ::
  {:ok, MistralClient.Models.FineTuningJobsResponse.t()} | {:error, term()}

List fine-tuning jobs with optional filtering.

Examples

# List all jobs
{:ok, jobs} = MistralClient.list_jobs()

# List with filtering
{:ok, jobs} = MistralClient.list_jobs(%{
  status: :running,
  model: "open-mistral-7b"
})

model(model_id)

@spec model(String.t()) :: {:ok, map()} | {:error, term()}

Retrieve a specific model.

Parameters

  • model_id - The ID of the model to retrieve

Examples

{:ok, model} = MistralClient.model("mistral-large-latest")

models()

@spec models() :: {:ok, [map()]} | {:error, term()}

List available models.

Examples

{:ok, models} = MistralClient.models()

moderate(model, inputs)

@spec moderate(String.t(), String.t() | [String.t()]) ::
  {:ok, MistralClient.Models.ModerationResponse.t()} | {:error, term()}

Moderate text content for safety and policy violations.

Parameters

  • model - Model to use for moderation (e.g., "mistral-moderation-latest")
  • inputs - Text to moderate (string or list of strings)

Examples

# Moderate a single text
{:ok, response} = MistralClient.moderate(
  "mistral-moderation-latest",
  "This is some text to moderate"
)

# Moderate multiple texts
{:ok, response} = MistralClient.moderate(
  "mistral-moderation-latest",
  ["Text 1", "Text 2", "Text 3"]
)

moderate_chat(model, inputs)

@spec moderate_chat(String.t(), [[map()]]) ::
  {:ok, MistralClient.Models.ModerationResponse.t()} | {:error, term()}

Moderate chat conversations for safety and policy violations.

Parameters

  • model - Model to use for moderation
  • inputs - Chat conversations to moderate (list of message lists)

Examples

{:ok, response} = MistralClient.moderate_chat(
  "mistral-moderation-latest",
  [
    [
      %{role: "user", content: "Hello"},
      %{role: "assistant", content: "Hi there!"}
    ]
  ]
)

new(options \\ [])

@spec new(keyword()) :: MistralClient.Client.t()

Create a new client with custom configuration.

Parameters

  • options - Configuration options

Examples

client = MistralClient.new(api_key: "custom-key", timeout: 60_000)

ocr_process(model, document, options \\ %{})

Process a document or image using OCR (Optical Character Recognition).

Parameters

  • model - Model to use for OCR processing (e.g., "pixtral-12b-2024-12-19")
  • document - Document or image to process (DocumentURLChunk or ImageURLChunk)
  • options - Optional parameters:
    • :id - Request identifier
    • :pages - List of specific page numbers to process (0-indexed)
    • :include_image_base64 - Include base64-encoded images in response
    • :image_limit - Maximum number of images to extract
    • :image_min_size - Minimum size (height and width) for image extraction
    • :bbox_annotation_format - Structured output format for bounding boxes
    • :document_annotation_format - Structured output format for the document

Examples

# Process a document URL
document = MistralClient.Models.DocumentURLChunk.new("https://example.com/doc.pdf")
{:ok, response} = MistralClient.ocr_process("pixtral-12b-2024-12-19", document)

# Process an image with options
image_url = MistralClient.Models.ImageURLChunkImageURL.new("data:image/png;base64,...")
image_chunk = MistralClient.Models.ImageURLChunk.new(image_url)
{:ok, response} = MistralClient.ocr_process(
  "pixtral-12b-2024-12-19",
  image_chunk,
  %{include_image_base64: true, image_limit: 5}
)

# Process specific pages
{:ok, response} = MistralClient.ocr_process(
  "pixtral-12b-2024-12-19",
  document,
  %{pages: [0, 1, 2]}
)

ocr_process_request(request)

@spec ocr_process_request(MistralClient.Models.OCRRequest.t()) ::
  {:ok, MistralClient.Models.OCRResponse.t()} | {:error, term()}

Process a document or image using OCR with a structured request.

Parameters

  • request - OCR request struct with all parameters

Examples

document = MistralClient.Models.DocumentURLChunk.new("https://example.com/doc.pdf")
request = MistralClient.Models.OCRRequest.new("pixtral-12b-2024-12-19", document,
  pages: [0, 1],
  include_image_base64: true
)
{:ok, response} = MistralClient.ocr_process_request(request)

start_conversation(request)

@spec start_conversation(map()) ::
  {:ok, MistralClient.Models.Beta.ConversationResponse.t()} | {:error, term()}

Start a new conversation with an agent or model.

Parameters

  • request - Conversation start request with:
    • :inputs - Initial message(s) (required)
    • :agent_id - Agent ID to use (optional, mutually exclusive with model)
    • :model - Model to use (optional, mutually exclusive with agent_id)
    • :instructions - Custom instructions (optional)

Examples

# Start with an agent
{:ok, conversation} = MistralClient.start_conversation(%{
  agent_id: "agent_123",
  inputs: "Hello, I need help with my order."
})

# Start with a model
{:ok, conversation} = MistralClient.start_conversation(%{
  model: "mistral-large-latest",
  inputs: "Explain quantum computing",
  instructions: "You are a physics teacher."
})

start_conversation_stream(request, callback)

@spec start_conversation_stream(map(), function()) :: {:ok, term()} | {:error, term()}

Start a new conversation with streaming responses.

Examples

MistralClient.start_conversation_stream(%{
  agent_id: "agent_123",
  inputs: "Tell me a story"
}, fn chunk ->
  IO.write(chunk.content || "")
end)

start_fine_tuning_job(job_id)

@spec start_fine_tuning_job(String.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Start a validated fine-tuning job.

start_job(job_id)

@spec start_job(String.t()) ::
  {:ok, MistralClient.Models.FineTuningJobResponse.t()} | {:error, term()}

Start a validated fine-tuning job.

Examples

{:ok, job} = MistralClient.start_job("job-123")

unarchive_fine_tuned_model(model_id)

@spec unarchive_fine_tuned_model(String.t()) :: {:ok, map()} | {:error, term()}

Unarchive a fine-tuned model.

update_agent(agent_id, updates)

@spec update_agent(String.t(), map()) ::
  {:ok, MistralClient.Models.Beta.Agent.t()} | {:error, term()}

Update an agent's configuration.

Examples

{:ok, updated_agent} = MistralClient.update_agent("agent_123", %{
  instructions: "Updated instructions for the agent"
})

update_fine_tuned_model(model_id, updates)

@spec update_fine_tuned_model(String.t(), map()) :: {:ok, map()} | {:error, term()}

Update a fine-tuned model.

upload_file(file_path, purpose)

@spec upload_file(String.t(), String.t()) :: {:ok, map()} | {:error, term()}

Upload a file.

Parameters

  • file_path - Path to the file to upload
  • purpose - Purpose of the file (e.g., "fine-tune")

Examples

{:ok, file} = MistralClient.upload_file("./data.jsonl", "fine-tune")