View Source HyperLLM.Chat (hyper_llm v0.1.0)
HypperLLM.Chat is a single interface for interacting with LLM providers. The interface uses the OpenAI chat completion API. https://platform.openai.com/docs/api-reference/chat
Example
A Liveview that sends messages to the chat and updates the chat with the response.
defmodule ChatLive do
use Phoenix.LiveView
def mount(params, session, socket) do
{:ok,
socket
|> assign(chat: HyperLLM.Chat.start(model: "gpt-4o-mini"))}
end
def handle_event("send_message", %{"message" => message}, socket) do
chat = HyperLLM.Chat.append(socket.assigns.chat, message)
send(self(), :chat_completion)
{:noreply, socket |> assign(chat: chat)}
end
def handle_info(:chat_completion, socket) do
with {:ok, response} <- HyperLLM.Chat.completion(socket.assigns.chat) do
chat = HyperLLM.Chat.append(socket.assigns.chat, response)
{:noreply, socket |> assign(chat: chat)}
end
end
end
Summary
Functions
Append a message to the chat as a user.
Append a message to the chat with the role.
Start a new chat.
Types
Functions
@spec append(t(), HyperLLM.Chat.Message.t()) :: t()
@spec append(t(), [HyperLLM.Chat.Message.t()]) :: t()
Append a message to the chat as a user.
iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
iex> HyperLLM.Chat.append(chat, "Hello")
%HyperLLM.Chat{
messages: [
%HyperLLM.Chat.Message{role: :user, content: "Hello"}
],
provider: HyperLLM.Provider.OpenAI,
config: [model: "gpt-4o-mini"]
}
You can also append a list of messages to the chat.
iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
iex> HyperLLM.Chat.append(chat, ["Hello", "World"])
%HyperLLM.Chat{
messages: [
%HyperLLM.Chat.Message{role: :user, content: "Hello"},
%HyperLLM.Chat.Message{role: :user, content: "World"}
],
provider: HyperLLM.Provider.OpenAI,
config: [model: "gpt-4o-mini"]
}
Append a message to the chat with the role.
Example
iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
iex> HyperLLM.Chat.append(chat, :developer, "You are a helpful assistant.")
%HyperLLM.Chat{
messages: [
%HyperLLM.Chat.Message{
role: :developer,
content: "You are a helpful assistant."
}
],
provider: HyperLLM.Provider.OpenAI,
config: [model: "gpt-4o-mini"]
}
Start a new chat.
Example
iex> HyperLLM.Chat.start(model: "gpt-4o-mini")
%HyperLLM.Chat{
messages: [],
provider: HyperLLM.Provider.OpenAI,
config: [model: "gpt-4o-mini"]
}