Skip to content

class OpenAI::Client
inherits Reference #

Constructors#

.new(auth_token : String)#

View source

Methods#

#chat_completion(request : ChatCompletionRequest) : ChatCompletionResponse#

Given a list of messages comprising a conversation, the model will return a response.

Arguments#
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

req = OpenAI::ChatCompletionRequest.new(
  model: OpenAI::GPT3DOT5_TURBO,
  messages: [
    OpenAI::ChatCompletionMessage.new(
      role: OpenAI::ChatMessageRole::User,
      content: "Hello!"
    ),
  ]
)

puts client.chat_completion(req)
View source

#completion(request : CompletionRequest) : CompletionResponse#

Given a prompt, the model will return one or more predicted completions along with the probabilities of alternative tokens at each position. Most developer should use our Chat Completions API to leverage our best and newest models.

Arguments#
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

completion_request = OpenAI::CompletionRequest.new(
  prompt: "Say this is a test",
  model: OpenAI::GPT3DOT5_TURBO_INSTRUCT
)

puts client.completion(completion_request)
View source

#config : ClientConfig#

View source

#create_assistant(request : AssistantRequest) : Assistant#

Create an assistant with a model and instructions.

Arguments#
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

assistant = OpenAI::AssistantRequest.new(
  name: "Spanish tutor",
  instructions: "You are a personal spanish tutor, teach everyone spanish!",
  model: OpenAI::GPT4_TURBO
)

response = client.create_assistant(assistant)
View source

#create_embeddings(request : EmbeddingRequest) : EmbeddingResponse#

Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.

Arguments#
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

embedding_request = OpenAI::EmbeddingRequest.new(
  input: ["Your input string goes here"],
  model: OpenAI::SMALL_EMBEDDING_3,
)

response = client.create_embeddings(embedding_request)
View source

#list_assistants(limit : Int32 | Nil = nil, order : String | Nil = nil, after : String | Nil = nil, before : String | Nil = nil) : AssistantsList#

List assistants

Arguments#
  • limit : Int32 | Nil - How many assistants to return
  • order : String | Nil - asc or desc
  • after : String | Nil - assistant_id to return after
  • before : String | Nil - assistant_id to return before
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

response = client.retrieve_assistant(assistant_id)
View source

#modify_assistant(assistant_id : String, request : AssistantRequest) : Assistant#

Modify an existing assistant

Arguments#
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

assistant = OpenAI::AssistantRequest.new(
  name: "Spanish tutor",
  instructions: "You are a personal spanish tutor, teach everyone spanish!",
  model: OpenAI::GPT4_TURBO
)

response = client.modify_assistant(assistant_id, assistant)
View source

#retrieve_assistant(assistant_id : String) : Assistant#

Retrieve an already created assistant

Arguments#
  • assistant_id : String - ID of created assistant
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]

response = client.retrieve_assistant(assistant_id)
View source