class OpenAI::Client
inherits Reference
#
Constructors#
Methods#
#chat_completion(request : ChatCompletionRequest) : ChatCompletionResponse
#
Given a list of messages comprising a conversation, the model will return a response.
Arguments#
- request :
OpenAI::ChatCompletionRequest
- Request body to create a completion
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]
req = OpenAI::ChatCompletionRequest.new(
model: OpenAI::GPT3DOT5_TURBO,
messages: [
OpenAI::ChatCompletionMessage.new(
role: OpenAI::ChatMessageRole::User,
content: "Hello!"
),
]
)
puts client.chat_completion(req)
#completion(request : CompletionRequest) : CompletionResponse
#
Given a prompt, the model will return one or more predicted completions along with the probabilities of alternative tokens at each position. Most developer should use our Chat Completions API to leverage our best and newest models.
Arguments#
- request :
OpenAI::CompletionRequest
- Request body to create a completion
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]
completion_request = OpenAI::CompletionRequest.new(
prompt: "Say this is a test",
model: OpenAI::GPT3DOT5_TURBO_INSTRUCT
)
puts client.completion(completion_request)
#create_assistant(request : AssistantRequest) : Assistant
#
Create an assistant with a model and instructions.
Arguments#
- request :
OpenAI::AssistantRequest
- Request body to create an assistant
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]
assistant = OpenAI::AssistantRequest.new(
name: "Spanish tutor",
instructions: "You are a personal spanish tutor, teach everyone spanish!",
model: OpenAI::GPT4_TURBO
)
response = client.create_assistant(assistant)
#create_embeddings(request : EmbeddingRequest) : EmbeddingResponse
#
Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.
Arguments#
- request :
EmbeddingRequest
- Request containing text or features to embed
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]
embedding_request = OpenAI::EmbeddingRequest.new(
input: ["Your input string goes here"],
model: OpenAI::SMALL_EMBEDDING_3,
)
response = client.create_embeddings(embedding_request)
#list_assistants(limit : Int32 | Nil = nil, order : String | Nil = nil, after : String | Nil = nil, before : String | Nil = nil) : AssistantsList
#
List assistants
Arguments#
- limit :
Int32 | Nil
- How many assistants to return - order :
String | Nil
- asc or desc - after :
String | Nil
- assistant_id to return after - before :
String | Nil
- assistant_id to return before
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]
response = client.retrieve_assistant(assistant_id)
#modify_assistant(assistant_id : String, request : AssistantRequest) : Assistant
#
Modify an existing assistant
Arguments#
- assistant_id :
String
- Existing assistant id - request :
OpenAI::AssistantRequest
- Request body to update existing assistant
Examples#
client = OpenAI::Client.new ENV["OPENAI_API_KEY"]
assistant = OpenAI::AssistantRequest.new(
name: "Spanish tutor",
instructions: "You are a personal spanish tutor, teach everyone spanish!",
model: OpenAI::GPT4_TURBO
)
response = client.modify_assistant(assistant_id, assistant)