JULIA-OPENAI-API
Simon-Pierre Boucher
2024-09-14
In [1]:
using JSON
using HTTP
# Fonction pour appeler l'API OpenAI
function call_openai_api(
messages::Vector{Dict{String, Any}}; # Required argument
model::String = "gpt-4",
temperature::Float64 = 1.0,
max_tokens::Int = 2048,
top_p::Float64 = 1.0,
frequency_penalty::Float64 = 0.0,
presence_penalty::Float64 = 0.0
)
# Récupérer la clé API depuis les variables d'environnement
api_key = get(ENV, "OPENAI_API_KEY", "")
if isempty(api_key)
error("API key not found in environment variables. Please set OPENAI_API_KEY.")
end
url = "https://api.openai.com/v1/chat/completions"
headers = [
"Content-Type" => "application/json",
"Authorization" => "Bearer $api_key"
]
# Préparer le corps de la requête
data = Dict(
"model" => model,
"messages" => messages,
"temperature" => temperature,
"max_tokens" => max_tokens,
"top_p" => top_p,
"frequency_penalty" => frequency_penalty,
"presence_penalty" => presence_penalty
)
json_data = JSON.json(data)
# Envoyer la requête POST à l'API OpenAI
response = HTTP.post(url, headers; body = json_data)
# Vérifier si la requête a réussi
if response.status != 200
error("API request failed: $(response.status) - $(String(response.body))")
end
# Parse la réponse JSON
result = JSON.parse(String(response.body))
return result
end
Out[1]:
In [2]:
# Exemple d'utilisation de la fonction
messages = [
Dict{String, Any}("role" => "system", "content" => "You are an assistant"),
Dict{String, Any}("role" => "user", "content" => "Give me a julia code for snake game")
]
Out[2]:
In [3]:
# Appeler la fonction pour obtenir une réponse
response = call_openai_api(
messages; # Required argument
model = "gpt-4",
temperature = 1.0,
max_tokens = 2048,
top_p = 1.0,
frequency_penalty = 0.0,
presence_penalty = 0.0
)
# Afficher la réponse de l'assistant
println(response["choices"][1]["message"]["content"])
In [4]:
# Appeler la fonction pour obtenir une réponse
response = call_openai_api(
messages; # Required argument
model = "gpt-4o",
temperature = 1.0,
max_tokens = 2048,
top_p = 1.0,
frequency_penalty = 0.0,
presence_penalty = 0.0
)
# Afficher la réponse de l'assistant
println(response["choices"][1]["message"]["content"])
In [5]:
# Appeler la fonction pour obtenir une réponse
response = call_openai_api(
messages; # Required argument
model = "gpt-4o-mini",
temperature = 1.0,
max_tokens = 2048,
top_p = 1.0,
frequency_penalty = 0.0,
presence_penalty = 0.0
)
# Afficher la réponse de l'assistant
println(response["choices"][1]["message"]["content"])
In [6]:
# Appeler la fonction pour obtenir une réponse
response = call_openai_api(
messages; # Required argument
model = "gpt-4-turbo",
temperature = 1.0,
max_tokens = 2048,
top_p = 1.0,
frequency_penalty = 0.0,
presence_penalty = 0.0
)
# Afficher la réponse de l'assistant
println(response["choices"][1]["message"]["content"])
In [7]:
# Appeler la fonction pour obtenir une réponse
response = call_openai_api(
messages; # Required argument
model = "gpt-3.5-turbo",
temperature = 1.0,
max_tokens = 2048,
top_p = 1.0,
frequency_penalty = 0.0,
presence_penalty = 0.0
)
# Afficher la réponse de l'assistant
println(response["choices"][1]["message"]["content"])