diff --git a/Cargo.toml b/Cargo.toml index 2075e09..8ba936b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ poise = "0.5.2" tracing = "0.1.37" tracing-subscriber = "0.3.16" tracing-futures = "0.2.5" -openai = "1.0.0-alpha.5" +openai = "1.0.0-alpha.6" rand = "0.8.5" [dependencies.songbird] diff --git a/src/personality.rs b/src/personality.rs index 09c3790..f1c5c2c 100644 --- a/src/personality.rs +++ b/src/personality.rs @@ -1,6 +1,6 @@ use anyhow::{Context, Result}; use openai::{ - chat::{ChatCompletion, ChatCompletionRequestMessage}, + chat::{ChatCompletion, ChatCompletionMessage}, models::ModelID, }; use rand::seq::SliceRandom; @@ -55,24 +55,24 @@ pub async fn get_sassy_commentary(title: &str) -> Result { [ system .into_iter() - .map(|s| ChatCompletionRequestMessage { + .map(|s| ChatCompletionMessage { role: openai::chat::ChatCompletionMessageRole::System, content: String::from(s), name: None, }) .collect::>(), vec![ - ChatCompletionRequestMessage { + ChatCompletionMessage { role: openai::chat::ChatCompletionMessageRole::User, content: String::from(example_prompt), name: None, }, - ChatCompletionRequestMessage { + ChatCompletionMessage { role: openai::chat::ChatCompletionMessageRole::Assistant, content: String::from(example_response), name: None, }, - ChatCompletionRequestMessage { + ChatCompletionMessage { role: openai::chat::ChatCompletionMessageRole::User, content: prompt, name: None,