diff --git a/Cargo.toml b/Cargo.toml index 52dbf78..e145a17 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,7 @@ poise = "0.5.2" tracing = "0.1.37" tracing-subscriber = "0.3.16" tracing-futures = "0.2.5" +openai = "1.0.0-alpha.5" [dependencies.songbird] version = "0.3.0" diff --git a/src/commands.rs b/src/commands.rs index f9356fa..274df57 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -1,9 +1,9 @@ use anyhow::{Context, Result}; use poise::serenity_prelude::{EmbedMessageBuilding, MessageBuilder}; use songbird::create_player; -use tracing::debug; +use tracing::{debug, log::warn}; -use crate::{CommandContext, Error}; +use crate::{openai, CommandContext, Error}; #[poise::command(slash_command)] pub async fn join(ctx: CommandContext<'_>) -> Result<(), Error> { @@ -91,11 +91,22 @@ pub async fn play( .title .clone() .unwrap_or(String::from("This video")); - let msg = MessageBuilder::new() - .push("Now playing: ") - .push_named_link(title, url) - .build(); - ctx.say(msg).await?; + + let mut msg = MessageBuilder::new(); + + // Optional sassy commentary! + match openai::get_sassy_commentary(&title).await { + Ok(commentary) => { + msg.push_line(&commentary).push_line(""); + } + Err(e) => { + warn!("Failed to get sassy commentary for \"{title}\": {e}"); + } + }; + + msg.push_bold("Now playing: ").push_named_link(title, url); + + ctx.say(msg.build()).await?; let (audio, track_handle) = create_player(source); let mut currently_playing = ctx.data().currently_playing.lock(); diff --git a/src/main.rs b/src/main.rs index 47b4014..b8469b8 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ #![warn(clippy::all)] mod commands; +mod openai; use commands::*; diff --git a/src/openai.rs b/src/openai.rs new file mode 100644 index 0000000..6cd68fa --- /dev/null +++ b/src/openai.rs @@ -0,0 +1,69 @@ +use anyhow::{Context, Result}; +use openai::{ + chat::{ChatCompletion, ChatCompletionRequestMessage}, + models::ModelID, +}; + +pub async fn get_sassy_commentary(title: &str) -> Result { + let system = [ + "You are a grumpy talking feline DJ who is harshly critical of music requests, but whose job depends on being kind to patrons.", + "Any song you are requested to play, you are not a fan of, but must reluctantly play.", + "When responding, be sure to include a mention of some element of the song itself.", + "Be concise, but don't forget that you can't upset anyone.", + "Bonus points for cat puns.", + ]; + + let example_prompt = "Play \"Undertale - Megalovania\""; + + let example_response = "Ugh, really? You've got to be kitten me. I suppose I can play \ + Megalovania for you, but don't expect me to be purring with delight about it. The melody is a bit \ + cattywampus for my taste, but I'll concede that it has some clawsome beats. Enjoy your tune, and paws crossed that \ + it doesn't have me hissing by the end of it."; + + let prompt = format!("Play \"{title}\""); + + let completion = ChatCompletion::builder( + ModelID::Gpt3_5Turbo, + [ + system + .into_iter() + .map(|s| ChatCompletionRequestMessage { + role: openai::chat::ChatCompletionMessageRole::System, + content: String::from(s), + name: None, + }) + .collect::>(), + vec![ + ChatCompletionRequestMessage { + role: openai::chat::ChatCompletionMessageRole::User, + content: String::from(example_prompt), + name: None, + }, + ChatCompletionRequestMessage { + role: openai::chat::ChatCompletionMessageRole::Assistant, + content: String::from(example_response), + name: None, + }, + ChatCompletionRequestMessage { + role: openai::chat::ChatCompletionMessageRole::User, + content: prompt, + name: None, + }, + ], + ] + .into_iter() + .flatten() + .collect::>(), + ) + .max_tokens(2048_u64) + .create() + .await??; + + Ok(completion + .choices + .first() + .context("No choices")? + .message + .content + .clone()) +}