Add AI-generated sassy commentary

This commit is contained in:
Alex Page 2023-03-02 19:57:58 -05:00
parent 1342253a76
commit 9242d39f98
4 changed files with 89 additions and 7 deletions

View file

@ -10,6 +10,7 @@ poise = "0.5.2"
tracing = "0.1.37"
tracing-subscriber = "0.3.16"
tracing-futures = "0.2.5"
openai = "1.0.0-alpha.5"
[dependencies.songbird]
version = "0.3.0"

View file

@ -1,9 +1,9 @@
use anyhow::{Context, Result};
use poise::serenity_prelude::{EmbedMessageBuilding, MessageBuilder};
use songbird::create_player;
use tracing::debug;
use tracing::{debug, log::warn};
use crate::{CommandContext, Error};
use crate::{openai, CommandContext, Error};
#[poise::command(slash_command)]
pub async fn join(ctx: CommandContext<'_>) -> Result<(), Error> {
@ -91,11 +91,22 @@ pub async fn play(
.title
.clone()
.unwrap_or(String::from("This video"));
let msg = MessageBuilder::new()
.push("Now playing: ")
.push_named_link(title, url)
.build();
ctx.say(msg).await?;
let mut msg = MessageBuilder::new();
// Optional sassy commentary!
match openai::get_sassy_commentary(&title).await {
Ok(commentary) => {
msg.push_line(&commentary).push_line("");
}
Err(e) => {
warn!("Failed to get sassy commentary for \"{title}\": {e}");
}
};
msg.push_bold("Now playing: ").push_named_link(title, url);
ctx.say(msg.build()).await?;
let (audio, track_handle) = create_player(source);
let mut currently_playing = ctx.data().currently_playing.lock();

View file

@ -1,6 +1,7 @@
#![warn(clippy::all)]
mod commands;
mod openai;
use commands::*;

69
src/openai.rs Normal file
View file

@ -0,0 +1,69 @@
use anyhow::{Context, Result};
use openai::{
chat::{ChatCompletion, ChatCompletionRequestMessage},
models::ModelID,
};
pub async fn get_sassy_commentary(title: &str) -> Result<String> {
let system = [
"You are a grumpy talking feline DJ who is harshly critical of music requests, but whose job depends on being kind to patrons.",
"Any song you are requested to play, you are not a fan of, but must reluctantly play.",
"When responding, be sure to include a mention of some element of the song itself.",
"Be concise, but don't forget that you can't upset anyone.",
"Bonus points for cat puns.",
];
let example_prompt = "Play \"Undertale - Megalovania\"";
let example_response = "Ugh, really? You've got to be kitten me. I suppose I can play \
Megalovania for you, but don't expect me to be purring with delight about it. The melody is a bit \
cattywampus for my taste, but I'll concede that it has some clawsome beats. Enjoy your tune, and paws crossed that \
it doesn't have me hissing by the end of it.";
let prompt = format!("Play \"{title}\"");
let completion = ChatCompletion::builder(
ModelID::Gpt3_5Turbo,
[
system
.into_iter()
.map(|s| ChatCompletionRequestMessage {
role: openai::chat::ChatCompletionMessageRole::System,
content: String::from(s),
name: None,
})
.collect::<Vec<_>>(),
vec![
ChatCompletionRequestMessage {
role: openai::chat::ChatCompletionMessageRole::User,
content: String::from(example_prompt),
name: None,
},
ChatCompletionRequestMessage {
role: openai::chat::ChatCompletionMessageRole::Assistant,
content: String::from(example_response),
name: None,
},
ChatCompletionRequestMessage {
role: openai::chat::ChatCompletionMessageRole::User,
content: prompt,
name: None,
},
],
]
.into_iter()
.flatten()
.collect::<Vec<_>>(),
)
.max_tokens(2048_u64)
.create()
.await??;
Ok(completion
.choices
.first()
.context("No choices")?
.message
.content
.clone())
}