Initial recommit to poise

This commit is contained in:
Tom 2022-02-03 14:55:56 +01:00
commit b6a3bea2c4
18 changed files with 3952 additions and 0 deletions

1
.env Normal file
View file

@ -0,0 +1 @@
DATABASE_URL=sqlite:data/database.db

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
/target
/data/config
/.vscode

2496
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

24
Cargo.toml Normal file
View file

@ -0,0 +1,24 @@
[package]
name = "ol_rusty"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
cached = "0.26"
chrono = "0.4"
config = "0.11"
futures = { version = "0.3.13", default-features = false }
glob = "0.3"
lazy_static = "1"
log = "0.4.14"
simplelog = "0.11.1"
poise = { git = "https://github.com/kangalioo/poise", branch = "develop", features = ["collector"] }
regex = {version="1"}
reqwest = "0.11"
sqlx = { version = "0.5", features = ["runtime-tokio-rustls", "sqlite", "offline"] }
sea-orm = { version = "^0", features = [ "sqlx-sqlite", "runtime-tokio-rustls", "macros" ], default-features = false }
serde_json = "1"
serde = "1"
tokio = { version = "1.4.0", features = ["rt-multi-thread", "process"] }

BIN
data/database.db Normal file

Binary file not shown.

View file

@ -0,0 +1,47 @@
CREATE TABLE IF NOT EXISTS Locations (
id TEXT NOT NULL,
name TEXT NOT NULL,
description TEXT,
PRIMARY KEY(id)
);
CREATE TABLE IF NOT EXISTS Series (
id TEXT NOT NULL,
name TEXT NOT NULL,
description TEXT,
PRIMARY KEY(id)
);
CREATE TABLE IF NOT EXISTS Sessions (
id TEXT NOT NULL,
name TEXT NOT NULL,
start INTEGER NOT NULL,
duration_minutes INTEGER NOT NULL,
location TEXT,
location_name TEXT,
series TEXT NOT NULL,
series_name TEXT NOT NULL,
PRIMARY KEY(id),
FOREIGN KEY(location) REFERENCES locations(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS Events (
id TEXT NOT NULL,
start_date TEXT NOT NULL,
end_date TEXT,
description TEXT NOT NULL,
series TEXT NOT NULL,
series_name TEXT NOT NULL,
location TEXT,
location_name TEXT,
FOREIGN KEY(location) REFERENCES locations(id) ON DELETE CASCADE,
PRIMARY KEY(id),
FOREIGN KEY(series) REFERENCES series(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS Updates (
window_start INTEGER NOT NULL,
time INTEGER NOT NULL,
date TEXT,
PRIMARY KEY(window_start)
);

152
src/commands/invites.rs Normal file
View file

@ -0,0 +1,152 @@
use crate::{Context, Error};
use serde::Deserialize;
// pub mod invites;
#[derive(Debug, Deserialize)]
struct InviteUser {
// #[serde(rename = "_id")]
// id: String,
#[serde(rename = "showUsername")]
name: String,
}
#[derive(Debug, Deserialize)]
struct InviteInfo {
// #[serde(rename = "_id")]
// id: String,
#[serde(rename = "fromUser")]
from_user: InviteUser,
#[serde(rename = "usedBy")]
used_by: Option<InviteUser>,
token: String,
high_tier: bool,
custom: bool,
#[serde(rename = "isUsed")]
used: bool,
}
async fn get_invite(invite: &str) -> Result<Option<InviteInfo>, Error> {
let client = reqwest::Client::new();
let req = client
.get(format!(
"https://api.morningstreams.com/api/invites/{}",
invite
))
.send()
.await?;
if req.status().as_u16() == 404 {
// debug!("Error 404 on getting the invites, this just means invite not found");
Ok(None)
} else if req.status().as_u16() == 200 {
let data: InviteInfo = req.json().await?;
// debug!("invite data: {:#?}", data);
Ok(Some(data))
} else {
Ok(None)
}
}
#[derive(Debug, poise::SlashChoiceParameter)]
pub enum InviteActions {
#[name = "Does this invite exist?"]
Exist,
#[name = "Is this invite valid?"]
Valid,
// If no name is given, the variant name is used
#[name = "Get invite information"]
Info,
#[name = "Which user used the invite?"]
User,
}
#[poise::command(slash_command)]
pub async fn invites(
ctx: Context<'_>,
#[description = "Action to perform with invite"] action: InviteActions,
#[description = "The invite to check"] invite: String,
) -> Result<(), Error> {
let invite_res = get_invite(&invite).await;
match invite_res {
Err(e) => {
ctx.say(format!("Error getting invite: {}", e)).await?;
}
Ok(invite_opt) => {
ctx.send(|b| {
match action {
InviteActions::Exist => {
b.content(match invite_opt {
None => "Invite not found",
Some(_) => "Invite found",
});
}
InviteActions::Valid => {
b.content(match invite_opt {
None => "Invite not found",
Some(i) => {
if !i.used {
"✅ Invite is valid and not used yet"
} else {
"❌ Invite is valid but is already used"
}
}
});
}
InviteActions::Info => {
b.content(match invite_opt {
None => "Invite not found".to_owned(),
Some(i) => {
if !i.used {
"Ooh this invite is still up for grabs, go quickly claim it!"
.to_owned()
} else {
match i.used_by {
None => {
"Invite is already used but can not find the user..."
.to_owned()
}
Some(user) => format!(
"This invite has already been used by {}",
user.name
),
}
}
}
});
}
InviteActions::User => {
match invite_opt {
None => b.content("Invite not found"),
Some(i) => {
b.embed(|e| {
e.title("Invite information");
e.description(format!("Invite: {}", i.token));
if i.used {
e.field("Used", "Yes", true);
if let Some(user) = i.used_by {
e.field("Used by", user.name, true);
}
} else {
e.field("Used", "No", true);
}
e.field(
"High tier",
if i.high_tier { "Yes" } else { "No" },
true,
);
e.field("Custom", if i.custom { "Yes" } else { "No" }, true);
e.field("Given out by", i.from_user.name, true);
e
});
b.content("Embed:")
}
};
}
}
b
})
.await?;
}
};
Ok(())
}

50
src/commands/mod.rs Normal file
View file

@ -0,0 +1,50 @@
use crate::{Context, Error};
use poise::serenity_prelude as serenity;
pub mod invites;
pub mod planning;
pub mod schedule;
pub mod utils;
/// Boop the bot!
#[poise::command(prefix_command, track_edits, slash_command)]
pub async fn boop(ctx: Context<'_>) -> Result<(), Error> {
let uuid_boop = ctx.id();
ctx.send(|m| {
m.content("I want some boops!").components(|c| {
c.create_action_row(|ar| {
ar.create_button(|b| {
b.style(serenity::ButtonStyle::Primary)
.label("Boop me!")
.custom_id(uuid_boop)
})
})
})
})
.await?;
let mut boop_count = 0;
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx.discord())
.author_id(ctx.author().id)
.channel_id(ctx.channel_id())
.timeout(std::time::Duration::from_secs(120))
.filter(move |mci| mci.data.custom_id == uuid_boop.to_string())
.await
{
boop_count += 1;
let mut msg = mci.message.clone();
msg.edit(ctx.discord(), |m| {
m.content(format!("Boop count: {}", boop_count))
})
.await?;
mci.create_interaction_response(ctx.discord(), |ir| {
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
})
.await?;
}
Ok(())
}

249
src/commands/planning.rs Normal file
View file

@ -0,0 +1,249 @@
use super::utils;
use crate::{Context, Error};
use futures::{Stream, StreamExt};
#[allow(dead_code)]
async fn autocomplete_series(_ctx: Context<'_>, partial: String) -> impl Stream<Item = String> {
futures::stream::iter(&[
"Formula 1",
"Formula",
"MotoGP",
"Moto",
"IndyCar",
"World Rally Championship",
])
.filter(move |name| futures::future::ready(name.starts_with(&partial)))
.map(|name| name.to_string())
}
#[allow(dead_code)]
async fn autocomplete_session(_ctx: Context<'_>, partial: String) -> impl Stream<Item = String> {
futures::stream::iter(&[
"Race",
"Qualifying",
"Free practice 3",
"Free practice 2",
"Free practice 1",
"Sprint race",
])
.filter(move |name| futures::future::ready(name.starts_with(&partial)))
.map(|name| name.to_string())
}
/// Get a list of supported racing series
#[poise::command{slash_command}]
pub async fn series(
ctx: Context<'_>,
#[description = "Filter for series (These are suggestions)"]
#[autocomplete = "autocomplete_series"]
serie: Option<String>,
) -> Result<(), Error> {
let filter: String = match serie {
None => "%".to_string(),
Some(s) if s.is_empty() => "%".to_string(),
Some(s) => {
format!("%{}%", s)
}
};
let query = sqlx::query!("SELECT name FROM Series WHERE name like ?", filter)
.fetch_many(&ctx.data().database);
let serie_list = query
.filter_map(|result| async move { result.ok()?.right() })
.map(|record| record.name)
.collect()
.await;
let pages = utils::paginator(serie_list, 1000, ", ".to_string());
print!("{:?}", pages);
utils::paginate_string(ctx, pages).await?;
Ok(())
}
#[derive(Debug, poise::SlashChoiceParameter)]
pub enum Timeframe {
#[name = "Currently happening"]
Current,
#[name = "Currently happening or starting in the future"]
Future,
#[name = "Currently happening or already ended"]
Past,
#[name = "Everything"]
Everything,
}
#[derive(sqlx::FromRow)]
struct Session {
name: String,
start: i64,
duration_minutes: i64,
#[sqlx(rename = "series_name")]
series: String,
#[sqlx(rename = "location_name")]
location: Option<String>,
}
impl Session {
fn to_string(&self) -> String {
match &self.location {
None => format!(
"<t:{}:R> - <t:{}:R> | **{}** `{}`",
self.start,
(self.start + 60 * self.duration_minutes),
self.series,
self.name
),
Some(l) => format!(
"<t:{}:R> - <t:{}:R> | **{}** `{}` @*{}*",
self.start,
(self.start + 60 * self.duration_minutes),
self.series,
self.name,
l
),
}
}
}
/// Get a list of racing sessions
#[poise::command{slash_command}]
pub async fn sessions(
ctx: Context<'_>,
#[description = "Filter for the session"] filter: Option<String>,
#[description = "Filter sessions for when they are/were happening, defaults to future"]
timeframe: Option<Timeframe>,
) -> Result<(), Error> {
let filter_f: String = match filter {
None => "%".to_string(),
Some(s) if s.is_empty() => "%".to_string(),
Some(s) => {
format!("%{}%", s)
}
};
let mut base_query: String = "SELECT name, start, duration_minutes, series_name, location_name FROM sessions WHERE (series_name LIKE ? or location_name like ? or name like ?)".to_string();
let time_query = match timeframe {
Some(Timeframe::Everything) => " ORDER BY abs(julianday() - julianday(start, 'unixepoch')) ASC",
Some(Timeframe::Current) => " AND (DATETIME(start, 'unixepoch') > DATETIME() AND DATETIME((start+60*duration_minutes), 'unixepoch') < DATETIME()) ORDER BY start ASC",
Some(Timeframe::Past) => " AND DATETIME(start, 'unixepoch') < DATETIME() ORDER BY start DESC",
_ => " AND DATETIME((start+60*duration_minutes), 'unixepoch') > DATETIME() ORDER BY start ASC",
};
base_query.push_str(time_query);
let stream: Vec<Session> = sqlx::query_as::<_, Session>(&base_query)
.bind(&filter_f)
.bind(&filter_f)
.bind(&filter_f)
.fetch_all(&ctx.data().database)
.await?;
let sessions: Vec<String> = stream.iter().map(|s| s.to_string()).collect();
let pages = utils::paginator(sessions, 1900, "\n".to_string());
utils::paginate_string(ctx, pages).await?;
Ok(())
}
#[derive(sqlx::FromRow)]
struct Event {
// name: String,
#[sqlx(rename = "start_date")]
start: String,
#[sqlx(rename = "end_date")]
end: Option<String>,
description: String,
#[sqlx(rename = "series_name")]
series: String,
#[sqlx(rename = "location_name")]
location: Option<String>,
}
impl Event {
fn to_string(&self) -> String {
let mut result: String = match &self.end {
None => format!("`{}`", self.start),
Some(end) => format!("`{}` - `{}`", self.start, end),
};
result.push_str(&format!(" | **{}** {}", self.series, self.description));
if let Some(loc) = &self.location {
result.push_str(&format!("Location: {}", loc));
};
return result;
}
}
/// Get a list of racing events
#[poise::command{slash_command}]
pub async fn events(
ctx: Context<'_>,
#[description = "Filter for the session"] filter: Option<String>,
#[description = "Filter events for when they are/were happening, defaults to future"]
timeframe: Option<Timeframe>,
) -> Result<(), Error> {
let filter_f: String = match filter {
None => "%".to_string(),
Some(s) if s.is_empty() => "%".to_string(),
Some(s) => {
format!("%{}%", s)
}
};
let mut base_query: String = "SELECT start_date, end_date, description, series_name, location_name FROM events WHERE (series_name LIKE ? or location_name like ? or description like ?)".to_string();
let time_query = match timeframe {
Some(Timeframe::Everything) => " ORDER BY abs(julianday() - julianday(start_date)) ASC",
Some(Timeframe::Current) => " AND ((end_date is NULL and start_date == date()) OR (start_date <= date() AND end_date >= date())) ORDER BY start_date ASC, end_date ASC",
Some(Timeframe::Past) => " AND JULIANDAY(start_date) - JULIANDAY(date()) <= 0 ORDER BY start DESC",
_ => " AND start_date >= DATE() ORDER BY start_date ASC, end_date ASC",
};
base_query.push_str(time_query);
let stream: Vec<Event> = sqlx::query_as::<_, Event>(&base_query)
.bind(&filter_f)
.bind(&filter_f)
.bind(&filter_f)
.fetch_all(&ctx.data().database)
.await?;
let events: Vec<String> = stream.iter().map(|e| e.to_string()).collect();
let pages = utils::paginator(events, 1900, "\n".to_string());
utils::paginate_string(ctx, pages).await?;
Ok(())
}
/// Overall information about racing events and sessions
#[poise::command(slash_command)]
pub async fn planning(ctx: Context<'_>) -> Result<(), Error> {
ctx.say("Hey this is a parent command, how'd you get here?")
.await?;
Ok(())
}
pub fn get_command() -> poise::Command<
super::super::Data,
Box<(dyn std::error::Error + Sync + std::marker::Send + 'static)>,
> {
poise::Command {
subcommands: vec![
series(),
sessions(),
events(),
// Let's make sure poise isn't confused by the duplicate names!
// planning(),
],
..planning()
}
}

409
src/commands/schedule.rs Normal file
View file

@ -0,0 +1,409 @@
use crate::{Context, Error};
use cached::proc_macro::cached;
use futures::{Stream, StreamExt};
use log::*;
use chrono::{DateTime, NaiveDateTime, TimeZone, Utc};
use poise::serenity_prelude::CreateEmbed;
use reqwest::header::AUTHORIZATION;
use reqwest::Error as reqError;
use serde::Deserialize;
use std::cmp::Ordering::{Greater, Less};
#[derive(Debug, Clone)]
struct Event {
name: String,
series: String,
_lower_series: String,
session: String,
_lower_session: String,
date: DateTime<Utc>,
}
#[allow(dead_code)]
#[derive(Debug, Deserialize)]
struct F1Data {
#[serde(rename = "seasonContext")]
season_context: SeasonContext,
race: Race,
}
#[allow(dead_code)]
#[derive(Debug, Deserialize)]
struct SeasonContext {
timetables: Vec<TimeTable>,
}
#[allow(dead_code)]
#[derive(Debug, Deserialize)]
struct Race {
#[serde(rename = "meetingOfficialName")]
name: String,
#[serde(rename = "meetingCountryName")]
country: String,
}
#[allow(dead_code)]
#[derive(Debug, Deserialize)]
struct TimeTable {
state: String,
session: String,
#[serde(rename = "gmtOffset")]
offset: String,
#[serde(rename = "startTime")]
start: String,
#[serde(rename = "endTime")]
end: String,
}
impl Event {
fn create(name: String, series: String, session: String, date: DateTime<Utc>) -> Event {
Event {
name,
_lower_series: series.to_ascii_lowercase(),
series,
_lower_session: session.to_ascii_lowercase(),
session,
date,
}
}
fn deref(&self) -> Event {
Event {
name: self.name.to_string(),
_lower_series: self._lower_series.to_string(),
series: self.series.to_string(),
_lower_session: self._lower_session.to_string(),
session: self.session.to_string(),
date: self.date.clone(),
}
}
fn check_series(&self, series: String) -> bool {
return self._lower_series.contains(&series);
}
fn check_session(&self, session: String) -> bool {
return self._lower_session.contains(&session);
}
#[allow(dead_code)]
fn check(&self, series: String, session: String) -> bool {
if self._lower_session.contains(&session) {
if self._lower_series.contains(&series) {
return true;
}
}
return false;
}
}
fn parse_time(time: String) -> Option<DateTime<Utc>> {
let tim = NaiveDateTime::parse_from_str(&*time, "%Y-%m-%dT%H:%M:%S.%fZ");
match tim {
Ok(t) => Some(Utc.from_utc_datetime(&t)),
Err(e) => {
warn!("Error on parsing time: {}", e);
None
}
}
}
fn parse_f1_time(mut time: String, offset: &String) -> Option<DateTime<Utc>> {
time.push_str(offset);
let tim = DateTime::parse_from_str(&time, "%Y-%m-%dT%H:%M:%S%:z");
match tim {
Ok(t) => Some(t.with_timezone(&Utc)),
Err(e) => {
warn!("Error on parsing time: {}", e);
None
}
}
}
// Use cached to cache the requests, don't repeatedly redo this call. Caches for 1 hour.
#[cached(time = 3600)]
async fn get_api_events() -> Vec<Event> {
let token = super::super::SETTINGS
.read()
.unwrap()
.get_table("morningstreams")
.unwrap()
.get("token")
.expect("Config error, please set the morningstreams[token] value")
.clone()
.into_str()
.expect("Config error, please make sure morningstreams[token] is a string");
let client = reqwest::Client::new();
let req = client
.get(format!("https://api.morningstreams.com/api/events"))
.header(AUTHORIZATION, token)
.send()
.await;
#[derive(Debug, Deserialize)]
struct Data {
category: String,
name: String,
session: String,
date: String,
}
let result: Option<Vec<Event>> = match req {
Err(e) => {
warn!("Error getting schedule {}", e);
None
}
Ok(req) => {
info!("Did MS event request status code {}", req.status().as_u16());
if req.status().as_u16() == 404 {
warn!("404 on getting MS events");
None
} else if req.status().as_u16() == 200 {
let data: Result<Vec<Data>, reqError> = req.json().await;
match data {
Err(e) => {
warn!("Error parsing morningstreams event: {}", e);
None
}
Ok(dat) => {
let mut result: Vec<Event> = Vec::new();
for d in dat {
let t = parse_time(d.date.to_string());
if let Some(tim) = t {
result.push(Event::create(
d.name.to_string(),
d.category.to_string(),
d.session.to_string(),
tim,
));
}
}
if result.len() > 0 {
result.sort_by(|a, b| a.date.cmp(&b.date));
Some(result)
} else {
None
}
}
}
} else {
None
}
}
};
match result {
None => {
return Vec::new();
}
Some(events) => return events,
}
}
#[allow(dead_code)]
#[cached(time = 3600)]
async fn get_f1_events() -> Option<Vec<Event>> {
let token = super::super::SETTINGS
.read()
.unwrap()
.get_table("morningstreams")
.unwrap()
.get("token")
.expect("Config error, please set the morningstreams[token] value")
.clone()
.into_str()
.expect("Config error, please make sure morningstreams[token] is a string");
let client = reqwest::Client::new();
let req = client
.get(format!(
"https://api.morningstreams.com/api/events/f1/event-tracker"
))
.header(AUTHORIZATION, token)
.send()
.await;
let result: Option<Vec<Event>> = match req {
Err(e) => {
warn!("Error getting schedule {}", e);
None
}
Ok(req) => {
info!("Did MS F1 request status code {}", req.status().as_u16());
if req.status().as_u16() == 404 {
warn!("404 on getting F1 events");
return None;
} else if req.status().as_u16() == 200 {
let data: Result<F1Data, reqError> = req.json().await;
match data {
Err(e) => {
warn!("Error parsing morningstreams event: {}", e);
None
}
Ok(dat) => {
// return Some(dat);
let mut events: Vec<Event> = Vec::new();
// let mut sessions: Vec<F1Session> = Vec::new();
for ses in dat.season_context.timetables {
if let Some(start) = parse_f1_time(ses.start, &ses.offset) {
events.push(Event::create(
dat.race.name.to_string(),
"Formula 1".to_string(),
ses.session,
start,
))
}
}
return Some(events);
}
}
} else {
None
}
}
};
return result;
}
#[cached(size = 5, time = 3600)]
async fn filter_events(series: String, session: String) -> (Option<Event>, Option<Event>) {
let mut events: Vec<Event> = get_api_events().await;
if let Some(mut e) = get_f1_events().await {
events.append(&mut e);
}
if events.len() == 0 {
return (None, None);
} else {
let mut next_event: Option<&Event> = None;
let mut previous_event: Option<&Event> = None;
let now = Utc::now();
for e in &events {
if e.check_series(series.to_string()) && e.check_session(session.to_string()) {
match now.cmp(&e.date) {
// Now is greater (after) event
Greater => {
if let Some(p) = previous_event {
if p.date.cmp(&e.date) == Less {
previous_event = Some(e)
};
} else {
previous_event = Some(e);
}
}
Less => {
if let Some(f) = next_event {
if f.date.cmp(&e.date) == Greater {
next_event = Some(e)
}
} else {
next_event = Some(e);
}
}
_ => {
next_event = Some(e);
previous_event = Some(e);
}
};
}
}
let first: Option<Event> = match previous_event {
None => None,
Some(e) => Some(e.deref()),
};
let second: Option<Event> = match next_event {
None => None,
Some(e) => Some(e.deref()),
};
(first, second)
}
}
#[allow(dead_code)]
fn build_embed<'a>(event: Event, e: &'a mut CreateEmbed) -> &'a mut CreateEmbed {
e.title(format!("{} | {}", event.series, event.name));
// e.description(format!("{}", event.session));
e.field("Session", &event.session, true);
e.field(
"Starts in",
format!("<t:{}:R>", &event.date.timestamp()),
true,
);
e.field(
"Date and time",
format!("<t:{}>", &event.date.timestamp()),
true,
);
e.timestamp(event.date);
e
}
#[allow(dead_code)]
async fn autocomplete_series(_ctx: Context<'_>, partial: String) -> impl Stream<Item = String> {
futures::stream::iter(&["Formula 1", "MotoGP", "IndyCar"])
.filter(move |name| futures::future::ready(name.starts_with(&partial)))
.map(|name| name.to_string())
}
#[allow(dead_code)]
async fn autocomplete_session(_ctx: Context<'_>, partial: String) -> impl Stream<Item = String> {
futures::stream::iter(&[
"Race",
"Qualifying",
"Free practice 3",
"Free practice 2",
"Free practice 1",
"Sprint race",
])
.filter(move |name| futures::future::ready(name.starts_with(&partial)))
.map(|name| name.to_string())
}
#[poise::command(slash_command)]
pub async fn schedule(
ctx: Context<'_>,
#[description = "Which series to look for"]
#[autocomplete = "autocomplete_series"]
series: Option<String>,
#[description = "Which session to look for"]
#[autocomplete = "autocomplete_session"]
session: Option<String>,
) -> Result<(), Error> {
let serie: String = match series {
None => "".to_string(),
Some(ser) => {
if vec![
"f1".to_string(),
"formula 1".to_string(),
"formula1".to_string(),
]
.contains(&ser.to_ascii_lowercase())
{
"formula 1".to_string()
} else {
ser.to_ascii_lowercase()
}
}
};
let session: String = match session {
None => "".to_string(),
Some(s) => s.to_ascii_lowercase(),
};
// Get the events (This will hopefully be cached)
let (previous_event, next_event) = filter_events(serie, session.to_string()).await;
// Do the event sending thingy...
if let Some(e) = next_event {
ctx.send(|b| b.embed(|em| build_embed(e, em))).await?;
} else if let Some(e) = previous_event {
ctx.send(|b| {
b.embed(|em| build_embed(e, em));
b.content("No future events found, showing most recent")
})
.await?;
} else {
ctx.say("No future events found, showing most recent")
.await?;
};
Ok(())
}

114
src/commands/utils.rs Normal file
View file

@ -0,0 +1,114 @@
use std::vec;
use crate::{Context, Error};
use poise::serenity_prelude as serenity;
pub fn paginator(input: Vec<String>, chunk_size: usize, join_string: String) -> Vec<String> {
if input.len() == 0 {
return vec![];
}
let mut result: Vec<String> = vec![];
let mut part: String = "".to_string();
let filler = &join_string.chars().count();
for i in input {
if part.chars().count() + i.chars().count() + filler >= chunk_size {
result.push(part);
part = i.to_string();
}
part.push_str(&join_string);
part.push_str(&i.to_string());
}
result.push(part);
return result;
}
pub async fn paginate_string(ctx: Context<'_>, pages: Vec<String>) -> Result<(), Error> {
let uuid_command = ctx.id().to_string();
let page_count = pages.len();
match pages.len() {
0 => {
ctx.say("No data found :(").await?;
return Ok(());
}
1 => {
ctx.say(pages.get(0).unwrap()).await?;
return Ok(());
}
_ => {}
};
ctx.send(|m| {
m.content(format!(
"{}\n\nPage: {}/{}",
pages.get(0).unwrap(),
1,
page_count
))
.components(|c| {
c.create_action_row(|ar| {
ar.create_button(|b| {
b.style(serenity::ButtonStyle::Primary)
.label("Previous page")
.custom_id(format!("{}_previous", uuid_command))
});
ar.create_button(|b| {
b.style(serenity::ButtonStyle::Primary)
.label("Next page")
.custom_id(format!("{}_next", uuid_command))
});
ar.create_button(|b| {
b.style(serenity::ButtonStyle::Secondary)
.label("Reset")
.custom_id(format!("{}_close", uuid_command))
})
})
})
})
.await?;
// let interaction1 = if let ReplyHandle::Application { http, interaction } = msg.unwrap(){Some(interaction)} else {None};
// let interaction = interaction1.unwrap();
let mut page = 0;
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx.discord())
// .author_id(ctx.author().id)
.channel_id(ctx.channel_id())
.timeout(std::time::Duration::from_secs(1200))
// .filter(move |mci| mci.data.custom_id == uuid_command.to_string())
.await
{
if !mci.data.custom_id.contains(&uuid_command) {
continue;
}
if mci.data.custom_id.contains("_previous") {
if page == 0 {
page = page_count - 1;
} else {
page = (page - 1) % page_count;
}
} else if mci.data.custom_id.contains("_next") {
page = (page + 1) % page_count;
} else if mci.data.custom_id.contains("_close") {
page = 0;
}
let mut msg = mci.message.clone();
msg.edit(ctx.discord(), |m| {
m.content(format!(
"{}\n\nPage: {}/{}",
pages.get(page).unwrap(),
page + 1,
page_count
))
})
.await?;
mci.create_interaction_response(ctx.discord(), |ir| {
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
})
.await?;
}
Ok(())
}

52
src/entity/events.rs Normal file
View file

@ -0,0 +1,52 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.5.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "events")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub start_date: String,
pub end_date: Option<String>,
pub description: String,
pub series: String,
pub series_name: String,
pub location: Option<String>,
pub location_name: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::series::Entity",
from = "Column::Series",
to = "super::series::Column::Id",
on_update = "NoAction",
on_delete = "Cascade"
)]
Series,
#[sea_orm(
belongs_to = "super::locations::Entity",
from = "Column::Location",
to = "super::locations::Column::Id",
on_update = "NoAction",
on_delete = "Cascade"
)]
Locations,
}
impl Related<super::series::Entity> for Entity {
fn to() -> RelationDef {
Relation::Series.def()
}
}
impl Related<super::locations::Entity> for Entity {
fn to() -> RelationDef {
Relation::Locations.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

35
src/entity/locations.rs Normal file
View file

@ -0,0 +1,35 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.5.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "locations")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub name: String,
pub description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::sessions::Entity")]
Sessions,
#[sea_orm(has_many = "super::events::Entity")]
Events,
}
impl Related<super::sessions::Entity> for Entity {
fn to() -> RelationDef {
Relation::Sessions.def()
}
}
impl Related<super::events::Entity> for Entity {
fn to() -> RelationDef {
Relation::Events.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

8
src/entity/mod.rs Normal file
View file

@ -0,0 +1,8 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.5.0
pub mod prelude;
pub mod events;
pub mod locations;
pub mod series;
pub mod sessions;

6
src/entity/prelude.rs Normal file
View file

@ -0,0 +1,6 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.5.0
pub use super::events::Entity as Events;
pub use super::locations::Entity as Locations;
pub use super::series::Entity as Series;
pub use super::sessions::Entity as Sessions;

27
src/entity/series.rs Normal file
View file

@ -0,0 +1,27 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.5.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "series")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub name: String,
pub description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::events::Entity")]
Events,
}
impl Related<super::events::Entity> for Entity {
fn to() -> RelationDef {
Relation::Events.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

38
src/entity/sessions.rs Normal file
View file

@ -0,0 +1,38 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.5.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "sessions")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
pub name: String,
pub start: i32,
pub duration_minutes: Option<i32>,
pub location: Option<String>,
pub location_name: Option<String>,
pub series: String,
pub series_name: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::locations::Entity",
from = "Column::Location",
to = "super::locations::Column::Id",
on_update = "NoAction",
on_delete = "Cascade"
)]
Locations,
}
impl Related<super::locations::Entity> for Entity {
fn to() -> RelationDef {
Relation::Locations.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

241
src/main.rs Normal file
View file

@ -0,0 +1,241 @@
use config::{Config, File};
use glob::glob;
use lazy_static::lazy_static;
use log::LevelFilter;
use poise::serenity_prelude as serenity;
use poise::serenity_prelude::UserId;
use regex::Regex;
use simplelog::SimpleLogger;
use std::{collections::HashSet, sync::RwLock};
// mod entity;
// Types used by all command functions
type Error = Box<dyn std::error::Error + Send + Sync>;
type Context<'a> = poise::Context<'a, Data, Error>;
mod commands;
// Custom user data passed to all command functions
pub struct Data {
database: sqlx::SqlitePool,
}
lazy_static! {
static ref SETTINGS: RwLock<Config> = RwLock::new(Config::default());
}
/// Register application commands in this guild or globally
///
/// Run with no arguments to register in guild, run with argument "global" to register globally.
#[poise::command(
prefix_command,
hide_in_help,
// required_permissions = "MANAGE_MESSAGES | ADMINISTRATOR",
owners_only=true,
)]
async fn register(ctx: Context<'_>, #[flag] global: bool) -> Result<(), Error> {
poise::builtins::register_application_commands(ctx, global).await?;
Ok(())
}
/// Boop the bot!
#[poise::command(prefix_command, track_edits, slash_command)]
pub async fn boop(ctx: Context<'_>) -> Result<(), Error> {
let uuid_boop = ctx.id();
ctx.send(|m| {
m.content("I want some boops!").components(|c| {
c.create_action_row(|ar| {
ar.create_button(|b| {
b.style(serenity::ButtonStyle::Primary)
.label("Boop me!")
.custom_id(uuid_boop)
})
})
})
})
.await?;
let mut boop_count = 0;
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx.discord())
.author_id(ctx.author().id)
.channel_id(ctx.channel_id())
.timeout(std::time::Duration::from_secs(1200))
.filter(move |mci| mci.data.custom_id == uuid_boop.to_string())
.await
{
boop_count += 1;
let mut msg = mci.message.clone();
msg.edit(ctx.discord(), |m| {
m.content(format!("Boop count: {}", boop_count))
})
.await?;
mci.create_interaction_response(ctx.discord(), |ir| {
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
})
.await?;
}
Ok(())
}
async fn on_error(error: poise::FrameworkError<'_, Data, Error>) {
// This is our custom error handler
// They are many errors that can occur, so we only handle the ones we want to customize
// and forward the rest to the default handler
match error {
poise::FrameworkError::Setup { error } => panic!("Failed to start bot: {:?}", error),
poise::FrameworkError::Command { error, ctx } => {
println!("Error in command `{}`: {:?}", ctx.command().name, error,);
}
error => {
if let Err(e) = poise::builtins::on_error(error).await {
println!("Error while handling error: {}", e)
}
}
}
}
/// Show this help menu
#[poise::command(prefix_command, track_edits, slash_command)]
async fn help(
ctx: Context<'_>,
#[description = "Specific command to show help about"]
#[autocomplete = "poise::builtins::autocomplete_command"]
command: Option<String>,
) -> Result<(), Error> {
poise::builtins::help(
ctx,
command.as_deref(),
poise::builtins::HelpConfiguration {
extra_text_at_bottom: "\
This bot is very much a work in progress. Commands will change and may break.",
show_context_menu_commands: true,
..Default::default()
},
)
.await?;
Ok(())
}
fn setup_config() -> Result<(), Error> {
SETTINGS
.write()
.unwrap()
.merge(
glob("data/config/*")
.unwrap()
.map(|path| File::from(path.unwrap()))
.collect::<Vec<_>>(),
)
.unwrap();
println!("{:#?}", SETTINGS.read());
Ok(())
}
async fn app() -> Result<(), Error> {
let discord = SETTINGS.read().unwrap().get_table("discord").unwrap();
let mut owners: HashSet<UserId> = HashSet::new();
owners.insert(UserId(117992484310745097));
let prefix: String = discord
.get("prefix")
.expect("Config error, please set the discord[token] value")
.clone()
.into_str()
.expect("Config error, please make sure discord[token] is a string");
let options = poise::FrameworkOptions {
commands: vec![
help(),
register(),
commands::invites::invites(),
commands::schedule::schedule(),
commands::boop(),
commands::planning::get_command(),
// poise::Command {
// subcommands: vec![
// commands::planning::series(),
// commands::planning::sessions(),
// commands::planning::events(),
// // Let's make sure poise isn't confused by the duplicate names!
// // commands::planning::planning(),
// ],
// ..commands::planning::planning()
// },
],
prefix_options: poise::PrefixFrameworkOptions {
prefix: Some(prefix.into()),
edit_tracker: Some(poise::EditTracker::for_timespan(
std::time::Duration::from_secs(3600),
)),
additional_prefixes: vec![
poise::Prefix::Regex(Regex::new("(?i)ol'? rusty please").unwrap()),
poise::Prefix::Regex(Regex::new("(?i)Oi Rusty please").unwrap()),
],
..Default::default()
},
on_error: |error| Box::pin(on_error(error)),
pre_command: |ctx| {
Box::pin(async move {
println!("Executing command {}...", ctx.command().qualified_name);
})
},
post_command: |ctx| {
Box::pin(async move {
println!("Executed command {}!", ctx.command().qualified_name);
})
},
owners,
..Default::default()
};
// let discord = SETTINGS.read().unwrap().get_table("discord").unwrap();
let token = discord
.get("token")
.expect("Config error, please set the discord[token] value")
.clone()
.into_str()
.expect("Config error, please make sure discord[token] is a string");
let database = sqlx::sqlite::SqlitePoolOptions::new()
.max_connections(5)
.connect_with(
"sqlite:data/database.db"
.parse::<sqlx::sqlite::SqliteConnectOptions>()?
.create_if_missing(true),
)
.await?;
sqlx::migrate!("./migrations").run(&database).await?;
poise::Framework::build()
.token(token)
.user_data_setup(move |_ctx, _ready, _framework| {
Box::pin(async move { Ok(Data { database: database }) })
})
.options(options)
.run()
.await
.unwrap();
Ok(())
}
#[tokio::main]
async fn main() {
print!("Starting the bot");
setup_config().unwrap();
// env_logger::init();
// init_log();
SimpleLogger::init(LevelFilter::Warn, simplelog::Config::default()).unwrap();
if let Err(e) = app().await {
log::error!("{}", e);
std::process::exit(1);
}
}