Add viaplay_schedule command to get schedule for specific day
Clean up some comments of dead/old code
This commit is contained in:
parent
5d2f9a777c
commit
df60ac6aae
|
@ -20,50 +20,11 @@ pub enum Timeframe {
|
||||||
Everything,
|
Everything,
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[derive(Debug, poise::ChoiceParameter)]
|
|
||||||
// pub enum Source {
|
|
||||||
// #[name = "Get links for the Eurosport player"]
|
|
||||||
// Eurosport,
|
|
||||||
// // #[name = "Get links for the apocalympics Eurosport player"]
|
|
||||||
// // Olympics,
|
|
||||||
// #[name = "Get WRC links for the WRC player"]
|
|
||||||
// WRC,
|
|
||||||
// #[name = "Get Viaplay links for the Viaplay player"]
|
|
||||||
// Viaplay,
|
|
||||||
// // #[name = "F1 content for the weekend"]
|
|
||||||
// // F1,
|
|
||||||
// }
|
|
||||||
|
|
||||||
/// Get links for high tier commands.
|
|
||||||
// #[poise::command(slash_command)]
|
|
||||||
// pub async fn links2(
|
|
||||||
// ctx: Context<'_>,
|
|
||||||
// #[description = "Where to git the juicy links from?"] source: Source,
|
|
||||||
// #[description = "Filter sessions for when they are/were happening, defaults to future"]
|
|
||||||
// timeframe: Option<Timeframe>,
|
|
||||||
// #[description = "Content to filter on"] filter: Option<String>,
|
|
||||||
// ) -> Result<(), Error> {
|
|
||||||
// if !utils::high_tier(ctx).await {
|
|
||||||
// ctx.say("This command can only be used in high tier channels for security")
|
|
||||||
// .await?;
|
|
||||||
// return Ok(());
|
|
||||||
// }
|
|
||||||
|
|
||||||
// match source {
|
|
||||||
// Source::Eurosport => eurosport::proc_eurosport(ctx, timeframe, filter).await,
|
|
||||||
// // Source::Olympics => eurosport::proc_olympics(ctx, timeframe, filter).await,
|
|
||||||
// Source::WRC => wrc::wrc(ctx).await,
|
|
||||||
// Source::Viaplay => viaplay::viaplay(ctx, timeframe, filter).await,
|
|
||||||
// // Source::F1 => f1::proc_f1(ctx, timeframe, filter).await,
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // Ok(())
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[poise::command(
|
#[poise::command(
|
||||||
slash_command,
|
slash_command,
|
||||||
subcommands(
|
subcommands(
|
||||||
"viaplay::viaplay",
|
"viaplay::viaplay",
|
||||||
|
"viaplay::viaplay_schedule",
|
||||||
"eurosport::eurosport",
|
"eurosport::eurosport",
|
||||||
"wrc::wrc",
|
"wrc::wrc",
|
||||||
"f1::f1",
|
"f1::f1",
|
||||||
|
|
|
@ -2,7 +2,7 @@ use std::{collections::HashSet, fmt};
|
||||||
|
|
||||||
use crate::{commands::utils, Context, Error};
|
use crate::{commands::utils, Context, Error};
|
||||||
use cached::proc_macro::cached;
|
use cached::proc_macro::cached;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Duration, NaiveDate, Utc};
|
||||||
use futures::{Stream, StreamExt};
|
use futures::{Stream, StreamExt};
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use reqwest::header::AUTHORIZATION;
|
use reqwest::header::AUTHORIZATION;
|
||||||
|
@ -10,19 +10,6 @@ use serde::Deserialize;
|
||||||
|
|
||||||
use super::Timeframe;
|
use super::Timeframe;
|
||||||
|
|
||||||
// const translations: HashMap<String, String> = HashMap::from([
|
|
||||||
// ("Valioliiga", "Premier league"),
|
|
||||||
// ("Gjensidige Kvindeliga", "Mutual Women's League (Norway)"),
|
|
||||||
// ("Tanskan 1. divisioona", "Danish 1st Division"),
|
|
||||||
// ("Bundesliiga", "Bundesliga"),
|
|
||||||
// ("2. Bundesliiga", "2. Bundesliga"),
|
|
||||||
// ]);
|
|
||||||
|
|
||||||
// #[derive(Deserialize, Clone)]
|
|
||||||
// struct ViaplaySchedule {
|
|
||||||
// events: Vec<ViaplayEvent>,
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[derive(Deserialize, Clone)]
|
#[derive(Deserialize, Clone)]
|
||||||
pub struct ViaplayEvent {
|
pub struct ViaplayEvent {
|
||||||
content: Content,
|
content: Content,
|
||||||
|
@ -80,8 +67,6 @@ impl ViaplayEvent {
|
||||||
|
|
||||||
impl fmt::Display for ViaplayEvent {
|
impl fmt::Display for ViaplayEvent {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
// Use `self.number` to refer to each positional data point.
|
|
||||||
// write!(f, "```md\n[{sport}]({title}) {synopsis}```(<t:{start}:R>-<t:{end}:R>) {desc}\nhttps://tom.al/ms/vp/{id}", sport=self.content.format.sport, title=self.content.title, synopsis=self.content.synopsis, start=self.times.start.timestamp(), end=self.times.end.timestamp(), desc=self.content.description, id=self.system.product_key)
|
|
||||||
write!(f, "{}", self.to_string())
|
write!(f, "{}", self.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -208,6 +193,59 @@ pub async fn get_schedule() -> Option<Vec<ViaplayEvent>> {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cached(time = 3600)]
|
||||||
|
pub async fn get_schedule_date(date: NaiveDate) -> Option<Vec<ViaplayEvent>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let req = client
|
||||||
|
.get(format!(
|
||||||
|
"https://content.viaplay.fi/pcdash-fi/urheilu?date={}",
|
||||||
|
date.format("%Y-%m-%d")
|
||||||
|
))
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let result: Option<Vec<ViaplayEvent>> = match req {
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Error getting Viaplay schedule {}", e);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Ok(req) if req.status().as_u16() == 404 => {
|
||||||
|
warn!("404 on getting VP events");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Ok(req) if req.status().as_u16() == 200 => {
|
||||||
|
let data = req.json::<serde_json::Value>().await;
|
||||||
|
Some(
|
||||||
|
serde_json::from_value::<Vec<ViaplayEvent>>(
|
||||||
|
data.unwrap_or(serde_json::Value::Null)
|
||||||
|
.get("_embedded")
|
||||||
|
.unwrap_or(&serde_json::Value::Null)
|
||||||
|
.get("viaplay:blocks")
|
||||||
|
.unwrap_or(&serde_json::Value::Null)
|
||||||
|
.as_array()
|
||||||
|
.unwrap_or(&vec![])
|
||||||
|
.last()
|
||||||
|
.unwrap_or(&serde_json::Value::Null)
|
||||||
|
.get("_embedded")
|
||||||
|
.unwrap_or(&serde_json::Value::Null)
|
||||||
|
.get("viaplay:products")
|
||||||
|
.unwrap_or(&serde_json::Value::Null)
|
||||||
|
.clone(),
|
||||||
|
)
|
||||||
|
.unwrap_or(vec![]),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Ok(req) => {
|
||||||
|
warn!(
|
||||||
|
"Unhandled status when parsing viaplay request {}",
|
||||||
|
req.status()
|
||||||
|
);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
#[cached(time = 3600)]
|
#[cached(time = 3600)]
|
||||||
pub async fn get_sports() -> Vec<String> {
|
pub async fn get_sports() -> Vec<String> {
|
||||||
// let events = get_schedule();
|
// let events = get_schedule();
|
||||||
|
@ -222,15 +260,6 @@ pub async fn get_sports() -> Vec<String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[allow(dead_code)]
|
|
||||||
// async fn autocomplete_sport(_ctx: Context<'_>, partial: String) -> impl Stream<Item = String> {
|
|
||||||
// // futures::stream::iter(get_sports().iter())
|
|
||||||
// // .filter(move |name| futures::future::ready(name.contains(&partial)))
|
|
||||||
// // .map(|name| name.to_string())
|
|
||||||
// futures::stream::iter(get_sports().await)
|
|
||||||
// .filter(move |name| futures::future::ready(name.contains(&partial)))
|
|
||||||
// }
|
|
||||||
|
|
||||||
async fn autocomplete_sport<'a>(
|
async fn autocomplete_sport<'a>(
|
||||||
_ctx: Context<'_>,
|
_ctx: Context<'_>,
|
||||||
partial: &'a str,
|
partial: &'a str,
|
||||||
|
@ -280,19 +309,40 @@ pub async fn viaplay(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// /// Another subcommand of `parent`
|
//Viaplay schedule for date
|
||||||
// #[poise::command(slash_command)]
|
#[poise::command(slash_command, ephemeral)]
|
||||||
// pub async fn child3(
|
pub async fn viaplay_schedule(
|
||||||
// ctx: Context<'_>,
|
ctx: Context<'_>,
|
||||||
// #[description = "Where to git the juicy links from?"]
|
#[description = "Offset for amount fo days from today (0 for current day)"] offset: Option<i8>,
|
||||||
// _source: super::Source,
|
#[description = "Content to filter on"] filter: Option<String>,
|
||||||
// #[description = "Filter sessions for when they are/were happening, defaults to future"]
|
) -> Result<(), Error> {
|
||||||
// _timeframe: Option<super::Timeframe>,
|
let offset: i8 = offset.unwrap_or_default();
|
||||||
// #[description = "Content to filter on"] filter: Option<String>,
|
let date = Utc::now()
|
||||||
// #[description = "Filter for which sport to list (only Viaplay)"]
|
.date_naive()
|
||||||
// #[autocomplete = "autocomplete_sport"]
|
.checked_add_signed(Duration::days(offset.into()))
|
||||||
// sport: Option<String>,
|
.expect("Expected an existing date as result");
|
||||||
// ) -> Result<(), Error> {
|
|
||||||
// ctx.say("You invoked the second child command!").await?;
|
let schedule = get_schedule_date(date).await;
|
||||||
// Ok(())
|
let title = format!("`Viaplay schedule for {}`", date.format("%d-%m-%Y"));
|
||||||
// }
|
match schedule {
|
||||||
|
None => {
|
||||||
|
ctx.say("Unable to get the events, try again later (it's cached so wait a bit...)")
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Some(evs) => {
|
||||||
|
info!("Found {} events from viaplay", evs.len());
|
||||||
|
let filtered: Vec<String> = evs
|
||||||
|
.into_iter()
|
||||||
|
.filter(|e| match &filter {
|
||||||
|
None => true,
|
||||||
|
Some(f) => e.filter(f.as_str()),
|
||||||
|
})
|
||||||
|
.map(|e| e.to_string())
|
||||||
|
.collect();
|
||||||
|
let pages = utils::paginator_title(title, filtered, 1900, "\n".to_string());
|
||||||
|
utils::paginate_string(ctx, pages).await?;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -44,6 +44,31 @@ pub fn paginator(input: Vec<String>, chunk_size: usize, join_string: String) ->
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn paginator_title(
|
||||||
|
title: String,
|
||||||
|
input: Vec<String>,
|
||||||
|
chunk_size: usize,
|
||||||
|
join_string: String,
|
||||||
|
) -> Vec<String> {
|
||||||
|
if input.len() == 0 {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
let mut result: Vec<String> = vec![];
|
||||||
|
let mut part: String = title.clone();
|
||||||
|
let filler = &join_string.chars().count();
|
||||||
|
for i in input {
|
||||||
|
if part.chars().count() + i.chars().count() + filler >= chunk_size {
|
||||||
|
result.push(part);
|
||||||
|
part = title.to_string();
|
||||||
|
part.push_str(&i.to_string());
|
||||||
|
} else {
|
||||||
|
part.push_str(&join_string);
|
||||||
|
part.push_str(&i.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.push(part);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn paginate_string(ctx: Context<'_>, pages: Vec<String>) -> Result<(), Error> {
|
pub async fn paginate_string(ctx: Context<'_>, pages: Vec<String>) -> Result<(), Error> {
|
||||||
let uuid_command = ctx.id().to_string();
|
let uuid_command = ctx.id().to_string();
|
||||||
|
@ -61,43 +86,40 @@ pub async fn paginate_string(ctx: Context<'_>, pages: Vec<String>) -> Result<(),
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
|
|
||||||
let reply_handle = ctx.send(|m| {
|
let reply_handle = ctx
|
||||||
m.content(format!(
|
.send(|m| {
|
||||||
"{}\n\nPage: {}/{}",
|
m.content(format!(
|
||||||
pages.get(0).unwrap(),
|
"{}\n\nPage: {}/{}",
|
||||||
1,
|
pages.get(0).unwrap(),
|
||||||
page_count
|
1,
|
||||||
))
|
page_count
|
||||||
.components(|c| {
|
))
|
||||||
c.create_action_row(|ar| {
|
.components(|c| {
|
||||||
ar.create_button(|b| {
|
c.create_action_row(|ar| {
|
||||||
b.style(serenity::ButtonStyle::Primary)
|
ar.create_button(|b| {
|
||||||
.label("Previous page")
|
b.style(serenity::ButtonStyle::Primary)
|
||||||
.custom_id(format!("{}_previous", uuid_command))
|
.label("Previous page")
|
||||||
});
|
.custom_id(format!("{}_previous", uuid_command))
|
||||||
ar.create_button(|b| {
|
});
|
||||||
b.style(serenity::ButtonStyle::Primary)
|
ar.create_button(|b| {
|
||||||
.label("Next page")
|
b.style(serenity::ButtonStyle::Primary)
|
||||||
.custom_id(format!("{}_next", uuid_command))
|
.label("Next page")
|
||||||
});
|
.custom_id(format!("{}_next", uuid_command))
|
||||||
ar.create_button(|b| {
|
});
|
||||||
b.style(serenity::ButtonStyle::Secondary)
|
ar.create_button(|b| {
|
||||||
.label("Reset")
|
b.style(serenity::ButtonStyle::Secondary)
|
||||||
.custom_id(format!("{}_close", uuid_command))
|
.label("Reset")
|
||||||
|
.custom_id(format!("{}_close", uuid_command))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
.await?;
|
||||||
.await?;
|
|
||||||
// let interaction1 = if let ReplyHandle::Application { http, interaction } = msg.unwrap(){Some(interaction)} else {None};
|
|
||||||
// let interaction = interaction1.unwrap();
|
|
||||||
|
|
||||||
let mut page = 0;
|
let mut page = 0;
|
||||||
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx)
|
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx)
|
||||||
// .author_id(ctx.author().id)
|
|
||||||
.channel_id(ctx.channel_id())
|
.channel_id(ctx.channel_id())
|
||||||
.timeout(std::time::Duration::from_secs(1200))
|
.timeout(std::time::Duration::from_secs(1200))
|
||||||
// .filter(move |mci| mci.data.custom_id == uuid_command.to_string())
|
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
if !mci.data.custom_id.contains(&uuid_command) {
|
if !mci.data.custom_id.contains(&uuid_command) {
|
||||||
|
@ -116,15 +138,16 @@ pub async fn paginate_string(ctx: Context<'_>, pages: Vec<String>) -> Result<(),
|
||||||
page = 0;
|
page = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
reply_handle.edit(ctx, |m| {
|
reply_handle
|
||||||
m.content(format!(
|
.edit(ctx, |m| {
|
||||||
"{}\n\nPage: {}/{}",
|
m.content(format!(
|
||||||
pages.get(page).unwrap(),
|
"{}\n\nPage: {}/{}",
|
||||||
page + 1,
|
pages.get(page).unwrap(),
|
||||||
page_count
|
page + 1,
|
||||||
))
|
page_count
|
||||||
})
|
))
|
||||||
.await?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
mci.create_interaction_response(ctx, |ir| {
|
mci.create_interaction_response(ctx, |ir| {
|
||||||
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
|
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
|
||||||
|
@ -149,7 +172,6 @@ pub async fn paginate_string_embed(
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
1 => {
|
1 => {
|
||||||
// ctx.say(pages.get(0).unwrap()).await?;
|
|
||||||
ctx.send(|m| m.embed(|e| e.title(title).description(pages.get(0).unwrap())))
|
ctx.send(|m| m.embed(|e| e.title(title).description(pages.get(0).unwrap())))
|
||||||
.await?;
|
.await?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -157,47 +179,38 @@ pub async fn paginate_string_embed(
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
|
|
||||||
let reply_handle = ctx.send(|m| {
|
let reply_handle = ctx
|
||||||
// m.content(format!(
|
.send(|m| {
|
||||||
// "{}\n\nPage: {}/{}",
|
m.embed(|e| {
|
||||||
// pages.get(0).unwrap(),
|
e.title(format!("{} Page 1/{}", title, page_count))
|
||||||
// 1,
|
.description(pages.get(0).unwrap())
|
||||||
// page_count
|
})
|
||||||
// ))
|
.components(|c| {
|
||||||
m.embed(|e| {
|
c.create_action_row(|ar| {
|
||||||
e.title(format!("{} Page 1/{}", title, page_count))
|
ar.create_button(|b| {
|
||||||
.description(pages.get(0).unwrap())
|
b.style(serenity::ButtonStyle::Primary)
|
||||||
})
|
.label("Previous page")
|
||||||
.components(|c| {
|
.custom_id(format!("{}_previous", uuid_command))
|
||||||
c.create_action_row(|ar| {
|
});
|
||||||
ar.create_button(|b| {
|
ar.create_button(|b| {
|
||||||
b.style(serenity::ButtonStyle::Primary)
|
b.style(serenity::ButtonStyle::Primary)
|
||||||
.label("Previous page")
|
.label("Next page")
|
||||||
.custom_id(format!("{}_previous", uuid_command))
|
.custom_id(format!("{}_next", uuid_command))
|
||||||
});
|
});
|
||||||
ar.create_button(|b| {
|
ar.create_button(|b| {
|
||||||
b.style(serenity::ButtonStyle::Primary)
|
b.style(serenity::ButtonStyle::Secondary)
|
||||||
.label("Next page")
|
.label("Reset")
|
||||||
.custom_id(format!("{}_next", uuid_command))
|
.custom_id(format!("{}_close", uuid_command))
|
||||||
});
|
})
|
||||||
ar.create_button(|b| {
|
|
||||||
b.style(serenity::ButtonStyle::Secondary)
|
|
||||||
.label("Reset")
|
|
||||||
.custom_id(format!("{}_close", uuid_command))
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
.await?;
|
||||||
.await?;
|
|
||||||
// let interaction1 = if let ReplyHandle::Application { http, interaction } = msg.unwrap(){Some(interaction)} else {None};
|
|
||||||
// let interaction = interaction1.unwrap();
|
|
||||||
|
|
||||||
let mut page = 0;
|
let mut page = 0;
|
||||||
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx)
|
while let Some(mci) = serenity::CollectComponentInteraction::new(ctx)
|
||||||
// .author_id(ctx.author().id)
|
|
||||||
.channel_id(ctx.channel_id())
|
.channel_id(ctx.channel_id())
|
||||||
.timeout(std::time::Duration::from_secs(1200))
|
.timeout(std::time::Duration::from_secs(1200))
|
||||||
// .filter(move |mci| mci.data.custom_id == uuid_command.to_string())
|
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
if !mci.data.custom_id.contains(&uuid_command) {
|
if !mci.data.custom_id.contains(&uuid_command) {
|
||||||
|
@ -216,13 +229,14 @@ pub async fn paginate_string_embed(
|
||||||
page = 0;
|
page = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
reply_handle.edit(ctx, |m| {
|
reply_handle
|
||||||
m.embed(|e| {
|
.edit(ctx, |m| {
|
||||||
e.title(format!("{} Page {}/{}", title, page + 1, page_count))
|
m.embed(|e| {
|
||||||
.description(pages.get(page).unwrap())
|
e.title(format!("{} Page {}/{}", title, page + 1, page_count))
|
||||||
|
.description(pages.get(page).unwrap())
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
.await?;
|
||||||
.await?;
|
|
||||||
|
|
||||||
mci.create_interaction_response(ctx, |ir| {
|
mci.create_interaction_response(ctx, |ir| {
|
||||||
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
|
ir.kind(serenity::InteractionResponseType::DeferredUpdateMessage)
|
||||||
|
|
Loading…
Reference in a new issue