Implemented watchdog

This commit is contained in:
Lukas Wölfer
2025-07-22 01:39:41 +02:00
parent 8bae1fdfbb
commit 618a6c1d02
6 changed files with 373 additions and 211 deletions

156
src/dance_info.rs Normal file
View File

@@ -0,0 +1,156 @@
use std::collections::HashMap;
#[derive(serde::Deserialize, Debug, PartialEq, Eq)]
pub enum DanceRole {
Leader,
Follower,
}
impl DanceRole {
pub const fn as_str(&self) -> &str {
match self {
Self::Leader => "Leader",
Self::Follower => "Follower",
}
}
#[allow(dead_code)]
pub const fn other(&self) -> Self {
match self {
Self::Leader => Self::Follower,
Self::Follower => Self::Leader,
}
}
}
#[derive(serde::Deserialize, Debug, PartialEq, Eq)]
pub enum DanceRank {
Newcomer,
Novice,
Intermediate,
#[serde(rename = "Advance")]
Advanced,
#[serde(rename = "All Star")]
AllStars,
Champions,
}
impl DanceRank {
pub const fn as_str(&self) -> &str {
match self {
Self::Newcomer => "Newcomer",
Self::Novice => "Novice",
Self::Intermediate => "Intermediate",
Self::Advanced => "Advanced",
Self::AllStars => "All-Stars",
Self::Champions => "Champions",
}
}
}
#[derive(serde::Deserialize, Debug)]
enum OptionalDanceRank {
#[serde(rename = "N/A")]
NotAvailable,
#[serde(untagged)]
Rank(DanceRank),
}
#[derive(serde::Deserialize, Debug)]
enum OptionalDancePoints {
#[serde(rename = "N/A")]
NotAvailable,
#[serde(untagged)]
Points(u16),
}
#[derive(serde::Deserialize, Debug)]
struct DanceInfoParser {
pub dancer_first: String,
pub dancer_last: String,
pub short_dominate_role: DanceRole,
#[allow(dead_code)]
pub short_non_dominate_role: DanceRole,
pub dominate_role_highest_level_points: u16,
pub dominate_role_highest_level: DanceRank,
pub non_dominate_role_highest_level_points: OptionalDancePoints,
pub non_dominate_role_highest_level: OptionalDanceRank,
}
#[derive(Debug)]
pub struct CompState {
pub rank: DanceRank,
pub points: u16,
}
pub struct DanceInfo {
pub firstname: String,
pub lastname: String,
pub dominant_role: DanceRole,
pub dominant_role_comp: CompState,
pub non_dominant_role_comp: Option<CompState>,
}
impl DanceInfo {
pub fn name(&self) -> String {
format!("{} {}", self.firstname, self.lastname)
}
#[allow(dead_code)]
pub const fn non_dominant_role(&self) -> DanceRole {
self.dominant_role.other()
}
}
impl From<DanceInfoParser> for DanceInfo {
fn from(value: DanceInfoParser) -> Self {
let non_dominant_role_comp = if let OptionalDanceRank::Rank(r) =
value.non_dominate_role_highest_level
&& let OptionalDancePoints::Points(l) = value.non_dominate_role_highest_level_points
{
Some(CompState { rank: r, points: l })
} else {
None
};
Self {
firstname: value.dancer_first,
lastname: value.dancer_last,
dominant_role: value.short_dominate_role,
dominant_role_comp: CompState {
rank: value.dominate_role_highest_level,
points: value.dominate_role_highest_level_points,
},
non_dominant_role_comp,
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum DanceInfoError {
#[error("Failed to build client: {0}")]
ClientBuild(reqwest::Error),
#[error("Request error: {0}")]
Request(reqwest::Error),
#[error("Failed to parse response: {0}")]
JsonParse(reqwest::Error),
}
pub async fn fetch_wsdc_info(id: u32) -> Result<DanceInfo, DanceInfoError> {
let client = reqwest::ClientBuilder::new()
.build()
.map_err(DanceInfoError::ClientBuild)?;
let mut params = HashMap::new();
params.insert("q", id.to_string());
let response = client
.request(
reqwest::Method::POST,
"https://points.worldsdc.com/lookup2020/find",
)
.form(&params)
.send()
.await
.map_err(DanceInfoError::Request)?;
let x: DanceInfoParser = response.json().await.map_err(DanceInfoError::JsonParse)?;
Ok(x.into())
}

View File

@@ -1,16 +1,32 @@
#![warn(clippy::all, clippy::cargo, clippy::nursery, clippy::pedantic)]
#![warn(clippy::unwrap_used, clippy::expect_used)]
#![warn(clippy::print_stderr, clippy::print_stdout)]
#![allow(
clippy::multiple_crate_versions,
reason = "Don't know how to fix this, should be fine"
)]
#![allow(
clippy::cast_possible_truncation,
clippy::cast_precision_loss,
clippy::cast_sign_loss,
clippy::cast_possible_wrap,
reason = "Disable this for most of the time, enable this for cleanup later"
)]
use mwbot::{
Bot,
generators::{
Generator, SortDirection, categories::CategoryMemberSort, querypage::QueryPage,
search::Search,
},
generators::{Generator, SortDirection, categories::CategoryMemberSort},
};
use reqwest::RequestBuilder;
use serde::{Deserialize, Deserializer};
use std::{collections::HashMap, error::Error, path::Path};
use std::{error::Error, path::Path};
mod old_style;
use crate::watchdog::watch_wanted;
mod dance_info;
mod watchdog;
mod wikiinfo;
#[allow(dead_code)]
#[allow(clippy::print_stdout, reason = "We want to print here")]
fn list_teacher_pages(bot: &Bot) -> tokio::sync::mpsc::Receiver<Result<mwbot::Page, mwbot::Error>> {
let category_title = "Category:Teachers";
let pages = mwbot::generators::categories::CategoryMembers::new(category_title)
@@ -22,145 +38,31 @@ fn list_teacher_pages(bot: &Bot) -> tokio::sync::mpsc::Receiver<Result<mwbot::Pa
pages.generate(bot)
}
#[derive(serde::Deserialize, Debug, PartialEq, Eq)]
enum DanceRole {
Leader,
Follower,
}
fn main() -> Result<(), Box<dyn Error>> {
tracing_subscriber::fmt()
.with_level(true)
.with_max_level(tracing::Level::INFO)
.init();
#[derive(serde::Deserialize, Debug, PartialEq, Eq)]
enum DanceRank {
Novice,
Intermediate,
Advanced,
#[serde(rename = "All-Stars")]
AllStars,
Champions,
}
#[derive(serde::Deserialize, Debug)]
enum OptionalDanceRank {
#[serde(rename = "N/A")]
NotAvailable,
#[serde(untagged)]
Rank(DanceRank),
}
#[derive(serde::Deserialize, Debug)]
enum OptionalDanceLevel {
#[serde(rename = "N/A")]
NotAvailable,
#[serde(untagged)]
Level(u16),
}
#[derive(serde::Deserialize, Debug)]
struct DanceInfo {
dancer_first: String,
dancer_last: String,
short_dominate_role: DanceRole,
short_non_dominate_role: DanceRole,
dominate_role_highest_level_points: u16,
dominate_role_highest_level: DanceRank,
non_dominate_role_highest_level_points: OptionalDanceLevel,
non_dominate_role_highest_level: OptionalDanceRank,
}
async fn fetch_wsdc_info(id: u32) {
let client = reqwest::ClientBuilder::new().build().unwrap();
let mut params = HashMap::new();
params.insert("q", id.to_string());
let response = client
.request(
reqwest::Method::POST,
"https://points.worldsdc.com/lookup2020/find",
)
.form(&params)
.send()
.await
.unwrap();
let x: DanceInfo = response.json().await.unwrap();
dbg!(x);
}
async fn wanted_ids(bot: &Bot) -> Vec<u32> {
let mut gene = QueryPage::new("Wantedpages").generate(bot);
let mut result = vec![];
while let Some(x) = gene.recv().await {
let p = match x {
Ok(p) => p,
Err(e) => {
eprintln!("Could not get search result: {e}");
continue;
}
};
if let Ok(n) = parse_wsdc_page_name(p.title()) {
result.push(n);
}
}
result
}
fn parse_wsdc_page_name(name: &str) -> Result<u32, ()> {
if !name.starts_with("WSDC/") {
eprintln!("{name} is a wrong match");
return Err(());
}
match name.trim_start_matches("WSDC/").parse::<u32>() {
Ok(n) => Ok(n),
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(o) => o,
Err(e) => {
eprintln!("Page {name} does not fit: {e}");
Err(())
}
}
}
async fn index_wsdc_ids(bot: &Bot) -> Vec<u32> {
let mut gene = Search::new("WSDC/").generate(bot);
let mut result = vec![];
while let Some(x) = gene.recv().await {
let p = match x {
Ok(p) => p,
Err(e) => {
eprintln!("Could not get search result: {e}");
continue;
}
};
if let Ok(n) = parse_wsdc_page_name(&p.title()) {
result.push(n);
}
}
result
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
let bot = match Bot::from_path(Path::new("./mwbot.toml")).await {
Ok(x) => x,
Err(e) => {
dbg!(e);
tracing::error!("Could not start runtime: {e}");
return Ok(());
}
};
// dbg!(index_wsdc_ids(&bot).await);
// dbg!(wanted_ids(&bot).await);
// fetch_wsdc_info(1234).await;
// fetch_wsdc_info(1010).await;
fetch_wsdc_info(18080).await;
Ok(())
// // Monitor changes on these pages
// for page_title in pages {
// let mut stream = bot.watch_page(&page_title).await?;
// tokio::spawn(async move {
// while let Some(change) = stream.next().await {
// println!("Change detected on {}: {:?}", page_title, change);
// }
// });
// }
// // Keep the main thread alive to continue monitoring
// loop {
// tokio::time::sleep(tokio::time::Duration::from_secs(60)).await;
// }
rt.block_on(async {
let bot = match Bot::from_path(Path::new("./mwbot.toml")).await {
Ok(x) => x,
Err(e) => {
dbg!(e);
return Ok(());
}
};
watch_wanted(bot).await;
Ok(())
})
}

94
src/watchdog.rs Normal file
View File

@@ -0,0 +1,94 @@
use std::time::Duration;
use crate::{
dance_info::{DanceInfo, fetch_wsdc_info},
wikiinfo::wanted_ids,
};
use mwbot::{
Bot,
parsoid::{self, Template, Wikicode, map::IndexMap},
};
use mwbot::{SaveOptions, parsoid::WikinodeIterator};
use tracing::Level;
#[derive(thiserror::Error, Debug)]
enum InfoCompileError {
#[error("Could not compile wikipage: {0}")]
CompileError(#[from] parsoid::Error),
}
fn page_from_info(info: DanceInfo) -> Result<Wikicode, InfoCompileError> {
let mut params = IndexMap::new();
params.insert("name".to_string(), info.name());
params.insert(
"dominant_role".to_string(),
info.dominant_role.as_str().to_string(),
);
params.insert(
"allowed_rank".to_string(),
info.dominant_role_comp.rank.as_str().to_string(),
);
params.insert(
"dominant_rank".to_string(),
info.dominant_role_comp.rank.as_str().to_string(),
);
params.insert(
"dominant_points".to_string(),
info.dominant_role_comp.points.to_string(),
);
if let Some(u) = info.non_dominant_role_comp {
params.insert("non_dominant_rank".to_string(), u.rank.as_str().to_string());
params.insert("non_dominant_points".to_string(), u.points.to_string());
}
let t = Template::new("Template:WSDCBox", &params)?;
let result = Wikicode::new("");
result.append(&t);
Ok(result)
}
pub async fn watch_wanted(bot: Bot) {
let span = tracing::span!(Level::INFO, "wanted_watchdog");
let _enter = span.enter();
loop {
tracing::info!("Watchdog check...");
let wanted = wanted_ids(bot.clone()).await;
for (id, page) in wanted {
generate_page(id, page).await;
}
tokio::time::sleep(Duration::from_secs(30)).await;
}
}
async fn generate_page(id: u32, page: mwbot::Page) {
tracing::info!("Taking care of {id}");
let info = match fetch_wsdc_info(id).await {
Ok(o) => o,
Err(e) => {
tracing::error!("Error fetching wsdc info for {id}: {e}");
return;
}
};
let code = match page_from_info(info) {
Ok(o) => o,
Err(e) => {
tracing::error!("Creating wikicode for {id}: {e}");
return;
}
};
match page
.save(
code,
&SaveOptions::summary("Created WSDC info from worldsdc.com")
.mark_as_bot(true)
.mark_as_minor(false),
)
.await
{
Ok(_) => (),
Err(e) => {
tracing::error!("Could not save page for {id}: {e}");
}
}
}

62
src/wikiinfo.rs Normal file
View File

@@ -0,0 +1,62 @@
use mwbot::{
Bot, Page,
generators::{Generator, querypage::QueryPage, search::Search},
};
pub async fn wanted_ids(bot: Bot) -> Vec<(u32, Page)> {
let mut gene = QueryPage::new("Wantedpages").generate(&bot);
let mut result = vec![];
while let Some(x) = gene.recv().await {
let p = match x {
Ok(p) => p,
Err(e) => {
tracing::error!("Could not get search result: {e}");
continue;
}
};
if let Ok(n) = parse_wsdc_page_name(p.title()) {
result.push((n, p));
}
}
result
}
#[derive(thiserror::Error, Debug)]
enum TitleParseError {
#[error("Does not contain the correct prefix")]
NoPrefix,
#[error("Possibly incorrect page")]
Sketchy,
}
fn parse_wsdc_page_name(name: &str) -> Result<u32, TitleParseError> {
if !name.starts_with("WSDC/") {
return Err(TitleParseError::NoPrefix);
}
match name.trim_start_matches("WSDC/").parse::<u32>() {
Ok(n) => Ok(n),
Err(e) => {
tracing::error!("Page {name} does not fit: {e}");
Err(TitleParseError::Sketchy)
}
}
}
#[allow(dead_code)]
pub async fn index_wsdc_ids(bot: &Bot) -> Vec<u32> {
let mut gene = Search::new("WSDC/").generate(bot);
let mut result = vec![];
while let Some(x) = gene.recv().await {
let p = match x {
Ok(p) => p,
Err(e) => {
tracing::error!("Could not get search result: {e}");
continue;
}
};
if let Ok(n) = parse_wsdc_page_name(p.title()) {
result.push(n);
}
}
result
}