diff options
author | eta <eta@theta.eu.org> | 2020-06-15T20·03+0100 |
---|---|---|
committer | eta <eta@theta.eu.org> | 2020-06-16T13·30+0000 |
commit | c3abbb5e2daeec07d03f1addd4a6c905af7a02ac (patch) | |
tree | 416ba1be719dd3ebbd15ee41b6f8a2c1b944bd43 /fun/tvldb/src | |
parent | 4c22cf316933613215e83f70a6bb2c556d42e02c (diff) |
feat(tvldb): Import the tvldb/paroxysm source, add a Nix derivation r/993
- This imports the tvldb (actually a thing called 'paroxysm') code from https://git.theta.eu.org/eta/paroxysm into the monorepo. - Additionally, I did a nix thing, yay! \o/ (well, with tazjin's help) - 3p/default.nix needed modifying to whitelist pgsql. Change-Id: Icdf13ca221650dde376f632bd2dd8a087af451bf Reviewed-on: https://cl.tvl.fyi/c/depot/+/389 Reviewed-by: tazjin <mail@tazj.in>
Diffstat (limited to 'fun/tvldb/src')
-rw-r--r-- | fun/tvldb/src/cfg.rs | 11 | ||||
-rw-r--r-- | fun/tvldb/src/keyword.rs | 182 | ||||
-rw-r--r-- | fun/tvldb/src/main.rs | 353 | ||||
-rw-r--r-- | fun/tvldb/src/models.rs | 33 | ||||
-rw-r--r-- | fun/tvldb/src/schema.rs | 18 |
5 files changed, 597 insertions, 0 deletions
diff --git a/fun/tvldb/src/cfg.rs b/fun/tvldb/src/cfg.rs new file mode 100644 index 000000000000..038795a6f114 --- /dev/null +++ b/fun/tvldb/src/cfg.rs @@ -0,0 +1,11 @@ +use std::collections::HashSet; + +#[derive(Deserialize)] +pub struct Config { + pub database_url: String, + pub irc_config_path: String, + #[serde(default)] + pub admins: HashSet<String>, + #[serde(default)] + pub log_filter: Option<String>, +} diff --git a/fun/tvldb/src/keyword.rs b/fun/tvldb/src/keyword.rs new file mode 100644 index 000000000000..78dc36f35f12 --- /dev/null +++ b/fun/tvldb/src/keyword.rs @@ -0,0 +1,182 @@ +use crate::models::{Entry, Keyword, NewEntry, NewKeyword}; +use diesel::pg::PgConnection; +use diesel::prelude::*; +use failure::Error; +use std::borrow::Cow; + +pub struct KeywordDetails { + pub keyword: Keyword, + pub entries: Vec<Entry>, +} +impl KeywordDetails { + pub fn learn(&mut self, nick: &str, text: &str, dbc: &PgConnection) -> Result<usize, Error> { + let now = ::chrono::Utc::now().naive_utc(); + let ins = NewEntry { + keyword_id: self.keyword.id, + idx: (self.entries.len() + 1) as _, + text, + creation_ts: now, + created_by: nick, + }; + let new = { + use crate::schema::entries; + ::diesel::insert_into(entries::table) + .values(ins) + .get_result(dbc)? + }; + self.entries.push(new); + Ok(self.entries.len()) + } + pub fn process_moves(&mut self, moves: &[(i32, i32)], dbc: &PgConnection) -> Result<(), Error> { + for (oid, new_idx) in moves { + { + use crate::schema::entries::dsl::*; + ::diesel::update(entries.filter(id.eq(oid))) + .set(idx.eq(new_idx)) + .execute(dbc)?; + } + } + self.entries = Self::get_entries(self.keyword.id, dbc)?; + Ok(()) + } + pub fn swap(&mut self, idx_a: usize, idx_b: usize, dbc: &PgConnection) -> Result<(), Error> { + let mut moves = vec![]; + for ent in self.entries.iter() { + if ent.idx == idx_a as i32 { + moves.push((ent.id, idx_b as i32)); + } + if ent.idx == idx_b as i32 { + moves.push((ent.id, idx_a as i32)); + } + } + if moves.len() != 2 { + Err(format_err!("Invalid swap operation."))?; + } + self.process_moves(&moves, dbc)?; + Ok(()) + } + pub fn update(&mut self, idx: usize, val: &str, dbc: &PgConnection) -> Result<(), Error> { + let ent = self + .entries + .get_mut(idx.saturating_sub(1)) + .ok_or(format_err!("No such element to update."))?; + { + use crate::schema::entries::dsl::*; + ::diesel::update(entries.filter(id.eq(ent.id))) + .set(text.eq(val)) + .execute(dbc)?; + } + ent.text = val.to_string(); + Ok(()) + } + pub fn delete(&mut self, idx: usize, dbc: &PgConnection) -> Result<(), Error> { + // step 1: delete the element + { + let ent = self + .entries + .get(idx.saturating_sub(1)) + .ok_or(format_err!("No such element to delete."))?; + { + use crate::schema::entries::dsl::*; + ::diesel::delete(entries.filter(id.eq(ent.id))).execute(dbc)?; + } + } + // step 2: move all the elements in front of it back one + let mut moves = vec![]; + for ent in self.entries.iter() { + if idx > ent.idx as _ { + moves.push((ent.id, ent.idx.saturating_sub(1))); + } + } + self.process_moves(&moves, dbc)?; + Ok(()) + } + pub fn add_zwsp_to_name(name: &str) -> Option<String> { + let second_index = name.char_indices().nth(1).map(|(i, _)| i)?; + let (start, end) = name.split_at(second_index); + Some(format!("{}{}", start, end)) + } + pub fn format_entry(&self, idx: usize) -> Option<String> { + if let Some(ent) = self.entries.get(idx.saturating_sub(1)) { + let gen_clr = if self.keyword.chan == "*" { + "\x0307" + } else { + "" + }; + let zwsp_name = Self::add_zwsp_to_name(&self.keyword.name) + .unwrap_or_else(|| self.keyword.name.clone()); + Some(format!( + "\x02{}{}\x0f\x0315[{}/{}]\x0f: {} \x0f\x0314[{}]\x0f", + gen_clr, + zwsp_name, + idx, + self.entries.len(), + ent.text, + ent.creation_ts.date() + )) + } else { + None + } + } + pub fn get_or_create(word: &str, c: &str, dbc: &PgConnection) -> Result<Self, Error> { + if let Some(ret) = Self::get(word, c, dbc)? { + Ok(ret) + } else { + Ok(Self::create(word, c, dbc)?) + } + } + pub fn create(word: &str, c: &str, dbc: &PgConnection) -> Result<Self, Error> { + let val = NewKeyword { + name: word, + chan: c, + }; + let ret: Keyword = { + use crate::schema::keywords; + ::diesel::insert_into(keywords::table) + .values(val) + .get_result(dbc)? + }; + Ok(KeywordDetails { + keyword: ret, + entries: vec![], + }) + } + fn get_entries(kid: i32, dbc: &PgConnection) -> Result<Vec<Entry>, Error> { + let entries: Vec<Entry> = { + use crate::schema::entries::dsl::*; + entries + .filter(keyword_id.eq(kid)) + .order_by(idx.asc()) + .load(dbc)? + }; + Ok(entries) + } + pub fn get<'a, T: Into<Cow<'a, str>>>( + word: T, + c: &str, + dbc: &PgConnection, + ) -> Result<Option<Self>, Error> { + let word = word.into(); + let keyword: Option<Keyword> = { + use crate::schema::keywords::dsl::*; + keywords + .filter(name.ilike(word).and(chan.eq(c).or(chan.eq("*")))) + .first(dbc) + .optional()? + }; + if let Some(k) = keyword { + let entries = Self::get_entries(k.id, dbc)?; + if let Some(e0) = entries.get(0) { + if e0.text.starts_with("see: ") { + return Self::get(e0.text.replace("see: ", ""), c, dbc); + } + } + Ok(Some(KeywordDetails { + keyword: k, + entries, + })) + } else { + Ok(None) + } + } +} diff --git a/fun/tvldb/src/main.rs b/fun/tvldb/src/main.rs new file mode 100644 index 000000000000..9fe7584b61b7 --- /dev/null +++ b/fun/tvldb/src/main.rs @@ -0,0 +1,353 @@ +extern crate irc; +extern crate serde; +#[macro_use] +extern crate serde_derive; +#[macro_use] +extern crate diesel; +extern crate chrono; +extern crate config; +extern crate env_logger; +#[macro_use] +extern crate log; +#[macro_use] +extern crate failure; +extern crate regex; +#[macro_use] +extern crate lazy_static; +extern crate rand; + +use crate::cfg::Config; +use crate::keyword::KeywordDetails; +use diesel::pg::PgConnection; +use diesel::r2d2::{ConnectionManager, Pool}; +use failure::Error; +use irc::client::prelude::*; +use rand::rngs::ThreadRng; +use rand::{thread_rng, Rng}; +use regex::{Captures, Regex}; +use std::collections::HashMap; +use std::fmt::Display; + +mod cfg; +mod keyword; +mod models; +mod schema; + +pub struct App { + client: IrcClient, + pg: Pool<ConnectionManager<PgConnection>>, + rng: ThreadRng, + cfg: Config, + last_msgs: HashMap<String, HashMap<String, String>>, +} + +impl App { + pub fn report_error<T: Display>( + &mut self, + nick: &str, + chan: &str, + msg: T, + ) -> Result<(), Error> { + self.client + .send_notice(nick, format!("[{}] \x0304Error:\x0f {}", chan, msg))?; + Ok(()) + } + pub fn keyword_from_captures( + &mut self, + learn: &::regex::Captures, + nick: &str, + chan: &str, + ) -> Result<KeywordDetails, Error> { + let db = self.pg.get()?; + debug!("Fetching keyword for captures: {:?}", learn); + let subj = &learn["subj"]; + let learn_chan = if learn.name("gen").is_some() { + "*" + } else { + chan + }; + if !chan.starts_with("#") && learn_chan != "*" { + Err(format_err!("Only general entries may be taught via PM."))?; + } + debug!("Fetching keyword '{}' for chan {}", subj, learn_chan); + let kwd = KeywordDetails::get_or_create(subj, learn_chan, &db)?; + if kwd.keyword.chan == "*" && !self.cfg.admins.contains(nick) { + Err(format_err!( + "Only administrators can create or modify general entries." + ))?; + } + Ok(kwd) + } + pub fn handle_move( + &mut self, + target: &str, + nick: &str, + chan: &str, + mv: Captures, + ) -> Result<(), Error> { + let db = self.pg.get()?; + let idx = &mv["idx"]; + let idx = match idx[1..(idx.len() - 1)].parse::<usize>() { + Ok(i) => i, + Err(e) => Err(format_err!("Could not parse index: {}", e))?, + }; + let new_idx = match mv["new_idx"].parse::<i32>() { + Ok(i) => i, + Err(e) => Err(format_err!("Could not parse target index: {}", e))?, + }; + let mut kwd = self.keyword_from_captures(&mv, nick, chan)?; + if new_idx < 0 { + kwd.delete(idx, &db)?; + self.client.send_notice( + target, + format!("\x02{}\x0f: Deleted entry {}.", kwd.keyword.name, idx), + )?; + } else { + kwd.swap(idx, new_idx as _, &db)?; + self.client.send_notice( + target, + format!( + "\x02{}\x0f: Swapped entries {} and {}.", + kwd.keyword.name, idx, new_idx + ), + )?; + } + Ok(()) + } + pub fn handle_learn( + &mut self, + target: &str, + nick: &str, + chan: &str, + learn: Captures, + ) -> Result<(), Error> { + let db = self.pg.get()?; + let val = &learn["val"]; + let mut kwd = self.keyword_from_captures(&learn, nick, chan)?; + let idx = kwd.learn(nick, val, &db)?; + self.client + .send_notice(target, kwd.format_entry(idx).unwrap())?; + Ok(()) + } + pub fn handle_insert_last_quote( + &mut self, + target: &str, + nick: &str, + chan: &str, + qlast: Captures, + ) -> Result<(), Error> { + let db = self.pg.get()?; + let mut kwd = self.keyword_from_captures(&qlast, nick, chan)?; + let chan_lastmsgs = self + .last_msgs + .entry(chan.to_string()) + .or_insert(HashMap::new()); + let val = if let Some(last) = chan_lastmsgs.get(&kwd.keyword.name.to_ascii_lowercase()) { + format!("<{}> {}", &kwd.keyword.name, last) + } else { + Err(format_err!("I dunno what {} said...", kwd.keyword.name))? + }; + let idx = kwd.learn(nick, &val, &db)?; + self.client + .send_notice(target, kwd.format_entry(idx).unwrap())?; + Ok(()) + } + pub fn handle_increment( + &mut self, + target: &str, + nick: &str, + chan: &str, + icr: Captures, + ) -> Result<(), Error> { + let db = self.pg.get()?; + let mut kwd = self.keyword_from_captures(&icr, nick, chan)?; + let is_incr = &icr["incrdecr"] == "++"; + let now = chrono::Utc::now().naive_utc().date(); + let mut idx = None; + for (i, ent) in kwd.entries.iter().enumerate() { + if ent.creation_ts.date() == now { + if let Ok(val) = ent.text.parse::<i32>() { + let val = if is_incr { val + 1 } else { val - 1 }; + idx = Some((i + 1, val)); + } + } + } + if let Some((i, val)) = idx { + kwd.update(i, &val.to_string(), &db)?; + self.client.send_notice(target, kwd.format_entry(i).unwrap())?; + } else { + let val = if is_incr { 1 } else { -1 }; + let idx = kwd.learn(nick, &val.to_string(), &db)?; + self.client + .send_notice(target, kwd.format_entry(idx).unwrap())?; + } + Ok(()) + } + pub fn handle_query( + &mut self, + target: &str, + nick: &str, + chan: &str, + query: Captures, + ) -> Result<(), Error> { + let db = self.pg.get()?; + let subj = &query["subj"]; + let idx = match query.name("idx") { + Some(i) => { + let i = i.as_str(); + match &i[1..(i.len() - 1)] { + "*" => Some(-1), + x => x.parse::<usize>().map(|x| x as i32).ok(), + } + } + None => None, + }; + debug!("Querying {} with idx {:?}", subj, idx); + match KeywordDetails::get(subj, chan, &db)? { + Some(kwd) => { + if let Some(mut idx) = idx { + if idx == -1 { + // 'get all entries' ('*' parses into this) + for i in 0..kwd.entries.len() { + self.client.send_notice( + nick, + format!("[{}] {}", chan, kwd.format_entry(i + 1).unwrap()), + )?; + } + } else { + if idx == 0 { + idx = 1; + } + if let Some(ent) = kwd.format_entry(idx as _) { + self.client.send_notice(target, ent)?; + } else { + let pluralised = if kwd.entries.len() == 1 { + "entry" + } else { + "entries" + }; + self.client.send_notice( + target, + format!( + "\x02{}\x0f: only has \x02\x0304{}\x0f {}", + subj, + kwd.entries.len(), + pluralised + ), + )?; + } + } + } else { + let entry = if kwd.entries.len() < 2 { + 1 // because [1, 1) does not a range make + } else { + self.rng.gen_range(1, kwd.entries.len()) + }; + if let Some(ent) = kwd.format_entry(entry) { + self.client.send_notice(target, ent)?; + } else { + self.client + .send_notice(target, format!("\x02{}\x0f: no entries yet", subj))?; + } + } + } + None => { + self.client + .send_notice(target, format!("\x02{}\x0f: never heard of it", subj))?; + } + } + Ok(()) + } + pub fn handle_privmsg(&mut self, from: &str, chan: &str, msg: &str) -> Result<(), Error> { + lazy_static! { + static ref LEARN_RE: Regex = + Regex::new(r#"^\?\?(?P<gen>!)?\s*(?P<subj>[^\[:]*):\s*(?P<val>.*)"#).unwrap(); + static ref QUERY_RE: Regex = + Regex::new(r#"^\?\?\s*(?P<subj>[^\[:]*)(?P<idx>\[[^\]]+\])?"#).unwrap(); + static ref QLAST_RE: Regex = Regex::new(r#"^\?\?\s*(?P<subj>[^\[:]*)!"#).unwrap(); + static ref INCREMENT_RE: Regex = + Regex::new(r#"^\?\?(?P<gen>!)?\s*(?P<subj>[^\[:]*)(?P<incrdecr>\+\+|\-\-)"#) + .unwrap(); + static ref MOVE_RE: Regex = Regex::new( + r#"^\?\?(?P<gen>!)?\s*(?P<subj>[^\[:]*)(?P<idx>\[[^\]]+\])->(?P<new_idx>.*)"# + ) + .unwrap(); + } + let nick = from.split("!").next().ok_or(format_err!( + "Received PRIVMSG from a source without nickname (failed to split n!u@h)" + ))?; + let target = if chan.starts_with("#") { chan } else { nick }; + debug!("[{}] <{}> {}", chan, nick, msg); + if let Some(learn) = LEARN_RE.captures(msg) { + self.handle_learn(target, nick, chan, learn)?; + } else if let Some(qlast) = QLAST_RE.captures(msg) { + self.handle_insert_last_quote(target, nick, chan, qlast)?; + } else if let Some(mv) = MOVE_RE.captures(msg) { + self.handle_move(target, nick, chan, mv)?; + } else if let Some(icr) = INCREMENT_RE.captures(msg) { + self.handle_increment(target, nick, chan, icr)?; + } else if let Some(query) = QUERY_RE.captures(msg) { + self.handle_query(target, nick, chan, query)?; + } else { + let chan_lastmsgs = self + .last_msgs + .entry(chan.to_string()) + .or_insert(HashMap::new()); + chan_lastmsgs.insert(nick.to_string().to_ascii_lowercase(), msg.to_string()); + } + Ok(()) + } + pub fn handle_msg(&mut self, m: Message) -> Result<(), Error> { + match m.command { + Command::PRIVMSG(channel, message) => { + if let Some(src) = m.prefix { + if let Err(e) = self.handle_privmsg(&src, &channel, &message) { + warn!("error handling command in {} (src {}): {}", channel, src, e); + if let Some(nick) = src.split("!").next() { + self.report_error(nick, &channel, e)?; + } + } + } + } + Command::INVITE(nick, channel) => { + if self.cfg.admins.contains(&nick) { + info!("Joining {} after admin invite", channel); + self.client.send_join(channel)?; + } + } + _ => {} + } + Ok(()) + } +} +fn main() -> Result<(), Error> { + println!("[+] loading configuration"); + let default_log_filter = "paroxysm=info".to_string(); + let mut settings = config::Config::default(); + settings.merge(config::Environment::with_prefix("PARX"))?; + let cfg: Config = settings.try_into()?; + let env = env_logger::Env::new() + .default_filter_or(cfg.log_filter.clone().unwrap_or(default_log_filter)); + env_logger::init_from_env(env); + info!("paroxysm starting up"); + info!("connecting to database at {}", cfg.database_url); + let pg = Pool::new(ConnectionManager::new(&cfg.database_url))?; + info!("connecting to IRC using config {}", cfg.irc_config_path); + let client = IrcClient::new(&cfg.irc_config_path)?; + client.identify()?; + let st = client.stream(); + let mut app = App { + client, + pg, + cfg, + rng: thread_rng(), + last_msgs: HashMap::new(), + }; + info!("running!"); + st.for_each_incoming(|m| { + if let Err(e) = app.handle_msg(m) { + warn!("Error processing message: {}", e); + } + })?; + Ok(()) +} diff --git a/fun/tvldb/src/models.rs b/fun/tvldb/src/models.rs new file mode 100644 index 000000000000..78ddce0af96c --- /dev/null +++ b/fun/tvldb/src/models.rs @@ -0,0 +1,33 @@ +use crate::schema::{entries, keywords}; +use chrono::NaiveDateTime; + +#[derive(Queryable)] +pub struct Keyword { + pub id: i32, + pub name: String, + pub chan: String, +} +#[derive(Queryable)] +pub struct Entry { + pub id: i32, + pub keyword_id: i32, + pub idx: i32, + pub text: String, + pub creation_ts: NaiveDateTime, + pub created_by: String, +} +#[derive(Insertable)] +#[table_name = "keywords"] +pub struct NewKeyword<'a> { + pub name: &'a str, + pub chan: &'a str, +} +#[derive(Insertable)] +#[table_name = "entries"] +pub struct NewEntry<'a> { + pub keyword_id: i32, + pub idx: i32, + pub text: &'a str, + pub creation_ts: NaiveDateTime, + pub created_by: &'a str, +} diff --git a/fun/tvldb/src/schema.rs b/fun/tvldb/src/schema.rs new file mode 100644 index 000000000000..ef4044531ee7 --- /dev/null +++ b/fun/tvldb/src/schema.rs @@ -0,0 +1,18 @@ +table! { + entries (id) { + id -> Int4, + keyword_id -> Int4, + idx -> Int4, + text -> Varchar, + creation_ts -> Timestamp, + created_by -> Varchar, + } +} + +table! { + keywords (id) { + id -> Int4, + name -> Varchar, + chan -> Varchar, + } +} |