feat: now have two scripts that can run ona timer
This commit is contained in:
parent
cb363e6558
commit
ca6ae993c5
7 changed files with 538 additions and 239 deletions
266
src/main.rs
266
src/main.rs
|
@ -1,53 +1,49 @@
|
|||
use dotenvy::dotenv;
|
||||
use serenity::{
|
||||
async_trait,
|
||||
client::{Context, EventHandler},
|
||||
model::{
|
||||
gateway::{GatewayIntents, Ready},
|
||||
guild::Member,
|
||||
id::GuildId,
|
||||
guild,
|
||||
prelude::RoleId,
|
||||
},
|
||||
prelude::TypeMapKey,
|
||||
Client,
|
||||
};
|
||||
use std::{
|
||||
env,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
use skynet_discord_bot::{db_init, get_config, get_server_config, get_server_member, Config, DataBase};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
struct Handler {
|
||||
is_loop_running: AtomicBool,
|
||||
}
|
||||
struct Handler;
|
||||
|
||||
#[async_trait]
|
||||
impl EventHandler for Handler {
|
||||
async fn guild_member_addition(&self, ctx: Context, mut new_member: Member) {
|
||||
let config_lock = {
|
||||
async fn guild_member_addition(&self, ctx: Context, mut new_member: guild::Member) {
|
||||
let db_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
let config = config_lock.read().await;
|
||||
|
||||
let members_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Members>().expect("Expected Members in TypeMap.").clone()
|
||||
let db = db_lock.read().await;
|
||||
let config = match get_server_config(&db, &new_member.guild_id).await {
|
||||
None => return,
|
||||
Some(x) => x,
|
||||
};
|
||||
let members = members_lock.read().await;
|
||||
|
||||
if members.contains(&new_member.user.name) {
|
||||
if get_server_member(&db, &new_member.guild_id, &new_member).await.is_some() {
|
||||
let mut roles = vec![];
|
||||
|
||||
if !new_member.roles.contains(&config.member_role_past) {
|
||||
roles.push(config.member_role_past);
|
||||
if let Some(role) = &config.role_past {
|
||||
let role = RoleId::from(*role as u64);
|
||||
if !new_member.roles.contains(&role) {
|
||||
roles.push(role.to_owned());
|
||||
}
|
||||
}
|
||||
if !new_member.roles.contains(&config.member_role_current) {
|
||||
roles.push(config.member_role_current);
|
||||
|
||||
if let Some(role) = &config.role_current {
|
||||
let role = RoleId::from(*role as u64);
|
||||
if !new_member.roles.contains(&role) {
|
||||
roles.push(role.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = new_member.add_roles(&ctx, &roles).await {
|
||||
|
@ -56,171 +52,32 @@ impl EventHandler for Handler {
|
|||
}
|
||||
}
|
||||
|
||||
async fn ready(&self, ctx: Context, ready: Ready) {
|
||||
let ctx = Arc::new(ctx);
|
||||
println!("{} is connected!", ready.user.name);
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
let config = config_lock.read().await;
|
||||
let timing_update = config.timing_update;
|
||||
let timing_fetch = config.timing_fetch;
|
||||
|
||||
if !self.is_loop_running.load(Ordering::Relaxed) {
|
||||
// We have to clone the Arc, as it gets moved into the new thread.
|
||||
let ctx1 = Arc::clone(&ctx);
|
||||
// tokio::spawn creates a new green thread that can run in parallel with the rest of
|
||||
// the application.
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
// We clone Context again here, because Arc is owned, so it moves to the
|
||||
// new function.
|
||||
bulk_check(Arc::clone(&ctx1)).await;
|
||||
tokio::time::sleep(Duration::from_secs(timing_update)).await;
|
||||
}
|
||||
});
|
||||
|
||||
let ctx2 = Arc::clone(&ctx);
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
fetch_accounts(Arc::clone(&ctx2)).await;
|
||||
tokio::time::sleep(Duration::from_secs(timing_fetch)).await;
|
||||
}
|
||||
});
|
||||
|
||||
// Now that the loop is running, we set the bool to true
|
||||
self.is_loop_running.swap(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct MembersCount {
|
||||
members: i32,
|
||||
members_current: i32,
|
||||
}
|
||||
struct MemberCounter;
|
||||
impl TypeMapKey for MemberCounter {
|
||||
type Value = Arc<RwLock<MembersCount>>;
|
||||
}
|
||||
|
||||
struct Members;
|
||||
impl TypeMapKey for Members {
|
||||
type Value = Arc<RwLock<Vec<String>>>;
|
||||
}
|
||||
async fn bulk_check(ctx: Arc<Context>) {
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
let config = config_lock.read().await;
|
||||
|
||||
let members_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Members>().expect("Expected Members in TypeMap.").clone()
|
||||
};
|
||||
let members = members_lock.read().await;
|
||||
|
||||
let mut roles_set = [0, 0, 0];
|
||||
let mut res = MembersCount {
|
||||
members: 0,
|
||||
members_current: 0,
|
||||
};
|
||||
if let Ok(x) = config.server.members(&ctx, None, None).await {
|
||||
for mut member in x {
|
||||
if members.contains(&member.user.name) {
|
||||
let mut roles = vec![];
|
||||
|
||||
if !member.roles.contains(&config.member_role_past) {
|
||||
roles_set[0] += 1;
|
||||
roles.push(config.member_role_past);
|
||||
}
|
||||
if !member.roles.contains(&config.member_role_current) {
|
||||
roles_set[1] += 1;
|
||||
roles.push(config.member_role_current);
|
||||
}
|
||||
|
||||
if let Err(e) = member.add_roles(&ctx, &roles).await {
|
||||
println!("{:?}", e);
|
||||
}
|
||||
} else if member.roles.contains(&config.member_role_current) {
|
||||
roles_set[2] += 1;
|
||||
// if theya re not a current member and have the role then remove it
|
||||
if let Err(e) = member.remove_role(&ctx, &config.member_role_current).await {
|
||||
println!("{:?}", e);
|
||||
}
|
||||
}
|
||||
|
||||
if member.roles.contains(&config.member_role_past) {
|
||||
res.members += 1;
|
||||
}
|
||||
if member.roles.contains(&config.member_role_current) {
|
||||
res.members_current += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// small bit of logging to note changes over time
|
||||
println!("Changes: New: +{}, Current: +{}/-{}", roles_set[0], roles_set[1], roles_set[2]);
|
||||
|
||||
{
|
||||
let data_read = ctx.data.read().await;
|
||||
let counter_lock = data_read.get::<MemberCounter>().expect("Expected MemberCounter in TypeMap.").clone();
|
||||
// The HashMap of CommandCounter is wrapped in an RwLock; since we want to write to it, we will
|
||||
// open the lock in write mode.
|
||||
let mut counter = counter_lock.write().await;
|
||||
|
||||
// And we write the amount of times the command has been called to it.
|
||||
counter.members_current = res.members_current;
|
||||
counter.members = res.members;
|
||||
};
|
||||
}
|
||||
|
||||
async fn fetch_accounts(ctx: Arc<Context>) {
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
let config = config_lock.read().await;
|
||||
let auth = &config.auth;
|
||||
let ldap_api = &config.ldap_api;
|
||||
let url = format!("{}/ldap/discord?auth={}", ldap_api, auth);
|
||||
if let Ok(result) = surf::get(url).recv_json::<Vec<String>>().await {
|
||||
let members_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Members>().expect("Expected Members in TypeMap.").clone()
|
||||
};
|
||||
let mut accounts = members_lock.write().await;
|
||||
*accounts = result;
|
||||
async fn ready(&self, _ctx: Context, ready: Ready) {
|
||||
println!("[Main] {} is connected!", ready.user.name);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let config = get_config();
|
||||
let db = match db_init(&config).await {
|
||||
Ok(x) => x,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
// Intents are a bitflag, bitwise operations can be used to dictate which intents to use
|
||||
let intents = GatewayIntents::GUILDS | GatewayIntents::GUILD_MESSAGES | GatewayIntents::MESSAGE_CONTENT | GatewayIntents::GUILD_MEMBERS;
|
||||
// Build our client.
|
||||
let mut client = Client::builder(&config.discord_token, intents)
|
||||
.event_handler(Handler {
|
||||
is_loop_running: AtomicBool::new(false),
|
||||
})
|
||||
.event_handler(Handler {})
|
||||
.await
|
||||
.expect("Error creating client");
|
||||
|
||||
{
|
||||
let mut data = client.data.write().await;
|
||||
|
||||
// will keep track of how many past and current members we have
|
||||
data.insert::<MemberCounter>(Arc::new(RwLock::new(MembersCount::default())));
|
||||
|
||||
// a list of all current members
|
||||
data.insert::<Members>(Arc::new(RwLock::new(vec![])));
|
||||
|
||||
// make config available top all, strangely its easier to keep it in a shared lock state.
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
}
|
||||
|
||||
// Finally, start a single shard, and start listening to events.
|
||||
|
@ -231,62 +88,3 @@ async fn main() {
|
|||
println!("Client error: {:?}", why);
|
||||
}
|
||||
}
|
||||
|
||||
struct Config {
|
||||
server: GuildId,
|
||||
member_role_current: RoleId,
|
||||
member_role_past: RoleId,
|
||||
ldap_api: String,
|
||||
auth: String,
|
||||
timing_update: u64,
|
||||
timing_fetch: u64,
|
||||
discord_token: String,
|
||||
}
|
||||
impl TypeMapKey for Config {
|
||||
type Value = Arc<RwLock<Config>>;
|
||||
}
|
||||
fn get_config() -> Config {
|
||||
dotenv().ok();
|
||||
|
||||
// reasonable defaults
|
||||
let mut config = Config {
|
||||
server: Default::default(),
|
||||
member_role_current: Default::default(),
|
||||
member_role_past: Default::default(),
|
||||
ldap_api: "https://api.account.skynet.ie".to_string(),
|
||||
auth: "".to_string(),
|
||||
timing_update: 0,
|
||||
timing_fetch: 0,
|
||||
discord_token: "".to_string(),
|
||||
};
|
||||
|
||||
if let Ok(x) = env::var("DISCORD_SERVER") {
|
||||
config.server = GuildId::from(str_to_num::<u64>(&x));
|
||||
}
|
||||
if let Ok(x) = env::var("DISCORD_ROLE_CURRENT") {
|
||||
config.member_role_current = RoleId::from(str_to_num::<u64>(&x));
|
||||
}
|
||||
if let Ok(x) = env::var("DISCORD_ROLE_PAST") {
|
||||
config.member_role_past = RoleId::from(str_to_num::<u64>(&x));
|
||||
}
|
||||
if let Ok(x) = env::var("LDAP_API") {
|
||||
config.ldap_api = x.trim().to_string();
|
||||
}
|
||||
if let Ok(x) = env::var("LDAP_DISCORD_AUTH") {
|
||||
config.auth = x.trim().to_string();
|
||||
}
|
||||
if let Ok(x) = env::var("DISCORD_TIMING_UPDATE") {
|
||||
config.timing_update = str_to_num::<u64>(&x);
|
||||
}
|
||||
if let Ok(x) = env::var("DISCORD_TIMING_FETCH") {
|
||||
config.timing_fetch = str_to_num::<u64>(&x);
|
||||
}
|
||||
if let Ok(x) = env::var("DISCORD_TOKEN") {
|
||||
config.discord_token = x.trim().to_string();
|
||||
}
|
||||
config
|
||||
}
|
||||
|
||||
fn str_to_num<T: std::str::FromStr + Default>(x: &str) -> T {
|
||||
x.trim().parse::<T>().unwrap_or_default()
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue