Update dependencies, remove unnecessary RwLock on database
This commit is contained in:
parent
7526a82bb7
commit
7b5626c279
20 changed files with 1114 additions and 983 deletions
|
@ -7,4 +7,5 @@ fn_params_layout = "Compressed"
|
|||
struct_lit_width = 0
|
||||
tab_spaces = 2
|
||||
use_small_heuristics = "Max"
|
||||
imports_granularity = "Crate"
|
||||
# imports_granularity = "Crate"
|
||||
|
||||
|
|
1889
Cargo.lock
generated
1889
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
25
Cargo.toml
25
Cargo.toml
|
@ -21,11 +21,19 @@ name = "cleanup_committee"
|
|||
|
||||
[dependencies]
|
||||
# discord library
|
||||
serenity = { version = "0.12", default-features = false, features = ["client", "gateway", "rustls_backend", "model", "cache"] }
|
||||
serenity = { version = "0.12", default-features = false, features = [
|
||||
"client",
|
||||
"gateway",
|
||||
"rustls_backend",
|
||||
"model",
|
||||
"cache",
|
||||
] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread", "full"] }
|
||||
|
||||
# wolves api
|
||||
wolves_oxidised = { git = "https://forgejo.skynet.ie/Skynet/wolves-oxidised.git", features = ["unstable"] }
|
||||
wolves_oxidised = { git = "https://forgejo.skynet.ie/Skynet/wolves-oxidised.git", features = [
|
||||
"unstable",
|
||||
] }
|
||||
# wolves_oxidised = { path = "../wolves-oxidised", features = ["unstable"] }
|
||||
|
||||
# to make the http requests
|
||||
|
@ -47,13 +55,12 @@ chrono = "0.4"
|
|||
lettre = "0.11"
|
||||
maud = "0.27"
|
||||
|
||||
toml = "0.8.23"
|
||||
toml = "0.9.5"
|
||||
serde = "1.0"
|
||||
|
||||
# for image conversion
|
||||
eyre = "0.6.8"
|
||||
color-eyre = "0.6.2"
|
||||
usvg-text-layout = "0.29.0"
|
||||
usvg = "0.29.0"
|
||||
resvg = "0.29.0"
|
||||
tiny-skia = "0.8.3"
|
||||
eyre = "0.6.12"
|
||||
color-eyre = "0.6.5"
|
||||
usvg = "0.45.1"
|
||||
resvg = "0.45.1"
|
||||
tiny-skia = "0.11.4"
|
||||
|
|
|
@ -41,7 +41,7 @@ async fn main() {
|
|||
let mut data = client.data.write().await;
|
||||
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
data.insert::<DataBase>(Arc::new(db));
|
||||
}
|
||||
|
||||
if let Err(why) = client.start().await {
|
||||
|
@ -69,13 +69,11 @@ impl EventHandler for Handler {
|
|||
async fn guild_members_chunk(&self, ctx: Context, chunk: GuildMembersChunkEvent) {
|
||||
if (chunk.chunk_index + 1) == chunk.chunk_count {
|
||||
println!("Cache built successfully!");
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
|
|
|
@ -36,7 +36,7 @@ async fn main() {
|
|||
let mut data = client.data.write().await;
|
||||
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
data.insert::<DataBase>(Arc::new(db));
|
||||
}
|
||||
|
||||
if let Err(why) = client.start().await {
|
||||
|
|
|
@ -38,7 +38,7 @@ async fn main() {
|
|||
let mut data = client.data.write().await;
|
||||
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
data.insert::<DataBase>(Arc::new(db));
|
||||
}
|
||||
|
||||
if let Err(why) = client.start().await {
|
||||
|
|
|
@ -35,7 +35,7 @@ async fn main() {
|
|||
let mut data = client.data.write().await;
|
||||
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
data.insert::<DataBase>(Arc::new(db));
|
||||
}
|
||||
|
||||
if let Err(why) = client.start().await {
|
||||
|
@ -50,11 +50,10 @@ impl EventHandler for Handler {
|
|||
let ctx = Arc::new(ctx);
|
||||
println!("{} is connected!", ready.user.name);
|
||||
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
|
|
|
@ -40,7 +40,7 @@ async fn main() {
|
|||
let mut data = client.data.write().await;
|
||||
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
data.insert::<DataBase>(Arc::new(db));
|
||||
}
|
||||
|
||||
if let Err(why) = client.start().await {
|
||||
|
@ -79,13 +79,11 @@ impl EventHandler for Handler {
|
|||
}
|
||||
|
||||
async fn check_bulk(ctx: &Context) {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
|
||||
let db = db_lock.read().await;
|
||||
|
||||
for server_config in get_server_config_bulk(&db).await {
|
||||
normal::update_server(ctx, &server_config, &[], &[]).await;
|
||||
}
|
||||
|
|
|
@ -56,11 +56,10 @@ pub async fn run(command: &CommandInteraction, ctx: &Context) -> String {
|
|||
return "Please provide a valid channel for ``Bot Channel``".to_string();
|
||||
};
|
||||
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let server_data = Servers {
|
||||
server: command.guild_id.unwrap_or_default(),
|
||||
|
|
|
@ -27,11 +27,10 @@ pub mod committee {
|
|||
false
|
||||
};
|
||||
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let mut cs = vec![];
|
||||
// pull it from a DB
|
||||
|
@ -95,11 +94,10 @@ pub mod servers {
|
|||
use std::collections::HashMap;
|
||||
|
||||
pub async fn run(_command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let mut committees = HashMap::new();
|
||||
if let Some(x) = get_committees(&db).await {
|
||||
|
|
|
@ -23,11 +23,10 @@ pub(crate) mod user {
|
|||
use sqlx::Error;
|
||||
|
||||
pub async fn run(command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
|
@ -229,11 +228,10 @@ pub(crate) mod server {
|
|||
return String::from("Expected Server ID");
|
||||
};
|
||||
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
match add_server(&db, &g_id, &server_minecraft).await {
|
||||
Ok(_) => {}
|
||||
|
@ -290,11 +288,10 @@ pub(crate) mod server {
|
|||
Some(x) => x,
|
||||
};
|
||||
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let servers = get_minecraft_config_server(&db, g_id).await;
|
||||
|
||||
|
@ -366,11 +363,10 @@ pub(crate) mod server {
|
|||
return String::from("Expected Server ID");
|
||||
};
|
||||
|
||||
let db_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
let db = {
|
||||
let data = ctx.data.read().await;
|
||||
data.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
match server_remove(&db, &g_id, &server_minecraft).await {
|
||||
Ok(_) => {}
|
||||
|
|
|
@ -62,11 +62,10 @@ pub mod edit {
|
|||
false
|
||||
};
|
||||
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let server = command.guild_id.unwrap_or_default();
|
||||
let server_data = RoleAdder {
|
||||
|
|
|
@ -18,11 +18,10 @@ pub(crate) mod admin {
|
|||
use super::*;
|
||||
|
||||
pub async fn run(_command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
|
@ -69,11 +68,10 @@ pub(crate) mod user {
|
|||
use sqlx::{Pool, Sqlite};
|
||||
|
||||
pub async fn run(command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_toml = get_config_icons::minimal();
|
||||
|
||||
|
@ -145,11 +143,10 @@ pub(crate) mod user {
|
|||
use sqlx::{Pool, Sqlite};
|
||||
|
||||
pub async fn run(_command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_toml = get_config_icons::minimal();
|
||||
|
||||
|
|
|
@ -21,11 +21,10 @@ pub mod link {
|
|||
use serenity::all::{CommandDataOption, CommandDataOptionValue, CommandInteraction};
|
||||
|
||||
pub async fn run(command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
|
@ -314,11 +313,10 @@ pub mod verify {
|
|||
use sqlx::Error;
|
||||
|
||||
pub async fn run(command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Database in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
// check if user has used /link_wolves
|
||||
let details = if let Some(x) = get_verify_from_db(&db, &command.user.id).await {
|
||||
|
@ -494,11 +492,10 @@ pub mod unlink {
|
|||
use sqlx::{Pool, Sqlite};
|
||||
|
||||
pub async fn run(command: &CommandInteraction, ctx: &Context) -> String {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Databse in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
// dosent matter if there is one or not, it will be removed regardless
|
||||
delete_link(&db, &command.user.id).await;
|
||||
|
|
|
@ -12,11 +12,10 @@ use sqlx::{
|
|||
Error, FromRow, Pool, Row, Sqlite,
|
||||
};
|
||||
use std::{str::FromStr, sync::Arc};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
pub struct DataBase;
|
||||
impl TypeMapKey for DataBase {
|
||||
type Value = Arc<RwLock<Pool<Sqlite>>>;
|
||||
type Value = Arc<Pool<Sqlite>>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
use std::{
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
// use clap::builder::OsStr;
|
||||
use color_eyre::{eyre::bail, Result};
|
||||
use usvg_text_layout::TreeTextToPath;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Args {
|
||||
|
@ -37,7 +37,7 @@ enum ColorType {
|
|||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Renderer {
|
||||
fontdb: usvg_text_layout::fontdb::Database,
|
||||
fontdb: Arc<usvg::fontdb::Database>,
|
||||
colors: ColorType,
|
||||
size: (u32, u32),
|
||||
pub count: u64,
|
||||
|
@ -45,40 +45,35 @@ pub struct Renderer {
|
|||
|
||||
impl Renderer {
|
||||
pub fn new(args: &Args) -> Result<Self> {
|
||||
let mut db = usvg_text_layout::fontdb::Database::new();
|
||||
let mut db = usvg::fontdb::Database::default();
|
||||
db.load_system_fonts();
|
||||
|
||||
let mut this = Self {
|
||||
fontdb: db,
|
||||
fontdb: Arc::new(db),
|
||||
colors: ColorType::None,
|
||||
size: (args.width, args.height),
|
||||
count: 0,
|
||||
};
|
||||
|
||||
let colors = if args.colors.contains(':') {
|
||||
this.colors = if args.colors.contains(':') {
|
||||
//? object
|
||||
let obj = args
|
||||
.colors
|
||||
.split(',')
|
||||
.map(|s| {
|
||||
let s = s.split(':').collect::<Vec<&str>>();
|
||||
|
||||
if s.len() < 2 {
|
||||
let obj = args.colors.split(',').map(|s| {
|
||||
let mut iter = s.split(':');
|
||||
let [Some(a), Some(b), None] = std::array::from_fn(|_| iter.next()) else {
|
||||
dbg!("Invalid color object, try checking help");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some((s[0].to_string(), s[1].to_string()))
|
||||
})
|
||||
.collect::<Vec<Option<(String, String)>>>();
|
||||
Some((a.to_string(), b.to_string()))
|
||||
});
|
||||
|
||||
let mut colors = Vec::new();
|
||||
|
||||
for c in obj.into_iter().flatten() {
|
||||
let colors = obj
|
||||
.flatten()
|
||||
.map(|c| {
|
||||
std::fs::create_dir_all(args.output.join(&c.0))?;
|
||||
|
||||
colors.push(c);
|
||||
}
|
||||
Ok(c)
|
||||
})
|
||||
.collect::<std::io::Result<_>>()?;
|
||||
|
||||
ColorType::Object(colors)
|
||||
} else {
|
||||
|
@ -88,17 +83,17 @@ impl Renderer {
|
|||
// })
|
||||
// .collect::<Vec<String>>();
|
||||
|
||||
let mut colors = Vec::new();
|
||||
|
||||
for color in args.colors.split(',') {
|
||||
let colors = args
|
||||
.colors
|
||||
.split(',')
|
||||
.map(|color| -> std::io::Result<String> {
|
||||
std::fs::create_dir_all(args.output.join(color))?;
|
||||
colors.push(color.to_string())
|
||||
}
|
||||
Ok(color.to_string())
|
||||
})
|
||||
.collect::<std::io::Result<_>>()?;
|
||||
|
||||
ColorType::Array(colors)
|
||||
};
|
||||
|
||||
this.colors = colors;
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
|
@ -144,10 +139,11 @@ impl Renderer {
|
|||
let opt = usvg::Options {
|
||||
// Get file's absolute directory.
|
||||
resources_dir: std::fs::canonicalize(fi).ok().and_then(|p| p.parent().map(|p| p.to_path_buf())),
|
||||
fontdb: self.fontdb.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut tree = match usvg::Tree::from_data(svg.as_bytes(), &opt) {
|
||||
let tree = match usvg::Tree::from_data(svg.as_bytes(), &opt) {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
dbg!("Failed to parse {fi:?}");
|
||||
|
@ -155,14 +151,17 @@ impl Renderer {
|
|||
}
|
||||
};
|
||||
|
||||
tree.convert_text(&self.fontdb);
|
||||
|
||||
let mut pixmap = tiny_skia::Pixmap::new(self.size.0, self.size.1).unwrap();
|
||||
let scale = {
|
||||
let x = tree.size().width() / self.size.0 as f32;
|
||||
let y = tree.size().height() / self.size.0 as f32;
|
||||
x.min(y)
|
||||
};
|
||||
|
||||
// log::info!("Rendering {fo:?}");
|
||||
|
||||
//? maybe handle this and possibly throw error if its none
|
||||
let _ = resvg::render(&tree, usvg::FitTo::Size(self.size.0, self.size.1), tiny_skia::Transform::default(), pixmap.as_mut());
|
||||
resvg::render(&tree, usvg::Transform::default().post_scale(scale, scale), &mut pixmap.as_mut());
|
||||
|
||||
pixmap.save_png(fo)?;
|
||||
self.count += 1;
|
||||
|
|
|
@ -17,13 +17,11 @@ pub mod normal {
|
|||
}
|
||||
|
||||
pub async fn update_server(ctx: &Context, server: &Servers, remove_roles: &[Option<RoleId>], members_changed: &[UserId]) {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Database in TypeMap.").clone()
|
||||
};
|
||||
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let Servers {
|
||||
server,
|
||||
role_past,
|
||||
|
@ -174,13 +172,11 @@ pub mod committee {
|
|||
use std::collections::HashMap;
|
||||
|
||||
pub async fn check_committee(ctx: &Context) {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
|
|
|
@ -69,11 +69,10 @@ pub mod cns {
|
|||
}
|
||||
|
||||
pub async fn get_wolves(ctx: &Context) {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Database in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
|
@ -224,11 +223,10 @@ pub mod committees {
|
|||
}
|
||||
|
||||
pub async fn get_cns(ctx: &Context) {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Database in TypeMap.").clone()
|
||||
};
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
|
|
|
@ -106,23 +106,20 @@ pub fn get_config() -> Config {
|
|||
}
|
||||
}
|
||||
if let Ok(x) = env::var("COMMITTEE_ROLE") {
|
||||
if let Ok(x) = x.trim().parse::<u64>() {
|
||||
let x = x.trim().parse().unwrap();
|
||||
config.committee_role = RoleId::new(x);
|
||||
}
|
||||
}
|
||||
if let Ok(x) = env::var("COMMITTEE_CATEGORY") {
|
||||
for part in x.split(',') {
|
||||
if let Ok(x) = part.trim().parse::<u64>() {
|
||||
let x = part.trim().parse().unwrap();
|
||||
config.committee_category.push(ChannelId::new(x));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(x) = env::var("COMPSOC_DISCORD") {
|
||||
if let Ok(x) = x.trim().parse::<u64>() {
|
||||
let x = x.trim().parse().unwrap();
|
||||
config.compsoc_server = GuildId::new(x);
|
||||
}
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
|
10
src/main.rs
10
src/main.rs
|
@ -42,13 +42,11 @@ impl EventHandler for Handler {
|
|||
|
||||
// handles previously linked accounts joining the server
|
||||
async fn guild_member_addition(&self, ctx: Context, new_member: Member) {
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
|
||||
let db = db_lock.read().await;
|
||||
|
||||
let config_lock = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<Config>().expect("Expected Config in TypeMap.").clone()
|
||||
|
@ -110,13 +108,11 @@ Sign up on [UL Wolves]({}) and go to https://discord.com/channels/{}/{} and use
|
|||
// handles role updates
|
||||
async fn guild_member_update(&self, ctx: Context, _old_data: Option<Member>, new_data: Option<Member>, _: GuildMemberUpdateEvent) {
|
||||
// get config/db
|
||||
let db_lock = {
|
||||
let db = {
|
||||
let data_read = ctx.data.read().await;
|
||||
data_read.get::<DataBase>().expect("Expected Config in TypeMap.").clone()
|
||||
};
|
||||
|
||||
let db = db_lock.read().await;
|
||||
|
||||
// check if the role changed is part of the oens for this server
|
||||
if let Some(x) = new_data {
|
||||
on_role_change(&db, &ctx, x).await;
|
||||
|
@ -305,7 +301,7 @@ async fn main() {
|
|||
let mut data = client.data.write().await;
|
||||
|
||||
data.insert::<Config>(Arc::new(RwLock::new(config)));
|
||||
data.insert::<DataBase>(Arc::new(RwLock::new(db)));
|
||||
data.insert::<DataBase>(Arc::new(db));
|
||||
}
|
||||
|
||||
// Finally, start a single shard, and start listening to events.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue