diff --git a/Cargo.lock b/Cargo.lock index 736ae57c..81030516 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -566,7 +566,9 @@ name = "arcadia-shared" version = "0.1.0" dependencies = [ "anyhow", + "bincode", "indexmap", + "reqwest", "serde", "sqlx", ] @@ -608,9 +610,9 @@ dependencies = [ "env_logger", "envconfig", "futures", - "indexmap", "log", "parking_lot", + "percent-encoding", "reqwest", "serde", "serde_bencode", diff --git a/backend/api/.env.example b/backend/api/.env.example index 37257028..44ef84bb 100644 --- a/backend/api/.env.example +++ b/backend/api/.env.example @@ -29,7 +29,7 @@ POSTGRES_USER=arcadia POSTGRES_PASSWORD=password # Connection string for the database. -DATABASE_URL=postgresql://arcadia:password@localhost:5432/arcadia +DATABASE_URL=postgresql://arcadia:password@localhost:4321/arcadia ## Arcadia Configuration diff --git a/backend/api/src/handlers/affiliated_artists/create_affiliated_artists.rs b/backend/api/src/handlers/affiliated_artists/create_affiliated_artists.rs index cf8b6175..02b7bc66 100644 --- a/backend/api/src/handlers/affiliated_artists/create_affiliated_artists.rs +++ b/backend/api/src/handlers/affiliated_artists/create_affiliated_artists.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/affiliated_artists/remove_affiliated_artists.rs b/backend/api/src/handlers/affiliated_artists/remove_affiliated_artists.rs index 42d93d78..6dc7a50d 100644 --- a/backend/api/src/handlers/affiliated_artists/remove_affiliated_artists.rs +++ b/backend/api/src/handlers/affiliated_artists/remove_affiliated_artists.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{web::Data, HttpResponse}; use actix_web_lab::extract::Query; use arcadia_common::error::Result; diff --git a/backend/api/src/handlers/announces/handle_announce.rs b/backend/api/src/handlers/announces/handle_announce.rs index 0649bd4e..fbcfa013 100644 --- a/backend/api/src/handlers/announces/handle_announce.rs +++ b/backend/api/src/handlers/announces/handle_announce.rs @@ -1,164 +1,158 @@ -use crate::services::announce_service::is_torrent_client_allowed; -use crate::Arcadia; -use actix_web::{ - dev, - web::{Data, Path}, - FromRequest, HttpRequest, HttpResponse, ResponseError, -}; -use arcadia_common::{ - actix::HttpResponseBuilderExt, - error::announce::Error as AnnounceError, - models::tracker::announce::{Announce, AnnounceResponse, TorrentEvent}, -}; -use arcadia_storage::{redis::RedisPoolInterface, sqlx::types::ipnetwork::IpNetwork}; -use std::future::{self, Ready}; +// use crate::services::announce_service::is_torrent_client_allowed; +// use crate::Arcadia; +// use actix_web::{ +// dev, +// web::{Data, Path}, +// FromRequest, HttpRequest, HttpResponse, ResponseError, +// }; +// use arcadia_common::{ +// actix::HttpResponseBuilderExt, +// error::announce::Error as AnnounceError, +// models::tracker::announce::{Announce, AnnounceResponse, TorrentEvent}, +// }; +// use arcadia_storage::{redis::RedisPoolInterface, sqlx::types::ipnetwork::IpNetwork}; +// use std::future::{self, Ready}; -type Result = std::result::Result; +// type Result = std::result::Result; -#[derive(Debug)] -pub struct UserAgent(pub String); +// #[derive(Debug)] +// pub struct UserAgent(pub String); -impl std::ops::Deref for UserAgent { - type Target = str; +// impl std::ops::Deref for UserAgent { +// type Target = str; - fn deref(&self) -> &Self::Target { - self.0.deref() - } -} +// fn deref(&self) -> &Self::Target { +// self.0.deref() +// } +// } -#[derive(Debug, thiserror::Error)] -pub enum UserAgentExtractError { - #[error("no user agent")] - NoUserAgent, +// #[derive(Debug, thiserror::Error)] +// pub enum UserAgentExtractError { +// #[error("no user agent")] +// NoUserAgent, - #[error("not decodable as utf-8")] - ToStrError(#[from] actix_web::http::header::ToStrError), -} +// #[error("not decodable as utf-8")] +// ToStrError(#[from] actix_web::http::header::ToStrError), +// } -impl ResponseError for UserAgentExtractError { - fn error_response(&self) -> HttpResponse { - log::error!("The request generated this error: {self}"); - HttpResponse::BadRequest().body(format!("{self}")) - } -} +// impl ResponseError for UserAgentExtractError { +// fn error_response(&self) -> HttpResponse { +// log::error!("The request generated this error: {self}"); +// HttpResponse::BadRequest().body(format!("{self}")) +// } +// } -impl FromRequest for UserAgent { - type Error = UserAgentExtractError; - type Future = Ready>; +// impl FromRequest for UserAgent { +// type Error = UserAgentExtractError; +// type Future = Ready>; - fn from_request(req: &HttpRequest, _: &mut dev::Payload) -> Self::Future { - let user_agent = req - .headers() - .get("User-Agent") - .ok_or(UserAgentExtractError::NoUserAgent) - .and_then(|s| Ok(UserAgent(String::from(s.to_str()?)))); +// fn from_request(req: &HttpRequest, _: &mut dev::Payload) -> Self::Future { +// let agent = req +// .headers() +// .get("User-Agent") +// .ok_or(UserAgentExtractError::NoUserAgent) +// .and_then(|s| Ok(UserAgent(String::from(s.to_str()?)))); - future::ready(user_agent) - } -} +// future::ready(agent) +// } +// } -pub async fn exec( - arc: Data>, - passkey: Path, - user_agent: Option, - ann: Announce, - conn: dev::ConnectionInfo, -) -> Result { - if !is_torrent_client_allowed(&ann.peer_id, &arc.tracker.allowed_torrent_clients.clients) { - return Err(AnnounceError::TorrentClientNotInWhitelist); - } +// pub async fn exec( +// arc: Data>, +// passkey: Path, +// agent: Option, +// ann: Announce, +// conn: dev::ConnectionInfo, +// ) -> Result { +// if !is_torrent_client_allowed(&ann.peer_id, &arc.tracker.allowed_torrent_clients.clients) { +// return Err(AnnounceError::TorrentClientNotInWhitelist); +// } - let current_user = arc.pool.find_user_with_passkey(&passkey).await?; +// let current_user = arc.pool.find_user_with_passkey(&passkey).await?; - let torrent = arc.pool.find_torrent_with_id(&ann.info_hash).await?; +// let torrent = arc.pool.find_torrent_with_id(&ann.info_hash).await?; - let ip = conn - .realip_remote_addr() - .and_then(|ip| ip.parse::().ok()) - .unwrap(); +// let ip = conn +// .realip_remote_addr() +// .and_then(|ip| ip.parse::().ok()) +// .unwrap(); - if let Some(TorrentEvent::Stopped) = ann.event { - arc.pool - .remove_peer(&torrent.id, &ann.peer_id, &ip, ann.port) - .await; - //return HttpResponse::Ok().into(); - todo!(); - } +// if let Some(TorrentEvent::Stopped) = ann.event { +// arc.pool +// .remove_peer(&torrent.id, &ann.peer_id, &ip, ann.port) +// .await; +// //return HttpResponse::Ok().into(); +// todo!(); +// } - if let Some(TorrentEvent::Completed) = ann.event { - let _ = arc.pool.increment_torrent_completed(torrent.id).await; - } +// if let Some(TorrentEvent::Completed) = ann.event { +// let _ = arc.pool.increment_torrent_completed(torrent.id).await; +// } - let (old_real_uploaded, old_real_downloaded) = arc - .pool - .insert_or_update_peer( - &torrent.id, - &ip, - ¤t_user.id, - &ann, - user_agent.as_deref(), - ) - .await; +// let (old_real_uploaded, old_real_downloaded) = arc +// .pool +// .insert_or_update_peer(&torrent.id, &ip, ¤t_user.id, &ann, agent.as_deref()) +// .await; - let peers = arc - .pool - .find_torrent_peers(&torrent.id, ¤t_user.id) - .await; +// let peers = arc +// .pool +// .find_torrent_peers(&torrent.id, ¤t_user.id) +// .await; - // assuming that the client either sends both downloaded/uploaded - // or none of them - if let (Some(real_uploaded), Some(real_downloaded)) = (ann.uploaded, ann.downloaded) { - let upload_factor = if arc.tracker.global_upload_factor != 1.0 { - arc.tracker.global_upload_factor - } else { - torrent.upload_factor - }; - let upload_to_credit = - ((real_uploaded as i64 - old_real_uploaded) as f64 * upload_factor).ceil() as i64; +// // assuming that the client either sends both downloaded/uploaded +// // or none of them +// if let (Some(real_uploaded), Some(real_downloaded)) = (ann.uploaded, ann.downloaded) { +// let upload_factor = if arc.tracker.global_upload_factor != 1.0 { +// arc.tracker.global_upload_factor +// } else { +// torrent.upload_factor +// }; +// let upload_to_credit = +// ((real_uploaded as i64 - old_real_uploaded) as f64 * upload_factor).ceil() as i64; - let download_factor = if arc.tracker.global_download_factor != 1.0 { - arc.tracker.global_download_factor - } else { - torrent.download_factor - }; - let download_to_credit = - ((real_downloaded as i64 - old_real_downloaded) as f64 * download_factor).ceil() as i64; - let real_uploaded_to_credit = real_uploaded as i64 - old_real_uploaded; - let real_downloaded_to_credit = real_downloaded as i64 - old_real_downloaded; +// let download_factor = if arc.tracker.global_download_factor != 1.0 { +// arc.tracker.global_download_factor +// } else { +// torrent.download_factor +// }; +// let download_to_credit = +// ((real_downloaded as i64 - old_real_downloaded) as f64 * download_factor).ceil() as i64; +// let real_uploaded_to_credit = real_uploaded as i64 - old_real_uploaded; +// let real_downloaded_to_credit = real_downloaded as i64 - old_real_downloaded; - // if the client restarted, without sending a "stop" event, keeping the same ip/port - // calculated upload/download might be negative - if real_uploaded_to_credit >= 0 && real_downloaded_to_credit >= 0 { - let _ = arc - .pool - .credit_user_upload_download( - upload_to_credit, - download_to_credit, - real_uploaded_to_credit, - real_downloaded_to_credit, - current_user.id, - ) - .await; - } - } +// // if the client restarted, without sending a "stop" event, keeping the same ip/port +// // calculated upload/download might be negative +// if real_uploaded_to_credit >= 0 && real_downloaded_to_credit >= 0 { +// let _ = arc +// .pool +// .credit_user_upload_download( +// upload_to_credit, +// download_to_credit, +// real_uploaded_to_credit, +// real_downloaded_to_credit, +// current_user.id, +// ) +// .await; +// } +// } - if ann.left == Some(0u64) { - let _ = arc - .pool - .update_total_seedtime( - current_user.id, - torrent.id, - arc.tracker.announce_interval, - arc.tracker.announce_interval_grace_period, - ) - .await; - } +// if ann.left == Some(0u64) { +// let _ = arc +// .pool +// .update_total_seedtime( +// current_user.id, +// torrent.id, +// arc.tracker.announce_interval, +// arc.tracker.announce_interval_grace_period, +// ) +// .await; +// } - let resp = AnnounceResponse { - peers, - interval: arc.tracker.announce_interval, - ..Default::default() - }; +// let resp = AnnounceResponse { +// peers, +// interval: arc.tracker.announce_interval, +// ..Default::default() +// }; - Ok(HttpResponse::Ok().bencode(resp)) -} +// Ok(HttpResponse::Ok().bencode(resp)) +// } diff --git a/backend/api/src/handlers/announces/mod.rs b/backend/api/src/handlers/announces/mod.rs index 052a11a4..9908712c 100644 --- a/backend/api/src/handlers/announces/mod.rs +++ b/backend/api/src/handlers/announces/mod.rs @@ -1,8 +1,8 @@ -pub mod handle_announce; +// pub mod handle_announce; -use actix_web::web::{get, resource, ServiceConfig}; -use arcadia_storage::redis::RedisPoolInterface; +// use actix_web::web::{get, resource, ServiceConfig}; +// use arcadia_storage::redis::RedisPoolInterface; -pub fn config(cfg: &mut ServiceConfig) { - cfg.service(resource("/{passkey}").route(get().to(self::handle_announce::exec::))); -} +// pub fn config(cfg: &mut ServiceConfig) { +// cfg.service(resource("/{passkey}").route(get().to(self::handle_announce::exec::))); +// } diff --git a/backend/api/src/handlers/artists/create_artists.rs b/backend/api/src/handlers/artists/create_artists.rs index 42dabc64..8c351f40 100644 --- a/backend/api/src/handlers/artists/create_artists.rs +++ b/backend/api/src/handlers/artists/create_artists.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/collages/create_collage.rs b/backend/api/src/handlers/collages/create_collage.rs index c4e5d6e2..4e9b966a 100644 --- a/backend/api/src/handlers/collages/create_collage.rs +++ b/backend/api/src/handlers/collages/create_collage.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/collages/create_collage_entries.rs b/backend/api/src/handlers/collages/create_collage_entries.rs index a6c4e80a..3bc9fb1b 100644 --- a/backend/api/src/handlers/collages/create_collage_entries.rs +++ b/backend/api/src/handlers/collages/create_collage_entries.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/conversations/create_conversation.rs b/backend/api/src/handlers/conversations/create_conversation.rs index 4fc53f88..ddaf84d1 100644 --- a/backend/api/src/handlers/conversations/create_conversation.rs +++ b/backend/api/src/handlers/conversations/create_conversation.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/conversations/create_conversation_message.rs b/backend/api/src/handlers/conversations/create_conversation_message.rs index 20c58b30..1f81b6b3 100644 --- a/backend/api/src/handlers/conversations/create_conversation_message.rs +++ b/backend/api/src/handlers/conversations/create_conversation_message.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/conversations/get_conversation.rs b/backend/api/src/handlers/conversations/get_conversation.rs index ee227031..e42ec4d3 100644 --- a/backend/api/src/handlers/conversations/get_conversation.rs +++ b/backend/api/src/handlers/conversations/get_conversation.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Query}, HttpResponse, diff --git a/backend/api/src/handlers/edition_groups/create_edition_group.rs b/backend/api/src/handlers/edition_groups/create_edition_group.rs index 209dc83b..011569b8 100644 --- a/backend/api/src/handlers/edition_groups/create_edition_group.rs +++ b/backend/api/src/handlers/edition_groups/create_edition_group.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/external_db/get_isbn_data.rs b/backend/api/src/handlers/external_db/get_isbn_data.rs index 4f74159b..5a210906 100644 --- a/backend/api/src/handlers/external_db/get_isbn_data.rs +++ b/backend/api/src/handlers/external_db/get_isbn_data.rs @@ -1,5 +1,5 @@ use crate::{ - handlers::scrapers::ExternalDBData, middlewares::jwt_middleware::Authdata, + handlers::scrapers::ExternalDBData, middlewares::auth_middleware::Authdata, services::external_db_service::check_if_existing_title_group_with_link_exists, Arcadia, }; use actix_web::{ diff --git a/backend/api/src/handlers/forum/create_forum_post.rs b/backend/api/src/handlers/forum/create_forum_post.rs index b6d3ce20..abbee612 100644 --- a/backend/api/src/handlers/forum/create_forum_post.rs +++ b/backend/api/src/handlers/forum/create_forum_post.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/forum/create_forum_thread.rs b/backend/api/src/handlers/forum/create_forum_thread.rs index 3385782b..4980cb76 100644 --- a/backend/api/src/handlers/forum/create_forum_thread.rs +++ b/backend/api/src/handlers/forum/create_forum_thread.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/gifts/create_gift.rs b/backend/api/src/handlers/gifts/create_gift.rs index a9002eec..94fe0bce 100644 --- a/backend/api/src/handlers/gifts/create_gift.rs +++ b/backend/api/src/handlers/gifts/create_gift.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/invitations/create_invitation.rs b/backend/api/src/handlers/invitations/create_invitation.rs index de06083c..e7ac9909 100644 --- a/backend/api/src/handlers/invitations/create_invitation.rs +++ b/backend/api/src/handlers/invitations/create_invitation.rs @@ -1,5 +1,5 @@ use crate::{ - middlewares::jwt_middleware::Authdata, services::email_service::EmailService, Arcadia, + middlewares::auth_middleware::Authdata, services::email_service::EmailService, Arcadia, }; use actix_web::{ web::{Data, Json}, diff --git a/backend/api/src/handlers/master_groups/create_master_group.rs b/backend/api/src/handlers/master_groups/create_master_group.rs index ab59d5a9..e0af9a1d 100644 --- a/backend/api/src/handlers/master_groups/create_master_group.rs +++ b/backend/api/src/handlers/master_groups/create_master_group.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/scrapers/mod.rs b/backend/api/src/handlers/scrapers/mod.rs index dd171146..637fb419 100644 --- a/backend/api/src/handlers/scrapers/mod.rs +++ b/backend/api/src/handlers/scrapers/mod.rs @@ -11,5 +11,5 @@ pub struct ExternalDBData { pub title_group: Option, pub edition_group: Option, pub affiliated_artists: Vec, // pub series: UserCreatedSeries - pub existing_title_group_id: Option, + pub existing_title_group_id: Option, } diff --git a/backend/api/src/handlers/search/search_torrents.rs b/backend/api/src/handlers/search/search_torrents.rs index 27f94623..62997e47 100644 --- a/backend/api/src/handlers/search/search_torrents.rs +++ b/backend/api/src/handlers/search/search_torrents.rs @@ -3,7 +3,7 @@ use actix_web::{ HttpResponse, }; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::Result; use arcadia_storage::{ models::torrent::{TorrentSearch, TorrentSearchResults}, diff --git a/backend/api/src/handlers/series/create_series.rs b/backend/api/src/handlers/series/create_series.rs index 5d828cd3..1f23d0f3 100644 --- a/backend/api/src/handlers/series/create_series.rs +++ b/backend/api/src/handlers/series/create_series.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/staff_pms/create_staff_pm.rs b/backend/api/src/handlers/staff_pms/create_staff_pm.rs index 73c6a86c..36bcf8d6 100644 --- a/backend/api/src/handlers/staff_pms/create_staff_pm.rs +++ b/backend/api/src/handlers/staff_pms/create_staff_pm.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/staff_pms/create_staff_pm_message.rs b/backend/api/src/handlers/staff_pms/create_staff_pm_message.rs index 30821dd3..457eac63 100644 --- a/backend/api/src/handlers/staff_pms/create_staff_pm_message.rs +++ b/backend/api/src/handlers/staff_pms/create_staff_pm_message.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/staff_pms/get_staff_pm.rs b/backend/api/src/handlers/staff_pms/get_staff_pm.rs index 8ab13202..9200ff39 100644 --- a/backend/api/src/handlers/staff_pms/get_staff_pm.rs +++ b/backend/api/src/handlers/staff_pms/get_staff_pm.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Path}, HttpResponse, diff --git a/backend/api/src/handlers/staff_pms/list_staff_pms.rs b/backend/api/src/handlers/staff_pms/list_staff_pms.rs index 5de33dc4..347a716d 100644 --- a/backend/api/src/handlers/staff_pms/list_staff_pms.rs +++ b/backend/api/src/handlers/staff_pms/list_staff_pms.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{web::Data, HttpResponse}; use arcadia_common::error::Result; use arcadia_storage::models::staff_pm::StaffPmOverview; diff --git a/backend/api/src/handlers/staff_pms/resolve_staff_pm.rs b/backend/api/src/handlers/staff_pms/resolve_staff_pm.rs index ef4a526c..c0b3ab0e 100644 --- a/backend/api/src/handlers/staff_pms/resolve_staff_pm.rs +++ b/backend/api/src/handlers/staff_pms/resolve_staff_pm.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Path}, HttpResponse, diff --git a/backend/api/src/handlers/subscriptions/create_subscription.rs b/backend/api/src/handlers/subscriptions/create_subscription.rs index 3928fba7..4ae5b696 100644 --- a/backend/api/src/handlers/subscriptions/create_subscription.rs +++ b/backend/api/src/handlers/subscriptions/create_subscription.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Query}, HttpResponse, diff --git a/backend/api/src/handlers/subscriptions/remove_subscription.rs b/backend/api/src/handlers/subscriptions/remove_subscription.rs index bd38ec7b..896b0bd6 100644 --- a/backend/api/src/handlers/subscriptions/remove_subscription.rs +++ b/backend/api/src/handlers/subscriptions/remove_subscription.rs @@ -1,6 +1,6 @@ use crate::{ handlers::subscriptions::create_subscription::AddSubscriptionQuery, - middlewares::jwt_middleware::Authdata, Arcadia, + middlewares::auth_middleware::Authdata, Arcadia, }; use actix_web::{ web::{Data, Query}, diff --git a/backend/api/src/handlers/title_groups/create_title_group.rs b/backend/api/src/handlers/title_groups/create_title_group.rs index 86654c76..61e7e128 100644 --- a/backend/api/src/handlers/title_groups/create_title_group.rs +++ b/backend/api/src/handlers/title_groups/create_title_group.rs @@ -9,7 +9,7 @@ use arcadia_storage::{ use futures::future::join_all; use crate::{ - handlers::external_db::get_tmdb_data::get_tmdb_rating, middlewares::jwt_middleware::Authdata, + handlers::external_db::get_tmdb_data::get_tmdb_rating, middlewares::auth_middleware::Authdata, Arcadia, }; use arcadia_common::error::Result; diff --git a/backend/api/src/handlers/title_groups/create_title_group_comment.rs b/backend/api/src/handlers/title_groups/create_title_group_comment.rs index 4473ca79..58ef4779 100644 --- a/backend/api/src/handlers/title_groups/create_title_group_comment.rs +++ b/backend/api/src/handlers/title_groups/create_title_group_comment.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/title_groups/edit_title_group.rs b/backend/api/src/handlers/title_groups/edit_title_group.rs index b02af2b4..3d823b28 100644 --- a/backend/api/src/handlers/title_groups/edit_title_group.rs +++ b/backend/api/src/handlers/title_groups/edit_title_group.rs @@ -10,7 +10,7 @@ use arcadia_storage::{ redis::RedisPoolInterface, }; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::{Error, Result}; #[utoipa::path( diff --git a/backend/api/src/handlers/title_groups/get_title_group.rs b/backend/api/src/handlers/title_groups/get_title_group.rs index 5268d7c8..92b1b754 100644 --- a/backend/api/src/handlers/title_groups/get_title_group.rs +++ b/backend/api/src/handlers/title_groups/get_title_group.rs @@ -8,12 +8,12 @@ use arcadia_storage::{ use serde::Deserialize; use utoipa::IntoParams; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::Result; #[derive(Debug, Deserialize, IntoParams)] pub struct GetTitleGroupQuery { - pub id: i64, + pub id: i32, } #[utoipa::path( diff --git a/backend/api/src/handlers/torrent_requests/create_torrent_request.rs b/backend/api/src/handlers/torrent_requests/create_torrent_request.rs index 49468cdc..ad415ba9 100644 --- a/backend/api/src/handlers/torrent_requests/create_torrent_request.rs +++ b/backend/api/src/handlers/torrent_requests/create_torrent_request.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/torrent_requests/create_torrent_request_vote.rs b/backend/api/src/handlers/torrent_requests/create_torrent_request_vote.rs index 4ba0cfda..a03af4e1 100644 --- a/backend/api/src/handlers/torrent_requests/create_torrent_request_vote.rs +++ b/backend/api/src/handlers/torrent_requests/create_torrent_request_vote.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/torrent_requests/edit_torrent_request.rs b/backend/api/src/handlers/torrent_requests/edit_torrent_request.rs index f65591e8..6a3d53af 100644 --- a/backend/api/src/handlers/torrent_requests/edit_torrent_request.rs +++ b/backend/api/src/handlers/torrent_requests/edit_torrent_request.rs @@ -10,7 +10,7 @@ use arcadia_storage::{ redis::RedisPoolInterface, }; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::{Error, Result}; #[utoipa::path( diff --git a/backend/api/src/handlers/torrent_requests/fill_torrent_request.rs b/backend/api/src/handlers/torrent_requests/fill_torrent_request.rs index 6d1a7f2f..c8eb9a76 100644 --- a/backend/api/src/handlers/torrent_requests/fill_torrent_request.rs +++ b/backend/api/src/handlers/torrent_requests/fill_torrent_request.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/torrents/create_torrent.rs b/backend/api/src/handlers/torrents/create_torrent.rs index 2f3ed7ed..e9b320ab 100644 --- a/backend/api/src/handlers/torrents/create_torrent.rs +++ b/backend/api/src/handlers/torrents/create_torrent.rs @@ -1,7 +1,7 @@ use actix_multipart::form::MultipartForm; use actix_web::{web::Data, HttpResponse}; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::Result; use arcadia_storage::{ models::torrent::{Torrent, UploadedTorrent}, diff --git a/backend/api/src/handlers/torrents/create_torrent_report.rs b/backend/api/src/handlers/torrents/create_torrent_report.rs index 56b612c3..db69a100 100644 --- a/backend/api/src/handlers/torrents/create_torrent_report.rs +++ b/backend/api/src/handlers/torrents/create_torrent_report.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/torrents/delete_torrent.rs b/backend/api/src/handlers/torrents/delete_torrent.rs index a58459ed..8b36175d 100644 --- a/backend/api/src/handlers/torrents/delete_torrent.rs +++ b/backend/api/src/handlers/torrents/delete_torrent.rs @@ -4,7 +4,7 @@ use actix_web::{ }; use serde_json::json; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::{Error, Result}; use arcadia_storage::{ models::{torrent::TorrentToDelete, user::UserClass}, diff --git a/backend/api/src/handlers/torrents/download_dottorrent_file.rs b/backend/api/src/handlers/torrents/download_dottorrent_file.rs index 3402220f..d86a958d 100644 --- a/backend/api/src/handlers/torrents/download_dottorrent_file.rs +++ b/backend/api/src/handlers/torrents/download_dottorrent_file.rs @@ -9,12 +9,12 @@ use arcadia_storage::redis::RedisPoolInterface; use serde::Deserialize; use utoipa::{IntoParams, ToSchema}; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::Result; #[derive(Debug, Deserialize, IntoParams, ToSchema)] pub struct DownloadTorrentQuery { - id: i64, + id: i32, } #[utoipa::path( diff --git a/backend/api/src/handlers/torrents/edit_torrent.rs b/backend/api/src/handlers/torrents/edit_torrent.rs index 5f41ec2a..fbe01488 100644 --- a/backend/api/src/handlers/torrents/edit_torrent.rs +++ b/backend/api/src/handlers/torrents/edit_torrent.rs @@ -3,7 +3,7 @@ use actix_web::{ HttpResponse, }; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::{Error, Result}; use arcadia_storage::{ models::{ diff --git a/backend/api/src/handlers/torrents/get_registered_torrents.rs b/backend/api/src/handlers/torrents/get_registered_torrents.rs index 51c5cb43..a3434b11 100644 --- a/backend/api/src/handlers/torrents/get_registered_torrents.rs +++ b/backend/api/src/handlers/torrents/get_registered_torrents.rs @@ -1,6 +1,6 @@ use actix_web::{web::Data, HttpResponse}; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::error::{Error, Result}; use arcadia_storage::{ models::{torrent::TorrentMinimal, user::UserClass}, diff --git a/backend/api/src/handlers/torrents/get_upload_information.rs b/backend/api/src/handlers/torrents/get_upload_information.rs index 465fd047..1c070c23 100644 --- a/backend/api/src/handlers/torrents/get_upload_information.rs +++ b/backend/api/src/handlers/torrents/get_upload_information.rs @@ -3,7 +3,7 @@ use arcadia_storage::redis::RedisPoolInterface; use serde::{Deserialize, Serialize}; use utoipa::ToSchema; -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use arcadia_common::{error::Result, services::torrent_service::get_announce_url}; #[derive(Debug, Serialize, Deserialize, ToSchema)] diff --git a/backend/api/src/handlers/tracker/get_torrents.rs b/backend/api/src/handlers/tracker/get_torrents.rs new file mode 100644 index 00000000..3c9a8e96 --- /dev/null +++ b/backend/api/src/handlers/tracker/get_torrents.rs @@ -0,0 +1,9 @@ +use crate::{handlers::tracker::binary_response, Arcadia}; +use actix_web::{web::Data, HttpResponse}; +use arcadia_common::error::Result; +use arcadia_storage::redis::RedisPoolInterface; + +pub async fn exec(arc: Data>) -> Result { + let torrents = arc.pool.find_torrents().await?; + binary_response(&torrents) +} diff --git a/backend/api/src/handlers/tracker/get_users.rs b/backend/api/src/handlers/tracker/get_users.rs index 2b850a85..f5a2634a 100644 --- a/backend/api/src/handlers/tracker/get_users.rs +++ b/backend/api/src/handlers/tracker/get_users.rs @@ -1,16 +1,7 @@ -use crate::Arcadia; +use crate::{handlers::tracker::binary_response, Arcadia}; use actix_web::{web::Data, HttpResponse}; use arcadia_common::error::Result; use arcadia_storage::redis::RedisPoolInterface; -use serde::Serialize; - -#[inline] -fn binary_response(value: &T) -> Result { - let bytes = serde_bencode::to_bytes(value).expect("error encoding to binary"); - Ok(HttpResponse::Ok() - .content_type("application/octet-stream") - .body(bytes)) -} pub async fn exec(arc: Data>) -> Result { let users = arc.pool.find_users().await?; diff --git a/backend/api/src/handlers/tracker/mod.rs b/backend/api/src/handlers/tracker/mod.rs index aea69b02..c112e2cb 100644 --- a/backend/api/src/handlers/tracker/mod.rs +++ b/backend/api/src/handlers/tracker/mod.rs @@ -1,8 +1,24 @@ +pub mod get_torrents; pub mod get_users; -use actix_web::web::{get, resource, ServiceConfig}; +use actix_web::{ + web::{get, resource, ServiceConfig}, + HttpResponse, +}; +use arcadia_common::error::Result; use arcadia_storage::redis::RedisPoolInterface; +use bincode::config; +// TODO: protect by only allowing requests from tracker's ip pub fn config(cfg: &mut ServiceConfig) { cfg.service(resource("/users").route(get().to(self::get_users::exec::))); + cfg.service(resource("/torrents").route(get().to(self::get_torrents::exec::))); +} + +fn binary_response(value: &T) -> Result { + let config = config::standard(); + let bytes = bincode::encode_to_vec(value, config).expect("error encoding to bincode"); + Ok(HttpResponse::Ok() + .content_type("application/octet-stream") + .body(bytes)) } diff --git a/backend/api/src/handlers/user_applications/get_user_applications.rs b/backend/api/src/handlers/user_applications/get_user_applications.rs index 55cae3d7..e86bf8f7 100644 --- a/backend/api/src/handlers/user_applications/get_user_applications.rs +++ b/backend/api/src/handlers/user_applications/get_user_applications.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Query}, HttpResponse, diff --git a/backend/api/src/handlers/user_applications/update_user_application_status.rs b/backend/api/src/handlers/user_applications/update_user_application_status.rs index 2a428c05..9a20916f 100644 --- a/backend/api/src/handlers/user_applications/update_user_application_status.rs +++ b/backend/api/src/handlers/user_applications/update_user_application_status.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/users/create_api_key.rs b/backend/api/src/handlers/users/create_api_key.rs index d5e02fc3..6843c681 100644 --- a/backend/api/src/handlers/users/create_api_key.rs +++ b/backend/api/src/handlers/users/create_api_key.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/users/edit_user.rs b/backend/api/src/handlers/users/edit_user.rs index 753342b8..60fb8287 100644 --- a/backend/api/src/handlers/users/edit_user.rs +++ b/backend/api/src/handlers/users/edit_user.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/users/get_me.rs b/backend/api/src/handlers/users/get_me.rs index 696a5010..3e2f688e 100644 --- a/backend/api/src/handlers/users/get_me.rs +++ b/backend/api/src/handlers/users/get_me.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{web::Data, HttpResponse}; use arcadia_common::error::Result; use arcadia_storage::{ @@ -31,7 +31,7 @@ pub async fn exec( ) -> Result { let mut current_user = arc.pool.find_user_with_id(user.sub).await?; current_user.password_hash = String::from(""); - let peers = arc.pool.get_user_peers(current_user.id).await; + // let peers = arc.pool.get_user_peers(current_user.id).await; let user_warnings = arc.pool.find_user_warnings(current_user.id).await; let search_title_group = TorrentSearchTitleGroup { name: String::from(""), @@ -73,7 +73,7 @@ pub async fn exec( Ok(HttpResponse::Ok().json(json!({ "user": current_user, - "peers":peers, + "peers": "[]",//peers, "user_warnings": user_warnings, "unread_conversations_amount": unread_conversations_amount, "unread_notifications_amount":unread_notifications_amount, diff --git a/backend/api/src/handlers/users/get_registered_users.rs b/backend/api/src/handlers/users/get_registered_users.rs index 7f39f408..95384f72 100644 --- a/backend/api/src/handlers/users/get_registered_users.rs +++ b/backend/api/src/handlers/users/get_registered_users.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{web::Data, HttpResponse}; use arcadia_common::error::{Error, Result}; use arcadia_storage::{ diff --git a/backend/api/src/handlers/users/get_user.rs b/backend/api/src/handlers/users/get_user.rs index 1b4f645a..3618a5ee 100644 --- a/backend/api/src/handlers/users/get_user.rs +++ b/backend/api/src/handlers/users/get_user.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Query}, HttpResponse, diff --git a/backend/api/src/handlers/users/get_user_conversations.rs b/backend/api/src/handlers/users/get_user_conversations.rs index be33c0da..2abe8a88 100644 --- a/backend/api/src/handlers/users/get_user_conversations.rs +++ b/backend/api/src/handlers/users/get_user_conversations.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{web::Data, HttpResponse}; use arcadia_common::error::Result; use arcadia_storage::{models::conversation::ConversationsOverview, redis::RedisPoolInterface}; diff --git a/backend/api/src/handlers/users/warn_user.rs b/backend/api/src/handlers/users/warn_user.rs index 32d9e86c..05c7a875 100644 --- a/backend/api/src/handlers/users/warn_user.rs +++ b/backend/api/src/handlers/users/warn_user.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/handlers/wiki/create_wiki_article.rs b/backend/api/src/handlers/wiki/create_wiki_article.rs index 26c2332e..9526802e 100644 --- a/backend/api/src/handlers/wiki/create_wiki_article.rs +++ b/backend/api/src/handlers/wiki/create_wiki_article.rs @@ -1,4 +1,4 @@ -use crate::{middlewares::jwt_middleware::Authdata, Arcadia}; +use crate::{middlewares::auth_middleware::Authdata, Arcadia}; use actix_web::{ web::{Data, Json}, HttpResponse, diff --git a/backend/api/src/middlewares/jwt_middleware.rs b/backend/api/src/middlewares/auth_middleware.rs similarity index 100% rename from backend/api/src/middlewares/jwt_middleware.rs rename to backend/api/src/middlewares/auth_middleware.rs diff --git a/backend/api/src/middlewares/mod.rs b/backend/api/src/middlewares/mod.rs index ca782f2d..2a709c0c 100644 --- a/backend/api/src/middlewares/mod.rs +++ b/backend/api/src/middlewares/mod.rs @@ -1 +1 @@ -pub mod jwt_middleware; +pub mod auth_middleware; diff --git a/backend/api/src/routes.rs b/backend/api/src/routes.rs index 993ba040..620aec3d 100644 --- a/backend/api/src/routes.rs +++ b/backend/api/src/routes.rs @@ -3,7 +3,7 @@ use actix_web_httpauth::middleware::HttpAuthentication; use arcadia_storage::redis::RedisPoolInterface; use crate::handlers::affiliated_artists::config as AffiliatedArtistsConfig; -use crate::handlers::announces::config as AnnouncesConfig; +// use crate::handlers::announces::config as AnnouncesConfig; use crate::handlers::artists::config as ArtistsConfig; use crate::handlers::auth::config as AuthConfig; use crate::handlers::collages::config as CollagesConfig; @@ -26,10 +26,10 @@ use crate::handlers::tracker::config as TrackerConfig; use crate::handlers::user_applications::config as UserApplicationsConfig; use crate::handlers::users::config as UsersConfig; use crate::handlers::wiki::config as WikiConfig; -use crate::middlewares::jwt_middleware::authenticate_user; +use crate::middlewares::auth_middleware::authenticate_user; pub fn init(cfg: &mut web::ServiceConfig) { - cfg.service(scope("/announce").configure(AnnouncesConfig::)); + // cfg.service(scope("/announce").configure(AnnouncesConfig::)); cfg.service( web::scope("/api") diff --git a/backend/common/src/models/tracker/peer.rs b/backend/common/src/models/tracker/peer.rs index 6f37886a..cba3de78 100644 --- a/backend/common/src/models/tracker/peer.rs +++ b/backend/common/src/models/tracker/peer.rs @@ -4,7 +4,7 @@ use chrono::{DateTime, Local}; pub struct Peer { pub id: i64, pub user_id: i32, - pub torrent_id: i64, + pub torrent_id: i32, pub peer_id: [u8; 20], pub ip: Option, pub port: u16, diff --git a/backend/periodic-tasks/.env.example b/backend/periodic-tasks/.env.example index 6ee63bbf..94e2cf67 100644 --- a/backend/periodic-tasks/.env.example +++ b/backend/periodic-tasks/.env.example @@ -12,7 +12,7 @@ RUST_LOG="debug,sqlx=info" # Connection string for the database. -DATABASE_URL=postgresql://arcadia:password@localhost:5432/arcadia +DATABASE_URL=postgresql://arcadia:password@localhost:4321/arcadia # Interval for tracker announcements (in seconds). ARCADIA_TRACKER_ANNOUNCE_INTERVAL=1800 diff --git a/backend/periodic-tasks/src/main.rs b/backend/periodic-tasks/src/main.rs index 25e7537b..75589c20 100644 --- a/backend/periodic-tasks/src/main.rs +++ b/backend/periodic-tasks/src/main.rs @@ -1,16 +1,16 @@ -use arcadia_periodic_tasks::{periodic_tasks::scheduler::run_periodic_tasks, store::Store}; -use std::{env, sync::Arc}; +// use arcadia_periodic_tasks::{periodic_tasks::scheduler::run_periodic_tasks, store::Store}; +// use std::{env, sync::Arc}; #[tokio::main] async fn main() { - if env::var("ENV").unwrap_or("".to_string()) != "Docker" { - dotenvy::from_filename(".env").expect("cannot load env from a file"); - } + // if env::var("ENV").unwrap_or("".to_string()) != "Docker" { + // dotenvy::from_filename(".env").expect("cannot load env from a file"); + // } env_logger::init_from_env(env_logger::Env::default().default_filter_or("debug")); - let store = Arc::new(Store::new().await); - if let Err(e) = run_periodic_tasks(store).await { - eprintln!("Error running cron tasks: {e:?}"); - } + // let store = Arc::new(Store::new().await); + // if let Err(e) = run_periodic_tasks(store).await { + // eprintln!("Error running cron tasks: {e:?}"); + // } } diff --git a/backend/periodic-tasks/src/periodic_tasks/peers.rs b/backend/periodic-tasks/src/periodic_tasks/peers.rs index eb9a8adf..190199b3 100644 --- a/backend/periodic-tasks/src/periodic_tasks/peers.rs +++ b/backend/periodic-tasks/src/periodic_tasks/peers.rs @@ -1,16 +1,16 @@ -use arcadia_storage::connection_pool::ConnectionPool; -use std::sync::Arc; +// use arcadia_storage::connection_pool::ConnectionPool; +// use std::sync::Arc; -pub async fn remove_inactive_peers( - pool: Arc, - announce_interval: u32, - announce_grace_period: u32, -) { - let removed_peers_amount = pool - .remove_inactive_peers((announce_interval + announce_grace_period) as f64) - .await; - log::info!( - "Removed {} inactive peers from the database", - removed_peers_amount.unwrap() - ) -} +// pub async fn remove_inactive_peers( +// pool: Arc, +// announce_interval: u32, +// announce_grace_period: u32, +// ) { +// let removed_peers_amount = pool +// .remove_inactive_peers((announce_interval + announce_grace_period) as f64) +// .await; +// log::info!( +// "Removed {} inactive peers from the database", +// removed_peers_amount.unwrap() +// ) +// } diff --git a/backend/periodic-tasks/src/periodic_tasks/scheduler.rs b/backend/periodic-tasks/src/periodic_tasks/scheduler.rs index ec76858f..344dda17 100644 --- a/backend/periodic-tasks/src/periodic_tasks/scheduler.rs +++ b/backend/periodic-tasks/src/periodic_tasks/scheduler.rs @@ -1,57 +1,57 @@ -use std::{env, sync::Arc}; -use tokio_cron_scheduler::{Job, JobScheduler}; +// use std::{env, sync::Arc}; +// use tokio_cron_scheduler::{Job, JobScheduler}; -use crate::{periodic_tasks::peers::remove_inactive_peers, store::Store}; +// use crate::{periodic_tasks::peers::remove_inactive_peers, store::Store}; -use super::torrents::update_torrent_seeders_leechers; +// use super::torrents::update_torrent_seeders_leechers; -pub async fn run_periodic_tasks(store: Arc) -> Result<(), Box> { - let sched = JobScheduler::new().await?; +pub async fn run_periodic_tasks(/*store: Arc*/) -> Result<(), Box> { + // let sched = JobScheduler::new().await?; - let update_torrent_seeders_leechers_interval = - env::var("TASK_INTERVAL_UPDATE_TORRENT_SEEDERS_LEECHERS") - .expect("env var TASK_INTERVAL_UPDATE_TORRENT_SEEDERS_LEECHERS is missing"); + // let update_torrent_seeders_leechers_interval = + // env::var("TASK_INTERVAL_UPDATE_TORRENT_SEEDERS_LEECHERS") + // .expect("env var TASK_INTERVAL_UPDATE_TORRENT_SEEDERS_LEECHERS is missing"); - let pool_1 = Arc::clone(&store.pool); - let job1 = match Job::new_async( - update_torrent_seeders_leechers_interval.as_str(), - move |_uuid, _l| Box::pin(update_torrent_seeders_leechers(Arc::clone(&pool_1))), - ) { - Ok(job) => job, - Err(e) => { - return Err(format!( - "Error creating job for updating torrents seeders and leechers: {e}" - ) - .into()); - } - }; - sched.add(job1).await?; + // let pool_1 = Arc::clone(&store.pool); + // let job1 = match Job::new_async( + // update_torrent_seeders_leechers_interval.as_str(), + // move |_uuid, _l| Box::pin(update_torrent_seeders_leechers(Arc::clone(&pool_1))), + // ) { + // Ok(job) => job, + // Err(e) => { + // return Err(format!( + // "Error creating job for updating torrents seeders and leechers: {e}" + // ) + // .into()); + // } + // }; + // sched.add(job1).await?; - // this interval should be often enough - // let cleanup_interval_seconds = arc.tracker_announce_interval * 2; - let remove_inactive_peers_interval = env::var("TASK_INTERVAL_REMOVE_INACTIVE_PEERS") - .expect("env var TASK_INTERVAL_REMOVE_INACTIVE_PEERS is missing"); + // // this interval should be often enough + // // let cleanup_interval_seconds = arc.tracker_announce_interval * 2; + // let remove_inactive_peers_interval = env::var("TASK_INTERVAL_REMOVE_INACTIVE_PEERS") + // .expect("env var TASK_INTERVAL_REMOVE_INACTIVE_PEERS is missing"); - // cleaning old peers is also done when the client sends a "stop" event - // but it doesn't always do it, so we need to clean the ones that are gone without sending this event - let pool_2 = Arc::clone(&store.pool); - let announce_interval = store.env.tracker.announce_interval; - let announce_interval_grace_period = store.env.tracker.announce_interval_grace_period; - let job2 = match Job::new_async(remove_inactive_peers_interval.as_str(), move |_uuid, _l| { - Box::pin(remove_inactive_peers( - Arc::clone(&pool_2), - announce_interval, - announce_interval_grace_period, - )) - }) { - Ok(job) => job, - Err(e) => { - return Err(format!("Error creating job for cleaning inactive peers: {e}").into()); - } - }; - sched.add(job2).await?; + // // cleaning old peers is also done when the client sends a "stop" event + // // but it doesn't always do it, so we need to clean the ones that are gone without sending this event + // let pool_2 = Arc::clone(&store.pool); + // let announce_interval = store.env.tracker.announce_interval; + // let announce_interval_grace_period = store.env.tracker.announce_interval_grace_period; + // let job2 = match Job::new_async(remove_inactive_peers_interval.as_str(), move |_uuid, _l| { + // Box::pin(remove_inactive_peers( + // Arc::clone(&pool_2), + // announce_interval, + // announce_interval_grace_period, + // )) + // }) { + // Ok(job) => job, + // Err(e) => { + // return Err(format!("Error creating job for cleaning inactive peers: {e}").into()); + // } + // }; + // sched.add(job2).await?; - sched.start().await?; + // sched.start().await?; Ok(()) } diff --git a/backend/periodic-tasks/src/periodic_tasks/torrents.rs b/backend/periodic-tasks/src/periodic_tasks/torrents.rs index 670a3c0f..7c7fa3e8 100644 --- a/backend/periodic-tasks/src/periodic_tasks/torrents.rs +++ b/backend/periodic-tasks/src/periodic_tasks/torrents.rs @@ -1,6 +1,6 @@ -use arcadia_storage::connection_pool::ConnectionPool; -use std::sync::Arc; +// use arcadia_storage::connection_pool::ConnectionPool; +// use std::sync::Arc; -pub async fn update_torrent_seeders_leechers(pool: Arc) { - let _ = pool.update_torrent_seeders_leechers().await; -} +// pub async fn update_torrent_seeders_leechers(pool: Arc) { +// // let _ = pool.update_torrent_seeders_leechers().await; +// } diff --git a/backend/storage/.sqlx/query-0315da131b68d6bfdd9fc440b18112d1512edb00dc2a1a11f6fe8438ca518727.json b/backend/storage/.sqlx/query-0315da131b68d6bfdd9fc440b18112d1512edb00dc2a1a11f6fe8438ca518727.json index daf451b2..912a5afc 100644 --- a/backend/storage/.sqlx/query-0315da131b68d6bfdd9fc440b18112d1512edb00dc2a1a11f6fe8438ca518727.json +++ b/backend/storage/.sqlx/query-0315da131b68d6bfdd9fc440b18112d1512edb00dc2a1a11f6fe8438ca518727.json @@ -12,7 +12,7 @@ "parameters": { "Left": [ "Int4", - "Int8" + "Int4" ] }, "nullable": [ diff --git a/backend/storage/.sqlx/query-0793c255ebc855df14cb8f984507f33fc4397354a732c97e22b71c02c474ebf2.json b/backend/storage/.sqlx/query-0793c255ebc855df14cb8f984507f33fc4397354a732c97e22b71c02c474ebf2.json index d7819d77..a4c36775 100644 --- a/backend/storage/.sqlx/query-0793c255ebc855df14cb8f984507f33fc4397354a732c97e22b71c02c474ebf2.json +++ b/backend/storage/.sqlx/query-0793c255ebc855df14cb8f984507f33fc4397354a732c97e22b71c02c474ebf2.json @@ -5,7 +5,7 @@ "columns": [], "parameters": { "Left": [ - "Int8", + "Int4", "Int4", "Int8" ] diff --git a/backend/storage/.sqlx/query-108ccfa244fb15e63f65c8a69d05b8c5fe3f57854967ded2cfe7af277e1de6b9.json b/backend/storage/.sqlx/query-108ccfa244fb15e63f65c8a69d05b8c5fe3f57854967ded2cfe7af277e1de6b9.json index 3095412c..80923aaa 100644 --- a/backend/storage/.sqlx/query-108ccfa244fb15e63f65c8a69d05b8c5fe3f57854967ded2cfe7af277e1de6b9.json +++ b/backend/storage/.sqlx/query-108ccfa244fb15e63f65c8a69d05b8c5fe3f57854967ded2cfe7af277e1de6b9.json @@ -5,7 +5,7 @@ "columns": [], "parameters": { "Left": [ - "Int8" + "Int4" ] }, "nullable": [] diff --git a/backend/storage/.sqlx/query-10cd0ae1bfc1a98c305abea9814e191f914613204f795cde72f43920f5284f3c.json b/backend/storage/.sqlx/query-10cd0ae1bfc1a98c305abea9814e191f914613204f795cde72f43920f5284f3c.json index c7180925..dbe059f8 100644 --- a/backend/storage/.sqlx/query-10cd0ae1bfc1a98c305abea9814e191f914613204f795cde72f43920f5284f3c.json +++ b/backend/storage/.sqlx/query-10cd0ae1bfc1a98c305abea9814e191f914613204f795cde72f43920f5284f3c.json @@ -31,12 +31,12 @@ { "ordinal": 5, "name": "title_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 6, "name": "refers_to_torrent_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 7, @@ -47,9 +47,9 @@ "parameters": { "Left": [ "Text", - "Int8", "Int4", - "Int8", + "Int4", + "Int4", "Int8" ] }, diff --git a/backend/storage/.sqlx/query-1ba3d0500ac2e8ea8e74eb40aaae30120571f581ff3597c25cf04502dd7c9fc9.json b/backend/storage/.sqlx/query-1ba3d0500ac2e8ea8e74eb40aaae30120571f581ff3597c25cf04502dd7c9fc9.json index 4e86f820..0fdd3316 100644 --- a/backend/storage/.sqlx/query-1ba3d0500ac2e8ea8e74eb40aaae30120571f581ff3597c25cf04502dd7c9fc9.json +++ b/backend/storage/.sqlx/query-1ba3d0500ac2e8ea8e74eb40aaae30120571f581ff3597c25cf04502dd7c9fc9.json @@ -203,6 +203,11 @@ "ordinal": 37, "name": "staff_note", "type_info": "Text" + }, + { + "ordinal": 38, + "name": "can_download", + "type_info": "Bool" } ], "parameters": { @@ -253,6 +258,7 @@ false, false, false, + false, false ] }, diff --git a/backend/storage/.sqlx/query-2247b3a68a150a372b41698398ec0ba22ed6ea87babbd4f9be825821201986f7.json b/backend/storage/.sqlx/query-2247b3a68a150a372b41698398ec0ba22ed6ea87babbd4f9be825821201986f7.json index 060637dd..02330795 100644 --- a/backend/storage/.sqlx/query-2247b3a68a150a372b41698398ec0ba22ed6ea87babbd4f9be825821201986f7.json +++ b/backend/storage/.sqlx/query-2247b3a68a150a372b41698398ec0ba22ed6ea87babbd4f9be825821201986f7.json @@ -26,13 +26,13 @@ { "ordinal": 4, "name": "reported_torrent_id", - "type_info": "Int8" + "type_info": "Int4" } ], "parameters": { "Left": [ "Int4", - "Int8", + "Int4", "Text" ] }, diff --git a/backend/storage/.sqlx/query-2912c2e0c16b5dd6d8e6d0a58450f5f394ed490d19c9783405109726ee86e18b.json b/backend/storage/.sqlx/query-2912c2e0c16b5dd6d8e6d0a58450f5f394ed490d19c9783405109726ee86e18b.json index 18366209..fedc6307 100644 --- a/backend/storage/.sqlx/query-2912c2e0c16b5dd6d8e6d0a58450f5f394ed490d19c9783405109726ee86e18b.json +++ b/backend/storage/.sqlx/query-2912c2e0c16b5dd6d8e6d0a58450f5f394ed490d19c9783405109726ee86e18b.json @@ -6,12 +6,12 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, "name": "master_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 2, @@ -231,7 +231,7 @@ ], "parameters": { "Left": [ - "Int8" + "Int4" ] }, "nullable": [ diff --git a/backend/storage/.sqlx/query-2c69e15f31de9fc456939274ff1bec03793e0cbb8adf594ba9a9c09b6b135f0c.json b/backend/storage/.sqlx/query-2c69e15f31de9fc456939274ff1bec03793e0cbb8adf594ba9a9c09b6b135f0c.json index a546127a..552c6e7c 100644 --- a/backend/storage/.sqlx/query-2c69e15f31de9fc456939274ff1bec03793e0cbb8adf594ba9a9c09b6b135f0c.json +++ b/backend/storage/.sqlx/query-2c69e15f31de9fc456939274ff1bec03793e0cbb8adf594ba9a9c09b6b135f0c.json @@ -11,7 +11,7 @@ { "ordinal": 1, "name": "title_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 2, @@ -427,7 +427,7 @@ { "ordinal": 21, "name": "filled_by_torrent_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 22, diff --git a/backend/storage/.sqlx/query-3a17fd18a4051cc3455eba2c42c5967c6f8982f8de51100dff1dbeb3f9ccbe31.json b/backend/storage/.sqlx/query-3a17fd18a4051cc3455eba2c42c5967c6f8982f8de51100dff1dbeb3f9ccbe31.json index 1ff9c440..a156027e 100644 --- a/backend/storage/.sqlx/query-3a17fd18a4051cc3455eba2c42c5967c6f8982f8de51100dff1dbeb3f9ccbe31.json +++ b/backend/storage/.sqlx/query-3a17fd18a4051cc3455eba2c42c5967c6f8982f8de51100dff1dbeb3f9ccbe31.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, diff --git a/backend/storage/.sqlx/query-3b7e1a692b986ace00f9e897dc90f872e80e68ed2d3e31fd64ae0dd04f72f69a.json b/backend/storage/.sqlx/query-3b7e1a692b986ace00f9e897dc90f872e80e68ed2d3e31fd64ae0dd04f72f69a.json index bc66b698..9bd8267a 100644 --- a/backend/storage/.sqlx/query-3b7e1a692b986ace00f9e897dc90f872e80e68ed2d3e31fd64ae0dd04f72f69a.json +++ b/backend/storage/.sqlx/query-3b7e1a692b986ace00f9e897dc90f872e80e68ed2d3e31fd64ae0dd04f72f69a.json @@ -5,7 +5,7 @@ "columns": [], "parameters": { "Left": [ - "Int8", + "Int4", "Int4" ] }, diff --git a/backend/storage/.sqlx/query-49f9cf141cc03ec493d7aedc1d83790fea64a61959c7b94fe4aecc12c7d4cc9e.json b/backend/storage/.sqlx/query-49f9cf141cc03ec493d7aedc1d83790fea64a61959c7b94fe4aecc12c7d4cc9e.json index 7c45d5a4..77875597 100644 --- a/backend/storage/.sqlx/query-49f9cf141cc03ec493d7aedc1d83790fea64a61959c7b94fe4aecc12c7d4cc9e.json +++ b/backend/storage/.sqlx/query-49f9cf141cc03ec493d7aedc1d83790fea64a61959c7b94fe4aecc12c7d4cc9e.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, @@ -41,7 +41,7 @@ { "ordinal": 7, "name": "edition_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 8, @@ -461,7 +461,7 @@ ], "parameters": { "Left": [ - "Int8", + "Int4", "Text", "Varchar", "Text", diff --git a/backend/storage/.sqlx/query-4e362764b8f74fdced061ff73a3f59612e03035f1a9edaa33449602322fc9af6.json b/backend/storage/.sqlx/query-4e362764b8f74fdced061ff73a3f59612e03035f1a9edaa33449602322fc9af6.json index 3149cdd9..bc93fc77 100644 --- a/backend/storage/.sqlx/query-4e362764b8f74fdced061ff73a3f59612e03035f1a9edaa33449602322fc9af6.json +++ b/backend/storage/.sqlx/query-4e362764b8f74fdced061ff73a3f59612e03035f1a9edaa33449602322fc9af6.json @@ -203,6 +203,11 @@ "ordinal": 37, "name": "staff_note", "type_info": "Text" + }, + { + "ordinal": 38, + "name": "can_download", + "type_info": "Bool" } ], "parameters": { @@ -248,6 +253,7 @@ false, false, false, + false, false ] }, diff --git a/backend/storage/.sqlx/query-548e6f93067c88de9c7ae4e5a184ccc60bcf872eab55cfcd8219fe8a4e9f3e7f.json b/backend/storage/.sqlx/query-548e6f93067c88de9c7ae4e5a184ccc60bcf872eab55cfcd8219fe8a4e9f3e7f.json deleted file mode 100644 index 252baf42..00000000 --- a/backend/storage/.sqlx/query-548e6f93067c88de9c7ae4e5a184ccc60bcf872eab55cfcd8219fe8a4e9f3e7f.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO peers(torrent_id, peer_id, ip, port, user_id, real_uploaded, real_downloaded, user_agent, status)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9::peer_status_enum)\n ON CONFLICT (torrent_id, peer_id, ip, port) DO UPDATE\n SET\n last_seen_at = NOW(),\n real_uploaded = $6,\n real_downloaded = $7,\n status = $9::peer_status_enum\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Bytea", - "Inet", - "Int4", - "Int4", - "Int8", - "Int8", - "Text", - { - "Custom": { - "name": "peer_status_enum", - "kind": { - "Enum": [ - "seeding", - "leeching" - ] - } - } - } - ] - }, - "nullable": [] - }, - "hash": "548e6f93067c88de9c7ae4e5a184ccc60bcf872eab55cfcd8219fe8a4e9f3e7f" -} diff --git a/backend/storage/.sqlx/query-5f77d48e7072420c7ffded38cf73477cb1f8c18d3ea38097c5afbf8cb65febdc.json b/backend/storage/.sqlx/query-5f77d48e7072420c7ffded38cf73477cb1f8c18d3ea38097c5afbf8cb65febdc.json index c4aabd9f..ce73a949 100644 --- a/backend/storage/.sqlx/query-5f77d48e7072420c7ffded38cf73477cb1f8c18d3ea38097c5afbf8cb65febdc.json +++ b/backend/storage/.sqlx/query-5f77d48e7072420c7ffded38cf73477cb1f8c18d3ea38097c5afbf8cb65febdc.json @@ -6,12 +6,12 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, "name": "master_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 2, @@ -231,8 +231,8 @@ ], "parameters": { "Left": [ - "Int8", - "Int8", + "Int4", + "Int4", "Text", "TextArray", "Text", diff --git a/backend/storage/.sqlx/query-9a2c40e32e99578394e376c8389fae1805a901c5f38fbe6988a1739f09818463.json b/backend/storage/.sqlx/query-68d484ba5626b404315ba07d4fcf536f1ecda16c3f190bcb9483df9fe47b0f1c.json similarity index 67% rename from backend/storage/.sqlx/query-9a2c40e32e99578394e376c8389fae1805a901c5f38fbe6988a1739f09818463.json rename to backend/storage/.sqlx/query-68d484ba5626b404315ba07d4fcf536f1ecda16c3f190bcb9483df9fe47b0f1c.json index 05b048c6..ed17ab2c 100644 --- a/backend/storage/.sqlx/query-9a2c40e32e99578394e376c8389fae1805a901c5f38fbe6988a1739f09818463.json +++ b/backend/storage/.sqlx/query-68d484ba5626b404315ba07d4fcf536f1ecda16c3f190bcb9483df9fe47b0f1c.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n id,\n passkey,\n TRUE AS \"can_download!\",\n 0::int4 AS \"num_seeding!\",\n 0::int4 AS \"num_leeching!\"\n FROM users\n ", + "query": "\n SELECT\n id,\n passkey as \"passkey: Passkey\",\n TRUE AS \"can_download!\",\n 0::int4 AS \"num_seeding!\",\n 0::int4 AS \"num_leeching!\"\n FROM users\n ", "describe": { "columns": [ { @@ -10,7 +10,7 @@ }, { "ordinal": 1, - "name": "passkey", + "name": "passkey: Passkey", "type_info": "Varchar" }, { @@ -40,5 +40,5 @@ null ] }, - "hash": "9a2c40e32e99578394e376c8389fae1805a901c5f38fbe6988a1739f09818463" + "hash": "68d484ba5626b404315ba07d4fcf536f1ecda16c3f190bcb9483df9fe47b0f1c" } diff --git a/backend/storage/.sqlx/query-6a511bc45afdaa9205a2fa2ff360a9acd9a0e5c66e41d012c88db347f8991a07.json b/backend/storage/.sqlx/query-6a511bc45afdaa9205a2fa2ff360a9acd9a0e5c66e41d012c88db347f8991a07.json index 1e6cc59c..18dd78d0 100644 --- a/backend/storage/.sqlx/query-6a511bc45afdaa9205a2fa2ff360a9acd9a0e5c66e41d012c88db347f8991a07.json +++ b/backend/storage/.sqlx/query-6a511bc45afdaa9205a2fa2ff360a9acd9a0e5c66e41d012c88db347f8991a07.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, @@ -41,7 +41,7 @@ { "ordinal": 7, "name": "edition_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 8, @@ -461,7 +461,7 @@ ], "parameters": { "Left": [ - "Int8" + "Int4" ] }, "nullable": [ diff --git a/backend/storage/.sqlx/query-7c6dc5e429c39d634ed6386c2473b772457499c9e9dce5b57f8b90fd2d69ce8b.json b/backend/storage/.sqlx/query-7c6dc5e429c39d634ed6386c2473b772457499c9e9dce5b57f8b90fd2d69ce8b.json index 01f33e99..70e14fcf 100644 --- a/backend/storage/.sqlx/query-7c6dc5e429c39d634ed6386c2473b772457499c9e9dce5b57f8b90fd2d69ce8b.json +++ b/backend/storage/.sqlx/query-7c6dc5e429c39d634ed6386c2473b772457499c9e9dce5b57f8b90fd2d69ce8b.json @@ -11,7 +11,7 @@ { "ordinal": 1, "name": "title_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 2, @@ -427,7 +427,7 @@ { "ordinal": 21, "name": "filled_by_torrent_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 22, @@ -438,7 +438,7 @@ "parameters": { "Left": [ "Int8", - "Int8", + "Int4", "Text", "Varchar", "Text", diff --git a/backend/storage/.sqlx/query-7da30fb9252c3cc090c82890cfb6ae1539df66561292d0da594f4f02d4c9c416.json b/backend/storage/.sqlx/query-7da30fb9252c3cc090c82890cfb6ae1539df66561292d0da594f4f02d4c9c416.json deleted file mode 100644 index 3794f677..00000000 --- a/backend/storage/.sqlx/query-7da30fb9252c3cc090c82890cfb6ae1539df66561292d0da594f4f02d4c9c416.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT real_uploaded, real_downloaded\n FROM peers\n WHERE torrent_id = $1 AND peer_id = $2 AND ip = $3 AND port = $4 AND user_id = $5\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "real_uploaded", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "real_downloaded", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Bytea", - "Inet", - "Int4", - "Int4" - ] - }, - "nullable": [ - false, - false - ] - }, - "hash": "7da30fb9252c3cc090c82890cfb6ae1539df66561292d0da594f4f02d4c9c416" -} diff --git a/backend/storage/.sqlx/query-7f7ca07123ffd7752b648765cf968380803863cdbf6b52db581536bec5711731.json b/backend/storage/.sqlx/query-7f7ca07123ffd7752b648765cf968380803863cdbf6b52db581536bec5711731.json index 772737d3..c9bcf40c 100644 --- a/backend/storage/.sqlx/query-7f7ca07123ffd7752b648765cf968380803863cdbf6b52db581536bec5711731.json +++ b/backend/storage/.sqlx/query-7f7ca07123ffd7752b648765cf968380803863cdbf6b52db581536bec5711731.json @@ -11,7 +11,7 @@ ], "parameters": { "Left": [ - "Int8", + "Int4", "Int8" ] }, diff --git a/backend/storage/.sqlx/query-8b8150b88e99b7cd93eb9f7ca0aab778ccf21f823e30d96acb6d5c6a19b62a4f.json b/backend/storage/.sqlx/query-8b8150b88e99b7cd93eb9f7ca0aab778ccf21f823e30d96acb6d5c6a19b62a4f.json index a0614650..59e19d25 100644 --- a/backend/storage/.sqlx/query-8b8150b88e99b7cd93eb9f7ca0aab778ccf21f823e30d96acb6d5c6a19b62a4f.json +++ b/backend/storage/.sqlx/query-8b8150b88e99b7cd93eb9f7ca0aab778ccf21f823e30d96acb6d5c6a19b62a4f.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, diff --git a/backend/storage/.sqlx/query-8c678307a3962e0a192b292877cc5170f194251c2fa8bad5cf3c53532ea6a627.json b/backend/storage/.sqlx/query-8c678307a3962e0a192b292877cc5170f194251c2fa8bad5cf3c53532ea6a627.json deleted file mode 100644 index 33757733..00000000 --- a/backend/storage/.sqlx/query-8c678307a3962e0a192b292877cc5170f194251c2fa8bad5cf3c53532ea6a627.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n ip,\n port,\n user_agent,\n MIN(first_seen_at) as \"first_seen_at!\",\n MAX(last_seen_at) as \"last_seen_at!\",\n SUM(real_uploaded)::BIGINT as \"real_uploaded!\",\n SUM(real_downloaded)::BIGINT as \"real_downloaded!\",\n status::peer_status_enum as \"status!: PeerStatus\"\n FROM peers\n WHERE user_id = $1\n GROUP BY (peer_id, ip, port, user_agent, status)\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "ip", - "type_info": "Inet" - }, - { - "ordinal": 1, - "name": "port", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "user_agent", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "first_seen_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 4, - "name": "last_seen_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 5, - "name": "real_uploaded!", - "type_info": "Int8" - }, - { - "ordinal": 6, - "name": "real_downloaded!", - "type_info": "Int8" - }, - { - "ordinal": 7, - "name": "status!: PeerStatus", - "type_info": { - "Custom": { - "name": "peer_status_enum", - "kind": { - "Enum": [ - "seeding", - "leeching" - ] - } - } - } - } - ], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [ - false, - false, - true, - null, - null, - null, - null, - false - ] - }, - "hash": "8c678307a3962e0a192b292877cc5170f194251c2fa8bad5cf3c53532ea6a627" -} diff --git a/backend/storage/.sqlx/query-17a66ce5222c12349791df72eeea87a4df730db5dd22ab5a286d6f5aed5060d0.json b/backend/storage/.sqlx/query-979326b2fbccce1e8237d42e6b25c38b32d1dd0b49cb636d85bebe10a0624545.json similarity index 78% rename from backend/storage/.sqlx/query-17a66ce5222c12349791df72eeea87a4df730db5dd22ab5a286d6f5aed5060d0.json rename to backend/storage/.sqlx/query-979326b2fbccce1e8237d42e6b25c38b32d1dd0b49cb636d85bebe10a0624545.json index c208a61f..12d606e3 100644 --- a/backend/storage/.sqlx/query-17a66ce5222c12349791df72eeea87a4df730db5dd22ab5a286d6f5aed5060d0.json +++ b/backend/storage/.sqlx/query-979326b2fbccce1e8237d42e6b25c38b32d1dd0b49cb636d85bebe10a0624545.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT jsonb_agg(data)\n FROM (\n SELECT jsonb_build_object(\n 'id', tg.id, 'content_type', tg.content_type, 'name', tg.name, 'platform', tg.platform, 'covers', tg.covers,\n 'original_release_date', tg.original_release_date,\n 'edition_groups', COALESCE(\n jsonb_agg(\n jsonb_build_object(\n 'id', eg.id,\n 'name', eg.name,\n 'release_date', eg.release_date,\n 'distributor', eg.distributor,\n 'source', eg.source,\n 'additional_information', eg.additional_information\n )\n ) FILTER (WHERE eg.id IS NOT NULL),\n '[]'::jsonb\n )\n ) as data\n FROM title_groups tg\n LEFT JOIN edition_groups eg ON eg.title_group_id = tg.id\n LEFT JOIN (\n SELECT edition_group_id, MAX(created_at) as created_at\n FROM torrents\n GROUP BY edition_group_id\n ) AS latest_torrent ON latest_torrent.edition_group_id = eg.id\n WHERE ($1::BIGINT IS NOT NULL AND tg.id = $1)\n OR ($2::TEXT IS NOT NULL AND (tg.name ILIKE '%' || $2 || '%' OR $2 = ANY(tg.name_aliases)))\n AND ($3::content_type_enum IS NULL OR tg.content_type = $3::content_type_enum)\n GROUP BY tg.id\n ORDER BY MAX(latest_torrent.created_at) DESC NULLS LAST\n LIMIT $4\n ) AS subquery;\n ", + "query": "\n SELECT jsonb_agg(data)\n FROM (\n SELECT jsonb_build_object(\n 'id', tg.id, 'content_type', tg.content_type, 'name', tg.name, 'platform', tg.platform, 'covers', tg.covers,\n 'original_release_date', tg.original_release_date,\n 'edition_groups', COALESCE(\n jsonb_agg(\n jsonb_build_object(\n 'id', eg.id,\n 'name', eg.name,\n 'release_date', eg.release_date,\n 'distributor', eg.distributor,\n 'source', eg.source,\n 'additional_information', eg.additional_information\n )\n ) FILTER (WHERE eg.id IS NOT NULL),\n '[]'::jsonb\n )\n ) as data\n FROM title_groups tg\n LEFT JOIN edition_groups eg ON eg.title_group_id = tg.id\n LEFT JOIN (\n SELECT edition_group_id, MAX(created_at) as created_at\n FROM torrents\n GROUP BY edition_group_id\n ) AS latest_torrent ON latest_torrent.edition_group_id = eg.id\n WHERE ($1::INT IS NOT NULL AND tg.id = $1)\n OR ($2::TEXT IS NOT NULL AND (tg.name ILIKE '%' || $2 || '%' OR $2 = ANY(tg.name_aliases)))\n AND ($3::content_type_enum IS NULL OR tg.content_type = $3::content_type_enum)\n GROUP BY tg.id\n ORDER BY MAX(latest_torrent.created_at) DESC NULLS LAST\n LIMIT $4\n ) AS subquery;\n ", "describe": { "columns": [ { @@ -11,7 +11,7 @@ ], "parameters": { "Left": [ - "Int8", + "Int4", "Text", { "Custom": { @@ -37,5 +37,5 @@ null ] }, - "hash": "17a66ce5222c12349791df72eeea87a4df730db5dd22ab5a286d6f5aed5060d0" + "hash": "979326b2fbccce1e8237d42e6b25c38b32d1dd0b49cb636d85bebe10a0624545" } diff --git a/backend/storage/.sqlx/query-9a005f6e33977b003ca157711fb83fe4f96aca8348097fce5db85bb0fb709d5a.json b/backend/storage/.sqlx/query-9a005f6e33977b003ca157711fb83fe4f96aca8348097fce5db85bb0fb709d5a.json new file mode 100644 index 00000000..6f25f2c1 --- /dev/null +++ b/backend/storage/.sqlx/query-9a005f6e33977b003ca157711fb83fe4f96aca8348097fce5db85bb0fb709d5a.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n upload_factor,\n download_factor,\n seeders,\n leechers,\n completed\n FROM torrents\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "upload_factor", + "type_info": "Float8" + }, + { + "ordinal": 2, + "name": "download_factor", + "type_info": "Float8" + }, + { + "ordinal": 3, + "name": "seeders", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "leechers", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "completed", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "9a005f6e33977b003ca157711fb83fe4f96aca8348097fce5db85bb0fb709d5a" +} diff --git a/backend/storage/.sqlx/query-9c9d3ae5bb9942df95aafbf9e09a78b82609e444a801306e711c7820df28bfdf.json b/backend/storage/.sqlx/query-9c9d3ae5bb9942df95aafbf9e09a78b82609e444a801306e711c7820df28bfdf.json index 470c0620..9eec3e47 100644 --- a/backend/storage/.sqlx/query-9c9d3ae5bb9942df95aafbf9e09a78b82609e444a801306e711c7820df28bfdf.json +++ b/backend/storage/.sqlx/query-9c9d3ae5bb9942df95aafbf9e09a78b82609e444a801306e711c7820df28bfdf.json @@ -5,7 +5,7 @@ "columns": [], "parameters": { "Left": [ - "Int8", + "Int4", "Int4", "Text" ] diff --git a/backend/storage/.sqlx/query-a124658d1272c9b0a189d20c403ad515b080537e23a90b5112d32cede791cf53.json b/backend/storage/.sqlx/query-a124658d1272c9b0a189d20c403ad515b080537e23a90b5112d32cede791cf53.json index 27f7bb4f..a519d15e 100644 --- a/backend/storage/.sqlx/query-a124658d1272c9b0a189d20c403ad515b080537e23a90b5112d32cede791cf53.json +++ b/backend/storage/.sqlx/query-a124658d1272c9b0a189d20c403ad515b080537e23a90b5112d32cede791cf53.json @@ -5,8 +5,8 @@ "columns": [], "parameters": { "Left": [ - "Int8", - "Int8" + "Int4", + "Int4" ] }, "nullable": [] diff --git a/backend/storage/.sqlx/query-a45ff60d399a52538e67de35897d248d11539d4805fd53b8d27f3d1a0568ce92.json b/backend/storage/.sqlx/query-a45ff60d399a52538e67de35897d248d11539d4805fd53b8d27f3d1a0568ce92.json index cd518ff1..9a0095c6 100644 --- a/backend/storage/.sqlx/query-a45ff60d399a52538e67de35897d248d11539d4805fd53b8d27f3d1a0568ce92.json +++ b/backend/storage/.sqlx/query-a45ff60d399a52538e67de35897d248d11539d4805fd53b8d27f3d1a0568ce92.json @@ -11,7 +11,7 @@ ], "parameters": { "Left": [ - "Int8" + "Int4" ] }, "nullable": [ diff --git a/backend/storage/.sqlx/query-a7c034f161a7f462aec1a0a525e6bf71aaf3364c91358659f5320f0551d23b08.json b/backend/storage/.sqlx/query-a7c034f161a7f462aec1a0a525e6bf71aaf3364c91358659f5320f0551d23b08.json index e69f5212..83eb7b12 100644 --- a/backend/storage/.sqlx/query-a7c034f161a7f462aec1a0a525e6bf71aaf3364c91358659f5320f0551d23b08.json +++ b/backend/storage/.sqlx/query-a7c034f161a7f462aec1a0a525e6bf71aaf3364c91358659f5320f0551d23b08.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, @@ -16,7 +16,7 @@ ], "parameters": { "Left": [ - "Int8" + "Int4" ] }, "nullable": [ diff --git a/backend/storage/.sqlx/query-b074dd0846be331a99667fd0b1379d186986fa5a302897da06de8d9af540f109.json b/backend/storage/.sqlx/query-b074dd0846be331a99667fd0b1379d186986fa5a302897da06de8d9af540f109.json index 7e474c89..69012073 100644 --- a/backend/storage/.sqlx/query-b074dd0846be331a99667fd0b1379d186986fa5a302897da06de8d9af540f109.json +++ b/backend/storage/.sqlx/query-b074dd0846be331a99667fd0b1379d186986fa5a302897da06de8d9af540f109.json @@ -36,12 +36,12 @@ { "ordinal": 6, "name": "title_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 7, "name": "master_group_id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 8, @@ -54,8 +54,8 @@ "Int4", "Int8", "Int8", - "Int8", - "Int8", + "Int4", + "Int4", "Int8", "Text" ] diff --git a/backend/storage/.sqlx/query-b2f82503135861043c852764286a61813f7e25f5dac44c1837f959f3dfb22faa.json b/backend/storage/.sqlx/query-b2f82503135861043c852764286a61813f7e25f5dac44c1837f959f3dfb22faa.json index 164d2071..47c1e6e6 100644 --- a/backend/storage/.sqlx/query-b2f82503135861043c852764286a61813f7e25f5dac44c1837f959f3dfb22faa.json +++ b/backend/storage/.sqlx/query-b2f82503135861043c852764286a61813f7e25f5dac44c1837f959f3dfb22faa.json @@ -203,6 +203,11 @@ "ordinal": 37, "name": "staff_note", "type_info": "Text" + }, + { + "ordinal": 38, + "name": "can_download", + "type_info": "Bool" } ], "parameters": { @@ -248,6 +253,7 @@ false, false, false, + false, false ] }, diff --git a/backend/storage/.sqlx/query-b66113aed26d21612ef4b20df44b5f15471c2d69ee07c8717c6a25c0edaf3d91.json b/backend/storage/.sqlx/query-b66113aed26d21612ef4b20df44b5f15471c2d69ee07c8717c6a25c0edaf3d91.json index 5b5b7661..85a26223 100644 --- a/backend/storage/.sqlx/query-b66113aed26d21612ef4b20df44b5f15471c2d69ee07c8717c6a25c0edaf3d91.json +++ b/backend/storage/.sqlx/query-b66113aed26d21612ef4b20df44b5f15471c2d69ee07c8717c6a25c0edaf3d91.json @@ -21,7 +21,7 @@ ], "parameters": { "Left": [ - "Int8" + "Int4" ] }, "nullable": [ diff --git a/backend/storage/.sqlx/query-be1671602d0e2d5c5f430c344e239d8ef022c48b0a6326f095b2c61cadbfef9c.json b/backend/storage/.sqlx/query-be1671602d0e2d5c5f430c344e239d8ef022c48b0a6326f095b2c61cadbfef9c.json deleted file mode 100644 index 699d8fdf..00000000 --- a/backend/storage/.sqlx/query-be1671602d0e2d5c5f430c344e239d8ef022c48b0a6326f095b2c61cadbfef9c.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH peer_counts AS (\n SELECT\n torrent_id,\n COUNT(CASE WHEN status = 'seeding' THEN 1 END) AS current_seeders,\n COUNT(CASE WHEN status = 'leeching' THEN 1 END) AS current_leechers\n FROM\n peers\n GROUP BY\n torrent_id\n )\n UPDATE torrents AS t\n SET\n seeders = COALESCE(pc.current_seeders, 0),\n leechers = COALESCE(pc.current_leechers, 0)\n FROM\n torrents AS t_alias -- Use an alias for the table in the FROM clause to avoid ambiguity\n LEFT JOIN\n peer_counts AS pc ON t_alias.id = pc.torrent_id\n WHERE\n t.id = t_alias.id AND\n t.deleted_at IS NULL;\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "be1671602d0e2d5c5f430c344e239d8ef022c48b0a6326f095b2c61cadbfef9c" -} diff --git a/backend/storage/.sqlx/query-c5ef39624904dbc651eb31e96568a2d98359844aa26ccb5f9b99952d3ccc3d3c.json b/backend/storage/.sqlx/query-c5ef39624904dbc651eb31e96568a2d98359844aa26ccb5f9b99952d3ccc3d3c.json index ebcbdab6..d0abefbb 100644 --- a/backend/storage/.sqlx/query-c5ef39624904dbc651eb31e96568a2d98359844aa26ccb5f9b99952d3ccc3d3c.json +++ b/backend/storage/.sqlx/query-c5ef39624904dbc651eb31e96568a2d98359844aa26ccb5f9b99952d3ccc3d3c.json @@ -203,6 +203,11 @@ "ordinal": 37, "name": "staff_note", "type_info": "Text" + }, + { + "ordinal": 38, + "name": "can_download", + "type_info": "Bool" } ], "parameters": { @@ -248,6 +253,7 @@ false, false, false, + false, false ] }, diff --git a/backend/storage/.sqlx/query-c888cb000555bfb6f90440d3545d6b87dbe652b364e1713bb85bcc484c8679af.json b/backend/storage/.sqlx/query-c888cb000555bfb6f90440d3545d6b87dbe652b364e1713bb85bcc484c8679af.json index 197fa628..c327ea62 100644 --- a/backend/storage/.sqlx/query-c888cb000555bfb6f90440d3545d6b87dbe652b364e1713bb85bcc484c8679af.json +++ b/backend/storage/.sqlx/query-c888cb000555bfb6f90440d3545d6b87dbe652b364e1713bb85bcc484c8679af.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" }, { "ordinal": 1, diff --git a/backend/storage/.sqlx/query-cd8465a3d4c43769c4b0cba93780d4d2fe7899d9ad2db1da3efd6cf30055daaf.json b/backend/storage/.sqlx/query-cd8465a3d4c43769c4b0cba93780d4d2fe7899d9ad2db1da3efd6cf30055daaf.json deleted file mode 100644 index 80f6d311..00000000 --- a/backend/storage/.sqlx/query-cd8465a3d4c43769c4b0cba93780d4d2fe7899d9ad2db1da3efd6cf30055daaf.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT peers.ip AS ip, peers.port AS port\n FROM peers\n WHERE\n torrent_id = $1\n AND\n peers.user_id != $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "ip", - "type_info": "Inet" - }, - { - "ordinal": 1, - "name": "port", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int4" - ] - }, - "nullable": [ - false, - false - ] - }, - "hash": "cd8465a3d4c43769c4b0cba93780d4d2fe7899d9ad2db1da3efd6cf30055daaf" -} diff --git a/backend/storage/.sqlx/query-d3f04789f86ed230881478e2bc87541633817ad7ae0c1dd2d651801a4b130216.json b/backend/storage/.sqlx/query-d3f04789f86ed230881478e2bc87541633817ad7ae0c1dd2d651801a4b130216.json deleted file mode 100644 index 2f3bfe5c..00000000 --- a/backend/storage/.sqlx/query-d3f04789f86ed230881478e2bc87541633817ad7ae0c1dd2d651801a4b130216.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM peers WHERE last_seen_at < NOW() - INTERVAL '1 second' * $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Float8" - ] - }, - "nullable": [] - }, - "hash": "d3f04789f86ed230881478e2bc87541633817ad7ae0c1dd2d651801a4b130216" -} diff --git a/backend/storage/.sqlx/query-dac758645dbb1e022a77b807d9cc1d422f6fe660f56b95362a562151ec871f67.json b/backend/storage/.sqlx/query-dac758645dbb1e022a77b807d9cc1d422f6fe660f56b95362a562151ec871f67.json index 3b0b01cf..b7edd110 100644 --- a/backend/storage/.sqlx/query-dac758645dbb1e022a77b807d9cc1d422f6fe660f56b95362a562151ec871f67.json +++ b/backend/storage/.sqlx/query-dac758645dbb1e022a77b807d9cc1d422f6fe660f56b95362a562151ec871f67.json @@ -5,9 +5,9 @@ "columns": [], "parameters": { "Left": [ - "Int8", + "Int4", "Text", - "Int8" + "Int4" ] }, "nullable": [] diff --git a/backend/storage/.sqlx/query-e950019665911379059ebde0a096a9ed9ad9fe233f0b28d5b4bc4515920aed83.json b/backend/storage/.sqlx/query-e950019665911379059ebde0a096a9ed9ad9fe233f0b28d5b4bc4515920aed83.json index 7367ee16..b602ba0e 100644 --- a/backend/storage/.sqlx/query-e950019665911379059ebde0a096a9ed9ad9fe233f0b28d5b4bc4515920aed83.json +++ b/backend/storage/.sqlx/query-e950019665911379059ebde0a096a9ed9ad9fe233f0b28d5b4bc4515920aed83.json @@ -6,7 +6,7 @@ "parameters": { "Left": [ "Int4", - "Int8" + "Int4" ] }, "nullable": [] diff --git a/backend/storage/.sqlx/query-ee9381234cebd63cc00031da475bcff495d7f095b7b599f2fbdd0b042ff3a568.json b/backend/storage/.sqlx/query-ee9381234cebd63cc00031da475bcff495d7f095b7b599f2fbdd0b042ff3a568.json deleted file mode 100644 index 66dd6381..00000000 --- a/backend/storage/.sqlx/query-ee9381234cebd63cc00031da475bcff495d7f095b7b599f2fbdd0b042ff3a568.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM peers WHERE\n (torrent_id, peer_id, ip, port) = ($1, $2, $3, $4)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Bytea", - "Inet", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "ee9381234cebd63cc00031da475bcff495d7f095b7b599f2fbdd0b042ff3a568" -} diff --git a/backend/storage/.sqlx/query-f4784529bf6bae03c0d4a24b4d1d0d095ab90239c1eef295ed2b6af82282c18c.json b/backend/storage/.sqlx/query-f4784529bf6bae03c0d4a24b4d1d0d095ab90239c1eef295ed2b6af82282c18c.json index 835d6c10..b5e2d671 100644 --- a/backend/storage/.sqlx/query-f4784529bf6bae03c0d4a24b4d1d0d095ab90239c1eef295ed2b6af82282c18c.json +++ b/backend/storage/.sqlx/query-f4784529bf6bae03c0d4a24b4d1d0d095ab90239c1eef295ed2b6af82282c18c.json @@ -6,7 +6,7 @@ { "ordinal": 0, "name": "id", - "type_info": "Int8" + "type_info": "Int4" } ], "parameters": { diff --git a/backend/storage/migrations/20250312215600_initdb.sql b/backend/storage/migrations/20250312215600_initdb.sql index 3cfacece..872abe11 100644 --- a/backend/storage/migrations/20250312215600_initdb.sql +++ b/backend/storage/migrations/20250312215600_initdb.sql @@ -44,11 +44,12 @@ CREATE TABLE users ( warned BOOLEAN NOT NULL DEFAULT FALSE, banned BOOLEAN NOT NULL DEFAULT FALSE, staff_note TEXT NOT NULL DEFAULT '', + can_download BOOLEAN NOT NULL DEFAULT FALSE, UNIQUE(passkey) ); INSERT INTO users (username, email, password_hash, registered_from_ip, settings, passkey) -VALUES ('creator', 'none@domain.com', 'none', '127.0.0.1', '{}'::jsonb, 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'); +VALUES ('creator', 'none@domain.com', 'none', '127.0.0.1', '{}'::jsonb, 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'); CREATE TABLE api_keys ( id BIGSERIAL PRIMARY KEY, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), @@ -123,7 +124,7 @@ CREATE TABLE similar_artists ( FOREIGN KEY (artist_2_id) REFERENCES artists(id) ON DELETE CASCADE ); CREATE TABLE master_groups ( - id BIGSERIAL PRIMARY KEY, + id SERIAL PRIMARY KEY, name VARCHAR(255), -- name_aliases VARCHAR(255)[], created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), @@ -141,8 +142,8 @@ CREATE TABLE master_groups ( SET NULL ); CREATE TABLE similar_master_groups ( - group_1_id BIGINT NOT NULL, - group_2_id BIGINT NOT NULL, + group_1_id INT NOT NULL, + group_2_id INT NOT NULL, PRIMARY KEY (group_1_id, group_2_id), FOREIGN KEY (group_1_id) REFERENCES master_groups(id) ON DELETE CASCADE, FOREIGN KEY (group_2_id) REFERENCES master_groups(id) ON DELETE CASCADE @@ -251,8 +252,8 @@ CREATE TYPE language_enum AS ENUM( 'Other' ); CREATE TABLE title_groups ( - id BIGSERIAL PRIMARY KEY, - master_group_id BIGINT, + id SERIAL PRIMARY KEY, + master_group_id INT, name TEXT NOT NULL, name_aliases TEXT [], created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), @@ -309,7 +310,7 @@ CREATE TYPE artist_role_enum AS ENUM ( ); CREATE TABLE affiliated_artists ( id BIGSERIAL PRIMARY KEY, - title_group_id BIGINT NOT NULL, + title_group_id INT NOT NULL, artist_id BIGINT NOT NULL, roles artist_role_enum[] NOT NULL, nickname VARCHAR(255), @@ -342,8 +343,8 @@ CREATE TYPE source_enum AS ENUM ( 'Physical Book' ); CREATE TABLE edition_groups ( - id BIGSERIAL PRIMARY KEY, - title_group_id BIGINT NOT NULL, + id SERIAL PRIMARY KEY, + title_group_id INT NOT NULL, name TEXT, release_date TIMESTAMP WITH TIME ZONE NOT NULL, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), @@ -428,14 +429,14 @@ CREATE TYPE video_codec_enum AS ENUM( CREATE TYPE features_enum AS ENUM('HDR', 'HDR 10', 'HDR 10+', 'DV', 'Commentary', 'Remux', '3D', 'Cue', 'OCR'); CREATE TYPE extras_enum AS ENUM('booklet', 'manual', 'behind_the_scenes', 'deleted_scenes', 'featurette', 'trailer', 'other'); CREATE TABLE torrents ( - id BIGSERIAL PRIMARY KEY, + id SERIAL PRIMARY KEY, upload_factor FLOAT NOT NULL DEFAULT 1.0, download_factor FLOAT NOT NULL DEFAULT 1.0, seeders BIGINT NOT NULL DEFAULT 0, leechers BIGINT NOT NULL DEFAULT 0, completed BIGINT NOT NULL DEFAULT 0, snatched BIGINT NOT NULL DEFAULT 0, - edition_group_id BIGINT NOT NULL, + edition_group_id INT NOT NULL, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_by_id INT NOT NULL, @@ -489,8 +490,8 @@ CREATE TABLE title_group_comments ( created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_by_id INT NOT NULL, - title_group_id BIGINT NOT NULL, - refers_to_torrent_id BIGINT, + title_group_id INT NOT NULL, + refers_to_torrent_id INT, answers_to_comment_id BIGINT, FOREIGN KEY (created_by_id) REFERENCES users(id) ON DELETE CASCADE, FOREIGN KEY (title_group_id) REFERENCES title_groups(id) ON DELETE CASCADE, @@ -499,12 +500,12 @@ CREATE TABLE title_group_comments ( ); CREATE TABLE torrent_requests ( id BIGSERIAL PRIMARY KEY, - title_group_id BIGINT NOT NULL, + title_group_id INT NOT NULL, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_by_id INT NOT NULL, filled_by_user_id INT, - filled_by_torrent_id BIGINT, + filled_by_torrent_id INT, filled_at TIMESTAMP WITH TIME ZONE, edition_name TEXT, source source_enum[] NOT NULL DEFAULT ARRAY[]::source_enum[], @@ -543,36 +544,38 @@ CREATE TABLE torrent_reports ( reported_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), reported_by_id INT NOT NULL, description TEXT NOT NULL, - reported_torrent_id BIGINT NOT NULL, + reported_torrent_id INT NOT NULL, FOREIGN KEY (reported_by_id) REFERENCES users(id) ON DELETE CASCADE, FOREIGN KEY (reported_torrent_id) REFERENCES torrents(id) ON DELETE CASCADE ); - -CREATE TYPE peer_status_enum AS ENUM('seeding', 'leeching'); CREATE TABLE peers ( - id BIGINT GENERATED ALWAYS AS IDENTITY, - user_id INT NOT NULL, - torrent_id BIGINT NOT NULL, - peer_id BYTEA NOT NULL CHECK(octet_length(peer_id) = 20), - ip INET NOT NULL, - port INTEGER NOT NULL, - first_seen_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP , - last_seen_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, - real_uploaded BIGINT NOT NULL DEFAULT 0, - real_downloaded BIGINT NOT NULL DEFAULT 0, - user_agent TEXT, - status peer_status_enum NOT NULL, - - PRIMARY KEY (id), - - FOREIGN KEY (torrent_id) REFERENCES torrents(id) ON DELETE CASCADE, - FOREIGN KEY (user_id) REFERENCES users(id), - - UNIQUE (torrent_id, peer_id, ip, port) + peer_id bytea NOT NULL, + ip bytea NOT NULL, + port smallint NOT NULL, + agent varchar(64) NOT NULL, + uploaded bigint NOT NULL, + downloaded bigint NOT NULL, + "left" bigint NOT NULL, + seeder boolean NOT NULL, + created_at timestamp without time zone DEFAULT NULL, + updated_at timestamp without time zone DEFAULT NULL, + torrent_id integer NOT NULL, + user_id integer NOT NULL, + connectable boolean NOT NULL DEFAULT FALSE, + active boolean NOT NULL, + visible boolean NOT NULL, + PRIMARY KEY (user_id, torrent_id, peer_id) ); +CREATE INDEX peers_idx_seeder_user_id ON peers (seeder, user_id); +CREATE INDEX peers_torrent_id_foreign ON peers (torrent_id); +CREATE INDEX peers_active_index ON peers (active); +ALTER TABLE peers +ADD CONSTRAINT peers_torrent_id_foreign FOREIGN KEY (torrent_id) REFERENCES torrents (id) ON DELETE CASCADE ON UPDATE CASCADE; +ALTER TABLE peers +ADD CONSTRAINT peers_user_id_foreign FOREIGN KEY (user_id) REFERENCES users (id) ON UPDATE CASCADE; CREATE TABLE torrent_activities ( id BIGSERIAL PRIMARY KEY, - torrent_id BIGINT NOT NULL, + torrent_id INT NOT NULL, user_id INT NOT NULL, snatched_at TIMESTAMP WITH TIME ZONE, first_seen_seeding_at TIMESTAMP WITH TIME ZONE, @@ -607,7 +610,7 @@ CREATE TYPE entity_role_enum AS ENUM ( ); CREATE TABLE affiliated_entities ( id BIGSERIAL PRIMARY KEY, - title_group_id BIGINT NOT NULL, + title_group_id INT NOT NULL, entity_id BIGINT NOT NULL, created_by_id INT NOT NULL, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), @@ -647,8 +650,8 @@ CREATE TABLE collage_entry ( collage_id BIGINT NOT NULL REFERENCES collage(id), artist_id BIGINT REFERENCES artists(id), entity_id BIGINT REFERENCES entities(id), - title_group_id BIGINT REFERENCES title_groups(id), - master_group_id BIGINT REFERENCES master_groups(id), + title_group_id INT REFERENCES title_groups(id), + master_group_id INT REFERENCES master_groups(id), note TEXT ); -- prevent duplicate entries in a collage @@ -812,7 +815,7 @@ CREATE TYPE notification_reason_enum AS ENUM ( ); CREATE TABLE subscriptions ( id BIGSERIAL PRIMARY KEY, - title_group_id BIGINT, + title_group_id INT, artist_id BIGINT, forum_thread_id BIGINT, forum_sub_category_id BIGINT, @@ -832,8 +835,8 @@ CREATE TABLE notifications ( reason notification_reason_enum NOT NULL, message TEXT, read_status BOOLEAN NOT NULL DEFAULT FALSE, - title_group_id BIGINT, - torrent_id BIGINT, + title_group_id INT, + torrent_id INT, artist_id BIGINT, -- collage_id BIGINT, forum_thread_id BIGINT, @@ -927,7 +930,7 @@ ORDER BY p_external_link TEXT DEFAULT NULL ) RETURNS TABLE ( - title_group_id BIGINT, + title_group_id INT, title_group_data JSONB ) LANGUAGE plpgsql diff --git a/backend/storage/migrations/fixtures/fixtures.sql b/backend/storage/migrations/fixtures/fixtures.sql index 3a53ac49..c7474fcf 100644 --- a/backend/storage/migrations/fixtures/fixtures.sql +++ b/backend/storage/migrations/fixtures/fixtures.sql @@ -27,11 +27,11 @@ INSERT INTO public._sqlx_migrations VALUES (20250312215600, 'initdb', '2025-09-1 -- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: arcadia -- -INSERT INTO public.users VALUES (1, 'creator', NULL, 'none@domain.com', 'none', '127.0.0.1', '2025-09-17 12:42:13.702455+00', '', 0, 0, 1, 1, 0, 0, '2025-09-17 12:42:13.702455+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{}', 'aa', false, false, ''); -INSERT INTO public.users VALUES (5, 'waterbottle', 'https://i.pinimg.com/736x/a6/27/12/a6271204df8d387c3e614986c106f549.jpg', 'user2@example.com', 'hashedpassword2', '192.168.1.2', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'fqmslfjqmlsfj', false, false, ''''''); -INSERT INTO public.users VALUES (3, 'coolguy', 'https://i.pinimg.com/474x/c1/5a/6c/c15a6c91515e22f6ea8b766f89c12f0c.jpg', 'user3@example.com', 'hashedpassword3', '192.168.1.3', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'qnsvmqfmlqsdm', false, false, ''''''); -INSERT INTO public.users VALUES (2, 'picolo', 'https://img.freepik.com/premium-vector/random-people-line-art-vector_567805-63.jpg', 'user1@example.com', '$argon2id$v=19$m=19456,t=2,p=1$s4XJtCUk9IrGgNsTfP6Ofw$ktoGbBEoFaVgdiTn19Gh9h45LjFiv7AUEL5KHhzm4d0', '192.168.1.1', '2025-03-30 16:24:57.388152+00', '', 10000, 0, 1, 1, 0, 0, '2025-09-17 09:27:11.336576+00', 'staff', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 999999410, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'qmofqmlskdfnnns', false, false, ''''''); -INSERT INTO public.users VALUES (4, 'test', NULL, 'test@test.tsttt', '$argon2id$v=19$m=19456,t=2,p=1$yaA+WqA4OfSyAqR3iXhDng$/Ngv7VeJvVNHli9rBgQG0d/O2W+qoI2yHhQxZSxxW2M', '127.0.0.1', '2025-04-10 19:15:51.036818+00', '', 979900000000, 0, 1, 1, 0, 0, '2025-09-17 09:15:44.322914+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99999000, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'mqnmnqmlngqsklf', false, false, ''''''); +INSERT INTO public.users VALUES (1, 'creator', NULL, 'none@domain.com', 'none', '127.0.0.1', '2025-09-17 12:42:13.702455+00', '', 0, 0, 1, 1, 0, 0, '2025-09-17 12:42:13.702455+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{}', '111111111111111111111111111111111', false, false, ''); +INSERT INTO public.users VALUES (5, 'waterbottle', 'https://i.pinimg.com/736x/a6/27/12/a6271204df8d387c3e614986c106f549.jpg', 'user2@example.com', 'hashedpassword2', '192.168.1.2', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', '22222222222222222222222222222222', false, false, ''''''); +INSERT INTO public.users VALUES (3, 'coolguy', 'https://i.pinimg.com/474x/c1/5a/6c/c15a6c91515e22f6ea8b766f89c12f0c.jpg', 'user3@example.com', 'hashedpassword3', '192.168.1.3', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', '33333333333333333333333333333333', false, false, ''''''); +INSERT INTO public.users VALUES (2, 'picolo', 'https://img.freepik.com/premium-vector/random-people-line-art-vector_567805-63.jpg', 'user1@example.com', '$argon2id$v=19$m=19456,t=2,p=1$s4XJtCUk9IrGgNsTfP6Ofw$ktoGbBEoFaVgdiTn19Gh9h45LjFiv7AUEL5KHhzm4d0', '192.168.1.1', '2025-03-30 16:24:57.388152+00', '', 10000, 0, 1, 1, 0, 0, '2025-09-17 09:27:11.336576+00', 'staff', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 999999410, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', '44444444444444444444444444444444', false, false, ''''''); +INSERT INTO public.users VALUES (4, 'test', NULL, 'test@test.tsttt', '$argon2id$v=19$m=19456,t=2,p=1$yaA+WqA4OfSyAqR3iXhDng$/Ngv7VeJvVNHli9rBgQG0d/O2W+qoI2yHhQxZSxxW2M', '127.0.0.1', '2025-04-10 19:15:51.036818+00', '', 979900000000, 0, 1, 1, 0, 0, '2025-09-17 09:15:44.322914+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99999000, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', '55555555555555555555555555555555', false, false, ''''''); -- @@ -1621,4 +1621,3 @@ SELECT pg_catalog.setval('public.wiki_articles_id_seq', 1, true); -- -- PostgreSQL database dump complete -- - diff --git a/backend/storage/src/models/artist.rs b/backend/storage/src/models/artist.rs index 75ee989d..9d124c4a 100644 --- a/backend/storage/src/models/artist.rs +++ b/backend/storage/src/models/artist.rs @@ -104,7 +104,7 @@ pub enum ArtistRole { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct AffiliatedArtist { pub id: i64, - pub title_group_id: i64, + pub title_group_id: i32, pub artist_id: i64, pub roles: Vec, pub nickname: Option, // for example: name of the character the actor is playing @@ -121,7 +121,7 @@ pub struct AffiliatedArtistLite { #[derive(Debug, Serialize, Deserialize, ToSchema)] pub struct UserCreatedAffiliatedArtist { - pub title_group_id: i64, + pub title_group_id: i32, pub artist_id: i64, pub roles: Vec, pub nickname: Option, @@ -139,7 +139,7 @@ pub struct ArtistAndTitleGroupsLite { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct AffiliatedArtistHierarchy { pub id: i64, - pub title_group_id: i64, + pub title_group_id: i32, pub artist_id: i64, pub roles: Vec, pub nickname: Option, diff --git a/backend/storage/src/models/collage.rs b/backend/storage/src/models/collage.rs index 20fe035a..386109ae 100644 --- a/backend/storage/src/models/collage.rs +++ b/backend/storage/src/models/collage.rs @@ -60,8 +60,8 @@ pub struct CollageEntry { pub created_by_id: i32, pub artist_id: Option, pub entity_id: Option, - pub title_group_id: Option, - pub master_group_id: Option, + pub title_group_id: Option, + pub master_group_id: Option, pub collage_id: i64, pub note: Option, } @@ -70,8 +70,8 @@ pub struct CollageEntry { pub struct UserCreatedCollageEntry { pub artist_id: Option, pub entity_id: Option, - pub title_group_id: Option, - pub master_group_id: Option, + pub title_group_id: Option, + pub master_group_id: Option, pub collage_id: i64, pub note: Option, } @@ -86,9 +86,9 @@ pub struct CollageEntryHierarchy { pub artist: Option, pub entity_id: Option, pub entity: Option, - pub title_group_id: Option, + pub title_group_id: Option, pub title_group: Option, - pub master_group_id: Option, + pub master_group_id: Option, pub master_group: Option, pub collage_id: i64, pub note: Option, diff --git a/backend/storage/src/models/edition_group.rs b/backend/storage/src/models/edition_group.rs index f60802fc..0331bfb5 100644 --- a/backend/storage/src/models/edition_group.rs +++ b/backend/storage/src/models/edition_group.rs @@ -59,8 +59,8 @@ pub enum Source { // Every attribute is specific to the edition, no information should be entered about the torrents or the title #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct EditionGroup { - pub id: i64, - pub title_group_id: i64, + pub id: i32, + pub title_group_id: i32, pub name: Option, // edition name, not title name, (also, for Collections, includes the optional subscription level/tier) #[schema(value_type = String, format = DateTime)] pub release_date: DateTime, // public release, (also, for Collections, date of the last (chronologically) item included) @@ -94,13 +94,13 @@ pub struct UserCreatedEditionGroup { pub source: Option, #[schema(value_type = HashMap)] pub additional_information: Option, - pub title_group_id: i64, + pub title_group_id: i32, } #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct EditionGroupHierarchyLite { - pub id: i64, - pub title_group_id: i64, + pub id: i32, + pub title_group_id: i32, pub name: Option, #[schema(value_type = String, format = DateTime)] pub release_date: DateTime, @@ -114,8 +114,8 @@ pub struct EditionGroupHierarchyLite { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct EditionGroupHierarchy { - pub id: i64, - pub title_group_id: i64, + pub id: i32, + pub title_group_id: i32, pub name: Option, #[schema(value_type = String, format = DateTime)] pub release_date: DateTime, @@ -136,7 +136,7 @@ pub struct EditionGroupHierarchy { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct EditionGroupInfoLite { - pub id: i64, + pub id: i32, pub name: Option, #[schema(value_type = String, format = DateTime)] pub release_date: DateTime, diff --git a/backend/storage/src/models/entity.rs b/backend/storage/src/models/entity.rs index 1f183987..37015644 100644 --- a/backend/storage/src/models/entity.rs +++ b/backend/storage/src/models/entity.rs @@ -41,7 +41,7 @@ pub struct EntityLite { #[derive(Debug, Deserialize, Serialize, FromRow, ToSchema)] pub struct AffiliatedEntity { pub id: i64, - pub title_group_id: i64, + pub title_group_id: i32, pub entity_id: i64, pub created_by_id: i32, #[schema(value_type = String, format = DateTime)] @@ -51,7 +51,7 @@ pub struct AffiliatedEntity { #[derive(Debug, Deserialize, Serialize, ToSchema)] pub struct AffiliatedEntityHierarchy { pub id: i64, - pub title_group_id: i64, + pub title_group_id: i32, pub entity_id: i64, pub created_by_id: i32, #[schema(value_type = String, format = DateTime)] diff --git a/backend/storage/src/models/master_group.rs b/backend/storage/src/models/master_group.rs index 53f123c4..5c09b85d 100644 --- a/backend/storage/src/models/master_group.rs +++ b/backend/storage/src/models/master_group.rs @@ -19,7 +19,7 @@ use utoipa::ToSchema; // or adding some sort of cache/search engine on top, that has the data deserialized #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct MasterGroup { - pub id: i64, + pub id: i32, pub name: Option, // pub name_aliases: Vec, #[schema(value_type = String, format = DateTime)] @@ -39,14 +39,14 @@ pub struct MasterGroup { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct MasterGroupLite { - pub id: i64, + pub id: i32, pub name: Option, } #[derive(Debug, Serialize, Deserialize)] pub struct SimilarMasterGroups { - pub group_1_id: i64, - pub group_2_id: i64, + pub group_1_id: i32, + pub group_2_id: i32, } #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] diff --git a/backend/storage/src/models/notification.rs b/backend/storage/src/models/notification.rs index 68a82b9f..e3943b51 100644 --- a/backend/storage/src/models/notification.rs +++ b/backend/storage/src/models/notification.rs @@ -20,8 +20,8 @@ pub struct Notification { pub reason: NotificationReason, pub message: Option, pub read_status: bool, - pub title_group_id: Option, - pub torrent_id: Option, + pub title_group_id: Option, + pub torrent_id: Option, pub artist_id: Option, pub collage_id: Option, pub forum_thread_id: Option, diff --git a/backend/storage/src/models/peer.rs b/backend/storage/src/models/peer.rs index 784dcf1e..0c515499 100644 --- a/backend/storage/src/models/peer.rs +++ b/backend/storage/src/models/peer.rs @@ -21,6 +21,6 @@ pub struct Peer { pub last_seen_at: DateTime, pub real_uploaded: i64, pub real_downloaded: i64, - pub user_agent: Option, + pub agent: Option, pub status: PeerStatus, } diff --git a/backend/storage/src/models/subscription.rs b/backend/storage/src/models/subscription.rs index 04ff9e31..b8b6e8a1 100644 --- a/backend/storage/src/models/subscription.rs +++ b/backend/storage/src/models/subscription.rs @@ -7,7 +7,7 @@ pub struct Subscription { pub id: i64, pub subscribed_at: DateTime, pub subscriber_id: i64, - pub title_group_id: i64, + pub title_group_id: i32, pub artist_id: i64, pub forum_thread_id: i64, pub forum_sub_category_id: i64, diff --git a/backend/storage/src/models/title_group.rs b/backend/storage/src/models/title_group.rs index 7d18f96a..16d9ae61 100644 --- a/backend/storage/src/models/title_group.rs +++ b/backend/storage/src/models/title_group.rs @@ -108,8 +108,8 @@ pub struct PublicRating { // no specific information should be entered about the editions or the torrents #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TitleGroup { - pub id: i64, - pub master_group_id: Option, // only if master groups are needed for this type of content + pub id: i32, + pub master_group_id: Option, // only if master groups are needed for this type of content pub name: String, pub name_aliases: Vec, #[schema(value_type = String, format = DateTime)] @@ -167,13 +167,13 @@ pub struct UserCreatedTitleGroup { pub series_id: Option, pub screenshots: Vec, // one of them should be given, if master groups are required for this type of content - pub master_group_id: Option, + pub master_group_id: Option, // pub master_group: Option, } #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TitleGroupHierarchyLite { - pub id: i64, + pub id: i32, pub name: String, pub covers: Vec, pub category: Option, @@ -188,7 +188,7 @@ pub struct TitleGroupHierarchyLite { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TitleGroupLite { - pub id: i64, + pub id: i32, pub name: String, pub content_type: ContentType, #[schema(value_type = String, format = DateTime)] @@ -200,7 +200,7 @@ pub struct TitleGroupLite { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema, sqlx::Decode)] pub struct TitleGroupInfoLite { - pub id: i64, + pub id: i32, pub name: String, pub content_type: ContentType, } @@ -221,8 +221,8 @@ pub struct TitleGroupAndAssociatedData { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct EditedTitleGroup { - pub id: i64, - pub master_group_id: Option, + pub id: i32, + pub master_group_id: Option, pub name: String, pub name_aliases: Vec, pub description: String, diff --git a/backend/storage/src/models/title_group_comment.rs b/backend/storage/src/models/title_group_comment.rs index f1c3e51c..0d37e571 100644 --- a/backend/storage/src/models/title_group_comment.rs +++ b/backend/storage/src/models/title_group_comment.rs @@ -14,16 +14,16 @@ pub struct TitleGroupComment { #[schema(value_type = String, format = DateTime)] pub updated_at: DateTime, pub created_by_id: i32, - pub title_group_id: i64, - pub refers_to_torrent_id: Option, + pub title_group_id: i32, + pub refers_to_torrent_id: Option, pub answers_to_comment_id: Option, } #[derive(Debug, Serialize, Deserialize, ToSchema)] pub struct UserCreatedTitleGroupComment { pub content: String, - pub title_group_id: i64, - pub refers_to_torrent_id: Option, + pub title_group_id: i32, + pub refers_to_torrent_id: Option, pub answers_to_comment_id: Option, } @@ -36,8 +36,8 @@ pub struct TitleGroupCommentHierarchy { #[schema(value_type = String, format = DateTime)] pub updated_at: DateTime, pub created_by_id: i32, - pub title_group_id: i64, - pub refers_to_torrent_id: Option, + pub title_group_id: i32, + pub refers_to_torrent_id: Option, pub answers_to_comment_id: Option, pub created_by: UserLiteAvatar, } diff --git a/backend/storage/src/models/torrent.rs b/backend/storage/src/models/torrent.rs index 3bc37399..a6e620ac 100644 --- a/backend/storage/src/models/torrent.rs +++ b/backend/storage/src/models/torrent.rs @@ -306,14 +306,14 @@ impl FromStr for Features { #[derive(Debug, Serialize, FromRow, ToSchema)] pub struct Torrent { - pub id: i64, + pub id: i32, pub upload_factor: f64, pub download_factor: f64, pub seeders: i64, pub leechers: i64, pub completed: i64, pub snatched: i64, - pub edition_group_id: i64, + pub edition_group_id: i32, #[schema(value_type = String, format = DateTime)] pub created_at: DateTime, #[schema(value_type = String, format = DateTime)] @@ -373,8 +373,8 @@ pub struct UploadedTorrent { pub languages: Text, #[schema(value_type = String)] pub container: Text, - #[schema(value_type = i64)] - pub edition_group_id: Text, + #[schema(value_type = i32)] + pub edition_group_id: Text, #[schema(value_type = i32)] pub duration: Option>, #[schema(value_type = AudioCodec)] @@ -401,8 +401,8 @@ pub struct UploadedTorrent { #[derive(Debug, Serialize, Deserialize, ToSchema)] pub struct EditedTorrent { - pub id: i64, - pub edition_group_id: i64, + pub id: i32, + pub edition_group_id: i32, pub extras: Vec, pub release_name: Option, pub release_group: Option, @@ -476,14 +476,14 @@ pub struct TorrentSearch { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TorrentHierarchyLite { - pub id: i64, + pub id: i32, pub upload_factor: f64, pub download_factor: f64, pub seeders: i64, pub leechers: i64, pub completed: i64, pub snatched: i64, - pub edition_group_id: i64, + pub edition_group_id: i32, #[schema(value_type = String, format = DateTime)] pub created_at: DateTime, pub extras: Vec, @@ -523,14 +523,14 @@ pub struct TorrentHierarchyLite { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TorrentHierarchy { - pub id: i64, + pub id: i32, pub upload_factor: f64, pub download_factor: f64, pub seeders: i64, pub leechers: i64, pub completed: i64, pub snatched: i64, - pub edition_group_id: i64, + pub edition_group_id: i32, #[schema(value_type = String, format = DateTime)] pub created_at: DateTime, #[schema(value_type = String, format = DateTime)] @@ -575,14 +575,14 @@ pub struct TorrentSearchResults { #[derive(Debug, Serialize, Deserialize, ToSchema)] pub struct TorrentToDelete { - pub id: i64, + pub id: i32, pub reason: String, pub displayed_reason: Option, } #[derive(Debug, Serialize, FromRow, ToSchema)] pub struct TorrentMinimal { - pub id: i64, + pub id: i32, #[schema(value_type = String, format = DateTime)] pub created_at: DateTime, // TODO: remove Option<>, this should never be null, but without it, the deserialization with sqlx fails somehow diff --git a/backend/storage/src/models/torrent_activity.rs b/backend/storage/src/models/torrent_activity.rs index b3baf564..195993d4 100644 --- a/backend/storage/src/models/torrent_activity.rs +++ b/backend/storage/src/models/torrent_activity.rs @@ -6,7 +6,7 @@ use utoipa::ToSchema; #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TorrentActivity { pub id: i64, - pub torrent_id: i64, + pub torrent_id: i32, pub user_id: i32, #[schema(value_type = String, format = DateTime)] pub snatched_at: DateTime, diff --git a/backend/storage/src/models/torrent_report.rs b/backend/storage/src/models/torrent_report.rs index 75a0e3f2..8dd6035f 100644 --- a/backend/storage/src/models/torrent_report.rs +++ b/backend/storage/src/models/torrent_report.rs @@ -9,12 +9,12 @@ pub struct TorrentReport { #[schema(value_type = String, format = DateTime)] pub reported_at: DateTime, pub reported_by_id: i32, - pub reported_torrent_id: i64, + pub reported_torrent_id: i32, pub description: String, } #[derive(Debug, Serialize, Deserialize, ToSchema)] pub struct UserCreatedTorrentReport { - pub reported_torrent_id: i64, + pub reported_torrent_id: i32, pub description: String, } diff --git a/backend/storage/src/models/torrent_request.rs b/backend/storage/src/models/torrent_request.rs index 43b54880..11cbf0ad 100644 --- a/backend/storage/src/models/torrent_request.rs +++ b/backend/storage/src/models/torrent_request.rs @@ -20,14 +20,14 @@ use super::torrent::{ #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TorrentRequest { pub id: i64, - pub title_group_id: i64, + pub title_group_id: i32, #[schema(value_type = String, format = DateTime)] pub created_at: DateTime, #[schema(value_type = String, format = DateTime)] pub updated_at: DateTime, pub created_by_id: i32, pub filled_by_user_id: Option, - pub filled_by_torrent_id: Option, + pub filled_by_torrent_id: Option, #[schema(value_type = String, format = DateTime)] pub filled_at: Option>, pub edition_name: Option, @@ -52,7 +52,7 @@ pub struct TorrentRequest { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct UserCreatedTorrentRequest { - pub title_group_id: i64, + pub title_group_id: i32, pub edition_name: Option, pub release_group: Option, pub description: Option, @@ -73,7 +73,7 @@ pub struct UserCreatedTorrentRequest { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct EditedTorrentRequest { pub id: i64, - pub title_group_id: i64, + pub title_group_id: i32, pub edition_name: Option, pub release_group: Option, pub description: Option, @@ -116,7 +116,7 @@ pub struct TorrentRequestWithTitleGroupLite { #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] pub struct TorrentRequestFill { pub torrent_request_id: i64, - pub torrent_id: i64, + pub torrent_id: i32, } #[derive(Debug, Serialize, Deserialize, ToSchema)] diff --git a/backend/storage/src/models/user.rs b/backend/storage/src/models/user.rs index 3e14d0be..c9ecff26 100644 --- a/backend/storage/src/models/user.rs +++ b/backend/storage/src/models/user.rs @@ -81,6 +81,7 @@ pub struct User { pub banned: bool, pub staff_note: String, pub passkey: String, + pub can_download: bool, } #[derive(Debug, Clone, Serialize, Deserialize, sqlx::Type, ToSchema, PartialEq, Eq)] diff --git a/backend/storage/src/repositories/announce_repository.rs b/backend/storage/src/repositories/announce_repository.rs index 3779da1c..d31b3c45 100644 --- a/backend/storage/src/repositories/announce_repository.rs +++ b/backend/storage/src/repositories/announce_repository.rs @@ -72,7 +72,7 @@ impl ConnectionPool { pub async fn update_total_seedtime( &self, user_id: i32, - torrent_id: i64, + torrent_id: i32, announce_interval: u32, grace_period: u32, ) -> Result { @@ -109,7 +109,7 @@ impl ConnectionPool { #[derive(sqlx::FromRow)] pub struct TorrentCompact { - pub id: i64, + pub id: i32, pub upload_factor: f64, pub download_factor: f64, } diff --git a/backend/storage/src/repositories/auth_repository.rs b/backend/storage/src/repositories/auth_repository.rs index f8f13e65..e1374408 100644 --- a/backend/storage/src/repositories/auth_repository.rs +++ b/backend/storage/src/repositories/auth_repository.rs @@ -42,7 +42,7 @@ impl ConnectionPool { // TODO: check if the passkey already exists let passkey: String = rng .sample_iter(&Alphanumeric) - .take(33) + .take(32) .map(char::from) .collect(); diff --git a/backend/storage/src/repositories/notification_repository.rs b/backend/storage/src/repositories/notification_repository.rs index ca9458e1..406ac285 100644 --- a/backend/storage/src/repositories/notification_repository.rs +++ b/backend/storage/src/repositories/notification_repository.rs @@ -4,8 +4,8 @@ use sqlx::{Postgres, Transaction}; use std::{borrow::Borrow, collections::HashMap}; pub struct NotificationItemsIds { - pub title_group_id: Option, - pub torrent_id: Option, + pub title_group_id: Option, + pub torrent_id: Option, #[allow(dead_code)] pub artist_id: Option, #[allow(dead_code)] diff --git a/backend/storage/src/repositories/peer_repository.rs b/backend/storage/src/repositories/peer_repository.rs index fe94ed25..25bf41fa 100644 --- a/backend/storage/src/repositories/peer_repository.rs +++ b/backend/storage/src/repositories/peer_repository.rs @@ -1,161 +1,161 @@ use crate::connection_pool::ConnectionPool; -use crate::models::peer::PeerStatus; -use arcadia_common::error::Result; -use arcadia_common::models::tracker::announce::{Announce, Peer}; -use sqlx::types::ipnetwork::IpNetwork; -use std::borrow::Borrow; +// use crate::models::peer::PeerStatus; +// use arcadia_common::error::Result; +// use arcadia_common::models::tracker::announce::{Announce, Peer}; +// use sqlx::types::ipnetwork::IpNetwork; +// use std::borrow::Borrow; -use crate::models; +// use crate::models; impl ConnectionPool { - pub async fn get_user_peers(&self, user_id: i32) -> Vec { - sqlx::query_as!( - models::peer::Peer, - r#" - SELECT - ip, - port, - user_agent, - MIN(first_seen_at) as "first_seen_at!", - MAX(last_seen_at) as "last_seen_at!", - SUM(real_uploaded)::BIGINT as "real_uploaded!", - SUM(real_downloaded)::BIGINT as "real_downloaded!", - status::peer_status_enum as "status!: PeerStatus" - FROM peers - WHERE user_id = $1 - GROUP BY (peer_id, ip, port, user_agent, status) - "#, - user_id - ) - .fetch_all(self.borrow()) - .await - .expect("failed to retrieve peers") - } + // pub async fn get_user_peers(&self, user_id: i32) -> Vec { + // sqlx::query_as!( + // models::peer::Peer, + // r#" + // SELECT + // ip, + // port, + // agent, + // MIN(first_seen_at) as "first_seen_at!", + // MAX(last_seen_at) as "last_seen_at!", + // SUM(real_uploaded)::BIGINT as "real_uploaded!", + // SUM(real_downloaded)::BIGINT as "real_downloaded!", + // status::peer_status_enum as "status!: PeerStatus" + // FROM peers + // WHERE user_id = $1 + // GROUP BY (peer_id, ip, port, agent, status) + // "#, + // user_id + // ) + // .fetch_all(self.borrow()) + // .await + // .expect("failed to retrieve peers") + // } - pub async fn remove_peer( - &self, - torrent_id: &i64, - peer_id: &[u8; 20], - ip: &IpNetwork, - port: u16, - ) { - sqlx::query!( - r#" - DELETE FROM peers WHERE - (torrent_id, peer_id, ip, port) = ($1, $2, $3, $4) - "#, - torrent_id, - peer_id, - ip, - port as i32 - ) - .execute(self.borrow()) - .await - .expect("failed removing peer from table"); - } + // pub async fn remove_peer( + // &self, + // torrent_id: &i32, + // peer_id: &[u8; 20], + // ip: &IpNetwork, + // port: u16, + // ) { + // sqlx::query!( + // r#" + // DELETE FROM peers WHERE + // (torrent_id, peer_id, ip, port) = ($1, $2, $3, $4) + // "#, + // torrent_id, + // peer_id, + // ip, + // port as i32 + // ) + // .execute(self.borrow()) + // .await + // .expect("failed removing peer from table"); + // } // returns uploaded/downloaded before the update - pub async fn insert_or_update_peer( - &self, - torrent_id: &i64, - ip: &IpNetwork, - user_id: &i32, - ann: &Announce, - user_agent: Option<&str>, - ) -> (i64, i64) { - let existing = sqlx::query!( - r#" - SELECT real_uploaded, real_downloaded - FROM peers - WHERE torrent_id = $1 AND peer_id = $2 AND ip = $3 AND port = $4 AND user_id = $5 - "#, - torrent_id, - &ann.peer_id, - ip, - ann.port as i32, - user_id - ) - .fetch_optional(self.borrow()) - .await - .expect("failed"); + // pub async fn insert_or_update_peer( + // &self, + // torrent_id: &i32, + // ip: &IpNetwork, + // user_id: &i32, + // ann: &Announce, + // agent: Option<&str>, + // ) -> (i64, i64) { + // let existing = sqlx::query!( + // r#" + // SELECT real_uploaded, real_downloaded + // FROM peers + // WHERE torrent_id = $1 AND peer_id = $2 AND ip = $3 AND port = $4 AND user_id = $5 + // "#, + // torrent_id, + // &ann.peer_id, + // ip, + // ann.port as i32, + // user_id + // ) + // .fetch_optional(self.borrow()) + // .await + // .expect("failed"); - let peer_status = if ann.left.unwrap() == 0 { - PeerStatus::Seeding - } else { - PeerStatus::Leeching - }; + // let peer_status = if ann.left.unwrap() == 0 { + // PeerStatus::Seeding + // } else { + // PeerStatus::Leeching + // }; - sqlx::query!( - r#" - INSERT INTO peers(torrent_id, peer_id, ip, port, user_id, real_uploaded, real_downloaded, user_agent, status) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9::peer_status_enum) - ON CONFLICT (torrent_id, peer_id, ip, port) DO UPDATE - SET - last_seen_at = NOW(), - real_uploaded = $6, - real_downloaded = $7, - status = $9::peer_status_enum - "#, - torrent_id, - &ann.peer_id, - ip, - ann.port as i32, - user_id, - ann.uploaded.unwrap_or(0) as i64, - ann.downloaded.unwrap_or(0) as i64, - user_agent, - peer_status as PeerStatus - ) - .execute(self.borrow()) - .await - .expect("failed"); + // sqlx::query!( + // r#" + // INSERT INTO peers(torrent_id, peer_id, ip, port, user_id, real_uploaded, real_downloaded, agent, status) + // VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9::peer_status_enum) + // ON CONFLICT (torrent_id, peer_id, ip, port) DO UPDATE + // SET + // last_seen_at = NOW(), + // real_uploaded = $6, + // real_downloaded = $7, + // status = $9::peer_status_enum + // "#, + // torrent_id, + // &ann.peer_id, + // ip, + // ann.port as i32, + // user_id, + // ann.uploaded.unwrap_or(0) as i64, + // ann.downloaded.unwrap_or(0) as i64, + // agent, + // peer_status as PeerStatus + // ) + // .execute(self.borrow()) + // .await + // .expect("failed"); - existing - .map(|row| (row.real_uploaded, row.real_downloaded)) - .unwrap_or((0, 0)) - } + // existing + // .map(|row| (row.real_uploaded, row.real_downloaded)) + // .unwrap_or((0, 0)) + // } - pub async fn find_torrent_peers(&self, torrent_id: &i64, user_id: &i32) -> Vec { - let peers = sqlx::query!( - r#" - SELECT peers.ip AS ip, peers.port AS port - FROM peers - WHERE - torrent_id = $1 - AND - peers.user_id != $2 - "#, - torrent_id, - user_id - ) - .fetch_all(self.borrow()) - .await - .expect("failed"); + // pub async fn find_torrent_peers(&self, torrent_id: &i32, user_id: &i32) -> Vec { + // let peers = sqlx::query!( + // r#" + // SELECT peers.ip AS ip, peers.port AS port + // FROM peers + // WHERE + // torrent_id = $1 + // AND + // peers.user_id != $2 + // "#, + // torrent_id, + // user_id + // ) + // .fetch_all(self.borrow()) + // .await + // .expect("failed"); - peers - .into_iter() - .map(|p| { - let std::net::IpAddr::V4(ipv4) = p.ip.ip() else { - panic!("oops"); - }; + // peers + // .into_iter() + // .map(|p| { + // let std::net::IpAddr::V4(ipv4) = p.ip.ip() else { + // panic!("oops"); + // }; - Peer { - ip: ipv4, - port: p.port as u16, - } - }) - .collect::>() - } + // Peer { + // ip: ipv4, + // port: p.port as u16, + // } + // }) + // .collect::>() + // } - pub async fn remove_inactive_peers(&self, seconds_since_last_announce: f64) -> Result { - let removed_peers_amount = sqlx::query!( - r#"DELETE FROM peers WHERE last_seen_at < NOW() - INTERVAL '1 second' * $1"#, - seconds_since_last_announce - ) - .execute(self.borrow()) - .await? - .rows_affected(); + // pub async fn remove_inactive_peers(&self, seconds_since_last_announce: f64) -> Result { + // let removed_peers_amount = sqlx::query!( + // r#"DELETE FROM peers WHERE last_seen_at < NOW() - INTERVAL '1 second' * $1"#, + // seconds_since_last_announce + // ) + // .execute(self.borrow()) + // .await? + // .rows_affected(); - Ok(removed_peers_amount) - } + // Ok(removed_peers_amount) + // } } diff --git a/backend/storage/src/repositories/title_group_repository.rs b/backend/storage/src/repositories/title_group_repository.rs index 44fa3794..86760a5a 100644 --- a/backend/storage/src/repositories/title_group_repository.rs +++ b/backend/storage/src/repositories/title_group_repository.rs @@ -64,7 +64,7 @@ impl ConnectionPool { pub async fn find_title_group_hierarchy( &self, - title_group_id: i64, + title_group_id: i32, user_id: i32, ) -> Result { let title_group = sqlx::query!(r#"WITH torrent_data AS ( @@ -285,7 +285,7 @@ impl ConnectionPool { } pub async fn find_title_group_info_lite( &self, - title_group_id: Option, + title_group_id: Option, title_group_name: Option<&str>, title_group_content_type: &Option, limit: u32, @@ -318,7 +318,7 @@ impl ConnectionPool { FROM torrents GROUP BY edition_group_id ) AS latest_torrent ON latest_torrent.edition_group_id = eg.id - WHERE ($1::BIGINT IS NOT NULL AND tg.id = $1) + WHERE ($1::INT IS NOT NULL AND tg.id = $1) OR ($2::TEXT IS NOT NULL AND (tg.name ILIKE '%' || $2 || '%' OR $2 = ANY(tg.name_aliases))) AND ($3::content_type_enum IS NULL OR tg.content_type = $3::content_type_enum) GROUP BY tg.id @@ -339,7 +339,7 @@ impl ConnectionPool { .unwrap_or_else(|| serde_json::Value::Array(vec![]))) } - pub async fn find_title_group(&self, title_group_id: i64) -> Result { + pub async fn find_title_group(&self, title_group_id: i32) -> Result { let title_group = sqlx::query_as!( TitleGroup, r#" @@ -366,7 +366,7 @@ impl ConnectionPool { pub async fn update_title_group( &self, edited_title_group: &EditedTitleGroup, - title_group_id: i64, + title_group_id: i32, ) -> Result { let updated_title_group = sqlx::query_as!( TitleGroup, @@ -428,8 +428,8 @@ impl ConnectionPool { pub async fn does_title_group_with_link_exist( &self, external_link: &str, - ) -> Result> { - let title_group_id: Option = sqlx::query_scalar!( + ) -> Result> { + let title_group_id: Option = sqlx::query_scalar!( r#" SELECT id FROM title_groups diff --git a/backend/storage/src/repositories/torrent_repository.rs b/backend/storage/src/repositories/torrent_repository.rs index 29416628..24313fff 100644 --- a/backend/storage/src/repositories/torrent_repository.rs +++ b/backend/storage/src/repositories/torrent_repository.rs @@ -20,7 +20,7 @@ use std::{borrow::Borrow, str::FromStr}; #[derive(sqlx::FromRow)] struct TitleGroupInfoLite { - id: i64, + id: i32, #[allow(dead_code)] name: String, } @@ -201,7 +201,7 @@ impl ConnectionPool { Ok(uploaded_torrent) } - pub async fn find_torrent(&self, torrent_id: i64) -> Result { + pub async fn find_torrent(&self, torrent_id: i32) -> Result { let torrent = sqlx::query_as!( Torrent, r#" @@ -241,7 +241,7 @@ impl ConnectionPool { pub async fn update_torrent( &self, edited_torrent: &EditedTorrent, - torrent_id: i64, + torrent_id: i32, ) -> Result { let updated_torrent = sqlx::query_as!( Torrent, @@ -322,7 +322,7 @@ impl ConnectionPool { pub async fn get_torrent( &self, user_id: i32, - torrent_id: i64, + torrent_id: i32, tracker_name: &str, frontend_url: &str, tracker_url: &str, @@ -508,39 +508,39 @@ impl ConnectionPool { Ok(()) } - pub async fn update_torrent_seeders_leechers(&self) -> Result<()> { - let _ = sqlx::query!( - r#" - WITH peer_counts AS ( - SELECT - torrent_id, - COUNT(CASE WHEN status = 'seeding' THEN 1 END) AS current_seeders, - COUNT(CASE WHEN status = 'leeching' THEN 1 END) AS current_leechers - FROM - peers - GROUP BY - torrent_id - ) - UPDATE torrents AS t - SET - seeders = COALESCE(pc.current_seeders, 0), - leechers = COALESCE(pc.current_leechers, 0) - FROM - torrents AS t_alias -- Use an alias for the table in the FROM clause to avoid ambiguity - LEFT JOIN - peer_counts AS pc ON t_alias.id = pc.torrent_id - WHERE - t.id = t_alias.id AND - t.deleted_at IS NULL; - "# - ) - .execute(self.borrow()) - .await?; + // pub async fn update_torrent_seeders_leechers(&self) -> Result<()> { + // let _ = sqlx::query!( + // r#" + // WITH peer_counts AS ( + // SELECT + // torrent_id, + // COUNT(CASE WHEN status = 'seeding' THEN 1 END) AS current_seeders, + // COUNT(CASE WHEN status = 'leeching' THEN 1 END) AS current_leechers + // FROM + // peers + // GROUP BY + // torrent_id + // ) + // UPDATE torrents AS t + // SET + // seeders = COALESCE(pc.current_seeders, 0), + // leechers = COALESCE(pc.current_leechers, 0) + // FROM + // torrents AS t_alias -- Use an alias for the table in the FROM clause to avoid ambiguity + // LEFT JOIN + // peer_counts AS pc ON t_alias.id = pc.torrent_id + // WHERE + // t.id = t_alias.id AND + // t.deleted_at IS NULL; + // "# + // ) + // .execute(self.borrow()) + // .await?; - Ok(()) - } + // Ok(()) + // } - pub async fn increment_torrent_completed(&self, torrent_id: i64) -> Result<()> { + pub async fn increment_torrent_completed(&self, torrent_id: i32) -> Result<()> { let _ = sqlx::query!( r#" UPDATE torrents diff --git a/backend/storage/src/repositories/torrent_request_repository.rs b/backend/storage/src/repositories/torrent_request_repository.rs index 2b8d1374..27954698 100644 --- a/backend/storage/src/repositories/torrent_request_repository.rs +++ b/backend/storage/src/repositories/torrent_request_repository.rs @@ -58,7 +58,7 @@ impl ConnectionPool { pub async fn fill_torrent_request( &self, - torrent_id: i64, + torrent_id: i32, torrent_request_id: i64, current_user_id: i32, ) -> Result<()> { diff --git a/backend/storage/src/repositories/tracker_repository.rs b/backend/storage/src/repositories/tracker_repository.rs index a83a40ce..58290960 100644 --- a/backend/storage/src/repositories/tracker_repository.rs +++ b/backend/storage/src/repositories/tracker_repository.rs @@ -1,13 +1,14 @@ use crate::connection_pool::ConnectionPool; use arcadia_common::error::Result; -use arcadia_shared::tracker::models::user::{Passkey, User}; +use arcadia_shared::tracker::models::{torrent::Torrent, user::Passkey, user::User}; use std::borrow::Borrow; +use std::collections::HashMap; // This file contains functions for Arcadia's tracker // but not necessarily related to the tracker itself directly impl ConnectionPool { - pub async fn find_users(&self) -> Result> { + pub async fn find_users(&self) -> Result> { // TODO: fix this // query_as!() doesn't work as it requires the FromString trait // which is implemented, but somehow still throws an error @@ -15,7 +16,7 @@ impl ConnectionPool { r#" SELECT id, - passkey, + passkey as "passkey: Passkey", TRUE AS "can_download!", 0::int4 AS "num_seeding!", 0::int4 AS "num_leeching!" @@ -26,20 +27,53 @@ impl ConnectionPool { .await .expect("could not get users"); - let users = rows - .into_iter() - .map(|r| User { - id: r.id as u32, - passkey: r - .passkey - .parse::() - .expect("invalid passkey in database"), + let mut map: HashMap = HashMap::with_capacity(rows.len()); + for r in rows { + let id = r.id as u32; + let user = User { can_download: r.can_download, num_seeding: r.num_seeding as u32, num_leeching: r.num_leeching as u32, - }) - .collect(); + }; + map.insert(id, user); + } - Ok(users) + Ok(map) + } + + pub async fn find_torrents(&self) -> Result> { + // TODO: fix this + // query_as!() doesn't work as it requires the FromString trait + // which is implemented, but somehow still throws an error + let rows = sqlx::query!( + r#" + SELECT + id, + upload_factor, + download_factor, + seeders, + leechers, + completed + FROM torrents + "# + ) + .fetch_all(self.borrow()) + .await + .expect("could not get torrents"); + + let mut map: HashMap = HashMap::with_capacity(rows.len()); + for r in rows { + let id = r.id as u32; + let torrent = Torrent { + upload_factor: r.upload_factor, + download_factor: r.download_factor, + seeders: r.seeders, + leechers: r.leechers, + completed: r.completed, + }; + map.insert(id, torrent); + } + + Ok(map) } } diff --git a/frontend/src/api-schema/schema.d.ts b/frontend/src/api-schema/schema.d.ts index e0ea4b89..451d30e0 100644 --- a/frontend/src/api-schema/schema.d.ts +++ b/frontend/src/api-schema/schema.d.ts @@ -1506,7 +1506,7 @@ export interface components { /** Format: int64 */ real_uploaded: number; status: components["schemas"]["PeerStatus"]; - user_agent?: string | null; + agent?: string | null; }; /** @enum {string} */ PeerStatus: "Seeding" | "Leeching"; diff --git a/frontend/src/components/user/PeerTable.vue b/frontend/src/components/user/PeerTable.vue index 26f66bc4..af2c45bc 100644 --- a/frontend/src/components/user/PeerTable.vue +++ b/frontend/src/components/user/PeerTable.vue @@ -12,7 +12,7 @@ - +