more foundations for the new tracker and changed user id to i32 (rust)

and INT (sql)
This commit is contained in:
FrenchGithubUser
2025-10-07 21:08:18 +02:00
parent ab8c3f3bd1
commit ae614b3ce2
86 changed files with 902 additions and 615 deletions

25
Cargo.lock generated
View File

@@ -460,6 +460,12 @@ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.60.2",
] ]
[[package]]
name = "anyhow"
version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]] [[package]]
name = "arbitrary" name = "arbitrary"
version = "1.4.2" version = "1.4.2"
@@ -553,6 +559,16 @@ dependencies = [
"tokio-cron-scheduler", "tokio-cron-scheduler",
] ]
[[package]]
name = "arcadia-shared"
version = "0.1.0"
dependencies = [
"anyhow",
"indexmap",
"serde",
"sqlx",
]
[[package]] [[package]]
name = "arcadia-storage" name = "arcadia-storage"
version = "0.1.0" version = "0.1.0"
@@ -560,6 +576,7 @@ dependencies = [
"actix-multipart", "actix-multipart",
"actix-web", "actix-web",
"arcadia-common", "arcadia-common",
"arcadia-shared",
"argon2", "argon2",
"bip_metainfo", "bip_metainfo",
"chrono 0.4.41", "chrono 0.4.41",
@@ -582,10 +599,18 @@ version = "0.1.0"
dependencies = [ dependencies = [
"actix-web", "actix-web",
"actix-web-httpauth", "actix-web-httpauth",
"anyhow",
"arcadia-shared",
"dotenvy", "dotenvy",
"env_logger", "env_logger",
"envconfig", "envconfig",
"futures", "futures",
"indexmap",
"log",
"parking_lot",
"serde",
"serde_bencode",
"strum",
"thiserror 2.0.16", "thiserror 2.0.16",
"tokio", "tokio",
"utoipa", "utoipa",

View File

@@ -3,7 +3,7 @@ members = [
"backend/api", "backend/api",
"backend/common", "backend/common",
"backend/periodic-tasks", "backend/periodic-tasks",
"backend/storage", "tracker/arcadia_tracker", "backend/storage", "tracker/arcadia_tracker", "shared",
] ]
resolver = "2" resolver = "2"

View File

@@ -42,3 +42,8 @@ TASK_INTERVAL_REMOVE_INACTIVE_PEERS="0 0 * * * *"
# Required for TMDB access, must create a new account with themoviedb.org # Required for TMDB access, must create a new account with themoviedb.org
# TMDB_API_KEY="your token" # TMDB_API_KEY="your token"
# COMIC_VINCE_API_KEY="your api key" # COMIC_VINCE_API_KEY="your api key"
# ----------- Tracker
# Used for the backend to make requests to the tracker
# and vice-versa
ARCADIA_TRACKER_API_KEY=change_me

View File

@@ -3,3 +3,8 @@ ENV=Docker
# Docker buildkit support # Docker buildkit support
DOCKER_BUILDKIT=1 DOCKER_BUILDKIT=1
COMPOSE_DOCKER_CLI_BUILD=1 COMPOSE_DOCKER_CLI_BUILD=1
# ----------- Tracker
# Used for the backend to make requests to the tracker
# and vice-versa
ARCADIA_TRACKER_API_KEY=change_me

View File

@@ -79,3 +79,8 @@ TASK_INTERVAL_REMOVE_INACTIVE_PEERS="0 0 * * * *"
# SMTP_PASSWORD=your-app-password # SMTP_PASSWORD=your-app-password
# SMTP_FROM_EMAIL=noreply@yourtracker.com # SMTP_FROM_EMAIL=noreply@yourtracker.com
# SMTP_FROM_NAME=Arcadia Tracker # SMTP_FROM_NAME=Arcadia Tracker
# ----------- Tracker
# Used for the backend to make requests to the tracker
# and vice-versa
ARCADIA_TRACKER_API_KEY=change_me

View File

@@ -68,15 +68,7 @@ pub async fn exec<R: RedisPoolInterface>(
return Err(AnnounceError::TorrentClientNotInWhitelist); return Err(AnnounceError::TorrentClientNotInWhitelist);
} }
let passkey = u128::from_str_radix(&passkey, 16).map_err(|_| AnnounceError::InvalidPassKey)?; let current_user = arc.pool.find_user_with_passkey(&passkey).await?;
let passkey_upper = (passkey >> 64) as i64;
let passkey_lower = passkey as i64;
let current_user = arc
.pool
.find_user_with_passkey(passkey_upper, passkey_lower)
.await?;
let torrent = arc.pool.find_torrent_with_id(&ann.info_hash).await?; let torrent = arc.pool.find_torrent_with_id(&ann.info_hash).await?;

View File

@@ -28,11 +28,7 @@ pub async fn exec<R: RedisPoolInterface + 'static>(
user: Authdata, user: Authdata,
) -> Result<HttpResponse> { ) -> Result<HttpResponse> {
let current_user = arc.pool.find_user_with_id(user.sub).await?; let current_user = arc.pool.find_user_with_id(user.sub).await?;
let announce_url = get_announce_url( let announce_url = get_announce_url(current_user.passkey, arc.tracker.url.as_ref());
current_user.passkey_upper,
current_user.passkey_lower,
arc.tracker.url.as_ref(),
);
Ok(HttpResponse::Ok().json(UploadInformation { announce_url })) Ok(HttpResponse::Ok().json(UploadInformation { announce_url }))
} }

View File

@@ -20,7 +20,7 @@ use utoipa::IntoParams;
#[derive(Debug, Deserialize, IntoParams)] #[derive(Debug, Deserialize, IntoParams)]
pub struct GetUserQuery { pub struct GetUserQuery {
id: i64, id: i32,
} }
#[utoipa::path( #[utoipa::path(

View File

@@ -15,7 +15,7 @@ use jsonwebtoken::{decode, errors::ErrorKind, DecodingKey, Validation};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Authdata { pub struct Authdata {
pub sub: i64, pub sub: i32,
pub class: UserClass, pub class: UserClass,
} }

View File

@@ -11,12 +11,12 @@ pub static AUTH_TOKEN_LONG_DURATION: LazyLock<Duration> = LazyLock::new(|| Durat
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct InvalidationEntry { pub struct InvalidationEntry {
user_id: i64, user_id: i32,
token_invalidation_ts: i64, token_invalidation_ts: i64,
} }
impl InvalidationEntry { impl InvalidationEntry {
pub fn new(user_id: i64) -> Self { pub fn new(user_id: i32) -> Self {
let now = Utc::now(); let now = Utc::now();
Self { Self {
@@ -35,7 +35,7 @@ impl<R: RedisPoolInterface> Auth<R> {
Self { redis_pool } Self { redis_pool }
} }
pub async fn invalidate(&self, user_id: i64) -> Result<()> { pub async fn invalidate(&self, user_id: i32) -> Result<()> {
let entry = InvalidationEntry::new(user_id); let entry = InvalidationEntry::new(user_id);
let mut redis = self.redis_pool.connection().await?; let mut redis = self.redis_pool.connection().await?;
@@ -51,7 +51,7 @@ impl<R: RedisPoolInterface> Auth<R> {
Ok(()) Ok(())
} }
pub async fn is_invalidated(&self, user_id: i64, iat: i64) -> Result<bool> { pub async fn is_invalidated(&self, user_id: i32, iat: i64) -> Result<bool> {
let mut redis = self.redis_pool.connection().await?; let mut redis = self.redis_pool.connection().await?;
let Some(entry) = redis.get(user_id).await? else { let Some(entry) = redis.get(user_id).await? else {
return Ok(false); return Ok(false);

View File

@@ -1,5 +1,5 @@
INSERT INTO INSERT INTO
users (banned, username, email, password_hash, registered_from_ip, passkey_upper, passkey_lower) users (banned, username, email, password_hash, registered_from_ip, passkey)
VALUES VALUES
-- passkey d2037c66dd3e13044e0d2f9b891c3837 -- passkey d2037c66dd3e13044e0d2f9b891c3837
(true, 'test_user', 'test_email@testdomain.com', '$argon2id$v=19$m=19456,t=2,p=1$WM6V9pJ2ya7+N+NNIUtolg$n128u9idizCHLwZ9xhKaxOttLaAVZZgvfRZlRAnfyKk', '10.10.4.88', '-3313668119574211836', '5624203854722381879') (true, 'test_user', 'test_email@testdomain.com', '$argon2id$v=19$m=19456,t=2,p=1$WM6V9pJ2ya7+N+NNIUtolg$n128u9idizCHLwZ9xhKaxOttLaAVZZgvfRZlRAnfyKk', '10.10.4.88', 'mqdfkjqmsdkf')

View File

@@ -2,4 +2,4 @@ INSERT INTO
users (username, email, password_hash, registered_from_ip, passkey_upper, passkey_lower, class) users (username, email, password_hash, registered_from_ip, passkey_upper, passkey_lower, class)
VALUES VALUES
-- passkey d2037c66dd3e13044e0d2f9b891c3837 -- passkey d2037c66dd3e13044e0d2f9b891c3837
('test_user', 'test_email@testdomain.com', '$argon2id$v=19$m=19456,t=2,p=1$WM6V9pJ2ya7+N+NNIUtolg$n128u9idizCHLwZ9xhKaxOttLaAVZZgvfRZlRAnfyKk', '10.10.4.88', '-3313668119574211836', '5624203854722381879', 'newbie') ('test_user', 'test_email@testdomain.com', '$argon2id$v=19$m=19456,t=2,p=1$WM6V9pJ2ya7+N+NNIUtolg$n128u9idizCHLwZ9xhKaxOttLaAVZZgvfRZlRAnfyKk', '10.10.4.88', 'mqdslkfmkldf', 'newbie')

View File

@@ -2,4 +2,4 @@ INSERT INTO
users (username, email, password_hash, registered_from_ip, passkey_upper, passkey_lower, class) users (username, email, password_hash, registered_from_ip, passkey_upper, passkey_lower, class)
VALUES VALUES
-- passkey d2037c66dd3e13044e0d2f9b891c3838 -- passkey d2037c66dd3e13044e0d2f9b891c3838
('test_user2', 'test_email2@testdomain.com', '$argon2id$v=19$m=19456,t=2,p=1$WM6V9pJ2ya7+N+NNIUtolg$n128u9idizCHLwZ9xhKaxOttLaAVZZgvfRZlRAnfyKk', '10.10.4.88', '-3313668119574211836', '5624203854722381880', 'staff') ('test_user2', 'test_email2@testdomain.com', '$argon2id$v=19$m=19456,t=2,p=1$WM6V9pJ2ya7+N+NNIUtolg$n128u9idizCHLwZ9xhKaxOttLaAVZZgvfRZlRAnfyKk', '10.10.4.88', 'cmqklsdfmj', 'staff')

View File

@@ -1,401 +1,401 @@
pub mod common; pub mod common;
pub mod mocks; pub mod mocks;
use std::sync::Arc; // use std::sync::Arc;
use actix_web::test; // use actix_web::test;
use arcadia_api::OpenSignups; // use arcadia_api::OpenSignups;
use arcadia_common::models::tracker::announce; // use arcadia_common::models::tracker::announce;
use arcadia_storage::connection_pool::ConnectionPool; // use arcadia_storage::connection_pool::ConnectionPool;
use mocks::mock_redis::MockRedisPool; // use mocks::mock_redis::MockRedisPool;
use serde::Deserialize; // use serde::Deserialize;
use serde_json::Value; // use serde_json::Value;
use sqlx::PgPool; // use sqlx::PgPool;
use crate::common::auth_header; // use crate::common::auth_header;
#[derive(Debug, Deserialize)] // #[derive(Debug, Deserialize)]
struct WrappedError { // struct WrappedError {
#[serde(rename = "failure reason")] // #[serde(rename = "failure reason")]
_failure_reason: String, // _failure_reason: String,
} // }
#[sqlx::test(fixtures("with_test_user"), migrations = "../storage/migrations")] // #[sqlx::test(fixtures("with_test_user"), migrations = "../storage/migrations")]
async fn test_announce_unknown_passkey(pool: PgPool) { // async fn test_announce_unknown_passkey(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let service = common::create_test_app( // let service = common::create_test_app(
pool, // pool,
MockRedisPool::default(), // MockRedisPool::default(),
OpenSignups::Enabled, // OpenSignups::Enabled,
1.0, // 1.0,
1.0, // 1.0,
) // )
.await; // .await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/33333333333333333333333333333333?", // "/announce/33333333333333333333333333333333?",
"info_hash=%7C%B3%C6y%9A%FFm%5C%3B%10%A6S%1FF%07%D9%C9%0E%C0%A7&", // "info_hash=%7C%B3%C6y%9A%FFm%5C%3B%10%A6S%1FF%07%D9%C9%0E%C0%A7&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=0&", // "uploaded=0&",
"downloaded=0&", // "downloaded=0&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let resp = test::call_service(&service, req).await; // let resp = test::call_service(&service, req).await;
// Should fail because the passkey is invalid // // Should fail because the passkey is invalid
assert!( // assert!(
resp.status().is_client_error(), // resp.status().is_client_error(),
"status {} is not client error", // "status {} is not client error",
resp.status() // resp.status()
); // );
// Any error is okay, as long as it has "failure reason" populated. // // Any error is okay, as long as it has "failure reason" populated.
common::read_body_bencode::<WrappedError, _>(resp) // common::read_body_bencode::<WrappedError, _>(resp)
.await // .await
.expect("expected failure message"); // .expect("expected failure message");
} // }
#[sqlx::test(fixtures("with_test_user"), migrations = "../storage/migrations")] // #[sqlx::test(fixtures("with_test_user"), migrations = "../storage/migrations")]
async fn test_announce_unknown_torrent(pool: PgPool) { // async fn test_announce_unknown_torrent(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let service = common::create_test_app( // let service = common::create_test_app(
pool, // pool,
MockRedisPool::default(), // MockRedisPool::default(),
OpenSignups::Enabled, // OpenSignups::Enabled,
1.0, // 1.0,
1.0, // 1.0,
) // )
.await; // .await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/d2037c66dd3e13044e0d2f9b891c3837?", // "/announce/d2037c66dd3e13044e0d2f9b891c3837?",
"info_hash=%7C%B3%C6y%9A%FFm%5C%3B%10%A6S%1FF%07%D9%C9%0E%C0%A7&", // "info_hash=%7C%B3%C6y%9A%FFm%5C%3B%10%A6S%1FF%07%D9%C9%0E%C0%A7&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=0&", // "uploaded=0&",
"downloaded=0&", // "downloaded=0&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let resp = test::call_service(&service, req).await; // let resp = test::call_service(&service, req).await;
// Should fail because there is no torrent matching infohash. // // Should fail because there is no torrent matching infohash.
assert!( // assert!(
resp.status().is_client_error(), // resp.status().is_client_error(),
"status {} is not client error", // "status {} is not client error",
resp.status() // resp.status()
); // );
// Any error is okay, as long as it has "failure reason" populated. // // Any error is okay, as long as it has "failure reason" populated.
common::read_body_bencode::<WrappedError, _>(resp) // common::read_body_bencode::<WrappedError, _>(resp)
.await // .await
.expect("expected failure message"); // .expect("expected failure message");
} // }
#[sqlx::test( // #[sqlx::test(
fixtures( // fixtures(
"with_test_user", // "with_test_user",
"with_test_title_group", // "with_test_title_group",
"with_test_edition_group", // "with_test_edition_group",
"with_test_torrent" // "with_test_torrent"
), // ),
migrations = "../storage/migrations" // migrations = "../storage/migrations"
)] // )]
async fn test_announce_known_torrent(pool: PgPool) { // async fn test_announce_known_torrent(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let service = common::create_test_app( // let service = common::create_test_app(
pool, // pool,
MockRedisPool::default(), // MockRedisPool::default(),
OpenSignups::Enabled, // OpenSignups::Enabled,
1.0, // 1.0,
1.0, // 1.0,
) // )
.await; // .await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/d2037c66dd3e13044e0d2f9b891c3837?", // "/announce/d2037c66dd3e13044e0d2f9b891c3837?",
"info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&", // "info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=0&", // "uploaded=0&",
"downloaded=0&", // "downloaded=0&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let resp = test::call_service(&service, req).await; // let resp = test::call_service(&service, req).await;
// Should succeed because there is both a matching user and info hash. // // Should succeed because there is both a matching user and info hash.
assert!( // assert!(
resp.status().is_success(), // resp.status().is_success(),
"status {} is not success", // "status {} is not success",
resp.status() // resp.status()
); // );
let resp = common::read_body_bencode::<announce::AnnounceResponse, _>(resp) // let resp = common::read_body_bencode::<announce::AnnounceResponse, _>(resp)
.await // .await
.expect("could not deserialize announce response"); // .expect("could not deserialize announce response");
// There are no peers, so should be empty. // // There are no peers, so should be empty.
assert!(resp.peers.is_empty()); // assert!(resp.peers.is_empty());
} // }
#[sqlx::test( // #[sqlx::test(
fixtures( // fixtures(
"with_test_user", // "with_test_user",
"with_test_title_group", // "with_test_title_group",
"with_test_edition_group", // "with_test_edition_group",
"with_test_torrent", // "with_test_torrent",
"with_test_user2", // "with_test_user2",
"with_test_peers" // "with_test_peers"
), // ),
migrations = "../storage/migrations" // migrations = "../storage/migrations"
)] // )]
async fn test_announce_known_torrent_with_peers(pool: PgPool) { // async fn test_announce_known_torrent_with_peers(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let (service, user) = // let (service, user) =
common::create_test_app_and_login(pool, MockRedisPool::default(), 1.0, 1.0).await; // common::create_test_app_and_login(pool, MockRedisPool::default(), 1.0, 1.0).await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/d2037c66dd3e13044e0d2f9b891c3837?", // "/announce/d2037c66dd3e13044e0d2f9b891c3837?",
"info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&", // "info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=42&", // "uploaded=42&",
"downloaded=43&", // "downloaded=43&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let resp = test::call_service(&service, req).await; // let resp = test::call_service(&service, req).await;
// Should succeed because there is both a matching user and info hash. // // Should succeed because there is both a matching user and info hash.
assert!( // assert!(
resp.status().is_success(), // resp.status().is_success(),
"status {} is not success", // "status {} is not success",
resp.status() // resp.status()
); // );
let resp = common::read_body_bencode::<announce::AnnounceResponse, _>(resp) // let resp = common::read_body_bencode::<announce::AnnounceResponse, _>(resp)
.await // .await
.expect("could not deserialize announce response"); // .expect("could not deserialize announce response");
// Fixture sets up two non-self peers. // // Fixture sets up two non-self peers.
assert!(resp.peers.len() == 2); // assert!(resp.peers.len() == 2);
for announce::Peer { ip, port } in &resp.peers { // for announce::Peer { ip, port } in &resp.peers {
assert_ne!( // assert_ne!(
(ip, port), // (ip, port),
(&std::net::Ipv4Addr::new(10, 10, 4, 88), &6968), // (&std::net::Ipv4Addr::new(10, 10, 4, 88), &6968),
"announce response contains self in peer list" // "announce response contains self in peer list"
); // );
assert_ne!( // assert_ne!(
(ip, port), // (ip, port),
(&std::net::Ipv4Addr::new(10, 10, 4, 91), &26), // (&std::net::Ipv4Addr::new(10, 10, 4, 91), &26),
"peer by the same user is included in peer list" // "peer by the same user is included in peer list"
); // );
} // }
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.insert_header(auth_header(&user.token)) // .insert_header(auth_header(&user.token))
.uri("/api/users/me") // .uri("/api/users/me")
.to_request(); // .to_request();
let body = common::call_and_read_body_json::<Value, _>(&service, req).await; // let body = common::call_and_read_body_json::<Value, _>(&service, req).await;
assert_eq!(body["user"]["real_uploaded"].as_u64().unwrap(), 42); // assert_eq!(body["user"]["real_uploaded"].as_u64().unwrap(), 42);
// should be 44 because users start with 1 byte downloaded at account creation // // should be 44 because users start with 1 byte downloaded at account creation
assert_eq!(body["user"]["real_downloaded"].as_u64().unwrap(), 44); // assert_eq!(body["user"]["real_downloaded"].as_u64().unwrap(), 44);
} // }
#[sqlx::test( // #[sqlx::test(
fixtures( // fixtures(
"with_test_user", // "with_test_user",
"with_test_title_group", // "with_test_title_group",
"with_test_edition_group", // "with_test_edition_group",
"with_test_torrent", // "with_test_torrent",
"with_test_user2", // "with_test_user2",
"with_test_peers" // "with_test_peers"
), // ),
migrations = "../storage/migrations" // migrations = "../storage/migrations"
)] // )]
async fn test_announce_global_factor_manipulation(pool: PgPool) { // async fn test_announce_global_factor_manipulation(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let (service, user) = // let (service, user) =
common::create_test_app_and_login(pool, MockRedisPool::default(), 2.0, 0.5).await; // common::create_test_app_and_login(pool, MockRedisPool::default(), 2.0, 0.5).await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/d2037c66dd3e13044e0d2f9b891c3837?", // "/announce/d2037c66dd3e13044e0d2f9b891c3837?",
"info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&", // "info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=10&", // "uploaded=10&",
"downloaded=10&", // "downloaded=10&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let _ = test::call_service(&service, req).await; // let _ = test::call_service(&service, req).await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.insert_header(auth_header(&user.token)) // .insert_header(auth_header(&user.token))
.uri("/api/users/me") // .uri("/api/users/me")
.to_request(); // .to_request();
let body = common::call_and_read_body_json::<Value, _>(&service, req).await; // let body = common::call_and_read_body_json::<Value, _>(&service, req).await;
assert_eq!(body["user"]["uploaded"].as_u64().unwrap(), 20); // assert_eq!(body["user"]["uploaded"].as_u64().unwrap(), 20);
// should be 6 because users start with 1 byte downloaded at account creation // // should be 6 because users start with 1 byte downloaded at account creation
assert_eq!(body["user"]["downloaded"].as_u64().unwrap(), 6); // assert_eq!(body["user"]["downloaded"].as_u64().unwrap(), 6);
} // }
#[sqlx::test( // #[sqlx::test(
fixtures( // fixtures(
"with_test_user", // "with_test_user",
"with_test_title_group", // "with_test_title_group",
"with_test_edition_group", // "with_test_edition_group",
"with_test_torrent_custom_up_down_factors", // "with_test_torrent_custom_up_down_factors",
"with_test_user2", // "with_test_user2",
"with_test_peers" // "with_test_peers"
), // ),
migrations = "../storage/migrations" // migrations = "../storage/migrations"
)] // )]
async fn test_announce_torrent_specific_factor_manipulation(pool: PgPool) { // async fn test_announce_torrent_specific_factor_manipulation(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let (service, user) = // let (service, user) =
common::create_test_app_and_login(pool, MockRedisPool::default(), 1.0, 1.0).await; // common::create_test_app_and_login(pool, MockRedisPool::default(), 1.0, 1.0).await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/d2037c66dd3e13044e0d2f9b891c3837?", // "/announce/d2037c66dd3e13044e0d2f9b891c3837?",
"info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&", // "info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=10&", // "uploaded=10&",
"downloaded=10&", // "downloaded=10&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let _ = test::call_service(&service, req).await; // let _ = test::call_service(&service, req).await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.insert_header(auth_header(&user.token)) // .insert_header(auth_header(&user.token))
.uri("/api/users/me") // .uri("/api/users/me")
.to_request(); // .to_request();
let body = common::call_and_read_body_json::<Value, _>(&service, req).await; // let body = common::call_and_read_body_json::<Value, _>(&service, req).await;
assert_eq!(body["user"]["uploaded"].as_u64().unwrap(), 20); // assert_eq!(body["user"]["uploaded"].as_u64().unwrap(), 20);
// should be 6 because users start with 1 byte downloaded at account creation // // should be 6 because users start with 1 byte downloaded at account creation
assert_eq!(body["user"]["downloaded"].as_u64().unwrap(), 6); // assert_eq!(body["user"]["downloaded"].as_u64().unwrap(), 6);
} // }
#[sqlx::test( // #[sqlx::test(
fixtures( // fixtures(
"with_test_user", // "with_test_user",
"with_test_title_group", // "with_test_title_group",
"with_test_edition_group", // "with_test_edition_group",
"with_test_torrent" // "with_test_torrent"
), // ),
migrations = "../storage/migrations" // migrations = "../storage/migrations"
)] // )]
async fn test_peers_after_announce(pool: PgPool) { // async fn test_peers_after_announce(pool: PgPool) {
let pool = Arc::new(ConnectionPool::with_pg_pool(pool)); // let pool = Arc::new(ConnectionPool::with_pg_pool(pool));
let (service, user) = // let (service, user) =
common::create_test_app_and_login(pool, MockRedisPool::default(), 1.0, 1.0).await; // common::create_test_app_and_login(pool, MockRedisPool::default(), 1.0, 1.0).await;
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri(concat!( // .uri(concat!(
"/announce/d2037c66dd3e13044e0d2f9b891c3837?", // "/announce/d2037c66dd3e13044e0d2f9b891c3837?",
"info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&", // "info_hash=%11%223DUfw%88%99%AA%BB%CC%DD%EE%FF%00%11%223D&",
"peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&", // "peer_id=-lt0F01-%3D%91%BB%AC%5C%C69%C0%EDmux&",
"key=1ab4e687&", // "key=1ab4e687&",
"compact=1&", // "compact=1&",
"port=6968&", // "port=6968&",
"uploaded=100&", // "uploaded=100&",
"downloaded=100&", // "downloaded=100&",
"left=14&", // "left=14&",
"event=started" // "event=started"
)) // ))
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.to_request(); // .to_request();
let resp = test::call_service(&service, req).await; // let resp = test::call_service(&service, req).await;
// Should succeed because there is both a matching user and info hash. // // Should succeed because there is both a matching user and info hash.
assert!( // assert!(
resp.status().is_success(), // resp.status().is_success(),
"status {} is not success", // "status {} is not success",
resp.status() // resp.status()
); // );
let _ = common::read_body_bencode::<announce::AnnounceResponse, _>(resp) // let _ = common::read_body_bencode::<announce::AnnounceResponse, _>(resp)
.await // .await
.expect("could not deserialize announce response"); // .expect("could not deserialize announce response");
let req = test::TestRequest::get() // let req = test::TestRequest::get()
.uri("/api/users/me") // .uri("/api/users/me")
.insert_header(("X-Forwarded-For", "10.10.4.88")) // .insert_header(("X-Forwarded-For", "10.10.4.88"))
.insert_header(auth_header(&user.token)) // .insert_header(auth_header(&user.token))
.to_request(); // .to_request();
#[derive(Debug, PartialEq, Deserialize)] // #[derive(Debug, PartialEq, Deserialize)]
struct Peer { // struct Peer {
pub ip: String, // pub ip: String,
pub port: i16, // pub port: i16,
pub real_uploaded: i64, // pub real_uploaded: i64,
pub real_downloaded: i64, // pub real_downloaded: i64,
} // }
#[derive(Debug, PartialEq, Deserialize)] // #[derive(Debug, PartialEq, Deserialize)]
struct Profile { // struct Profile {
pub peers: Vec<Peer>, // pub peers: Vec<Peer>,
} // }
let resp = common::call_and_read_body_json::<Profile, _>(&service, req).await; // let resp = common::call_and_read_body_json::<Profile, _>(&service, req).await;
assert_eq!( // assert_eq!(
resp.peers, // resp.peers,
vec![Peer { // vec![Peer {
ip: String::from("10.10.4.88/32"), // ip: String::from("10.10.4.88/32"),
port: 6968, // port: 6968,
real_uploaded: 100, // real_uploaded: 100,
real_downloaded: 100, // real_downloaded: 100,
}] // }]
); // );
} // }

View File

@@ -122,7 +122,7 @@ async fn test_upload_torrent(pool: PgPool) {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct Torrent { struct Torrent {
edition_group_id: i64, edition_group_id: i64,
created_by_id: i64, created_by_id: i32,
} }
let torrent = common::call_and_read_body_json_with_status::<Torrent, _>( let torrent = common::call_and_read_body_json_with_status::<Torrent, _>(

View File

@@ -114,7 +114,7 @@ pub enum Error {
UserNotFound(String), UserNotFound(String),
#[error("user with id '{0}' not found")] #[error("user with id '{0}' not found")]
UserWithIdNotFound(i64), UserWithIdNotFound(i32),
#[error("wrong username or password")] #[error("wrong username or password")]
WrongUsernameOrPassword, WrongUsernameOrPassword,

View File

@@ -3,7 +3,7 @@ use chrono::{DateTime, Local};
#[derive(Debug)] #[derive(Debug)]
pub struct Peer { pub struct Peer {
pub id: i64, pub id: i64,
pub user_id: i64, pub user_id: i32,
pub torrent_id: i64, pub torrent_id: i64,
pub peer_id: [u8; 20], pub peer_id: [u8; 20],
pub ip: Option<std::net::Ipv4Addr>, pub ip: Option<std::net::Ipv4Addr>,

View File

@@ -1,7 +1,5 @@
pub fn get_announce_url(passkey_upper: i64, passkey_lower: i64, tracker_url: &str) -> String { pub fn get_announce_url(passkey: String, tracker_url: &str) -> String {
let passkey = ((passkey_upper as u64 as u128) << 64) | (passkey_lower as u64 as u128); format!("{tracker_url}announce/{passkey}")
format!("{tracker_url}announce/{passkey:x}")
} }
pub fn looks_like_url(s: &str) -> bool { pub fn looks_like_url(s: &str) -> bool {

View File

@@ -21,3 +21,4 @@ strum = { version = "0.27", features = ["derive"] }
musicbrainz_rs = "0.9.1" musicbrainz_rs = "0.9.1"
rand = "0.9.0" rand = "0.9.0"
utoipa = { version = "5.3.1", features = ["actix_extras"] } utoipa = { version = "5.3.1", features = ["actix_extras"] }
arcadia-shared = { path = "../../shared" }

View File

@@ -5,7 +5,7 @@ CREATE TYPE user_class_enum AS ENUM (
); );
CREATE TABLE users ( CREATE TABLE users (
id BIGSERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
username VARCHAR(20) UNIQUE NOT NULL, username VARCHAR(20) UNIQUE NOT NULL,
avatar TEXT, avatar TEXT,
email VARCHAR(255) UNIQUE NOT NULL, email VARCHAR(255) UNIQUE NOT NULL,
@@ -40,22 +40,21 @@ CREATE TABLE users (
bonus_points BIGINT NOT NULL DEFAULT 0, bonus_points BIGINT NOT NULL DEFAULT 0,
freeleech_tokens INT NOT NULL DEFAULT 0, freeleech_tokens INT NOT NULL DEFAULT 0,
settings JSONB NOT NULL DEFAULT '{}', settings JSONB NOT NULL DEFAULT '{}',
passkey_upper BIGINT NOT NULL, passkey VARCHAR(33) NOT NULL,
passkey_lower BIGINT NOT NULL,
warned BOOLEAN NOT NULL DEFAULT FALSE, warned BOOLEAN NOT NULL DEFAULT FALSE,
banned BOOLEAN NOT NULL DEFAULT FALSE, banned BOOLEAN NOT NULL DEFAULT FALSE,
staff_note TEXT NOT NULL DEFAULT '', staff_note TEXT NOT NULL DEFAULT '',
UNIQUE(passkey_upper, passkey_lower) UNIQUE(passkey)
); );
INSERT INTO users (username, email, password_hash, registered_from_ip, settings, passkey_upper, passkey_lower) INSERT INTO users (username, email, password_hash, registered_from_ip, settings, passkey)
VALUES ('creator', 'none@domain.com', 'none', '127.0.0.1', '{}'::jsonb, '1', '1'); VALUES ('creator', 'none@domain.com', 'none', '127.0.0.1', '{}'::jsonb, 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa');
CREATE TABLE api_keys ( CREATE TABLE api_keys (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
name VARCHAR(30) NOT NULL, name VARCHAR(30) NOT NULL,
value VARCHAR(40) NOT NULL UNIQUE, value VARCHAR(40) NOT NULL UNIQUE,
user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE
); );
CREATE TYPE user_application_status_enum AS ENUM ( CREATE TYPE user_application_status_enum AS ENUM (
'pending', 'pending',
@@ -77,27 +76,27 @@ CREATE TABLE invitations (
expires_at TIMESTAMP WITH TIME ZONE NOT NULL, expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
invitation_key VARCHAR(50) NOT NULL, invitation_key VARCHAR(50) NOT NULL,
message TEXT NOT NULL, message TEXT NOT NULL,
sender_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE, sender_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
receiver_email VARCHAR(255) NOT NULL, receiver_email VARCHAR(255) NOT NULL,
user_application_id BIGINT REFERENCES user_applications(id) ON DELETE SET NULL, user_application_id BIGINT REFERENCES user_applications(id) ON DELETE SET NULL,
receiver_id BIGINT REFERENCES users(id) ON DELETE SET NULL receiver_id INT REFERENCES users(id) ON DELETE SET NULL
); );
CREATE TABLE user_warnings ( CREATE TABLE user_warnings (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE, user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
expires_at TIMESTAMP WITH TIME ZONE, expires_at TIMESTAMP WITH TIME ZONE,
reason TEXT NOT NULL, reason TEXT NOT NULL,
ban boolean NOT NULL, ban boolean NOT NULL,
created_by_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE created_by_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE
); );
CREATE TABLE gifts ( CREATE TABLE gifts (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
sent_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), sent_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
message TEXT NOT NULL, message TEXT NOT NULL,
sender_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE, sender_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
receiver_id BIGINT NOT NULL REFERENCES users(id) ON DELETE SET NULL, receiver_id INT NOT NULL REFERENCES users(id) ON DELETE SET NULL,
bonus_points BIGINT NOT NULL DEFAULT 0, bonus_points BIGINT NOT NULL DEFAULT 0,
freeleech_tokens INT NOT NULL DEFAULT 0 freeleech_tokens INT NOT NULL DEFAULT 0
); );
@@ -106,7 +105,7 @@ CREATE TABLE artists (
name VARCHAR(255) UNIQUE NOT NULL, name VARCHAR(255) UNIQUE NOT NULL,
description TEXT NOT NULL, description TEXT NOT NULL,
pictures TEXT [] NOT NULL, pictures TEXT [] NOT NULL,
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
title_groups_amount INT NOT NULL DEFAULT 0, title_groups_amount INT NOT NULL DEFAULT 0,
edition_groups_amount INT NOT NULL DEFAULT 0, edition_groups_amount INT NOT NULL DEFAULT 0,
@@ -129,7 +128,7 @@ CREATE TABLE master_groups (
-- name_aliases VARCHAR(255)[], -- name_aliases VARCHAR(255)[],
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
-- description TEXT NOT NULL, -- description TEXT NOT NULL,
-- original_language VARCHAR(50) NOT NULL, -- original_language VARCHAR(50) NOT NULL,
-- country_from VARCHAR(50) NOT NULL, -- country_from VARCHAR(50) NOT NULL,
@@ -155,7 +154,7 @@ CREATE TABLE series (
tags TEXT [] NOT NULL, tags TEXT [] NOT NULL,
covers TEXT [] NOT NULL, covers TEXT [] NOT NULL,
banners TEXT [] NOT NULL, banners TEXT [] NOT NULL,
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
FOREIGN KEY (created_by_id) REFERENCES users(id) ON DELETE CASCADE FOREIGN KEY (created_by_id) REFERENCES users(id) ON DELETE CASCADE
@@ -258,7 +257,7 @@ CREATE TABLE title_groups (
name_aliases TEXT [], name_aliases TEXT [],
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
description TEXT NOT NULL, description TEXT NOT NULL,
platform platform_enum, platform platform_enum,
original_language language_enum, original_language language_enum,
@@ -314,7 +313,7 @@ CREATE TABLE affiliated_artists (
artist_id BIGINT NOT NULL, artist_id BIGINT NOT NULL,
roles artist_role_enum[] NOT NULL, roles artist_role_enum[] NOT NULL,
nickname VARCHAR(255), nickname VARCHAR(255),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
FOREIGN KEY (title_group_id) REFERENCES title_groups(id) ON DELETE CASCADE, FOREIGN KEY (title_group_id) REFERENCES title_groups(id) ON DELETE CASCADE,
FOREIGN KEY (artist_id) REFERENCES artists(id) ON DELETE CASCADE, FOREIGN KEY (artist_id) REFERENCES artists(id) ON DELETE CASCADE,
@@ -349,7 +348,7 @@ CREATE TABLE edition_groups (
release_date TIMESTAMP WITH TIME ZONE NOT NULL, release_date TIMESTAMP WITH TIME ZONE NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
description TEXT, description TEXT,
distributor VARCHAR(255), distributor VARCHAR(255),
covers TEXT [] NOT NULL, covers TEXT [] NOT NULL,
@@ -439,7 +438,7 @@ CREATE TABLE torrents (
edition_group_id BIGINT NOT NULL, edition_group_id BIGINT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
info_hash BYTEA NOT NULL CHECK(octet_length(info_hash) = 20), info_hash BYTEA NOT NULL CHECK(octet_length(info_hash) = 20),
info_dict BYTEA NOT NULL, info_dict BYTEA NOT NULL,
languages language_enum[] NOT NULL, languages language_enum[] NOT NULL,
@@ -485,7 +484,7 @@ CREATE TABLE torrents (
CREATE TABLE deleted_torrents ( CREATE TABLE deleted_torrents (
LIKE torrents INCLUDING CONSTRAINTS, -- INCLUDING DEFAULTS INCLUDING INDEXES, LIKE torrents INCLUDING CONSTRAINTS, -- INCLUDING DEFAULTS INCLUDING INDEXES,
deleted_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), deleted_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
deleted_by_id BIGINT NOT NULL, deleted_by_id INT NOT NULL,
reason TEXT NOT NULL, reason TEXT NOT NULL,
FOREIGN KEY (deleted_by_id) REFERENCES users(id) FOREIGN KEY (deleted_by_id) REFERENCES users(id)
@@ -495,7 +494,7 @@ CREATE TABLE title_group_comments (
content TEXT NOT NULL, content TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
title_group_id BIGINT NOT NULL, title_group_id BIGINT NOT NULL,
refers_to_torrent_id BIGINT, refers_to_torrent_id BIGINT,
answers_to_comment_id BIGINT, answers_to_comment_id BIGINT,
@@ -509,8 +508,8 @@ CREATE TABLE torrent_requests (
title_group_id BIGINT NOT NULL, title_group_id BIGINT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
filled_by_user_id BIGINT, filled_by_user_id INT,
filled_by_torrent_id BIGINT, filled_by_torrent_id BIGINT,
filled_at TIMESTAMP WITH TIME ZONE, filled_at TIMESTAMP WITH TIME ZONE,
edition_name TEXT, edition_name TEXT,
@@ -539,7 +538,7 @@ CREATE TABLE torrent_request_votes(
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
torrent_request_id BIGINT NOT NULL, torrent_request_id BIGINT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
bounty_upload BIGINT NOT NULL DEFAULT 0, bounty_upload BIGINT NOT NULL DEFAULT 0,
bounty_bonus_points BIGINT NOT NULL DEFAULT 0, bounty_bonus_points BIGINT NOT NULL DEFAULT 0,
FOREIGN KEY (torrent_request_id) REFERENCES torrent_requests(id) ON DELETE CASCADE, FOREIGN KEY (torrent_request_id) REFERENCES torrent_requests(id) ON DELETE CASCADE,
@@ -548,7 +547,7 @@ CREATE TABLE torrent_request_votes(
CREATE TABLE torrent_reports ( CREATE TABLE torrent_reports (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
reported_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), reported_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
reported_by_id BIGINT NOT NULL, reported_by_id INT NOT NULL,
description TEXT NOT NULL, description TEXT NOT NULL,
reported_torrent_id BIGINT NOT NULL, reported_torrent_id BIGINT NOT NULL,
FOREIGN KEY (reported_by_id) REFERENCES users(id) ON DELETE CASCADE, FOREIGN KEY (reported_by_id) REFERENCES users(id) ON DELETE CASCADE,
@@ -558,7 +557,7 @@ CREATE TABLE torrent_reports (
CREATE TYPE peer_status_enum AS ENUM('seeding', 'leeching'); CREATE TYPE peer_status_enum AS ENUM('seeding', 'leeching');
CREATE TABLE peers ( CREATE TABLE peers (
id BIGINT GENERATED ALWAYS AS IDENTITY, id BIGINT GENERATED ALWAYS AS IDENTITY,
user_id BIGINT NOT NULL, user_id INT NOT NULL,
torrent_id BIGINT NOT NULL, torrent_id BIGINT NOT NULL,
peer_id BYTEA NOT NULL CHECK(octet_length(peer_id) = 20), peer_id BYTEA NOT NULL CHECK(octet_length(peer_id) = 20),
ip INET NOT NULL, ip INET NOT NULL,
@@ -580,7 +579,7 @@ CREATE TABLE peers (
CREATE TABLE torrent_activities ( CREATE TABLE torrent_activities (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
torrent_id BIGINT NOT NULL, torrent_id BIGINT NOT NULL,
user_id BIGINT NOT NULL, user_id INT NOT NULL,
snatched_at TIMESTAMP WITH TIME ZONE, snatched_at TIMESTAMP WITH TIME ZONE,
first_seen_seeding_at TIMESTAMP WITH TIME ZONE, first_seen_seeding_at TIMESTAMP WITH TIME ZONE,
last_seen_seeding_at TIMESTAMP WITH TIME ZONE, last_seen_seeding_at TIMESTAMP WITH TIME ZONE,
@@ -596,7 +595,7 @@ CREATE TABLE entities (
name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL,
description TEXT NOT NULL, description TEXT NOT NULL,
pictures TEXT[] NOT NULL, pictures TEXT[] NOT NULL,
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
title_groups_amount INT NOT NULL DEFAULT 0, title_groups_amount INT NOT NULL DEFAULT 0,
edition_groups_amount INT NOT NULL DEFAULT 0, edition_groups_amount INT NOT NULL DEFAULT 0,
@@ -616,7 +615,7 @@ CREATE TABLE affiliated_entities (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
title_group_id BIGINT NOT NULL, title_group_id BIGINT NOT NULL,
entity_id BIGINT NOT NULL, entity_id BIGINT NOT NULL,
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
roles entity_role_enum[] NOT NULL, roles entity_role_enum[] NOT NULL,
FOREIGN KEY (title_group_id) REFERENCES title_groups(id) ON DELETE CASCADE, FOREIGN KEY (title_group_id) REFERENCES title_groups(id) ON DELETE CASCADE,
@@ -638,7 +637,7 @@ CREATE TYPE collage_type_enum AS ENUM (
CREATE TABLE collage ( CREATE TABLE collage (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
name VARCHAR NOT NULL, name VARCHAR NOT NULL,
cover TEXT, cover TEXT,
description TEXT NOT NULL, description TEXT NOT NULL,
@@ -650,7 +649,7 @@ CREATE TABLE collage (
CREATE TABLE collage_entry ( CREATE TABLE collage_entry (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL REFERENCES users(id), created_by_id INT NOT NULL REFERENCES users(id),
collage_id BIGINT NOT NULL REFERENCES collage(id), collage_id BIGINT NOT NULL REFERENCES collage(id),
artist_id BIGINT REFERENCES artists(id), artist_id BIGINT REFERENCES artists(id),
entity_id BIGINT REFERENCES entities(id), entity_id BIGINT REFERENCES entities(id),
@@ -717,7 +716,7 @@ CREATE TABLE forum_categories (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
name TEXT NOT NULL, name TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
FOREIGN KEY (created_by_id) REFERENCES users(id) FOREIGN KEY (created_by_id) REFERENCES users(id)
); );
@@ -727,7 +726,7 @@ CREATE TABLE forum_sub_categories (
forum_category_id INT NOT NULL, forum_category_id INT NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT, created_by_id INT,
threads_amount BIGINT NOT NULL DEFAULT 0, threads_amount BIGINT NOT NULL DEFAULT 0,
posts_amount BIGINT NOT NULL DEFAULT 0, posts_amount BIGINT NOT NULL DEFAULT 0,
forbidden_classes VARCHAR(50) [] NOT NULL DEFAULT ARRAY[]::VARCHAR(50)[], forbidden_classes VARCHAR(50) [] NOT NULL DEFAULT ARRAY[]::VARCHAR(50)[],
@@ -741,7 +740,7 @@ CREATE TABLE forum_threads (
forum_sub_category_id INT NOT NULL, forum_sub_category_id INT NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
posts_amount BIGINT NOT NULL DEFAULT 0, posts_amount BIGINT NOT NULL DEFAULT 0,
sticky BOOLEAN NOT NULL DEFAULT FALSE, sticky BOOLEAN NOT NULL DEFAULT FALSE,
locked BOOLEAN NOT NULL DEFAULT FALSE, locked BOOLEAN NOT NULL DEFAULT FALSE,
@@ -755,7 +754,7 @@ CREATE TABLE forum_posts (
forum_thread_id BIGINT NOT NULL, forum_thread_id BIGINT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
content TEXT NOT NULL, content TEXT NOT NULL,
sticky BOOLEAN NOT NULL DEFAULT FALSE, sticky BOOLEAN NOT NULL DEFAULT FALSE,
@@ -767,9 +766,9 @@ CREATE TABLE wiki_articles (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
title TEXT NOT NULL, title TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_by_id BIGINT NOT NULL, updated_by_id INT NOT NULL,
body TEXT NOT NULL, body TEXT NOT NULL,
FOREIGN KEY (created_by_id) REFERENCES users(id) FOREIGN KEY (created_by_id) REFERENCES users(id)
@@ -778,8 +777,8 @@ CREATE TABLE conversations (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL, created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
subject VARCHAR(255) NOT NULL, subject VARCHAR(255) NOT NULL,
sender_id BIGINT NOT NULL, sender_id INT NOT NULL,
receiver_id BIGINT NOT NULL, receiver_id INT NOT NULL,
sender_last_seen_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL, sender_last_seen_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
receiver_last_seen_at TIMESTAMP WITH TIME ZONE, receiver_last_seen_at TIMESTAMP WITH TIME ZONE,
@@ -790,7 +789,7 @@ CREATE TABLE conversation_messages (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
conversation_id BIGINT NOT NULL, conversation_id BIGINT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL, created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
created_by_id BIGINT NOT NULL, created_by_id INT NOT NULL,
content TEXT NOT NULL, content TEXT NOT NULL,
FOREIGN KEY (conversation_id) REFERENCES conversations(id), FOREIGN KEY (conversation_id) REFERENCES conversations(id),
@@ -800,14 +799,14 @@ CREATE TABLE staff_pms (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
subject TEXT NOT NULL, subject TEXT NOT NULL,
created_by_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE, created_by_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
resolved BOOLEAN NOT NULL DEFAULT FALSE resolved BOOLEAN NOT NULL DEFAULT FALSE
); );
CREATE TABLE staff_pm_messages ( CREATE TABLE staff_pm_messages (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
staff_pm_id BIGINT NOT NULL REFERENCES staff_pms(id) ON DELETE CASCADE, staff_pm_id BIGINT NOT NULL REFERENCES staff_pms(id) ON DELETE CASCADE,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_by_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE, created_by_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
content TEXT NOT NULL content TEXT NOT NULL
); );
CREATE TYPE notification_reason_enum AS ENUM ( CREATE TYPE notification_reason_enum AS ENUM (
@@ -835,7 +834,7 @@ CREATE TABLE subscriptions (
CREATE TABLE notifications ( CREATE TABLE notifications (
id BIGSERIAL PRIMARY KEY, id BIGSERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
receiver_id BIGINT NOT NULL, receiver_id INT NOT NULL,
reason notification_reason_enum NOT NULL, reason notification_reason_enum NOT NULL,
message TEXT, message TEXT,
read_status BOOLEAN NOT NULL DEFAULT FALSE, read_status BOOLEAN NOT NULL DEFAULT FALSE,
@@ -927,9 +926,9 @@ ORDER BY
p_order TEXT DEFAULT 'desc', p_order TEXT DEFAULT 'desc',
p_limit BIGINT DEFAULT NULL, p_limit BIGINT DEFAULT NULL,
p_offset BIGINT DEFAULT NULL, p_offset BIGINT DEFAULT NULL,
p_torrent_created_by_id BIGINT DEFAULT NULL, p_torrent_created_by_id INT DEFAULT NULL,
p_torrent_snatched_by_id BIGINT DEFAULT NULL, p_torrent_snatched_by_id INT DEFAULT NULL,
p_requesting_user_id BIGINT DEFAULT NULL, p_requesting_user_id INT DEFAULT NULL,
p_external_link TEXT DEFAULT NULL p_external_link TEXT DEFAULT NULL
) )
RETURNS TABLE ( RETURNS TABLE (

View File

@@ -27,11 +27,11 @@ INSERT INTO public._sqlx_migrations VALUES (20250312215600, 'initdb', '2025-09-1
-- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: arcadia -- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: arcadia
-- --
INSERT INTO public.users VALUES (1, 'creator', NULL, 'none@domain.com', 'none', '127.0.0.1', '2025-09-17 12:42:13.702455+00', '', 0, 0, 1, 1, 0, 0, '2025-09-17 12:42:13.702455+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{}', 1, 1, false, false, ''); INSERT INTO public.users VALUES (1, 'creator', NULL, 'none@domain.com', 'none', '127.0.0.1', '2025-09-17 12:42:13.702455+00', '', 0, 0, 1, 1, 0, 0, '2025-09-17 12:42:13.702455+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{}', 'aa', false, false, '');
INSERT INTO public.users VALUES (5, 'waterbottle', 'https://i.pinimg.com/736x/a6/27/12/a6271204df8d387c3e614986c106f549.jpg', 'user2@example.com', 'hashedpassword2', '192.168.1.2', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 5493004881313328037, 2566432999990446913, false, false, ''''''); INSERT INTO public.users VALUES (5, 'waterbottle', 'https://i.pinimg.com/736x/a6/27/12/a6271204df8d387c3e614986c106f549.jpg', 'user2@example.com', 'hashedpassword2', '192.168.1.2', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'fqmslfjqmlsfj', false, false, '''''');
INSERT INTO public.users VALUES (3, 'coolguy', 'https://i.pinimg.com/474x/c1/5a/6c/c15a6c91515e22f6ea8b766f89c12f0c.jpg', 'user3@example.com', 'hashedpassword3', '192.168.1.3', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 2274483400846363122, 1270934296711348124, false, false, ''''''); INSERT INTO public.users VALUES (3, 'coolguy', 'https://i.pinimg.com/474x/c1/5a/6c/c15a6c91515e22f6ea8b766f89c12f0c.jpg', 'user3@example.com', 'hashedpassword3', '192.168.1.3', '2025-03-30 16:24:57.388152+00', '', 0, 0, 1, 1, 0, 0, '2025-03-30 16:24:57.388152+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'qnsvmqfmlqsdm', false, false, '''''');
INSERT INTO public.users VALUES (2, 'picolo', 'https://img.freepik.com/premium-vector/random-people-line-art-vector_567805-63.jpg', 'user1@example.com', '$argon2id$v=19$m=19456,t=2,p=1$s4XJtCUk9IrGgNsTfP6Ofw$ktoGbBEoFaVgdiTn19Gh9h45LjFiv7AUEL5KHhzm4d0', '192.168.1.1', '2025-03-30 16:24:57.388152+00', '', 10000, 0, 1, 1, 0, 0, '2025-09-17 09:27:11.336576+00', 'staff', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 999999410, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', -197409747985172542, 1837889239438807682, false, false, ''''''); INSERT INTO public.users VALUES (2, 'picolo', 'https://img.freepik.com/premium-vector/random-people-line-art-vector_567805-63.jpg', 'user1@example.com', '$argon2id$v=19$m=19456,t=2,p=1$s4XJtCUk9IrGgNsTfP6Ofw$ktoGbBEoFaVgdiTn19Gh9h45LjFiv7AUEL5KHhzm4d0', '192.168.1.1', '2025-03-30 16:24:57.388152+00', '', 10000, 0, 1, 1, 0, 0, '2025-09-17 09:27:11.336576+00', 'staff', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 999999410, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'qmofqmlskdfnnns', false, false, '''''');
INSERT INTO public.users VALUES (4, 'test', NULL, 'test@test.tsttt', '$argon2id$v=19$m=19456,t=2,p=1$yaA+WqA4OfSyAqR3iXhDng$/Ngv7VeJvVNHli9rBgQG0d/O2W+qoI2yHhQxZSxxW2M', '127.0.0.1', '2025-04-10 19:15:51.036818+00', '', 979900000000, 0, 1, 1, 0, 0, '2025-09-17 09:15:44.322914+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99999000, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', -7167291202215854785, 1526268353104531819, false, false, ''''''); INSERT INTO public.users VALUES (4, 'test', NULL, 'test@test.tsttt', '$argon2id$v=19$m=19456,t=2,p=1$yaA+WqA4OfSyAqR3iXhDng$/Ngv7VeJvVNHli9rBgQG0d/O2W+qoI2yHhQxZSxxW2M', '127.0.0.1', '2025-04-10 19:15:51.036818+00', '', 979900000000, 0, 1, 1, 0, 0, '2025-09-17 09:15:44.322914+00', 'newbie', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99999000, 0, '{"site_appearance": {"item_detail_layout": "sidebar_right"}}', 'mqnmnqmlngqsklf', false, false, '''''');
-- --

View File

@@ -11,7 +11,7 @@ pub struct Artist {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub description: String, pub description: String,
pub pictures: Vec<String>, pub pictures: Vec<String>,
pub title_groups_amount: i32, pub title_groups_amount: i32,
@@ -110,7 +110,7 @@ pub struct AffiliatedArtist {
pub nickname: Option<String>, // for example: name of the character the actor is playing pub nickname: Option<String>, // for example: name of the character the actor is playing
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
} }
#[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)]
@@ -145,6 +145,6 @@ pub struct AffiliatedArtistHierarchy {
pub nickname: Option<String>, pub nickname: Option<String>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub artist: Artist, pub artist: Artist,
} }

View File

@@ -33,7 +33,7 @@ pub struct Collage {
pub id: i64, pub id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub name: String, pub name: String,
pub cover: Option<String>, pub cover: Option<String>,
pub description: String, pub description: String,
@@ -57,7 +57,7 @@ pub struct CollageEntry {
pub id: i64, pub id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub artist_id: Option<i64>, pub artist_id: Option<i64>,
pub entity_id: Option<i64>, pub entity_id: Option<i64>,
pub title_group_id: Option<i64>, pub title_group_id: Option<i64>,
@@ -81,7 +81,7 @@ pub struct CollageEntryHierarchy {
pub id: i64, pub id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub artist_id: Option<i64>, pub artist_id: Option<i64>,
pub artist: Option<ArtistLite>, pub artist: Option<ArtistLite>,
pub entity_id: Option<i64>, pub entity_id: Option<i64>,
@@ -105,7 +105,7 @@ pub struct CollageSearchResult {
pub id: i64, pub id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub created_by: UserLite, pub created_by: UserLite,
pub name: String, pub name: String,
pub cover: Option<String>, pub cover: Option<String>,

View File

@@ -13,8 +13,8 @@ pub struct Conversation {
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub subject: String, pub subject: String,
pub sender_id: i64, pub sender_id: i32,
pub receiver_id: i64, pub receiver_id: i32,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub sender_last_seen_at: DateTime<Utc>, pub sender_last_seen_at: DateTime<Utc>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
@@ -24,7 +24,7 @@ pub struct Conversation {
#[derive(Debug, Serialize, Deserialize, ToSchema)] #[derive(Debug, Serialize, Deserialize, ToSchema)]
pub struct UserCreatedConversation { pub struct UserCreatedConversation {
pub subject: String, pub subject: String,
pub receiver_id: i64, pub receiver_id: i32,
pub first_message: UserCreatedConversationMessage, pub first_message: UserCreatedConversationMessage,
} }
@@ -34,7 +34,7 @@ pub struct ConversationMessage {
pub conversation_id: i64, pub conversation_id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub content: String, pub content: String,
} }
@@ -81,8 +81,8 @@ pub struct ConversationOverview {
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub subject: String, pub subject: String,
pub sender_id: i64, pub sender_id: i32,
pub receiver_id: i64, pub receiver_id: i32,
pub correspondant: UserLite, pub correspondant: UserLite,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub sender_last_seen_at: DateTime<Utc>, pub sender_last_seen_at: DateTime<Utc>,

View File

@@ -68,7 +68,7 @@ pub struct EditionGroup {
pub created_at: DateTime<Utc>, // database entry creation pub created_at: DateTime<Utc>, // database entry creation
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub description: Option<String>, // specific to the edition pub description: Option<String>, // specific to the edition
pub distributor: Option<String>, // web: [web stores/distributors], physical: [shop if specific edition ?] pub distributor: Option<String>, // web: [web stores/distributors], physical: [shop if specific edition ?]
pub covers: Vec<String>, pub covers: Vec<String>,
@@ -123,7 +123,7 @@ pub struct EditionGroupHierarchy {
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub description: Option<String>, pub description: Option<String>,
pub distributor: Option<String>, pub distributor: Option<String>,
pub covers: Vec<String>, pub covers: Vec<String>,

View File

@@ -28,7 +28,7 @@ pub struct Entity {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub description: String, pub description: String,
pub pictures: Vec<String>, pub pictures: Vec<String>,
} }
@@ -43,7 +43,7 @@ pub struct AffiliatedEntity {
pub id: i64, pub id: i64,
pub title_group_id: i64, pub title_group_id: i64,
pub entity_id: i64, pub entity_id: i64,
pub created_by_id: i64, pub created_by_id: i32,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub roles: Vec<EntityRole>, pub roles: Vec<EntityRole>,
@@ -53,7 +53,7 @@ pub struct AffiliatedEntityHierarchy {
pub id: i64, pub id: i64,
pub title_group_id: i64, pub title_group_id: i64,
pub entity_id: i64, pub entity_id: i64,
pub created_by_id: i64, pub created_by_id: i32,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub roles: Vec<EntityRole>, pub roles: Vec<EntityRole>,

View File

@@ -11,7 +11,7 @@ pub struct ForumCategory {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
} }
#[derive(Debug, Deserialize, Serialize, FromRow, ToSchema)] #[derive(Debug, Deserialize, Serialize, FromRow, ToSchema)]
@@ -21,7 +21,7 @@ pub struct ForumSubCategory {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub threads_amount: i64, pub threads_amount: i64,
pub posts_amount: i64, pub posts_amount: i64,
pub forbidden_classes: Vec<String>, pub forbidden_classes: Vec<String>,
@@ -34,7 +34,7 @@ pub struct ForumThread {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub posts_amount: i64, pub posts_amount: i64,
pub sticky: bool, pub sticky: bool,
pub locked: bool, pub locked: bool,
@@ -55,7 +55,7 @@ pub struct ForumPost {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub content: String, pub content: String,
pub sticky: bool, pub sticky: bool,
} }
@@ -125,7 +125,7 @@ pub struct ForumThreadAndPosts {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub posts_amount: i64, pub posts_amount: i64,
pub sticky: bool, pub sticky: bool,
pub locked: bool, pub locked: bool,
@@ -153,7 +153,7 @@ pub struct ForumPostAndThreadName {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub content: String, pub content: String,
pub sticky: bool, pub sticky: bool,
pub forum_thread_name: String, pub forum_thread_name: String,

View File

@@ -9,8 +9,8 @@ pub struct Gift {
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub sent_at: DateTime<Utc>, pub sent_at: DateTime<Utc>,
pub message: String, pub message: String,
pub sender_id: i64, pub sender_id: i32,
pub receiver_id: i64, pub receiver_id: i32,
pub bonus_points: i64, pub bonus_points: i64,
pub freeleech_tokens: i32, pub freeleech_tokens: i32,
} }
@@ -18,7 +18,7 @@ pub struct Gift {
#[derive(Debug, Serialize, Deserialize, ToSchema)] #[derive(Debug, Serialize, Deserialize, ToSchema)]
pub struct UserCreatedGift { pub struct UserCreatedGift {
pub message: String, pub message: String,
pub receiver_id: i64, pub receiver_id: i32,
pub bonus_points: i64, pub bonus_points: i64,
pub freeleech_tokens: i32, pub freeleech_tokens: i32,
} }

View File

@@ -12,9 +12,9 @@ pub struct Invitation {
pub expires_at: DateTime<Local>, pub expires_at: DateTime<Local>,
pub message: String, pub message: String,
pub invitation_key: String, pub invitation_key: String,
pub sender_id: i64, pub sender_id: i32,
pub receiver_email: String, pub receiver_email: String,
pub receiver_id: Option<i64>, pub receiver_id: Option<i32>,
pub user_application_id: Option<i64>, pub user_application_id: Option<i64>,
} }

View File

@@ -26,7 +26,7 @@ pub struct MasterGroup {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
// pub description: String, // pub description: String,
// pub original_language: String, // pub original_language: String,
// pub country_from: String, // pub country_from: String,

View File

@@ -16,7 +16,7 @@ pub enum NotificationReason {
pub struct Notification { pub struct Notification {
pub id: i64, pub id: i64,
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub receiver_id: i64, pub receiver_id: i32,
pub reason: NotificationReason, pub reason: NotificationReason,
pub message: Option<String>, pub message: Option<String>,
pub read_status: bool, pub read_status: bool,

View File

@@ -14,7 +14,7 @@ pub struct Series {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub covers: Vec<String>, pub covers: Vec<String>,
pub banners: Option<Vec<String>>, pub banners: Option<Vec<String>>,
pub tags: Vec<String>, pub tags: Vec<String>,
@@ -47,7 +47,7 @@ pub struct SeriesSearchResult {
pub name: String, pub name: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub covers: Vec<String>, pub covers: Vec<String>,
pub banners: Option<Vec<String>>, pub banners: Option<Vec<String>>,
pub tags: Vec<String>, pub tags: Vec<String>,

View File

@@ -9,7 +9,7 @@ pub struct StaffPm {
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub subject: String, pub subject: String,
pub created_by_id: i64, pub created_by_id: i32,
pub resolved: bool, pub resolved: bool,
} }
@@ -19,7 +19,7 @@ pub struct StaffPmMessage {
pub staff_pm_id: i64, pub staff_pm_id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub content: String, pub content: String,
} }

View File

@@ -116,7 +116,7 @@ pub struct TitleGroup {
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub description: String, pub description: String,
pub platform: Option<Platform>, pub platform: Option<Platform>,
pub original_language: Option<Language>, pub original_language: Option<Language>,

View File

@@ -13,7 +13,7 @@ pub struct TitleGroupComment {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub title_group_id: i64, pub title_group_id: i64,
pub refers_to_torrent_id: Option<i64>, pub refers_to_torrent_id: Option<i64>,
pub answers_to_comment_id: Option<i64>, pub answers_to_comment_id: Option<i64>,
@@ -35,7 +35,7 @@ pub struct TitleGroupCommentHierarchy {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub title_group_id: i64, pub title_group_id: i64,
pub refers_to_torrent_id: Option<i64>, pub refers_to_torrent_id: Option<i64>,
pub answers_to_comment_id: Option<i64>, pub answers_to_comment_id: Option<i64>,

View File

@@ -318,7 +318,7 @@ pub struct Torrent {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub extras: Vec<Extras>, pub extras: Vec<Extras>,
pub release_name: Option<String>, pub release_name: Option<String>,
pub release_group: Option<String>, pub release_group: Option<String>,
@@ -431,8 +431,8 @@ pub struct TorrentSearchTitleGroup {
pub struct TorrentSearchTorrent { pub struct TorrentSearchTorrent {
pub reported: Option<bool>, pub reported: Option<bool>,
pub staff_checked: Option<bool>, pub staff_checked: Option<bool>,
pub created_by_id: Option<i64>, pub created_by_id: Option<i32>,
pub snatched_by_id: Option<i64>, pub snatched_by_id: Option<i32>,
} }
#[derive(Debug, Deserialize, Serialize, ToSchema, Display)] #[derive(Debug, Deserialize, Serialize, ToSchema, Display)]
@@ -532,7 +532,7 @@ pub struct TorrentHierarchy {
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Local>, pub updated_at: DateTime<Local>,
pub created_by_id: Option<i64>, pub created_by_id: Option<i32>,
pub created_by: Option<UserLite>, pub created_by: Option<UserLite>,
pub extras: Vec<Extras>, pub extras: Vec<Extras>,
pub release_name: Option<String>, pub release_name: Option<String>,

View File

@@ -7,7 +7,7 @@ use utoipa::ToSchema;
pub struct TorrentActivity { pub struct TorrentActivity {
pub id: i64, pub id: i64,
pub torrent_id: i64, pub torrent_id: i64,
pub user_id: i64, pub user_id: i32,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub snatched_at: DateTime<Local>, pub snatched_at: DateTime<Local>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]

View File

@@ -8,7 +8,7 @@ pub struct TorrentReport {
pub id: i64, pub id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub reported_at: DateTime<Local>, pub reported_at: DateTime<Local>,
pub reported_by_id: i64, pub reported_by_id: i32,
pub reported_torrent_id: i64, pub reported_torrent_id: i64,
pub description: String, pub description: String,
} }

View File

@@ -25,8 +25,8 @@ pub struct TorrentRequest {
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
pub filled_by_user_id: Option<i64>, pub filled_by_user_id: Option<i32>,
pub filled_by_torrent_id: Option<i64>, pub filled_by_torrent_id: Option<i64>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub filled_at: Option<DateTime<Utc>>, pub filled_at: Option<DateTime<Utc>>,

View File

@@ -11,7 +11,7 @@ pub struct TorrentRequestVote {
pub torrent_request_id: i64, pub torrent_request_id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub bounty_upload: i64, pub bounty_upload: i64,
pub bounty_bonus_points: i64, pub bounty_bonus_points: i64,
} }
@@ -29,7 +29,7 @@ pub struct TorrentRequestVoteHierarchy {
pub torrent_request_id: i64, pub torrent_request_id: i64,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Local>, pub created_at: DateTime<Local>,
pub created_by_id: i64, pub created_by_id: i32,
pub created_by: UserLite, pub created_by: UserLite,
pub bounty_upload: i64, pub bounty_upload: i64,
pub bounty_bonus_points: i64, pub bounty_bonus_points: i64,

View File

@@ -40,7 +40,7 @@ use super::title_group::TitleGroupHierarchyLite;
#[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)]
pub struct User { pub struct User {
pub id: i64, pub id: i32,
pub username: String, pub username: String,
pub avatar: Option<String>, pub avatar: Option<String>,
pub email: String, pub email: String,
@@ -80,8 +80,7 @@ pub struct User {
pub warned: bool, pub warned: bool,
pub banned: bool, pub banned: bool,
pub staff_note: String, pub staff_note: String,
pub passkey_upper: i64, pub passkey: String,
pub passkey_lower: i64,
} }
#[derive(Debug, Clone, Serialize, Deserialize, sqlx::Type, ToSchema, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, sqlx::Type, ToSchema, PartialEq, Eq)]
@@ -116,7 +115,7 @@ pub struct LoginResponse {
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Claims { pub struct Claims {
pub sub: i64, pub sub: i32,
pub exp: i64, pub exp: i64,
pub iat: i64, pub iat: i64,
pub class: UserClass, pub class: UserClass,
@@ -136,7 +135,7 @@ pub struct EditedUser {
#[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)]
pub struct PublicUser { pub struct PublicUser {
pub id: i64, pub id: i32,
pub username: String, pub username: String,
pub avatar: Option<String>, pub avatar: Option<String>,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
@@ -173,7 +172,7 @@ pub struct PublicUser {
#[derive(Debug, Serialize, Deserialize, FromRow, ToSchema, Decode)] #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema, Decode)]
pub struct UserLite { pub struct UserLite {
pub id: i64, pub id: i32,
pub username: String, pub username: String,
pub warned: bool, pub warned: bool,
pub banned: bool, pub banned: bool,
@@ -181,7 +180,7 @@ pub struct UserLite {
#[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)]
pub struct UserLiteAvatar { pub struct UserLiteAvatar {
pub id: i64, pub id: i32,
pub username: String, pub username: String,
pub banned: bool, pub banned: bool,
pub avatar: Option<String>, pub avatar: Option<String>,
@@ -207,19 +206,19 @@ pub struct PublicProfile {
#[derive(Debug, Serialize, Deserialize, ToSchema, FromRow)] #[derive(Debug, Serialize, Deserialize, ToSchema, FromRow)]
pub struct UserWarning { pub struct UserWarning {
pub id: i64, pub id: i64,
pub user_id: i64, pub user_id: i32,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
#[schema(value_type = Option<String>, format = DateTime)] #[schema(value_type = Option<String>, format = DateTime)]
pub expires_at: Option<DateTime<Utc>>, pub expires_at: Option<DateTime<Utc>>,
pub reason: String, pub reason: String,
pub created_by_id: i64, pub created_by_id: i32,
pub ban: bool, // wether or not this warning bans the user pub ban: bool, // wether or not this warning bans the user
} }
#[derive(Debug, Serialize, Deserialize, ToSchema)] #[derive(Debug, Serialize, Deserialize, ToSchema)]
pub struct UserCreatedUserWarning { pub struct UserCreatedUserWarning {
pub user_id: i64, pub user_id: i32,
#[schema(value_type = Option<String>, format = DateTime)] #[schema(value_type = Option<String>, format = DateTime)]
pub expires_at: Option<DateTime<Utc>>, pub expires_at: Option<DateTime<Utc>>,
pub reason: String, pub reason: String,
@@ -234,7 +233,7 @@ pub struct APIKey {
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub name: String, pub name: String,
pub value: String, pub value: String,
pub user_id: i64, pub user_id: i32,
} }
#[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)] #[derive(Debug, Serialize, Deserialize, FromRow, ToSchema)]
@@ -244,7 +243,6 @@ pub struct UserCreatedAPIKey {
#[derive(Debug, Serialize, Deserialize, ToSchema)] #[derive(Debug, Serialize, Deserialize, ToSchema)]
pub struct UserMinimal { pub struct UserMinimal {
pub id: i64, pub id: i32,
pub passkey_upper: i64, pub passkey: String,
pub passkey_lower: i64,
} }

View File

@@ -11,10 +11,10 @@ pub struct WikiArticle {
pub title: String, pub title: String,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub created_by_id: i64, pub created_by_id: i32,
#[schema(value_type = String, format = DateTime)] #[schema(value_type = String, format = DateTime)]
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub updated_by_id: i64, pub updated_by_id: i32,
pub body: String, pub body: String,
} }

View File

@@ -6,23 +6,18 @@ use crate::connection_pool::ConnectionPool;
#[derive(sqlx::FromRow)] #[derive(sqlx::FromRow)]
pub struct UserCompact { pub struct UserCompact {
pub id: i64, pub id: i32,
} }
impl ConnectionPool { impl ConnectionPool {
pub async fn find_user_with_passkey( pub async fn find_user_with_passkey(&self, passkey: &str) -> Result<UserCompact, Error> {
&self,
passkey_upper: i64,
passkey_lower: i64,
) -> Result<UserCompact, Error> {
sqlx::query_as!( sqlx::query_as!(
UserCompact, UserCompact,
r#" r#"
SELECT id FROM users SELECT id FROM users
WHERE (passkey_upper, passkey_lower) = ($1, $2) WHERE passkey = $1
"#, "#,
passkey_upper, passkey
passkey_lower
) )
.fetch_one(self.borrow()) .fetch_one(self.borrow())
.await .await
@@ -52,7 +47,7 @@ impl ConnectionPool {
downloaded: i64, downloaded: i64,
real_uploaded: i64, real_uploaded: i64,
real_downloaded: i64, real_downloaded: i64,
user_id: i64, user_id: i32,
) -> Result<PgQueryResult, Error> { ) -> Result<PgQueryResult, Error> {
sqlx::query!( sqlx::query!(
r#" r#"
@@ -76,7 +71,7 @@ impl ConnectionPool {
pub async fn update_total_seedtime( pub async fn update_total_seedtime(
&self, &self,
user_id: i64, user_id: i32,
torrent_id: i64, torrent_id: i64,
announce_interval: u32, announce_interval: u32,
grace_period: u32, grace_period: u32,

View File

@@ -14,7 +14,7 @@ impl ConnectionPool {
pub async fn create_artists( pub async fn create_artists(
&self, &self,
artists: &Vec<UserCreatedArtist>, artists: &Vec<UserCreatedArtist>,
current_user_id: i64, current_user_id: i32,
) -> Result<Vec<Artist>> { ) -> Result<Vec<Artist>> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()
@@ -53,7 +53,7 @@ impl ConnectionPool {
pub async fn create_artists_affiliation( pub async fn create_artists_affiliation(
&self, &self,
artists: &Vec<UserCreatedAffiliatedArtist>, artists: &Vec<UserCreatedAffiliatedArtist>,
current_user_id: i64, current_user_id: i32,
) -> Result<Vec<AffiliatedArtistHierarchy>> { ) -> Result<Vec<AffiliatedArtistHierarchy>> {
let values: Vec<String> = (0..artists.len()) let values: Vec<String> = (0..artists.len())
.map(|i| { .map(|i| {

View File

@@ -37,12 +37,14 @@ impl ConnectionPool {
invitation: &Invitation, invitation: &Invitation,
open_signups: &bool, open_signups: &bool,
) -> Result<User> { ) -> Result<User> {
let mut rng = rand::rng(); let rng = rand::rng();
let passkey = rng.random::<u128>(); // TODO: check if the passkey already exists
let passkey: String = rng
let passkey_upper = (passkey >> 64) as i64; .sample_iter(&Alphanumeric)
let passkey_lower = passkey as i64; .take(33)
.map(char::from)
.collect();
// Check username availability first // Check username availability first
if self.does_username_exist(&user.username).await? { if self.does_username_exist(&user.username).await? {
@@ -55,8 +57,8 @@ impl ConnectionPool {
let registered_user = sqlx::query_as_unchecked!( let registered_user = sqlx::query_as_unchecked!(
User, User,
r#" r#"
INSERT INTO users (username, email, password_hash, registered_from_ip, settings, passkey_upper, passkey_lower) INSERT INTO users (username, email, password_hash, registered_from_ip, settings, passkey)
VALUES ($1, $2, $3, $4, $5, $6, $7) VALUES ($1, $2, $3, $4, $5, $6)
RETURNING * RETURNING *
"#, "#,
&user.username, &user.username,
@@ -64,8 +66,7 @@ impl ConnectionPool {
password_hash, password_hash,
from_ip, from_ip,
settings, settings,
passkey_upper, passkey
passkey_lower,
) )
.fetch_one(self.borrow()) .fetch_one(self.borrow())
.await .await
@@ -127,7 +128,7 @@ impl ConnectionPool {
Ok(user) Ok(user)
} }
pub async fn find_user_with_id(&self, id: i64) -> Result<User> { pub async fn find_user_with_id(&self, id: i32) -> Result<User> {
sqlx::query_as_unchecked!( sqlx::query_as_unchecked!(
User, User,
r#" r#"
@@ -144,7 +145,7 @@ impl ConnectionPool {
pub async fn create_api_key( pub async fn create_api_key(
&self, &self,
created_api_key: &UserCreatedAPIKey, created_api_key: &UserCreatedAPIKey,
current_user_id: i64, current_user_id: i32,
) -> Result<APIKey> { ) -> Result<APIKey> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()

View File

@@ -14,7 +14,7 @@ impl ConnectionPool {
pub async fn create_collage( pub async fn create_collage(
&self, &self,
collage: &UserCreatedCollage, collage: &UserCreatedCollage,
user_id: i64, user_id: i32,
) -> Result<Collage> { ) -> Result<Collage> {
let created_collage = sqlx::query_as!( let created_collage = sqlx::query_as!(
Collage, Collage,
@@ -42,7 +42,7 @@ impl ConnectionPool {
pub async fn create_collage_entries( pub async fn create_collage_entries(
&self, &self,
collage_entries: &[UserCreatedCollageEntry], collage_entries: &[UserCreatedCollageEntry],
user_id: i64, user_id: i32,
) -> Result<Vec<CollageEntry>> { ) -> Result<Vec<CollageEntry>> {
let mut created_entries = Vec::with_capacity(collage_entries.len()); let mut created_entries = Vec::with_capacity(collage_entries.len());

View File

@@ -12,7 +12,7 @@ impl ConnectionPool {
pub async fn create_conversation( pub async fn create_conversation(
&self, &self,
conversation: &mut UserCreatedConversation, conversation: &mut UserCreatedConversation,
current_user_id: i64, current_user_id: i32,
) -> Result<Conversation> { ) -> Result<Conversation> {
//TODO: make transactional //TODO: make transactional
let created_conversation = sqlx::query_as!( let created_conversation = sqlx::query_as!(
@@ -40,7 +40,7 @@ impl ConnectionPool {
pub async fn create_conversation_message( pub async fn create_conversation_message(
&self, &self,
message: &UserCreatedConversationMessage, message: &UserCreatedConversationMessage,
current_user_id: i64, current_user_id: i32,
) -> Result<ConversationMessage> { ) -> Result<ConversationMessage> {
let message = sqlx::query_as!( let message = sqlx::query_as!(
ConversationMessage, ConversationMessage,
@@ -60,7 +60,7 @@ impl ConnectionPool {
Ok(message) Ok(message)
} }
pub async fn find_user_conversations(&self, user_id: i64) -> Result<Value> { pub async fn find_user_conversations(&self, user_id: i32) -> Result<Value> {
let conversations = sqlx::query!( let conversations = sqlx::query!(
r#" r#"
SELECT SELECT
@@ -127,7 +127,7 @@ impl ConnectionPool {
pub async fn find_conversation( pub async fn find_conversation(
&self, &self,
conversation_id: i64, conversation_id: i64,
current_user_id: i64, current_user_id: i32,
update_last_seen_at: bool, update_last_seen_at: bool,
) -> Result<Value> { ) -> Result<Value> {
let conversation_with_messages = sqlx::query!( let conversation_with_messages = sqlx::query!(
@@ -215,7 +215,7 @@ impl ConnectionPool {
Ok(conversation_with_messages.conversation_details.unwrap()) Ok(conversation_with_messages.conversation_details.unwrap())
} }
pub async fn find_unread_conversations_amount(&self, user_id: i64) -> Result<u32> { pub async fn find_unread_conversations_amount(&self, user_id: i32) -> Result<u32> {
let amount = sqlx::query_scalar!( let amount = sqlx::query_scalar!(
r#" r#"
SELECT SELECT

View File

@@ -9,10 +9,10 @@ impl ConnectionPool {
pub async fn create_edition_group( pub async fn create_edition_group(
&self, &self,
edition_group_form: &UserCreatedEditionGroup, edition_group_form: &UserCreatedEditionGroup,
current_user_id: i64, current_user_id: i32,
) -> Result<EditionGroup> { ) -> Result<EditionGroup> {
const CREATE_EDITION_GROUPS_QUERY: &str = r#" const CREATE_EDITION_GROUPS_QUERY: &str = r#"
INSERT INTO edition_groups (title_group_id, name, release_date, created_by_id, description, distributor, covers, external_links, source, additional_information) INSERT INTO edition_groups (title_group_id, name, release_date, created_by_id, description, distributor, covers, external_links, source, additional_information)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9::source_enum, $10) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9::source_enum, $10)
RETURNING *; RETURNING *;
"#; "#;

View File

@@ -14,7 +14,7 @@ impl ConnectionPool {
pub async fn create_forum_post( pub async fn create_forum_post(
&self, &self,
forum_post: &UserCreatedForumPost, forum_post: &UserCreatedForumPost,
current_user_id: i64, current_user_id: i32,
) -> Result<ForumPost> { ) -> Result<ForumPost> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()
@@ -67,7 +67,7 @@ impl ConnectionPool {
pub async fn create_forum_thread( pub async fn create_forum_thread(
&self, &self,
forum_thread: &mut UserCreatedForumThread, forum_thread: &mut UserCreatedForumThread,
current_user_id: i64, current_user_id: i32,
) -> Result<ForumThread> { ) -> Result<ForumThread> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()

View File

@@ -7,7 +7,7 @@ use sqlx::{PgPool, Postgres, Transaction};
use std::borrow::Borrow; use std::borrow::Borrow;
impl ConnectionPool { impl ConnectionPool {
pub async fn create_gift(&self, gift: &UserCreatedGift, current_user_id: i64) -> Result<Gift> { pub async fn create_gift(&self, gift: &UserCreatedGift, current_user_id: i32) -> Result<Gift> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()
.await?; .await?;
@@ -44,7 +44,7 @@ impl ConnectionPool {
pub async fn decrement_bonus_points_and_freeleech_tokens( pub async fn decrement_bonus_points_and_freeleech_tokens(
tx: &mut Transaction<'_, Postgres>, tx: &mut Transaction<'_, Postgres>,
current_user_id: i64, current_user_id: i32,
bonus_points: i64, bonus_points: i64,
freeleech_tokens: i32, freeleech_tokens: i32,
) -> Result<()> { ) -> Result<()> {

View File

@@ -14,7 +14,7 @@ impl ConnectionPool {
pub async fn create_invitation( pub async fn create_invitation(
&self, &self,
invitation: &SentInvitation, invitation: &SentInvitation,
current_user_id: i64, current_user_id: i32,
) -> Result<Invitation> { ) -> Result<Invitation> {
// TODO: retry if invitation_key already exists // TODO: retry if invitation_key already exists
let invitation_key: String = Alphanumeric.sample_string(&mut rng(), 50); let invitation_key: String = Alphanumeric.sample_string(&mut rng(), 50);
@@ -85,7 +85,7 @@ impl ConnectionPool {
pub async fn decrement_invitations_available( pub async fn decrement_invitations_available(
tx: &mut Transaction<'_, Postgres>, tx: &mut Transaction<'_, Postgres>,
current_user_id: i64, current_user_id: i32,
) -> Result<()> { ) -> Result<()> {
sqlx::query!( sqlx::query!(
r#" r#"

View File

@@ -9,7 +9,7 @@ impl ConnectionPool {
pub async fn create_master_group( pub async fn create_master_group(
&self, &self,
master_group_form: &UserCreatedMasterGroup, master_group_form: &UserCreatedMasterGroup,
current_user_id: i64, current_user_id: i32,
) -> Result<MasterGroup> { ) -> Result<MasterGroup> {
let created_master_group = sqlx::query_as!( let created_master_group = sqlx::query_as!(
MasterGroup, MasterGroup,

View File

@@ -20,6 +20,7 @@ pub mod torrent_report_repository;
pub mod torrent_repository; pub mod torrent_repository;
pub mod torrent_request_repository; pub mod torrent_request_repository;
pub mod torrent_request_vote_repository; pub mod torrent_request_vote_repository;
pub mod tracker_repository;
pub mod user_application_repository; pub mod user_application_repository;
pub mod user_repository; pub mod user_repository;
pub mod wiki_repository; pub mod wiki_repository;

View File

@@ -79,11 +79,11 @@ impl ConnectionPool {
pub async fn find_unread_notifications_amount( pub async fn find_unread_notifications_amount(
&self, &self,
user_id: i64, user_id: i32,
) -> Result<HashMap<NotificationReason, i64>> { ) -> Result<HashMap<NotificationReason, i64>> {
let rows = sqlx::query!( let rows = sqlx::query!(
r#" r#"
SELECT reason as "reason: NotificationReason", SELECT reason as "reason: NotificationReason",
COUNT(*) as "count!" COUNT(*) as "count!"
FROM notifications FROM notifications
WHERE receiver_id = $1 AND read_status = FALSE WHERE receiver_id = $1 AND read_status = FALSE

View File

@@ -8,7 +8,7 @@ use std::borrow::Borrow;
use crate::models; use crate::models;
impl ConnectionPool { impl ConnectionPool {
pub async fn get_user_peers(&self, user_id: i64) -> Vec<models::peer::Peer> { pub async fn get_user_peers(&self, user_id: i32) -> Vec<models::peer::Peer> {
sqlx::query_as!( sqlx::query_as!(
models::peer::Peer, models::peer::Peer,
r#" r#"
@@ -59,7 +59,7 @@ impl ConnectionPool {
&self, &self,
torrent_id: &i64, torrent_id: &i64,
ip: &IpNetwork, ip: &IpNetwork,
user_id: &i64, user_id: &i32,
ann: &Announce, ann: &Announce,
user_agent: Option<&str>, user_agent: Option<&str>,
) -> (i64, i64) { ) -> (i64, i64) {
@@ -115,7 +115,7 @@ impl ConnectionPool {
.unwrap_or((0, 0)) .unwrap_or((0, 0))
} }
pub async fn find_torrent_peers(&self, torrent_id: &i64, user_id: &i64) -> Vec<Peer> { pub async fn find_torrent_peers(&self, torrent_id: &i64, user_id: &i32) -> Vec<Peer> {
let peers = sqlx::query!( let peers = sqlx::query!(
r#" r#"
SELECT peers.ip AS ip, peers.port AS port SELECT peers.ip AS ip, peers.port AS port

View File

@@ -10,7 +10,7 @@ use sqlx::{query_as_unchecked, query_scalar};
use std::borrow::Borrow; use std::borrow::Borrow;
impl ConnectionPool { impl ConnectionPool {
pub async fn create_series(&self, series: &UserCreatedSeries, user_id: i64) -> Result<Series> { pub async fn create_series(&self, series: &UserCreatedSeries, user_id: i32) -> Result<Series> {
let created_series = sqlx::query_as!( let created_series = sqlx::query_as!(
Series, Series,
r#" r#"

View File

@@ -10,7 +10,7 @@ impl ConnectionPool {
pub async fn create_staff_pm( pub async fn create_staff_pm(
&self, &self,
conversation: &mut UserCreatedStaffPm, conversation: &mut UserCreatedStaffPm,
current_user_id: i64, current_user_id: i32,
) -> Result<StaffPm> { ) -> Result<StaffPm> {
let created_conversation = sqlx::query_as!( let created_conversation = sqlx::query_as!(
StaffPm, StaffPm,
@@ -36,7 +36,7 @@ impl ConnectionPool {
pub async fn create_staff_pm_message( pub async fn create_staff_pm_message(
&self, &self,
message: &UserCreatedStaffPmMessage, message: &UserCreatedStaffPmMessage,
current_user_id: i64, current_user_id: i32,
) -> Result<StaffPmMessage> { ) -> Result<StaffPmMessage> {
let message = sqlx::query_as!( let message = sqlx::query_as!(
StaffPmMessage, StaffPmMessage,
@@ -59,7 +59,7 @@ impl ConnectionPool {
pub async fn resolve_staff_pm( pub async fn resolve_staff_pm(
&self, &self,
staff_pm_id: i64, staff_pm_id: i64,
_current_user_id: i64, _current_user_id: i32,
) -> Result<StaffPm> { ) -> Result<StaffPm> {
let updated = sqlx::query_as!( let updated = sqlx::query_as!(
StaffPm, StaffPm,
@@ -78,7 +78,7 @@ impl ConnectionPool {
Ok(updated) Ok(updated)
} }
pub async fn list_staff_pms(&self, current_user_id: i64, is_staff: bool) -> Result<Value> { pub async fn list_staff_pms(&self, current_user_id: i32, is_staff: bool) -> Result<Value> {
let row = sqlx::query_unchecked!( let row = sqlx::query_unchecked!(
r#" r#"
SELECT SELECT
@@ -134,7 +134,7 @@ impl ConnectionPool {
pub async fn get_staff_pm( pub async fn get_staff_pm(
&self, &self,
staff_pm_id: i64, staff_pm_id: i64,
current_user_id: i64, current_user_id: i32,
is_staff: bool, is_staff: bool,
) -> Result<Value> { ) -> Result<Value> {
let row = sqlx::query_unchecked!( let row = sqlx::query_unchecked!(

View File

@@ -7,7 +7,7 @@ impl ConnectionPool {
&self, &self,
item_id: i64, item_id: i64,
item: &str, // TODO: should only be one of the existing columns of the table item: &str, // TODO: should only be one of the existing columns of the table
current_user_id: i64, current_user_id: i32,
) -> Result<()> { ) -> Result<()> {
sqlx::query(&format!( sqlx::query(&format!(
" "
@@ -28,7 +28,7 @@ impl ConnectionPool {
&self, &self,
item_id: i64, item_id: i64,
item: &str, item: &str,
current_user_id: i64, current_user_id: i32,
) -> Result<()> { ) -> Result<()> {
let _ = sqlx::query(&format!( let _ = sqlx::query(&format!(
" "

View File

@@ -9,7 +9,7 @@ impl ConnectionPool {
pub async fn create_title_group_comment( pub async fn create_title_group_comment(
&self, &self,
title_group_comment: &UserCreatedTitleGroupComment, title_group_comment: &UserCreatedTitleGroupComment,
user_id: i64, user_id: i32,
) -> Result<TitleGroupComment> { ) -> Result<TitleGroupComment> {
let created_title_group_comment = sqlx::query_as!( let created_title_group_comment = sqlx::query_as!(
TitleGroupComment, TitleGroupComment,

View File

@@ -25,7 +25,7 @@ impl ConnectionPool {
&self, &self,
title_group_form: &UserCreatedTitleGroup, title_group_form: &UserCreatedTitleGroup,
public_ratings: &Vec<PublicRating>, public_ratings: &Vec<PublicRating>,
user_id: i64, user_id: i32,
) -> Result<TitleGroup> { ) -> Result<TitleGroup> {
let create_title_group_query = r#" let create_title_group_query = r#"
INSERT INTO title_groups (master_group_id,name,name_aliases,created_by_id,description,original_language,country_from,covers,external_links,embedded_links,category,content_type,original_release_date,tags,tagline,platform,screenshots,public_ratings) INSERT INTO title_groups (master_group_id,name,name_aliases,created_by_id,description,original_language,country_from,covers,external_links,embedded_links,category,content_type,original_release_date,tags,tagline,platform,screenshots,public_ratings)
@@ -65,7 +65,7 @@ impl ConnectionPool {
pub async fn find_title_group_hierarchy( pub async fn find_title_group_hierarchy(
&self, &self,
title_group_id: i64, title_group_id: i64,
user_id: i64, user_id: i32,
) -> Result<Value> { ) -> Result<Value> {
let title_group = sqlx::query!(r#"WITH torrent_data AS ( let title_group = sqlx::query!(r#"WITH torrent_data AS (
SELECT SELECT

View File

@@ -9,7 +9,7 @@ impl ConnectionPool {
pub async fn report_torrent( pub async fn report_torrent(
&self, &self,
form: &UserCreatedTorrentReport, form: &UserCreatedTorrentReport,
user_id: i64, user_id: i32,
) -> Result<TorrentReport> { ) -> Result<TorrentReport> {
let torrent_report = sqlx::query_as!( let torrent_report = sqlx::query_as!(
TorrentReport, TorrentReport,

View File

@@ -34,7 +34,7 @@ impl ConnectionPool {
pub async fn create_torrent( pub async fn create_torrent(
&self, &self,
torrent_form: &UploadedTorrent, torrent_form: &UploadedTorrent,
user_id: i64, user_id: i32,
) -> Result<Torrent> { ) -> Result<Torrent> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()
@@ -317,7 +317,7 @@ impl ConnectionPool {
pub async fn get_torrent( pub async fn get_torrent(
&self, &self,
user_id: i64, user_id: i32,
torrent_id: i64, torrent_id: i64,
tracker_name: &str, tracker_name: &str,
frontend_url: &str, frontend_url: &str,
@@ -346,7 +346,7 @@ impl ConnectionPool {
let info = Info::from_bytes(torrent.info_dict).map_err(|_| Error::TorrentFileInvalid)?; let info = Info::from_bytes(torrent.info_dict).map_err(|_| Error::TorrentFileInvalid)?;
let user = self.find_user_with_id(user_id).await?; let user = self.find_user_with_id(user_id).await?;
let announce_url = get_announce_url(user.passkey_upper, user.passkey_lower, tracker_url); let announce_url = get_announce_url(user.passkey, tracker_url);
let frontend_url = format!("{frontend_url}torrent/{torrent_id}"); let frontend_url = format!("{frontend_url}torrent/{torrent_id}");
@@ -384,7 +384,7 @@ impl ConnectionPool {
pub async fn search_torrents( pub async fn search_torrents(
&self, &self,
torrent_search: &TorrentSearch, torrent_search: &TorrentSearch,
requesting_user_id: Option<i64>, requesting_user_id: Option<i32>,
) -> Result<Value> { ) -> Result<Value> {
let input = torrent_search.title_group.name.trim(); let input = torrent_search.title_group.name.trim();
@@ -468,7 +468,7 @@ impl ConnectionPool {
pub async fn remove_torrent( pub async fn remove_torrent(
&self, &self,
torrent_to_delete: &TorrentToDelete, torrent_to_delete: &TorrentToDelete,
current_user_id: i64, current_user_id: i32,
) -> Result<()> { ) -> Result<()> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
.begin() .begin()

View File

@@ -11,7 +11,7 @@ impl ConnectionPool {
pub async fn create_torrent_request( pub async fn create_torrent_request(
&self, &self,
torrent_request: &mut UserCreatedTorrentRequest, torrent_request: &mut UserCreatedTorrentRequest,
user_id: i64, user_id: i32,
) -> Result<TorrentRequest> { ) -> Result<TorrentRequest> {
//TODO: make those requests transactional //TODO: make those requests transactional
let create_torrent_request_query = r#" let create_torrent_request_query = r#"
@@ -60,7 +60,7 @@ impl ConnectionPool {
&self, &self,
torrent_id: i64, torrent_id: i64,
torrent_request_id: i64, torrent_request_id: i64,
current_user_id: i64, current_user_id: i32,
) -> Result<()> { ) -> Result<()> {
let is_torrent_in_requested_title_group = sqlx::query_scalar!( let is_torrent_in_requested_title_group = sqlx::query_scalar!(
r#" r#"
@@ -125,7 +125,7 @@ impl ConnectionPool {
let upload_share = (bounty_summary.total_upload as f32 / 2.0).round() as i32; let upload_share = (bounty_summary.total_upload as f32 / 2.0).round() as i32;
let bonus_share = (bounty_summary.total_bonus as f32 / 2.0).round() as i32; let bonus_share = (bounty_summary.total_bonus as f32 / 2.0).round() as i32;
let torrent_uploader_id: i64 = query_scalar!( let torrent_uploader_id: i32 = query_scalar!(
r#" r#"
SELECT created_by_id FROM torrents WHERE id = $1 SELECT created_by_id FROM torrents WHERE id = $1
"#, "#,

View File

@@ -9,7 +9,7 @@ impl ConnectionPool {
pub async fn create_torrent_request_vote( pub async fn create_torrent_request_vote(
&self, &self,
torrent_request_vote: &UserCreatedTorrentRequestVote, torrent_request_vote: &UserCreatedTorrentRequestVote,
user_id: i64, user_id: i32,
) -> Result<TorrentRequestVote> { ) -> Result<TorrentRequestVote> {
let current_user = self.find_user_with_id(user_id).await?; let current_user = self.find_user_with_id(user_id).await?;
if current_user.bonus_points - torrent_request_vote.bounty_bonus_points < 0 { if current_user.bonus_points - torrent_request_vote.bounty_bonus_points < 0 {

View File

@@ -0,0 +1,45 @@
use crate::connection_pool::ConnectionPool;
use arcadia_common::error::Result;
use arcadia_shared::tracker::models::user::{Passkey, User};
use std::borrow::Borrow;
// This file contains functions for Arcadia's tracker
// but not necessarily related to the tracker itself directly
impl ConnectionPool {
pub async fn find_users(&self) -> Result<Vec<User>> {
// TODO: fix this
// query_as!() doesn't work as it requires the FromString trait
// which is implemented, but somehow still throws an error
let rows = sqlx::query!(
r#"
SELECT
id,
passkey,
TRUE AS "can_download!",
0::int4 AS "num_seeding!",
0::int4 AS "num_leeching!"
FROM users
"#
)
.fetch_all(self.borrow())
.await
.expect("could not get users");
let users = rows
.into_iter()
.map(|r| User {
id: r.id as u32,
passkey: r
.passkey
.parse::<Passkey>()
.expect("invalid passkey in database"),
can_download: r.can_download,
num_seeding: r.num_seeding as u32,
num_leeching: r.num_leeching as u32,
})
.collect();
Ok(users)
}
}

View File

@@ -9,7 +9,7 @@ use sqlx::PgPool;
use std::borrow::Borrow; use std::borrow::Borrow;
impl ConnectionPool { impl ConnectionPool {
pub async fn find_user_profile(&self, id: &i64) -> Result<PublicUser> { pub async fn find_user_profile(&self, id: &i32) -> Result<PublicUser> {
sqlx::query_as!( sqlx::query_as!(
PublicUser, PublicUser,
r#" r#"
@@ -55,7 +55,7 @@ impl ConnectionPool {
.map_err(|_| Error::UserWithIdNotFound(*id)) .map_err(|_| Error::UserWithIdNotFound(*id))
} }
pub async fn update_last_seen(&self, id: i64) -> Result<()> { pub async fn update_last_seen(&self, id: i32) -> Result<()> {
let _ = sqlx::query!( let _ = sqlx::query!(
r#" r#"
UPDATE users UPDATE users
@@ -70,7 +70,7 @@ impl ConnectionPool {
Ok(()) Ok(())
} }
pub async fn update_user(&self, user_id: i64, edited_user: &EditedUser) -> Result<()> { pub async fn update_user(&self, user_id: i32, edited_user: &EditedUser) -> Result<()> {
let _ = sqlx::query!( let _ = sqlx::query!(
r#" r#"
UPDATE users UPDATE users
@@ -90,7 +90,7 @@ impl ConnectionPool {
pub async fn create_user_warning( pub async fn create_user_warning(
&self, &self,
current_user_id: i64, current_user_id: i32,
user_warning: &UserCreatedUserWarning, user_warning: &UserCreatedUserWarning,
) -> Result<UserWarning> { ) -> Result<UserWarning> {
let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self) let mut tx = <ConnectionPool as Borrow<PgPool>>::borrow(self)
@@ -135,7 +135,7 @@ impl ConnectionPool {
Ok(user_warning) Ok(user_warning)
} }
pub async fn find_user_warnings(&self, user_id: i64) -> Vec<UserWarning> { pub async fn find_user_warnings(&self, user_id: i32) -> Vec<UserWarning> {
sqlx::query_as!( sqlx::query_as!(
UserWarning, UserWarning,
r#" r#"
@@ -149,7 +149,7 @@ impl ConnectionPool {
.expect("failed to get user warnings") .expect("failed to get user warnings")
} }
pub async fn is_user_banned(&self, user_id: i64) -> Result<bool> { pub async fn is_user_banned(&self, user_id: i32) -> Result<bool> {
let result = sqlx::query_scalar!("SELECT banned FROM users WHERE id = $1", user_id) let result = sqlx::query_scalar!("SELECT banned FROM users WHERE id = $1", user_id)
.fetch_optional(self.borrow()) .fetch_optional(self.borrow())
.await?; .await?;
@@ -165,7 +165,7 @@ impl ConnectionPool {
let users = sqlx::query_as!( let users = sqlx::query_as!(
UserMinimal, UserMinimal,
r#" r#"
SELECT id, passkey_upper, passkey_lower FROM users SELECT id, passkey FROM users
"# "#
) )
.fetch_all(self.borrow()) .fetch_all(self.borrow())

View File

@@ -10,7 +10,7 @@ impl ConnectionPool {
pub async fn create_wiki_article( pub async fn create_wiki_article(
&self, &self,
article: &UserCreatedWikiArticle, article: &UserCreatedWikiArticle,
current_user_id: i64, current_user_id: i32,
) -> Result<WikiArticle> { ) -> Result<WikiArticle> {
let created_article = sqlx::query_as!( let created_article = sqlx::query_as!(
WikiArticle, WikiArticle,

View File

@@ -20,8 +20,7 @@ const initialState: User = {
invited: 0, invited: 0,
last_seen: '', last_seen: '',
leeching: 0, leeching: 0,
passkey_lower: 0, passkey: 'aaaaaaaaaaaa',
passkey_upper: 0,
password_hash: '', password_hash: '',
ratio: 0.0, ratio: 0.0,
real_downloaded: 0, real_downloaded: 0,

10
shared/Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
name = "arcadia-shared"
version = "0.1.0"
edition = "2024"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
indexmap = { version = "2.11.0", default-features = false, features = ["std", "serde"] }
anyhow = { version = "1.0.99", default-features = true, features = ["std"] }
sqlx = { version = "0.8", features = [ "runtime-tokio", "tls-native-tls", "postgres", "chrono", "ipnetwork" ] }

1
shared/src/lib.rs Normal file
View File

@@ -0,0 +1 @@
pub mod tracker;

View File

@@ -0,0 +1 @@
pub mod models;

View File

@@ -0,0 +1 @@
pub mod user;

View File

@@ -0,0 +1,63 @@
use anyhow::bail;
use serde::{Deserialize, Serialize, Serializer};
use sqlx::{Database, Decode};
use std::{fmt::Display, str::FromStr};
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, PartialEq)]
pub struct Passkey(pub [u8; 32]);
#[derive(Clone, Deserialize, Serialize)]
pub struct User {
pub id: u32,
pub passkey: Passkey,
pub can_download: bool,
pub num_seeding: u32,
pub num_leeching: u32,
}
impl FromStr for Passkey {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = s.bytes();
if bytes.len() != 32 {
bail!("Invalid passkey length.");
}
let array = [(); 32].map(|_| bytes.next().unwrap());
Ok(Passkey(array))
}
}
impl<'r, DB: Database> Decode<'r, DB> for Passkey
where
&'r str: Decode<'r, DB>,
{
fn decode(
value: <DB as Database>::ValueRef<'r>,
) -> Result<Passkey, Box<dyn std::error::Error + 'static + Send + Sync>> {
let value = <&str as Decode<DB>>::decode(value)?;
let mut bytes = value.bytes();
let array = [(); 32].map(|_| bytes.next().expect("Invalid passkey length."));
Ok(Passkey(array))
}
}
impl Display for Passkey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&String::from_utf8_lossy(&self.0))
}
}
impl Serialize for Passkey {
fn serialize<S>(&self, serializer: S) -> std::prelude::v1::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}

View File

@@ -15,3 +15,11 @@ env_logger = "0.11.8"
thiserror = "2.0.12" thiserror = "2.0.12"
actix-web-httpauth = "0.8.2" actix-web-httpauth = "0.8.2"
futures = "0.3" futures = "0.3"
serde = { version = "1.0", features = ["derive"] }
strum = "0.27"
log = "0.4"
serde_bencode = "0.2.4"
indexmap = { version = "2.11.0", default-features = false, features = ["std", "serde"] }
anyhow = { version = "1.0.99", default-features = true, features = ["std"] }
arcadia-shared = { path = "../../shared" }
parking_lot = "0.12.4"

View File

@@ -0,0 +1,43 @@
use serde::Serialize;
use crate::announce::HttpResponseBuilderExt;
pub type Result<T> = std::result::Result<T, AnnounceError>;
#[derive(Debug, thiserror::Error)]
pub enum AnnounceError {
#[error("invalid passkey")]
InvalidPassKey,
#[error("invalid info_hash")]
InvalidInfoHash,
#[error("invalid user id")]
InvalidUserId,
#[error("invalid user id or torrent id")]
InvalidUserIdOrTorrentId,
#[error("torrent client not in whitelist")]
TorrentClientNotInWhitelist,
}
impl actix_web::ResponseError for AnnounceError {
#[inline]
fn status_code(&self) -> actix_web::http::StatusCode {
actix_web::http::StatusCode::BAD_REQUEST
}
fn error_response(&self) -> actix_web::HttpResponse {
log::error!("The request generated this error: {self}");
#[derive(Debug, Serialize)]
struct WrappedError {
#[serde(rename = "failure reason")]
failure_reason: String,
}
actix_web::HttpResponse::build(self.status_code()).bencode(WrappedError {
failure_reason: self.to_string(),
})
}
}

View File

@@ -0,0 +1,21 @@
use actix_web::{
dev,
web::{Data, Path},
FromRequest, HttpRequest, HttpResponse, ResponseError,
};
use crate::announce::error::{AnnounceError, Result};
#[utoipa::path(
post,
operation_id = "Announce",
tag = "Announce",
path = "/{passkey}/announce",
responses(
(status = 200, description = "Announce"),
)
)]
pub async fn exec(arc: Data<Arcadia>, passkey: Path<String>) -> Result<HttpResponse> {
let passkey = u128::from_str_radix(&passkey, 16).map_err(|_| AnnounceError::InvalidPassKey)?;
Ok(HttpResponse::Ok())
}

View File

@@ -0,0 +1 @@
pub mod announce;

View File

@@ -0,0 +1,19 @@
use actix_web::{HttpResponse, HttpResponseBuilder};
use serde::Serialize;
pub mod error;
pub mod handlers;
pub mod models;
pub trait HttpResponseBuilderExt {
fn bencode(&mut self, val: impl Serialize) -> HttpResponse;
}
impl HttpResponseBuilderExt for HttpResponseBuilder {
fn bencode(&mut self, val: impl Serialize) -> HttpResponse {
match serde_bencode::to_bytes(&val) {
Ok(data) => self.body(data),
Err(_) => HttpResponse::BadRequest().body("Failed to bencode"),
}
}
}

View File

@@ -0,0 +1 @@
pub mod torrent;

View File

@@ -0,0 +1,34 @@
use serde::Deserialize;
use strum::{Display, EnumString};
#[derive(Clone, Copy, Deserialize, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
pub struct InfoHash(pub [u8; 20]);
#[derive(Clone, Copy, Eq, Hash, PartialEq, PartialOrd, Ord)]
pub struct PeerId(pub [u8; 20]);
#[derive(Clone, Copy, PartialEq, Display, EnumString)]
#[strum(serialize_all = "snake_case")]
pub enum Event {
#[strum(to_string = "completed")]
Completed,
#[strum(to_string = "")]
Empty,
#[strum(to_string = "started")]
Started,
#[strum(to_string = "stopped")]
Stopped,
}
pub struct Announce {
info_hash: InfoHash,
peer_id: PeerId,
port: u16,
uploaded: u64,
downloaded: u64,
left: u64,
event: Event,
numwant: usize,
corrupt: Option<u64>,
key: Option<String>,
}

View File

@@ -0,0 +1 @@
pub mod models;

View File

@@ -0,0 +1 @@
pub mod user;

View File

@@ -0,0 +1,7 @@
use indexmap::IndexMap;
use serde::Serialize;
pub use arcadia_shared::tracker::models::user::{Passkey, User};
#[derive(Serialize)]
pub struct Map(IndexMap<u32, User>);

View File

@@ -1,13 +1,19 @@
use crate::env::Env; use parking_lot::RwLock;
use std::ops::Deref;
use crate::env::Env;
use std::{io::Write, ops::Deref};
pub mod announce;
pub mod api_doc; pub mod api_doc;
pub mod common;
pub mod env; pub mod env;
pub mod middleware; pub mod middleware;
pub mod routes; pub mod routes;
pub struct Tracker { pub struct Tracker {
env: Env, env: Env,
pub users: RwLock<common::models::user::Map>,
} }
impl Deref for Tracker { impl Deref for Tracker {
@@ -20,6 +26,14 @@ impl Deref for Tracker {
impl Tracker { impl Tracker {
pub fn new(env: Env) -> Self { pub fn new(env: Env) -> Self {
Self { env } print!("Getting users...");
std::io::stdout().flush().unwrap();
let users = .await?;
println!("[Finished] Records: {:?}", users.len());
Self {
env,
users: RwLock::new(common::models::user::Map::default()),
}
} }
} }

View File

@@ -1,39 +1,35 @@
use actix_web::{dev::ServiceRequest, error::ErrorUnauthorized, web::Data}; // use actix_web::{dev::ServiceRequest, error::ErrorUnauthorized, web::Data};
use actix_web::{Error, FromRequest, HttpRequest}; // use actix_web::{Error, FromRequest, HttpRequest};
use futures::future::{ready, Ready}; // use futures::future::{ready, Ready};
pub struct Passkey(pub String); // pub struct Passkey(pub String);
impl FromRequest for Passkey { // impl FromRequest for Passkey {
type Error = Error; // type Error = Error;
type Future = Ready<Result<Self, Self::Error>>; // type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, _payload: &mut actix_web::dev::Payload) -> Self::Future { // fn from_request(req: &HttpRequest, _payload: &mut actix_web::dev::Payload) -> Self::Future {
let passkey = req // let passkey = req.path().into_inner();
.headers()
.get("api-key")
.and_then(|value| value.to_str().ok())
.map(|s| s.to_owned());
match passkey { // match passkey {
Some(key) => ready(Ok(Passkey(key))), // Some(key) => ready(Ok(Passkey(key))),
None => ready(Err(actix_web::error::ErrorUnauthorized( // None => ready(Err(actix_web::error::ErrorUnauthorized(
"authentication error: missing passkey", // "authentication error: missing passkey",
))), // ))),
} // }
} // }
} // }
pub async fn authenticate_user( // pub async fn authenticate_user(
req: ServiceRequest, // req: ServiceRequest,
passkey: Passkey, // passkey: Passkey,
) -> std::result::Result<ServiceRequest, (actix_web::Error, ServiceRequest)> { // ) -> std::result::Result<ServiceRequest, (actix_web::Error, ServiceRequest)> {
// if passkey.0 != arc.env.passkey { // // if passkey.0 != arc.env.passkey {
// Err(( // // Err((
// ErrorUnauthorized("authentication error: invalid API key"), // // ErrorUnauthorized("authentication error: invalid API key"),
// req, // // req,
// )) // // ))
// } else { // // } else {
Ok(req) // Ok(req)
// } // // }
} // }

View File

@@ -1,10 +1,10 @@
use actix_web::web::{self, scope}; use actix_web::web::{self, scope};
use actix_web_httpauth::middleware::HttpAuthentication; // use actix_web_httpauth::middleware::HttpAuthentication;
use crate::middleware::authenticate_user; // use crate::middleware::authenticate_user;
pub fn init(cfg: &mut web::ServiceConfig) { pub fn init(cfg: &mut web::ServiceConfig) {
// cfg.service( cfg.service(
// web::scope("/{passkey}").wrap(HttpAuthentication::with_fn(authenticate_user(req, passkey))), web::scope("/{passkey}"), //.wrap(HttpAuthentication::with_fn(authenticate_user(req, passkey))),
// ); );
} }