From e1790f6991d693aaf4fe72d1c13cd9c10cd488d3 Mon Sep 17 00:00:00 2001 From: Jose Celano Date: Fri, 11 Nov 2022 18:28:31 +0000 Subject: [PATCH] refactor: [#56] extract mods in upgrader --- .../from_v1_0_0_to_v2_0_0/databases/mod.rs | 27 ++ src/upgrades/from_v1_0_0_to_v2_0_0/mod.rs | 3 +- .../transferrers/category_transferrer.rs | 39 ++ .../from_v1_0_0_to_v2_0_0/transferrers/mod.rs | 4 + .../transferrers/torrent_transferrer.rs | 198 +++++++++ .../transferrers/tracker_key_transferrer.rs | 45 ++ .../transferrers/user_transferrer.rs | 80 ++++ .../from_v1_0_0_to_v2_0_0/upgrader.rs | 392 +----------------- .../testers/torrent_tester.rs | 2 +- 9 files changed, 408 insertions(+), 382 deletions(-) create mode 100644 src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs create mode 100644 src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/mod.rs create mode 100644 src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs create mode 100644 src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs create mode 100644 src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs index fa37d81b..0cc2e300 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs @@ -1,2 +1,29 @@ +use self::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use self::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +use std::sync::Arc; + pub mod sqlite_v1_0_0; pub mod sqlite_v2_0_0; + +pub async fn current_db(db_filename: &str) -> Arc { + let source_database_connect_url = format!("sqlite://{}?mode=ro", db_filename); + Arc::new(SqliteDatabaseV1_0_0::new(&source_database_connect_url).await) +} + +pub async fn new_db(db_filename: &str) -> Arc { + let dest_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename); + Arc::new(SqliteDatabaseV2_0_0::new(&dest_database_connect_url).await) +} + +pub async fn migrate_destiny_database(dest_database: Arc) { + println!("Running migrations in destiny database..."); + dest_database.migrate().await; +} + +pub async fn reset_destiny_database(dest_database: Arc) { + println!("Truncating all tables in destiny database ..."); + dest_database + .delete_all_database_rows() + .await + .expect("Can't reset destiny database."); +} diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/mod.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/mod.rs index ef4843d0..afb35f90 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/mod.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/mod.rs @@ -1,2 +1,3 @@ +pub mod databases; +pub mod transferrers; pub mod upgrader; -pub mod databases; \ No newline at end of file diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs new file mode 100644 index 00000000..b8e20515 --- /dev/null +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/category_transferrer.rs @@ -0,0 +1,39 @@ +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +use std::sync::Arc; + +pub async fn transfer_categories( + source_database: Arc, + dest_database: Arc, +) { + println!("Transferring categories ..."); + + let source_categories = source_database.get_categories_order_by_id().await.unwrap(); + println!("[v1] categories: {:?}", &source_categories); + + let result = dest_database.reset_categories_sequence().await.unwrap(); + println!("[v2] reset categories sequence result {:?}", result); + + for cat in &source_categories { + println!( + "[v2] adding category {:?} with id {:?} ...", + &cat.name, &cat.category_id + ); + let id = dest_database + .insert_category_and_get_id(&cat.name) + .await + .unwrap(); + + if id != cat.category_id { + panic!( + "Error copying category {:?} from source DB to destiny DB", + &cat.category_id + ); + } + + println!("[v2] category: {:?} {:?} added.", id, &cat.name); + } + + let dest_categories = dest_database.get_categories().await.unwrap(); + println!("[v2] categories: {:?}", &dest_categories); +} diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/mod.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/mod.rs new file mode 100644 index 00000000..94eaac75 --- /dev/null +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/mod.rs @@ -0,0 +1,4 @@ +pub mod category_transferrer; +pub mod torrent_transferrer; +pub mod tracker_key_transferrer; +pub mod user_transferrer; diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs new file mode 100644 index 00000000..bcb096b0 --- /dev/null +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs @@ -0,0 +1,198 @@ +use crate::models::torrent_file::Torrent; +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::TorrentRecordV2; +use crate::utils::parse_torrent::decode_torrent; +use std::sync::Arc; +use std::{error, fs}; + +pub async fn transfer_torrents( + source_database: Arc, + dest_database: Arc, + upload_path: &str, +) { + println!("Transferring torrents ..."); + + // Transfer table `torrust_torrents_files` + + // Although the The table `torrust_torrents_files` existed in version v1.0.0 + // it was was not used. + + // Transfer table `torrust_torrents` + + let torrents = source_database.get_torrents().await.unwrap(); + + for torrent in &torrents { + // [v2] table torrust_torrents + + println!( + "[v2][torrust_torrents] adding the torrent: {:?} ...", + &torrent.torrent_id + ); + + let uploader = source_database + .get_user_by_username(&torrent.uploader) + .await + .unwrap(); + + if uploader.username != torrent.uploader { + panic!( + "Error copying torrent with id {:?}. + Username (`uploader`) in `torrust_torrents` table does not match `username` in `torrust_users` table", + &torrent.torrent_id + ); + } + + let filepath = format!("{}/{}.torrent", upload_path, &torrent.torrent_id); + + let torrent_from_file_result = read_torrent_from_file(&filepath); + + if torrent_from_file_result.is_err() { + panic!("Error torrent file not found: {:?}", &filepath); + } + + let torrent_from_file = torrent_from_file_result.unwrap(); + + let id = dest_database + .insert_torrent(&TorrentRecordV2::from_v1_data( + torrent, + &torrent_from_file.info, + &uploader, + )) + .await + .unwrap(); + + if id != torrent.torrent_id { + panic!( + "Error copying torrent {:?} from source DB to destiny DB", + &torrent.torrent_id + ); + } + + println!( + "[v2][torrust_torrents] torrent with id {:?} added.", + &torrent.torrent_id + ); + + // [v2] table torrust_torrent_files + + println!("[v2][torrust_torrent_files] adding torrent files"); + + if torrent_from_file.is_a_single_file_torrent() { + // The torrent contains only one file then: + // - "path" is NULL + // - "md5sum" can be NULL + + println!( + "[v2][torrust_torrent_files][single-file-torrent] adding torrent file {:?} with length {:?} ...", + &torrent_from_file.info.name, &torrent_from_file.info.length, + ); + + let file_id = dest_database + .insert_torrent_file_for_torrent_with_one_file( + torrent.torrent_id, + // TODO: it seems med5sum can be None. Why? When? + &torrent_from_file.info.md5sum.clone(), + torrent_from_file.info.length.unwrap(), + ) + .await; + + println!( + "[v2][torrust_torrent_files][single-file-torrent] torrent file insert result: {:?}", + &file_id + ); + } else { + // Multiple files are being shared + let files = torrent_from_file.info.files.as_ref().unwrap(); + + for file in files.iter() { + println!( + "[v2][torrust_torrent_files][multiple-file-torrent] adding torrent file: {:?} ...", + &file + ); + + let file_id = dest_database + .insert_torrent_file_for_torrent_with_multiple_files(torrent, file) + .await; + + println!( + "[v2][torrust_torrent_files][multiple-file-torrent] torrent file insert result: {:?}", + &file_id + ); + } + } + + // [v2] table torrust_torrent_info + + println!( + "[v2][torrust_torrent_info] adding the torrent info for torrent id {:?} ...", + &torrent.torrent_id + ); + + let id = dest_database.insert_torrent_info(torrent).await; + + println!( + "[v2][torrust_torrents] torrent info insert result: {:?}.", + &id + ); + + // [v2] table torrust_torrent_announce_urls + + println!( + "[v2][torrust_torrent_announce_urls] adding the torrent announce url for torrent id {:?} ...", + &torrent.torrent_id + ); + + if torrent_from_file.announce_list.is_some() { + // BEP-0012. Multiple trackers. + + println!("[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id); + + // flatten the nested vec (this will however remove the) + let announce_urls = torrent_from_file + .announce_list + .clone() + .unwrap() + .into_iter() + .flatten() + .collect::>(); + + for tracker_url in announce_urls.iter() { + println!("[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id); + + let announce_url_id = dest_database + .insert_torrent_announce_url(torrent.torrent_id, tracker_url) + .await; + + println!("[v2][torrust_torrent_announce_urls][announce-list] torrent announce url insert result {:?} ...", &announce_url_id); + } + } else if torrent_from_file.announce.is_some() { + println!("[v2][torrust_torrent_announce_urls][announce] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id); + + let announce_url_id = dest_database + .insert_torrent_announce_url( + torrent.torrent_id, + &torrent_from_file.announce.unwrap(), + ) + .await; + + println!( + "[v2][torrust_torrent_announce_urls][announce] torrent announce url insert result {:?} ...", + &announce_url_id + ); + } + } + println!("Torrents transferred"); +} + +pub fn read_torrent_from_file(path: &str) -> Result> { + let contents = match fs::read(path) { + Ok(contents) => contents, + Err(e) => return Err(e.into()), + }; + + match decode_torrent(&contents) { + Ok(torrent) => Ok(torrent), + Err(e) => Err(e), + } +} diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs new file mode 100644 index 00000000..e639739a --- /dev/null +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/tracker_key_transferrer.rs @@ -0,0 +1,45 @@ +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +use std::sync::Arc; + +pub async fn transfer_tracker_keys( + source_database: Arc, + dest_database: Arc, +) { + println!("Transferring tracker keys ..."); + + // Transfer table `torrust_tracker_keys` + + let tracker_keys = source_database.get_tracker_keys().await.unwrap(); + + for tracker_key in &tracker_keys { + // [v2] table torrust_tracker_keys + + println!( + "[v2][torrust_users] adding the tracker key with id {:?} ...", + &tracker_key.key_id + ); + + let id = dest_database + .insert_tracker_key( + tracker_key.key_id, + tracker_key.user_id, + &tracker_key.key, + tracker_key.valid_until, + ) + .await + .unwrap(); + + if id != tracker_key.key_id { + panic!( + "Error copying tracker key {:?} from source DB to destiny DB", + &tracker_key.key_id + ); + } + + println!( + "[v2][torrust_tracker_keys] tracker key with id {:?} added.", + &tracker_key.key_id + ); + } +} diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs new file mode 100644 index 00000000..18d8d680 --- /dev/null +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/user_transferrer.rs @@ -0,0 +1,80 @@ +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +use std::sync::Arc; + +pub async fn transfer_users( + source_database: Arc, + dest_database: Arc, + date_imported: &str, +) { + println!("Transferring users ..."); + + // Transfer table `torrust_users` + + let users = source_database.get_users().await.unwrap(); + + for user in &users { + // [v2] table torrust_users + + println!( + "[v2][torrust_users] adding user with username {:?} and id {:?} ...", + &user.username, &user.user_id + ); + + let id = dest_database + .insert_imported_user(user.user_id, date_imported, user.administrator) + .await + .unwrap(); + + if id != user.user_id { + panic!( + "Error copying user {:?} from source DB to destiny DB", + &user.user_id + ); + } + + println!( + "[v2][torrust_users] user: {:?} {:?} added.", + &user.user_id, &user.username + ); + + // [v2] table torrust_user_profiles + + println!( + "[v2][torrust_user_profiles] adding user profile for user with username {:?} and id {:?} ...", + &user.username, &user.user_id + ); + + dest_database + .insert_user_profile( + user.user_id, + &user.username, + &user.email, + user.email_verified, + ) + .await + .unwrap(); + + println!( + "[v2][torrust_user_profiles] user profile added for user with username {:?} and id {:?}.", + &user.username, &user.user_id + ); + + // [v2] table torrust_user_authentication + + println!( + "[v2][torrust_user_authentication] adding password hash ({:?}) for user id ({:?}) ...", + &user.password, &user.user_id + ); + + dest_database + .insert_user_password_hash(user.user_id, &user.password) + .await + .unwrap(); + + println!( + "[v2][torrust_user_authentication] password hash ({:?}) added for user id ({:?}).", + &user.password, &user.user_id + ); + } +} diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs index 91e42931..e2c32c52 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs @@ -3,7 +3,7 @@ //! NOTES for `torrust_users` table transfer: //! //! - In v2, the table `torrust_user` contains a field `date_registered` non existing in v1. -//! We changed that columns to allow NULL. WE also added the new column `date_imported` with +//! We changed that columns to allow NULL. We also added the new column `date_imported` with //! the datetime when the upgrader was executed. //! //! NOTES for `torrust_user_profiles` table transfer: @@ -11,18 +11,18 @@ //! - In v2, the table `torrust_user_profiles` contains two new fields: `bio` and `avatar`. //! Empty string is used as default value. -use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::{ - SqliteDatabaseV2_0_0, TorrentRecordV2, -}; -use crate::utils::parse_torrent::decode_torrent; -use crate::{ - models::torrent_file::Torrent, - upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0, +use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::{ + current_db, migrate_destiny_database, new_db, reset_destiny_database, }; +use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::category_transferrer::transfer_categories; +use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::torrent_transferrer::transfer_torrents; +use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::tracker_key_transferrer::transfer_tracker_keys; +use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::user_transferrer::transfer_users; + use chrono::prelude::{DateTime, Utc}; -use std::{env, error, fs}; -use std::{sync::Arc, time::SystemTime}; +use std::env; +use std::time::SystemTime; use text_colorizer::*; @@ -86,8 +86,9 @@ pub async fn upgrade(args: &Arguments, date_imported: &str) { migrate_destiny_database(dest_database.clone()).await; reset_destiny_database(dest_database.clone()).await; + transfer_categories(source_database.clone(), dest_database.clone()).await; - transfer_user_data( + transfer_users( source_database.clone(), dest_database.clone(), date_imported, @@ -102,378 +103,9 @@ pub async fn upgrade(args: &Arguments, date_imported: &str) { .await; } -async fn current_db(db_filename: &str) -> Arc { - let source_database_connect_url = format!("sqlite://{}?mode=ro", db_filename); - Arc::new(SqliteDatabaseV1_0_0::new(&source_database_connect_url).await) -} - -async fn new_db(db_filename: &str) -> Arc { - let dest_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename); - Arc::new(SqliteDatabaseV2_0_0::new(&dest_database_connect_url).await) -} - -async fn migrate_destiny_database(dest_database: Arc) { - println!("Running migrations in destiny database..."); - dest_database.migrate().await; -} - -async fn reset_destiny_database(dest_database: Arc) { - println!("Truncating all tables in destiny database ..."); - dest_database - .delete_all_database_rows() - .await - .expect("Can't reset destiny database."); -} - -async fn transfer_categories( - source_database: Arc, - dest_database: Arc, -) { - println!("Transferring categories ..."); - - let source_categories = source_database.get_categories_order_by_id().await.unwrap(); - println!("[v1] categories: {:?}", &source_categories); - - let result = dest_database.reset_categories_sequence().await.unwrap(); - println!("[v2] reset categories sequence result {:?}", result); - - for cat in &source_categories { - println!( - "[v2] adding category {:?} with id {:?} ...", - &cat.name, &cat.category_id - ); - let id = dest_database - .insert_category_and_get_id(&cat.name) - .await - .unwrap(); - - if id != cat.category_id { - panic!( - "Error copying category {:?} from source DB to destiny DB", - &cat.category_id - ); - } - - println!("[v2] category: {:?} {:?} added.", id, &cat.name); - } - - let dest_categories = dest_database.get_categories().await.unwrap(); - println!("[v2] categories: {:?}", &dest_categories); -} - -async fn transfer_user_data( - source_database: Arc, - dest_database: Arc, - date_imported: &str, -) { - println!("Transferring users ..."); - - // Transfer table `torrust_users` - - let users = source_database.get_users().await.unwrap(); - - for user in &users { - // [v2] table torrust_users - - println!( - "[v2][torrust_users] adding user with username {:?} and id {:?} ...", - &user.username, &user.user_id - ); - - let id = dest_database - .insert_imported_user(user.user_id, date_imported, user.administrator) - .await - .unwrap(); - - if id != user.user_id { - panic!( - "Error copying user {:?} from source DB to destiny DB", - &user.user_id - ); - } - - println!( - "[v2][torrust_users] user: {:?} {:?} added.", - &user.user_id, &user.username - ); - - // [v2] table torrust_user_profiles - - println!( - "[v2][torrust_user_profiles] adding user profile for user with username {:?} and id {:?} ...", - &user.username, &user.user_id - ); - - dest_database - .insert_user_profile( - user.user_id, - &user.username, - &user.email, - user.email_verified, - ) - .await - .unwrap(); - - println!( - "[v2][torrust_user_profiles] user profile added for user with username {:?} and id {:?}.", - &user.username, &user.user_id - ); - - // [v2] table torrust_user_authentication - - println!( - "[v2][torrust_user_authentication] adding password hash ({:?}) for user id ({:?}) ...", - &user.password, &user.user_id - ); - - dest_database - .insert_user_password_hash(user.user_id, &user.password) - .await - .unwrap(); - - println!( - "[v2][torrust_user_authentication] password hash ({:?}) added for user id ({:?}).", - &user.password, &user.user_id - ); - } -} - /// Current datetime in ISO8601 without time zone. /// For example: 2022-11-10 10:35:15 pub fn datetime_iso_8601() -> String { let dt: DateTime = SystemTime::now().into(); format!("{}", dt.format("%Y-%m-%d %H:%M:%S")) } - -async fn transfer_tracker_keys( - source_database: Arc, - dest_database: Arc, -) { - println!("Transferring tracker keys ..."); - - // Transfer table `torrust_tracker_keys` - - let tracker_keys = source_database.get_tracker_keys().await.unwrap(); - - for tracker_key in &tracker_keys { - // [v2] table torrust_tracker_keys - - println!( - "[v2][torrust_users] adding the tracker key with id {:?} ...", - &tracker_key.key_id - ); - - let id = dest_database - .insert_tracker_key( - tracker_key.key_id, - tracker_key.user_id, - &tracker_key.key, - tracker_key.valid_until, - ) - .await - .unwrap(); - - if id != tracker_key.key_id { - panic!( - "Error copying tracker key {:?} from source DB to destiny DB", - &tracker_key.key_id - ); - } - - println!( - "[v2][torrust_tracker_keys] tracker key with id {:?} added.", - &tracker_key.key_id - ); - } -} - -async fn transfer_torrents( - source_database: Arc, - dest_database: Arc, - upload_path: &str, -) { - println!("Transferring torrents ..."); - - // Transfer table `torrust_torrents_files` - - // Although the The table `torrust_torrents_files` existed in version v1.0.0 - // it was was not used. - - // Transfer table `torrust_torrents` - - let torrents = source_database.get_torrents().await.unwrap(); - - for torrent in &torrents { - // [v2] table torrust_torrents - - println!( - "[v2][torrust_torrents] adding the torrent: {:?} ...", - &torrent.torrent_id - ); - - let uploader = source_database - .get_user_by_username(&torrent.uploader) - .await - .unwrap(); - - if uploader.username != torrent.uploader { - panic!( - "Error copying torrent with id {:?}. - Username (`uploader`) in `torrust_torrents` table does not match `username` in `torrust_users` table", - &torrent.torrent_id - ); - } - - let filepath = format!("{}/{}.torrent", upload_path, &torrent.torrent_id); - - let torrent_from_file_result = read_torrent_from_file(&filepath); - - if torrent_from_file_result.is_err() { - panic!("Error torrent file not found: {:?}", &filepath); - } - - let torrent_from_file = torrent_from_file_result.unwrap(); - - let id = dest_database - .insert_torrent(&TorrentRecordV2::from_v1_data( - torrent, - &torrent_from_file.info, - &uploader, - )) - .await - .unwrap(); - - if id != torrent.torrent_id { - panic!( - "Error copying torrent {:?} from source DB to destiny DB", - &torrent.torrent_id - ); - } - - println!( - "[v2][torrust_torrents] torrent with id {:?} added.", - &torrent.torrent_id - ); - - // [v2] table torrust_torrent_files - - println!("[v2][torrust_torrent_files] adding torrent files"); - - if torrent_from_file.is_a_single_file_torrent() { - // The torrent contains only one file then: - // - "path" is NULL - // - "md5sum" can be NULL - - println!( - "[v2][torrust_torrent_files][single-file-torrent] adding torrent file {:?} with length {:?} ...", - &torrent_from_file.info.name, &torrent_from_file.info.length, - ); - - let file_id = dest_database - .insert_torrent_file_for_torrent_with_one_file( - torrent.torrent_id, - // TODO: it seems med5sum can be None. Why? When? - &torrent_from_file.info.md5sum.clone(), - torrent_from_file.info.length.unwrap(), - ) - .await; - - println!( - "[v2][torrust_torrent_files][single-file-torrent] torrent file insert result: {:?}", - &file_id - ); - } else { - // Multiple files are being shared - let files = torrent_from_file.info.files.as_ref().unwrap(); - - for file in files.iter() { - println!( - "[v2][torrust_torrent_files][multiple-file-torrent] adding torrent file: {:?} ...", - &file - ); - - let file_id = dest_database - .insert_torrent_file_for_torrent_with_multiple_files(torrent, file) - .await; - - println!( - "[v2][torrust_torrent_files][multiple-file-torrent] torrent file insert result: {:?}", - &file_id - ); - } - } - - // [v2] table torrust_torrent_info - - println!( - "[v2][torrust_torrent_info] adding the torrent info for torrent id {:?} ...", - &torrent.torrent_id - ); - - let id = dest_database.insert_torrent_info(torrent).await; - - println!( - "[v2][torrust_torrents] torrent info insert result: {:?}.", - &id - ); - - // [v2] table torrust_torrent_announce_urls - - println!( - "[v2][torrust_torrent_announce_urls] adding the torrent announce url for torrent id {:?} ...", - &torrent.torrent_id - ); - - if torrent_from_file.announce_list.is_some() { - // BEP-0012. Multiple trackers. - - println!("[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id); - - // flatten the nested vec (this will however remove the) - let announce_urls = torrent_from_file - .announce_list - .clone() - .unwrap() - .into_iter() - .flatten() - .collect::>(); - - for tracker_url in announce_urls.iter() { - println!("[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id); - - let announce_url_id = dest_database - .insert_torrent_announce_url(torrent.torrent_id, tracker_url) - .await; - - println!("[v2][torrust_torrent_announce_urls][announce-list] torrent announce url insert result {:?} ...", &announce_url_id); - } - } else if torrent_from_file.announce.is_some() { - println!("[v2][torrust_torrent_announce_urls][announce] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id); - - let announce_url_id = dest_database - .insert_torrent_announce_url( - torrent.torrent_id, - &torrent_from_file.announce.unwrap(), - ) - .await; - - println!( - "[v2][torrust_torrent_announce_urls][announce] torrent announce url insert result {:?} ...", - &announce_url_id - ); - } - } - println!("Torrents transferred"); -} - -pub fn read_torrent_from_file(path: &str) -> Result> { - let contents = match fs::read(path) { - Ok(contents) => contents, - Err(e) => return Err(e.into()), - }; - - match decode_torrent(&contents) { - Ok(torrent) => Ok(torrent), - Err(e) => Err(e), - } -} diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/testers/torrent_tester.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/testers/torrent_tester.rs index d7ec1e39..79256e86 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/testers/torrent_tester.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/testers/torrent_tester.rs @@ -6,7 +6,7 @@ use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1 TorrentRecordV1, UserRecordV1, }; use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::convert_timestamp_to_datetime; -use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::upgrader::read_torrent_from_file; +use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::transferrers::torrent_transferrer::read_torrent_from_file; pub struct TorrentTester { source_database: Arc,