diff --git a/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs b/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs index 55a8821d..bf3754fe 100644 --- a/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs +++ b/src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs @@ -71,10 +71,11 @@ fn parse_args() -> Arguments { } pub async fn run_upgrader() { - upgrade(&parse_args()).await + let now = datetime_iso_8601(); + upgrade(&parse_args(), &now).await; } -pub async fn upgrade(args: &Arguments) { +pub async fn upgrade(args: &Arguments, date_imported: &str) { // Get connection to source database (current DB in settings) let source_database = current_db(&args.source_database_file).await; @@ -86,7 +87,12 @@ pub async fn upgrade(args: &Arguments) { migrate_destiny_database(dest_database.clone()).await; reset_destiny_database(dest_database.clone()).await; transfer_categories(source_database.clone(), dest_database.clone()).await; - transfer_user_data(source_database.clone(), dest_database.clone()).await; + transfer_user_data( + source_database.clone(), + dest_database.clone(), + date_imported, + ) + .await; transfer_tracker_keys(source_database.clone(), dest_database.clone()).await; transfer_torrents( source_database.clone(), @@ -158,6 +164,7 @@ async fn transfer_categories( async fn transfer_user_data( source_database: Arc, dest_database: Arc, + date_imported: &str, ) { println!("Transferring users ..."); @@ -173,8 +180,6 @@ async fn transfer_user_data( &user.username, &user.user_id ); - let date_imported = today_iso8601(); - let id = dest_database .insert_imported_user(user.user_id, &date_imported, user.administrator) .await @@ -238,7 +243,9 @@ async fn transfer_user_data( } } -fn today_iso8601() -> String { +/// Current datetime in ISO8601 without time zone. +/// For example: 2022-11-10 10:35:15 +pub fn datetime_iso_8601() -> String { let dt: DateTime = SystemTime::now().into(); format!("{}", dt.format("%Y-%m-%d %H:%M:%S")) } diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/mod.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/mod.rs index bb1d6613..0a1f301b 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/mod.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/mod.rs @@ -1,2 +1,3 @@ pub mod sqlite_v1_0_0; +pub mod sqlite_v2_0_0; pub mod tests; diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs index 1904df6c..6da98170 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs @@ -1,15 +1,16 @@ use sqlx::sqlite::SqlitePoolOptions; -use sqlx::SqlitePool; +use sqlx::{query, SqlitePool}; use std::fs; +use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::UserRecordV1; pub struct SqliteDatabaseV1_0_0 { pub pool: SqlitePool, } impl SqliteDatabaseV1_0_0 { - pub async fn db_connection(source_database_file: &str) -> Self { - let source_database_connect_url = format!("sqlite://{}?mode=rwc", source_database_file); - SqliteDatabaseV1_0_0::new(&source_database_connect_url).await + pub async fn db_connection(database_file: &str) -> Self { + let connect_url = format!("sqlite://{}?mode=rwc", database_file); + Self::new(&connect_url).await } pub async fn new(database_url: &str) -> Self { @@ -24,6 +25,7 @@ impl SqliteDatabaseV1_0_0 { pub async fn migrate(&self, fixtures_dir: &str) { let migrations_dir = format!("{}database/v1.0.0/migrations/", fixtures_dir); + // TODO: read files from dir let migrations = vec![ "20210831113004_torrust_users.sql", "20210904135524_torrust_tracker_keys.sql", @@ -50,4 +52,17 @@ impl SqliteDatabaseV1_0_0 { println!("Migration result {:?}", res); } + + pub async fn insert_user(&self, user: &UserRecordV1) -> Result { + query("INSERT INTO torrust_users (user_id, username, email, email_verified, password, administrator) VALUES (?, ?, ?, ?, ?, ?)") + .bind(user.user_id) + .bind(user.username.clone()) + .bind(user.email.clone()) + .bind(user.email_verified) + .bind(user.password.clone()) + .bind(user.administrator) + .execute(&self.pool) + .await + .map(|v| v.last_insert_rowid()) + } } diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs new file mode 100644 index 00000000..ba6f4831 --- /dev/null +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs @@ -0,0 +1,37 @@ +use serde::{Deserialize, Serialize}; +use sqlx::sqlite::SqlitePoolOptions; +use sqlx::{query_as, SqlitePool}; + +#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] +pub struct UserRecordV2 { + pub user_id: i64, + pub date_registered: Option, + pub date_imported: Option, + pub administrator: bool, +} + +pub struct SqliteDatabaseV2_0_0 { + pub pool: SqlitePool, +} + +impl SqliteDatabaseV2_0_0 { + pub async fn db_connection(database_file: &str) -> Self { + let connect_url = format!("sqlite://{}?mode=rwc", database_file); + Self::new(&connect_url).await + } + + pub async fn new(database_url: &str) -> Self { + let db = SqlitePoolOptions::new() + .connect(database_url) + .await + .expect("Unable to create database pool."); + Self { pool: db } + } + + pub async fn get_user(&self, user_id: i64) -> Result { + query_as::<_, UserRecordV2>("SELECT * FROM torrust_users WHERE user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + } +} diff --git a/tests/upgrades/from_v1_0_0_to_v2_0_0/tests.rs b/tests/upgrades/from_v1_0_0_to_v2_0_0/tests.rs index 79cfc866..e0f5f3bc 100644 --- a/tests/upgrades/from_v1_0_0_to_v2_0_0/tests.rs +++ b/tests/upgrades/from_v1_0_0_to_v2_0_0/tests.rs @@ -4,39 +4,137 @@ //! cargo test upgrade_data_from_version_v1_0_0_to_v2_0_0 -- --nocapture //! ``` use crate::upgrades::from_v1_0_0_to_v2_0_0::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use crate::upgrades::from_v1_0_0_to_v2_0_0::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +use argon2::password_hash::SaltString; +use argon2::{Argon2, PasswordHasher}; +use rand_core::OsRng; use std::fs; use std::sync::Arc; -use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::upgrader::{upgrade, Arguments}; +use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::UserRecordV1; +use torrust_index_backend::upgrades::from_v1_0_0_to_v2_0_0::upgrader::{ + datetime_iso_8601, upgrade, Arguments, +}; #[tokio::test] async fn upgrade_data_from_version_v1_0_0_to_v2_0_0() { - /* TODO: - * - Insert data: user, tracker key and torrent - * - Assertions - */ + // Directories let fixtures_dir = "./tests/upgrades/from_v1_0_0_to_v2_0_0/fixtures/".to_string(); - let debug_output_dir = "./tests/upgrades/from_v1_0_0_to_v2_0_0/output/".to_string(); + let output_dir = "./tests/upgrades/from_v1_0_0_to_v2_0_0/output/".to_string(); - let source_database_file = format!("{}source.db", debug_output_dir); - let destiny_database_file = format!("{}destiny.db", debug_output_dir); - - // TODO: use a unique temporary dir - fs::remove_file(&source_database_file).expect("Can't remove source DB file."); - fs::remove_file(&destiny_database_file).expect("Can't remove destiny DB file."); + // Files + let source_database_file = format!("{}source.db", output_dir); + let destiny_database_file = format!("{}destiny.db", output_dir); + // Set up clean database + reset_databases(&source_database_file, &destiny_database_file); let source_database = source_db_connection(&source_database_file).await; - source_database.migrate(&fixtures_dir).await; + // Load data into database v1 + + // `torrust_users` table + + let user = UserRecordV1 { + user_id: 1, + username: "user01".to_string(), + email: "user01@torrust.com".to_string(), + email_verified: true, + password: hashed_valid_password(), + administrator: true, + }; + let user_id = source_database.insert_user(&user).await.unwrap(); + + // `torrust_tracker_keys` table + + // TODO + + // `torrust_torrents` table + + // TODO + + // Run the upgrader let args = Arguments { - source_database_file, - destiny_database_file, + source_database_file: source_database_file.clone(), + destiny_database_file: destiny_database_file.clone(), upload_path: format!("{}uploads/", fixtures_dir), }; + let now = datetime_iso_8601(); + upgrade(&args, &now).await; + + // Assertions in database v2 + + let destiny_database = destiny_db_connection(&destiny_database_file).await; + + // `torrust_users` table + + let imported_user = destiny_database.get_user(user_id).await.unwrap(); + + assert_eq!(imported_user.user_id, user.user_id); + assert!(imported_user.date_registered.is_none()); + assert_eq!(imported_user.date_imported.unwrap(), now); + assert_eq!(imported_user.administrator, user.administrator); + + // `torrust_user_authentication` table + + // TODO + + // `torrust_user_profiles` table + + // TODO + + // `torrust_tracker_keys` table - upgrade(&args).await; + // TODO + + // `torrust_torrents` table + + // TODO + + // `torrust_torrent_files` table + + // TODO + + // `torrust_torrent_info` table + + // TODO + + // `torrust_torrent_announce_urls` table + + // TODO } async fn source_db_connection(source_database_file: &str) -> Arc { Arc::new(SqliteDatabaseV1_0_0::db_connection(&source_database_file).await) } + +async fn destiny_db_connection(destiny_database_file: &str) -> Arc { + Arc::new(SqliteDatabaseV2_0_0::db_connection(&destiny_database_file).await) +} + +/// Reset databases from previous executions +fn reset_databases(source_database_file: &str, destiny_database_file: &str) { + // TODO: use a unique temporary dir + fs::remove_file(&source_database_file).expect("Can't remove source DB file."); + fs::remove_file(&destiny_database_file).expect("Can't remove destiny DB file."); +} + +fn hashed_valid_password() -> String { + hash_password(&valid_password()) +} + +fn valid_password() -> String { + "123456".to_string() +} + +fn hash_password(plain_password: &str) -> String { + let salt = SaltString::generate(&mut OsRng); + + // Argon2 with default params (Argon2id v19) + let argon2 = Argon2::default(); + + // Hash password to PHC string ($argon2id$v=19$...) + argon2 + .hash_password(plain_password.as_bytes(), &salt) + .unwrap() + .to_string() +}