From b92fb0834cea34e783494081373c19167ceb5dd0 Mon Sep 17 00:00:00 2001 From: Jose Celano Date: Thu, 27 Oct 2022 19:04:35 +0100 Subject: [PATCH] feat: [#56] transfer categories from db v1.0.0 to v2.0.0 First action for the command to upgrade data. It transfers the categories from the current DB schema (v1.0.0) to the new DB schema. --- src/bin/db_migrate.rs | 91 +++++++++++++++++---------- src/databases/database.rs | 16 ----- src/databases/mod.rs | 2 + src/databases/sqlite.rs | 8 --- src/databases/sqlite_v1_0_0.rs | 30 +++++++++ src/databases/sqlite_v2_0_0.rs | 109 +++++++++++++++++++++++++++++++++ 6 files changed, 200 insertions(+), 56 deletions(-) create mode 100644 src/databases/sqlite_v1_0_0.rs create mode 100644 src/databases/sqlite_v2_0_0.rs diff --git a/src/bin/db_migrate.rs b/src/bin/db_migrate.rs index 693ed5e8..fcfb7eae 100644 --- a/src/bin/db_migrate.rs +++ b/src/bin/db_migrate.rs @@ -3,14 +3,11 @@ use std::sync::Arc; use torrust_index_backend::config::Configuration; -use torrust_index_backend::databases::database::{ - connect_database, connect_database_without_running_migrations, -}; - -#[actix_web::main] -async fn main() { - let dest_database_connect_url = "sqlite://data_v2.db?mode=rwc"; +use torrust_index_backend::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0; +use torrust_index_backend::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0; +async fn current_db() -> Arc { + // Connect to the old v1.0.0 DB let cfg = match Configuration::load_from_file().await { Ok(config) => Arc::new(config), Err(error) => { @@ -20,36 +17,66 @@ async fn main() { let settings = cfg.settings.read().await; - // Connect to the current v1.0.0 DB - let source_database = Arc::new( - connect_database_without_running_migrations(&settings.database.connect_url) - .await - .expect("Can't connect to source DB."), - ); + Arc::new(SqliteDatabaseV1_0_0::new(&settings.database.connect_url).await) +} + +async fn new_db(db_filename: String) -> Arc { + let dest_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename); + Arc::new(SqliteDatabaseV2_0_0::new(&dest_database_connect_url).await) +} + +async fn reset_destiny_database(dest_database: Arc) { + println!("Truncating all tables in destiny database ..."); + dest_database + .delete_all_database_rows() + .await + .expect("Can't reset destiny database."); +} - // Connect to the new v2.0.0 DB (running migrations) - let dest_database = Arc::new( - connect_database(&dest_database_connect_url) +async fn transfer_categories( + source_database: Arc, + dest_database: Arc, +) { + let source_categories = source_database.get_categories_order_by_id().await.unwrap(); + println!("[v1] categories: {:?}", &source_categories); + + let result = dest_database.reset_categories_sequence().await.unwrap(); + println!("result {:?}", result); + + for cat in &source_categories { + println!( + "[v2] adding category: {:?} {:?} ...", + &cat.category_id, &cat.name + ); + let id = dest_database + .insert_category_and_get_id(&cat.name) .await - .expect("Can't connect to dest DB."), - ); + .unwrap(); + + if id != cat.category_id { + panic!( + "Error copying category {:?} from source DB to destiny DB", + &cat.category_id + ); + } - println!("Upgrading database from v1.0.0 to v2.0.0 ..."); + println!("[v2] category: {:?} {:?} added.", id, &cat.name); + } - // It's just a test for the source connection. - // Print categories in current DB - let categories = source_database.get_categories().await; - println!("[v1] categories: {:?}", &categories); + let dest_categories = dest_database.get_categories().await.unwrap(); + println!("[v2] categories: {:?}", &dest_categories); +} + +#[actix_web::main] +async fn main() { + // Get connections to source adn destiny databases + let source_database = current_db().await; + let dest_database = new_db("data_v2.db".to_string()).await; - // It's just a test for the dest connection. - // Print categories in new DB - let categories = dest_database.get_categories().await; - println!("[v2] categories: {:?}", &categories); + println!("Upgrading data from version v1.0.0 to v2.0.0 ..."); - // Transfer categories + reset_destiny_database(dest_database.clone()).await; + transfer_categories(source_database.clone(), dest_database.clone()).await; - /* TODO: - - Transfer categories: remove categories from seeding, reset sequence for IDs, copy categories in the right order to keep the same ids. - - ... - */ + // TODO: WIP. We have to transfer data from the 5 tables in V1 and the torrent files in folder `uploads`. } diff --git a/src/databases/database.rs b/src/databases/database.rs index 27adde76..0f06f702 100644 --- a/src/databases/database.rs +++ b/src/databases/database.rs @@ -77,22 +77,6 @@ pub async fn connect_database(db_path: &str) -> Result, Databa } } -/// Connect to a database without running migrations -pub async fn connect_database_without_running_migrations(db_path: &str) -> Result, DatabaseError> { - match &db_path.chars().collect::>() as &[char] { - ['s', 'q', 'l', 'i', 't', 'e', ..] => { - let db = SqliteDatabase::new_without_running_migrations(db_path).await; - Ok(Box::new(db)) - } - ['m', 'y', 's', 'q', 'l', ..] => { - todo!() - } - _ => { - Err(DatabaseError::UnrecognizedDatabaseDriver) - } - } -} - /// Trait for database implementations. #[async_trait] pub trait Database: Sync + Send { diff --git a/src/databases/mod.rs b/src/databases/mod.rs index 169d99f4..c15a2b72 100644 --- a/src/databases/mod.rs +++ b/src/databases/mod.rs @@ -1,3 +1,5 @@ pub mod database; pub mod mysql; pub mod sqlite; +pub mod sqlite_v1_0_0; +pub mod sqlite_v2_0_0; diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs index 88a904ab..62b197d1 100644 --- a/src/databases/sqlite.rs +++ b/src/databases/sqlite.rs @@ -30,14 +30,6 @@ impl SqliteDatabase { Self { pool: db } } - - pub async fn new_without_running_migrations(database_url: &str) -> Self { - let db = SqlitePoolOptions::new() - .connect(database_url) - .await - .expect("Unable to create database pool."); - Self { pool: db } - } } #[async_trait] diff --git a/src/databases/sqlite_v1_0_0.rs b/src/databases/sqlite_v1_0_0.rs new file mode 100644 index 00000000..10420128 --- /dev/null +++ b/src/databases/sqlite_v1_0_0.rs @@ -0,0 +1,30 @@ +use super::database::DatabaseError; +use serde::{Deserialize, Serialize}; +use sqlx::sqlite::SqlitePoolOptions; +use sqlx::{query_as, SqlitePool}; + +#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] +pub struct Category { + pub category_id: i64, + pub name: String, +} +pub struct SqliteDatabaseV1_0_0 { + pub pool: SqlitePool, +} + +impl SqliteDatabaseV1_0_0 { + pub async fn new(database_url: &str) -> Self { + let db = SqlitePoolOptions::new() + .connect(database_url) + .await + .expect("Unable to create database pool."); + Self { pool: db } + } + + pub async fn get_categories_order_by_id(&self) -> Result, DatabaseError> { + query_as::<_, Category>("SELECT category_id, name FROM torrust_categories ORDER BY category_id ASC") + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } +} diff --git a/src/databases/sqlite_v2_0_0.rs b/src/databases/sqlite_v2_0_0.rs new file mode 100644 index 00000000..0a1efe33 --- /dev/null +++ b/src/databases/sqlite_v2_0_0.rs @@ -0,0 +1,109 @@ +use super::database::DatabaseError; +use serde::{Deserialize, Serialize}; +use sqlx::sqlite::{SqlitePoolOptions, SqliteQueryResult}; +use sqlx::{query, query_as, SqlitePool}; + +#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] +pub struct Category { + pub category_id: i64, + pub name: String, +} +pub struct SqliteDatabaseV2_0_0 { + pub pool: SqlitePool, +} + +impl SqliteDatabaseV2_0_0 { + pub async fn new(database_url: &str) -> Self { + let db = SqlitePoolOptions::new() + .connect(database_url) + .await + .expect("Unable to create database pool."); + Self { pool: db } + } + + pub async fn reset_categories_sequence(&self) -> Result { + query("DELETE FROM `sqlite_sequence` WHERE `name` = 'torrust_categories'") + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } + + pub async fn get_categories(&self) -> Result, DatabaseError> { + query_as::<_, Category>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name") + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } + + pub async fn insert_category_and_get_id(&self, category_name: &str) -> Result { + query("INSERT INTO torrust_categories (name) VALUES (?)") + .bind(category_name) + .execute(&self.pool) + .await + .map(|v| v.last_insert_rowid()) + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("UNIQUE") { + DatabaseError::CategoryAlreadyExists + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error, + }) + } + + pub async fn delete_all_database_rows(&self) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_categories;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_torrents;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_tracker_keys;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_users;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_user_authentication;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_user_bans;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_user_invitations;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_user_profiles;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_torrents;") + .execute(&self.pool) + .await + .unwrap(); + + query("DELETE FROM torrust_user_public_keys;") + .execute(&self.pool) + .await + .unwrap(); + + Ok(()) + } +}