Skip to content

Commit

Permalink
refactor: [#56] extract mods in upgrader
Browse files Browse the repository at this point in the history
  • Loading branch information
josecelano committed Nov 30, 2022
1 parent e23d948 commit e1790f6
Show file tree
Hide file tree
Showing 9 changed files with 408 additions and 382 deletions.
27 changes: 27 additions & 0 deletions src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,29 @@
use self::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use self::sqlite_v2_0_0::SqliteDatabaseV2_0_0;
use std::sync::Arc;

pub mod sqlite_v1_0_0;
pub mod sqlite_v2_0_0;

pub async fn current_db(db_filename: &str) -> Arc<SqliteDatabaseV1_0_0> {
let source_database_connect_url = format!("sqlite://{}?mode=ro", db_filename);
Arc::new(SqliteDatabaseV1_0_0::new(&source_database_connect_url).await)
}

pub async fn new_db(db_filename: &str) -> Arc<SqliteDatabaseV2_0_0> {
let dest_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename);
Arc::new(SqliteDatabaseV2_0_0::new(&dest_database_connect_url).await)
}

pub async fn migrate_destiny_database(dest_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Running migrations in destiny database...");
dest_database.migrate().await;
}

pub async fn reset_destiny_database(dest_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Truncating all tables in destiny database ...");
dest_database
.delete_all_database_rows()
.await
.expect("Can't reset destiny database.");
}
3 changes: 2 additions & 1 deletion src/upgrades/from_v1_0_0_to_v2_0_0/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod databases;
pub mod transferrers;
pub mod upgrader;
pub mod databases;
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0;
use std::sync::Arc;

pub async fn transfer_categories(
source_database: Arc<SqliteDatabaseV1_0_0>,
dest_database: Arc<SqliteDatabaseV2_0_0>,
) {
println!("Transferring categories ...");

let source_categories = source_database.get_categories_order_by_id().await.unwrap();
println!("[v1] categories: {:?}", &source_categories);

let result = dest_database.reset_categories_sequence().await.unwrap();
println!("[v2] reset categories sequence result {:?}", result);

for cat in &source_categories {
println!(
"[v2] adding category {:?} with id {:?} ...",
&cat.name, &cat.category_id
);
let id = dest_database
.insert_category_and_get_id(&cat.name)
.await
.unwrap();

if id != cat.category_id {
panic!(
"Error copying category {:?} from source DB to destiny DB",
&cat.category_id
);
}

println!("[v2] category: {:?} {:?} added.", id, &cat.name);
}

let dest_categories = dest_database.get_categories().await.unwrap();
println!("[v2] categories: {:?}", &dest_categories);
}
4 changes: 4 additions & 0 deletions src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
pub mod category_transferrer;
pub mod torrent_transferrer;
pub mod tracker_key_transferrer;
pub mod user_transferrer;
198 changes: 198 additions & 0 deletions src/upgrades/from_v1_0_0_to_v2_0_0/transferrers/torrent_transferrer.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
use crate::models::torrent_file::Torrent;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::TorrentRecordV2;
use crate::utils::parse_torrent::decode_torrent;
use std::sync::Arc;
use std::{error, fs};

pub async fn transfer_torrents(
source_database: Arc<SqliteDatabaseV1_0_0>,
dest_database: Arc<SqliteDatabaseV2_0_0>,
upload_path: &str,
) {
println!("Transferring torrents ...");

// Transfer table `torrust_torrents_files`

// Although the The table `torrust_torrents_files` existed in version v1.0.0
// it was was not used.

// Transfer table `torrust_torrents`

let torrents = source_database.get_torrents().await.unwrap();

for torrent in &torrents {
// [v2] table torrust_torrents

println!(
"[v2][torrust_torrents] adding the torrent: {:?} ...",
&torrent.torrent_id
);

let uploader = source_database
.get_user_by_username(&torrent.uploader)
.await
.unwrap();

if uploader.username != torrent.uploader {
panic!(
"Error copying torrent with id {:?}.
Username (`uploader`) in `torrust_torrents` table does not match `username` in `torrust_users` table",
&torrent.torrent_id
);
}

let filepath = format!("{}/{}.torrent", upload_path, &torrent.torrent_id);

let torrent_from_file_result = read_torrent_from_file(&filepath);

if torrent_from_file_result.is_err() {
panic!("Error torrent file not found: {:?}", &filepath);
}

let torrent_from_file = torrent_from_file_result.unwrap();

let id = dest_database
.insert_torrent(&TorrentRecordV2::from_v1_data(
torrent,
&torrent_from_file.info,
&uploader,
))
.await
.unwrap();

if id != torrent.torrent_id {
panic!(
"Error copying torrent {:?} from source DB to destiny DB",
&torrent.torrent_id
);
}

println!(
"[v2][torrust_torrents] torrent with id {:?} added.",
&torrent.torrent_id
);

// [v2] table torrust_torrent_files

println!("[v2][torrust_torrent_files] adding torrent files");

if torrent_from_file.is_a_single_file_torrent() {
// The torrent contains only one file then:
// - "path" is NULL
// - "md5sum" can be NULL

println!(
"[v2][torrust_torrent_files][single-file-torrent] adding torrent file {:?} with length {:?} ...",
&torrent_from_file.info.name, &torrent_from_file.info.length,
);

let file_id = dest_database
.insert_torrent_file_for_torrent_with_one_file(
torrent.torrent_id,
// TODO: it seems med5sum can be None. Why? When?
&torrent_from_file.info.md5sum.clone(),
torrent_from_file.info.length.unwrap(),
)
.await;

println!(
"[v2][torrust_torrent_files][single-file-torrent] torrent file insert result: {:?}",
&file_id
);
} else {
// Multiple files are being shared
let files = torrent_from_file.info.files.as_ref().unwrap();

for file in files.iter() {
println!(
"[v2][torrust_torrent_files][multiple-file-torrent] adding torrent file: {:?} ...",
&file
);

let file_id = dest_database
.insert_torrent_file_for_torrent_with_multiple_files(torrent, file)
.await;

println!(
"[v2][torrust_torrent_files][multiple-file-torrent] torrent file insert result: {:?}",
&file_id
);
}
}

// [v2] table torrust_torrent_info

println!(
"[v2][torrust_torrent_info] adding the torrent info for torrent id {:?} ...",
&torrent.torrent_id
);

let id = dest_database.insert_torrent_info(torrent).await;

println!(
"[v2][torrust_torrents] torrent info insert result: {:?}.",
&id
);

// [v2] table torrust_torrent_announce_urls

println!(
"[v2][torrust_torrent_announce_urls] adding the torrent announce url for torrent id {:?} ...",
&torrent.torrent_id
);

if torrent_from_file.announce_list.is_some() {
// BEP-0012. Multiple trackers.

println!("[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id);

// flatten the nested vec (this will however remove the)
let announce_urls = torrent_from_file
.announce_list
.clone()
.unwrap()
.into_iter()
.flatten()
.collect::<Vec<String>>();

for tracker_url in announce_urls.iter() {
println!("[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id);

let announce_url_id = dest_database
.insert_torrent_announce_url(torrent.torrent_id, tracker_url)
.await;

println!("[v2][torrust_torrent_announce_urls][announce-list] torrent announce url insert result {:?} ...", &announce_url_id);
}
} else if torrent_from_file.announce.is_some() {
println!("[v2][torrust_torrent_announce_urls][announce] adding the torrent announce url for torrent id {:?} ...", &torrent.torrent_id);

let announce_url_id = dest_database
.insert_torrent_announce_url(
torrent.torrent_id,
&torrent_from_file.announce.unwrap(),
)
.await;

println!(
"[v2][torrust_torrent_announce_urls][announce] torrent announce url insert result {:?} ...",
&announce_url_id
);
}
}
println!("Torrents transferred");
}

pub fn read_torrent_from_file(path: &str) -> Result<Torrent, Box<dyn error::Error>> {
let contents = match fs::read(path) {
Ok(contents) => contents,
Err(e) => return Err(e.into()),
};

match decode_torrent(&contents) {
Ok(torrent) => Ok(torrent),
Err(e) => Err(e),
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0;
use std::sync::Arc;

pub async fn transfer_tracker_keys(
source_database: Arc<SqliteDatabaseV1_0_0>,
dest_database: Arc<SqliteDatabaseV2_0_0>,
) {
println!("Transferring tracker keys ...");

// Transfer table `torrust_tracker_keys`

let tracker_keys = source_database.get_tracker_keys().await.unwrap();

for tracker_key in &tracker_keys {
// [v2] table torrust_tracker_keys

println!(
"[v2][torrust_users] adding the tracker key with id {:?} ...",
&tracker_key.key_id
);

let id = dest_database
.insert_tracker_key(
tracker_key.key_id,
tracker_key.user_id,
&tracker_key.key,
tracker_key.valid_until,
)
.await
.unwrap();

if id != tracker_key.key_id {
panic!(
"Error copying tracker key {:?} from source DB to destiny DB",
&tracker_key.key_id
);
}

println!(
"[v2][torrust_tracker_keys] tracker key with id {:?} added.",
&tracker_key.key_id
);
}
}
Loading

0 comments on commit e1790f6

Please sign in to comment.