Skip to content

Commit

Permalink
dev: apply clippy auto-fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
da2ce7 committed May 10, 2023
1 parent f1270ff commit 4a70ee0
Show file tree
Hide file tree
Showing 23 changed files with 106 additions and 74 deletions.
4 changes: 4 additions & 0 deletions src/cache/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ pub struct BytesCache {
}

impl BytesCache {
#[must_use]
pub fn new() -> Self {
Self {
bytes_table: IndexMap::new(),
Expand All @@ -36,6 +37,7 @@ impl BytesCache {
}

// With a total capacity in bytes.
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
let mut new = Self::new();

Expand All @@ -45,6 +47,7 @@ impl BytesCache {
}

// With a limit for individual entry sizes.
#[must_use]
pub fn with_entry_size_limit(entry_size_limit: usize) -> Self {
let mut new = Self::new();

Expand Down Expand Up @@ -77,6 +80,7 @@ impl BytesCache {
}

// Size of all the entry bytes combined.
#[must_use]
pub fn total_size(&self) -> usize {
let mut size: usize = 0;

Expand Down
3 changes: 3 additions & 0 deletions src/cache/image/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ pub enum Error {

type UserQuotas = HashMap<i64, ImageCacheQuota>;

#[must_use]
pub fn now_in_secs() -> u64 {
match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {
Ok(n) => n.as_secs(),
Expand All @@ -36,6 +37,7 @@ pub struct ImageCacheQuota {
}

impl ImageCacheQuota {
#[must_use]
pub fn new(user_id: i64, max_usage: usize, period_secs: u64) -> Self {
Self {
user_id,
Expand Down Expand Up @@ -66,6 +68,7 @@ impl ImageCacheQuota {
self.date_start_secs = now_in_secs();
}

#[must_use]
pub fn is_reached(&self) -> bool {
self.usage >= self.max_usage
}
Expand Down
2 changes: 1 addition & 1 deletion src/console/commands/import_tracker_statistics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::env;
use std::sync::Arc;

use derive_more::{Display, Error};
use text_colorizer::*;
use text_colorizer::Colorize;

use crate::bootstrap::config::init_configuration;
use crate::bootstrap::logging;
Expand Down
11 changes: 8 additions & 3 deletions src/databases/mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ impl Database for MysqlDatabase {
// flatten the nested vec (this will however remove the)
let announce_urls = announce_urls.iter().flatten().collect::<Vec<&String>>();

for tracker_url in announce_urls.iter() {
for tracker_url in &announce_urls {
let _ = query("INSERT INTO torrust_torrent_announce_urls (torrent_id, tracker_url) VALUES (?, ?)")
.bind(torrent_id)
.bind(tracker_url)
Expand Down Expand Up @@ -520,7 +520,7 @@ impl Database for MysqlDatabase {
match insert_torrent_info_result {
Ok(_) => {
let _ = tx.commit().await;
Ok(torrent_id as i64)
Ok(torrent_id)
}
Err(e) => {
let _ = tx.rollback().await;
Expand Down Expand Up @@ -560,7 +560,12 @@ impl Database for MysqlDatabase {
let torrent_files: Vec<TorrentFile> = db_torrent_files
.into_iter()
.map(|tf| TorrentFile {
path: tf.path.unwrap_or_default().split('/').map(|v| v.to_string()).collect(),
path: tf
.path
.unwrap_or_default()
.split('/')
.map(std::string::ToString::to_string)
.collect(),
length: tf.length,
md5sum: tf.md5sum,
})
Expand Down
15 changes: 10 additions & 5 deletions src/databases/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl Database for SqliteDatabase {
match insert_user_profile_result {
Ok(_) => {
let _ = tx.commit().await;
Ok(user_id as i64)
Ok(user_id)
}
Err(e) => {
let _ = tx.rollback().await;
Expand Down Expand Up @@ -410,7 +410,7 @@ impl Database for SqliteDatabase {
.bind(root_hash)
.execute(&self.pool)
.await
.map(|v| v.last_insert_rowid() as i64)
.map(|v| v.last_insert_rowid())
.map_err(|e| match e {
sqlx::Error::Database(err) => {
if err.message().contains("info_hash") {
Expand Down Expand Up @@ -462,7 +462,7 @@ impl Database for SqliteDatabase {
// flatten the nested vec (this will however remove the)
let announce_urls = announce_urls.iter().flatten().collect::<Vec<&String>>();

for tracker_url in announce_urls.iter() {
for tracker_url in &announce_urls {
let _ = query("INSERT INTO torrust_torrent_announce_urls (torrent_id, tracker_url) VALUES (?, ?)")
.bind(torrent_id)
.bind(tracker_url)
Expand Down Expand Up @@ -515,7 +515,7 @@ impl Database for SqliteDatabase {
match insert_torrent_info_result {
Ok(_) => {
let _ = tx.commit().await;
Ok(torrent_id as i64)
Ok(torrent_id)
}
Err(e) => {
let _ = tx.rollback().await;
Expand Down Expand Up @@ -555,7 +555,12 @@ impl Database for SqliteDatabase {
let torrent_files: Vec<TorrentFile> = db_torrent_files
.into_iter()
.map(|tf| TorrentFile {
path: tf.path.unwrap_or_default().split('/').map(|v| v.to_string()).collect(),
path: tf
.path
.unwrap_or_default()
.split('/')
.map(std::string::ToString::to_string)
.collect(),
length: tf.length,
md5sum: tf.md5sum,
})
Expand Down
10 changes: 5 additions & 5 deletions src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ impl ResponseError for ServiceError {

impl From<sqlx::Error> for ServiceError {
fn from(e: sqlx::Error) -> Self {
eprintln!("{:?}", e);
eprintln!("{e:?}");

if let Some(err) = e.as_database_error() {
return if err.code() == Some(Cow::from("2067")) {
Expand Down Expand Up @@ -229,28 +229,28 @@ impl From<DatabaseError> for ServiceError {

impl From<argon2::password_hash::Error> for ServiceError {
fn from(e: argon2::password_hash::Error) -> Self {
eprintln!("{}", e);
eprintln!("{e}");
ServiceError::InternalServerError
}
}

impl From<std::io::Error> for ServiceError {
fn from(e: std::io::Error) -> Self {
eprintln!("{}", e);
eprintln!("{e}");
ServiceError::InternalServerError
}
}

impl From<Box<dyn error::Error>> for ServiceError {
fn from(e: Box<dyn error::Error>) -> Self {
eprintln!("{}", e);
eprintln!("{e}");
ServiceError::InternalServerError
}
}

impl From<serde_json::Error> for ServiceError {
fn from(e: serde_json::Error) -> Self {
eprintln!("{}", e);
eprintln!("{e}");
ServiceError::InternalServerError
}
}
5 changes: 3 additions & 2 deletions src/models/response.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ pub struct TorrentResponse {
}

impl TorrentResponse {
#[must_use]
pub fn from_listing(torrent_listing: TorrentListing) -> TorrentResponse {
TorrentResponse {
torrent_id: torrent_listing.torrent_id,
Expand All @@ -57,7 +58,7 @@ impl TorrentResponse {
description: torrent_listing.description,
category: Category {
category_id: 0,
name: "".to_string(),
name: String::new(),
num_torrents: 0,
},
upload_date: torrent_listing.date_uploaded,
Expand All @@ -66,7 +67,7 @@ impl TorrentResponse {
leechers: torrent_listing.leechers,
files: vec![],
trackers: vec![],
magnet_link: "".to_string(),
magnet_link: String::new(),
}
}
}
Expand Down
13 changes: 12 additions & 1 deletion src/models/torrent_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,24 +42,28 @@ pub struct TorrentInfo {
impl TorrentInfo {
/// torrent file can only hold a pieces key or a root hash key:
/// http://www.bittorrent.org/beps/bep_0030.html
#[must_use]
pub fn get_pieces_as_string(&self) -> String {
match &self.pieces {
None => "".to_string(),
None => String::new(),
Some(byte_buf) => bytes_to_hex(byte_buf.as_ref()),
}
}

#[must_use]
pub fn get_root_hash_as_i64(&self) -> i64 {
match &self.root_hash {
None => 0i64,
Some(root_hash) => root_hash.parse::<i64>().unwrap(),
}
}

#[must_use]
pub fn is_a_single_file_torrent(&self) -> bool {
self.length.is_some()
}

#[must_use]
pub fn is_a_multiple_file_torrent(&self) -> bool {
self.files.is_some()
}
Expand Down Expand Up @@ -90,6 +94,7 @@ pub struct Torrent {
}

impl Torrent {
#[must_use]
pub fn from_db_info_files_and_announce_urls(
torrent_info: DbTorrentInfo,
torrent_files: Vec<TorrentFile>,
Expand Down Expand Up @@ -170,6 +175,7 @@ impl Torrent {
}
}

#[must_use]
pub fn calculate_info_hash_as_bytes(&self) -> [u8; 20] {
let info_bencoded = ser::to_bytes(&self.info).unwrap();
let mut hasher = Sha1::new();
Expand All @@ -180,10 +186,12 @@ impl Torrent {
sum_bytes
}

#[must_use]
pub fn info_hash(&self) -> String {
bytes_to_hex(&self.calculate_info_hash_as_bytes())
}

#[must_use]
pub fn file_size(&self) -> i64 {
if self.info.length.is_some() {
self.info.length.unwrap()
Expand All @@ -201,6 +209,7 @@ impl Torrent {
}
}

#[must_use]
pub fn announce_urls(&self) -> Vec<String> {
if self.announce_list.is_none() {
return vec![self.announce.clone().unwrap()];
Expand All @@ -214,10 +223,12 @@ impl Torrent {
.collect::<Vec<String>>()
}

#[must_use]
pub fn is_a_single_file_torrent(&self) -> bool {
self.info.is_a_single_file_torrent()
}

#[must_use]
pub fn is_a_multiple_file_torrent(&self) -> bool {
self.info.is_a_multiple_file_torrent()
}
Expand Down
6 changes: 3 additions & 3 deletions src/routes/torrent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,9 +370,9 @@ async fn get_torrent_request_from_payload(mut payload: Multipart) -> Result<Torr
let torrent_buffer = vec![0u8];
let mut torrent_cursor = Cursor::new(torrent_buffer);

let mut title = "".to_string();
let mut description = "".to_string();
let mut category = "".to_string();
let mut title = String::new();
let mut description = String::new();
let mut category = String::new();

while let Ok(Some(mut field)) = payload.try_next().await {
match field.content_disposition().get_name().unwrap() {
Expand Down
2 changes: 1 addition & 1 deletion src/routes/user.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ pub async fn register(req: HttpRequest, mut payload: web::Json<Register>, app_da
return Err(ServiceError::UsernameInvalid);
}

let email = payload.email.as_ref().unwrap_or(&"".to_string()).to_string();
let email = payload.email.as_ref().unwrap_or(&String::new()).to_string();

let user_id = app_data
.database
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,11 @@ pub async fn transfer_categories(source_database: Arc<SqliteDatabaseV1_0_0>, tar
.await
.unwrap();

if id != cat.category_id {
panic!(
"Error copying category {:?} from source DB to the target DB",
&cat.category_id
);
}
assert!(
id == cat.category_id,
"Error copying category {:?} from source DB to the target DB",
&cat.category_id
);

println!("[v2] category: {:?} {:?} added.", id, &cat.name);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,21 +29,22 @@ pub async fn transfer_torrents(

let uploader = source_database.get_user_by_username(&torrent.uploader).await.unwrap();

if uploader.username != torrent.uploader {
panic!(
"Error copying torrent with id {:?}.
assert!(
uploader.username == torrent.uploader,
"Error copying torrent with id {:?}.
Username (`uploader`) in `torrust_torrents` table does not match `username` in `torrust_users` table",
&torrent.torrent_id
);
}
&torrent.torrent_id
);

let filepath = format!("{}/{}.torrent", upload_path, &torrent.torrent_id);

let torrent_from_file_result = read_torrent_from_file(&filepath);

if torrent_from_file_result.is_err() {
panic!("Error torrent file not found: {:?}", &filepath);
}
assert!(
torrent_from_file_result.is_ok(),
"Error torrent file not found: {:?}",
&filepath
);

let torrent_from_file = torrent_from_file_result.unwrap();

Expand All @@ -52,12 +53,11 @@ pub async fn transfer_torrents(
.await
.unwrap();

if id != torrent.torrent_id {
panic!(
"Error copying torrent {:?} from source DB to the target DB",
&torrent.torrent_id
);
}
assert!(
id == torrent.torrent_id,
"Error copying torrent {:?} from source DB to the target DB",
&torrent.torrent_id
);

println!("[v2][torrust_torrents] torrent with id {:?} added.", &torrent.torrent_id);

Expand Down Expand Up @@ -144,7 +144,7 @@ pub async fn transfer_torrents(
.flatten()
.collect::<Vec<String>>();

for tracker_url in announce_urls.iter() {
for tracker_url in &announce_urls {
println!(
"[v2][torrust_torrent_announce_urls][announce-list] adding the torrent announce url for torrent id {:?} ...",
&torrent.torrent_id
Expand Down
Loading

0 comments on commit 4a70ee0

Please sign in to comment.