Skip to content

Commit

Permalink
feat: [#725] API. Add scrape filter to torrents endpoint
Browse files Browse the repository at this point in the history
The torrents endppint allow getting a list of torrents provifing the
infohashes:

http://127.0.0.1:1212/api/v1/torrents?token=MyAccessToken&info_hash=9c38422213e30bff212b30c360d26f9a02136422&info_hash=2b66980093bc11806fab50cb3cb41835b95a0362

It's like the tracker "scrape" request. The response JSON is the same as
the normal torrent list:

```json
[
  {
    "info_hash": "9c38422213e30bff212b30c360d26f9a02136422",
    "seeders": 1,
    "completed": 0,
    "leechers": 0
  },
  {
    "info_hash": "2b66980093bc11806fab50cb3cb41835b95a0362",
    "seeders": 1,
    "completed": 0,
    "leechers": 0
  }
]
```
  • Loading branch information
josecelano committed Mar 11, 2024
1 parent 4b24256 commit d39bfc2
Show file tree
Hide file tree
Showing 3 changed files with 163 additions and 29 deletions.
36 changes: 29 additions & 7 deletions src/core/services/torrent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ pub async fn get_torrent_info(tracker: Arc<Tracker>, info_hash: &InfoHash) -> Op
}

/// It returns all the information the tracker has about multiple torrents in a [`BasicInfo`] struct, excluding the peer list.
pub async fn get_torrents(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec<BasicInfo> {
pub async fn get_torrents_page(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec<BasicInfo> {
let db = tracker.torrents.get_torrents().await;

let mut basic_infos: Vec<BasicInfo> = vec![];
Expand All @@ -134,6 +134,28 @@ pub async fn get_torrents(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec
basic_infos
}

/// It returns all the information the tracker has about multiple torrents in a [`BasicInfo`] struct, excluding the peer list.
pub async fn get_torrents(tracker: Arc<Tracker>, info_hashes: &[InfoHash]) -> Vec<BasicInfo> {
let db = tracker.torrents.get_torrents().await;

let mut basic_infos: Vec<BasicInfo> = vec![];

for info_hash in info_hashes {
if let Some(entry) = db.get(info_hash) {
let (seeders, completed, leechers) = entry.get_stats();

basic_infos.push(BasicInfo {
info_hash: *info_hash,
seeders: u64::from(seeders),
completed: u64::from(completed),
leechers: u64::from(leechers),
});
}
}

basic_infos
}

#[cfg(test)]
mod tests {
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
Expand Down Expand Up @@ -219,7 +241,7 @@ mod tests {
use torrust_tracker_test_helpers::configuration;

use crate::core::services::torrent::tests::sample_peer;
use crate::core::services::torrent::{get_torrents, BasicInfo, Pagination};
use crate::core::services::torrent::{get_torrents_page, BasicInfo, Pagination};
use crate::core::services::tracker_factory;
use crate::shared::bit_torrent::info_hash::InfoHash;

Expand All @@ -231,7 +253,7 @@ mod tests {
async fn should_return_an_empty_result_if_the_tracker_does_not_have_any_torrent() {
let tracker = Arc::new(tracker_factory(&tracker_configuration()));

let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::default()).await;

assert_eq!(torrents, vec![]);
}
Expand All @@ -247,7 +269,7 @@ mod tests {
.update_torrent_with_peer_and_get_stats(&info_hash, &sample_peer())
.await;

let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::default()).await;

assert_eq!(
torrents,
Expand Down Expand Up @@ -279,7 +301,7 @@ mod tests {
let offset = 0;
let limit = 1;

let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::new(offset, limit)).await;

assert_eq!(torrents.len(), 1);
}
Expand All @@ -303,7 +325,7 @@ mod tests {
let offset = 1;
let limit = 4000;

let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::new(offset, limit)).await;

assert_eq!(torrents.len(), 1);
assert_eq!(
Expand Down Expand Up @@ -333,7 +355,7 @@ mod tests {
.update_torrent_with_peer_and_get_stats(&info_hash2, &sample_peer())
.await;

let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::default()).await;

assert_eq!(
torrents,
Expand Down
91 changes: 70 additions & 21 deletions src/servers/apis/v1/context/torrent/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@ use std::fmt;
use std::str::FromStr;
use std::sync::Arc;

use axum::extract::{Path, Query, State};
use axum::response::{IntoResponse, Json, Response};
use axum::extract::{Path, State};
use axum::response::{IntoResponse, Response};
use axum_extra::extract::Query;
use log::debug;
use serde::{de, Deserialize, Deserializer};
use thiserror::Error;

use super::resources::torrent::ListItem;
use super::responses::{torrent_info_response, torrent_list_response, torrent_not_known_response};
use crate::core::services::torrent::{get_torrent_info, get_torrents, Pagination};
use crate::core::services::torrent::{get_torrent_info, get_torrents, get_torrents_page, Pagination};
use crate::core::Tracker;
use crate::servers::apis::v1::responses::invalid_info_hash_param_response;
use crate::servers::apis::InfoHashParam;
Expand All @@ -36,39 +37,87 @@ pub async fn get_torrent_handler(State(tracker): State<Arc<Tracker>>, Path(info_
}
}

/// A container for the optional URL query pagination parameters:
/// `offset` and `limit`.
/// A container for the URL query parameters.
///
/// Pagination: `offset` and `limit`.
/// Array of infohashes: `info_hash`.
///
/// You can either get all torrents with pagination or get a list of torrents
/// providing a list of infohashes. For example:
///
/// First page of torrents:
///
/// <http://127.0.0.1:1212/api/v1/torrents?token=MyAccessToken>
///
///
/// Only two torrents:
///
/// <http://127.0.0.1:1212/api/v1/torrents?token=MyAccessToken&info_hash=9c38422213e30bff212b30c360d26f9a02136422&info_hash=2b66980093bc11806fab50cb3cb41835b95a0362>
///
///
/// NOTICE: Pagination is ignored if array of infohashes is provided.
#[derive(Deserialize, Debug)]
pub struct PaginationParams {
pub struct QueryParams {
/// The offset of the first page to return. Starts at 0.
#[serde(default, deserialize_with = "empty_string_as_none")]
pub offset: Option<u32>,
/// The maximum number of items to return per page
/// The maximum number of items to return per page.
#[serde(default, deserialize_with = "empty_string_as_none")]
pub limit: Option<u32>,
/// A list of infohashes to retrieve.
#[serde(default, rename = "info_hash")]
pub info_hashes: Vec<String>,

Check warning on line 69 in src/servers/apis/v1/context/torrent/handlers.rs

View check run for this annotation

Codecov / codecov/patch

src/servers/apis/v1/context/torrent/handlers.rs#L69

Added line #L69 was not covered by tests
}

/// It handles the request to get a list of torrents.
///
/// It returns a `200` response with a json array with
/// [`ListItem`]
/// resources.
/// It returns a `200` response with a json array with [`crate::servers::apis::v1::context::torrent::resources::torrent::ListItem`] resources.
///
/// Refer to the [API endpoint documentation](crate::servers::apis::v1::context::torrent#list-torrents)
/// for more information about this endpoint.
pub async fn get_torrents_handler(
State(tracker): State<Arc<Tracker>>,
pagination: Query<PaginationParams>,
) -> Json<Vec<ListItem>> {
pub async fn get_torrents_handler(State(tracker): State<Arc<Tracker>>, pagination: Query<QueryParams>) -> Response {
debug!("pagination: {:?}", pagination);

torrent_list_response(
&get_torrents(
tracker.clone(),
&Pagination::new_with_options(pagination.0.offset, pagination.0.limit),
if pagination.0.info_hashes.is_empty() {
torrent_list_response(
&get_torrents_page(
tracker.clone(),
&Pagination::new_with_options(pagination.0.offset, pagination.0.limit),
)
.await,
)
.await,
)
.into_response()
} else {
match parse_info_hashes(pagination.0.info_hashes) {
Ok(info_hashes) => torrent_list_response(&get_torrents(tracker.clone(), &info_hashes).await).into_response(),
Err(err) => match err {
QueryParamError::InvalidInfoHash { info_hash } => invalid_info_hash_param_response(&info_hash),
},
}
}
}

#[derive(Error, Debug)]

Check warning on line 100 in src/servers/apis/v1/context/torrent/handlers.rs

View check run for this annotation

Codecov / codecov/patch

src/servers/apis/v1/context/torrent/handlers.rs#L100

Added line #L100 was not covered by tests
pub enum QueryParamError {
#[error("invalid infohash {info_hash}")]
InvalidInfoHash { info_hash: String },

Check warning on line 103 in src/servers/apis/v1/context/torrent/handlers.rs

View check run for this annotation

Codecov / codecov/patch

src/servers/apis/v1/context/torrent/handlers.rs#L102-L103

Added lines #L102 - L103 were not covered by tests
}

fn parse_info_hashes(info_hashes_str: Vec<String>) -> Result<Vec<InfoHash>, QueryParamError> {
let mut info_hashes: Vec<InfoHash> = Vec::new();

for info_hash_str in info_hashes_str {
match InfoHash::from_str(&info_hash_str) {
Ok(info_hash) => info_hashes.push(info_hash),
Err(_err) => {
return Err(QueryParamError::InvalidInfoHash {
info_hash: info_hash_str,
})
}
}
}

Ok(info_hashes)
}

/// Serde deserialization decorator to map empty Strings to None,
Expand Down
65 changes: 64 additions & 1 deletion tests/servers/api/v1/contract/context/torrent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use crate::servers::api::v1::contract::fixtures::{
use crate::servers::api::Started;

#[tokio::test]
async fn should_allow_getting_torrents() {
async fn should_allow_getting_all_torrents() {
let env = Started::new(&configuration::ephemeral().into()).await;

let info_hash = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap();
Expand Down Expand Up @@ -100,6 +100,48 @@ async fn should_allow_the_torrents_result_pagination() {
env.stop().await;
}

#[tokio::test]
async fn should_allow_getting_a_list_of_torrents_providing_infohashes() {
let env = Started::new(&configuration::ephemeral().into()).await;

let info_hash_1 = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap(); // DevSkim: ignore DS173237
let info_hash_2 = InfoHash::from_str("0b3aea4adc213ce32295be85d3883a63bca25446").unwrap(); // DevSkim: ignore DS173237

env.add_torrent_peer(&info_hash_1, &PeerBuilder::default().into()).await;
env.add_torrent_peer(&info_hash_2, &PeerBuilder::default().into()).await;

let response = Client::new(env.get_connection_info())
.get_torrents(Query::params(
[
QueryParam::new("info_hash", "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d"), // DevSkim: ignore DS173237
QueryParam::new("info_hash", "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d"), // DevSkim: ignore DS173237
]
.to_vec(),
))
.await;

assert_torrent_list(
response,
vec![
torrent::ListItem {
info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), // DevSkim: ignore DS173237
seeders: 1,
completed: 0,
leechers: 0,
},
torrent::ListItem {
info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), // DevSkim: ignore DS173237
seeders: 1,
completed: 0,
leechers: 0,
},
],
)
.await;

env.stop().await;
}

#[tokio::test]
async fn should_fail_getting_torrents_when_the_offset_query_parameter_cannot_be_parsed() {
let env = Started::new(&configuration::ephemeral().into()).await;
Expand Down Expand Up @@ -134,6 +176,27 @@ async fn should_fail_getting_torrents_when_the_limit_query_parameter_cannot_be_p
env.stop().await;
}

#[tokio::test]
async fn should_fail_getting_torrents_when_the_info_hash_parameter_is_invalid() {
let env = Started::new(&configuration::ephemeral().into()).await;

let invalid_info_hashes = [" ", "-1", "1.1", "INVALID INFO_HASH"];

for invalid_info_hash in &invalid_info_hashes {
let response = Client::new(env.get_connection_info())
.get_torrents(Query::params([QueryParam::new("info_hash", invalid_info_hash)].to_vec()))
.await;

assert_bad_request(
response,
&format!("Invalid URL: invalid infohash param: string \"{invalid_info_hash}\", expected a 40 character long string"),
)
.await;
}

env.stop().await;
}

#[tokio::test]
async fn should_not_allow_getting_torrents_for_unauthenticated_users() {
let env = Started::new(&configuration::ephemeral().into()).await;
Expand Down

0 comments on commit d39bfc2

Please sign in to comment.