Skip to content

Commit

Permalink
feat!: [torrust#114] remove the torrents response min page size and a…
Browse files Browse the repository at this point in the history
…dd max page size

Changes in GET torrents response pagination.

Minimum page size for GET torrents results was removed.

A default GET torrents page size was added (10).

BREAKING CHANGE: a maximum page size was added (30). If you request more
than 30 torrents per page, the result will contain 30 torrents at the most.
  • Loading branch information
josecelano committed May 8, 2023
1 parent fc15671 commit 41b6000
Show file tree
Hide file tree
Showing 3 changed files with 128 additions and 12 deletions.
33 changes: 24 additions & 9 deletions src/routes/torrent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub fn init_routes(cfg: &mut web::ServiceConfig) {
.route(web::delete().to(delete_torrent_handler)),
),
);
cfg.service(web::scope("/torrents").service(web::resource("").route(web::get().to(get_torrents))));
cfg.service(web::scope("/torrents").service(web::resource("").route(web::get().to(get_torrents_handler))));
}

#[derive(FromRow)]
Expand Down Expand Up @@ -321,30 +321,45 @@ pub async fn delete_torrent_handler(req: HttpRequest, app_data: WebAppData) -> S
}))
}

// eg: /torrents?categories=music,other,movie&search=bunny&sort=size_DESC
pub async fn get_torrents(params: Query<TorrentSearch>, app_data: WebAppData) -> ServiceResult<impl Responder> {
/// It returns a list of torrents matching the search criteria.
/// Eg: `/torrents?categories=music,other,movie&search=bunny&sort=size_DESC`
///
/// # Errors
///
/// Returns a `ServiceError::DatabaseError` if the database query fails.
pub async fn get_torrents_handler(params: Query<TorrentSearch>, app_data: WebAppData) -> ServiceResult<impl Responder> {
let sort = params.sort.unwrap_or(Sorting::UploadedDesc);

let page = params.page.unwrap_or(0);

// make sure the min page size = 10
let page_size = match params.page_size.unwrap_or(30) {
0..=9 => 10,
v => v,
let page_size = params.page_size.unwrap_or(default_page_size());

let page_size = if page_size > max_torrent_page_size() {
max_torrent_page_size()
} else {
page_size
};

let offset = (page * page_size as u32) as u64;
let offset = u64::from(page * u32::from(page_size));

let categories = params.categories.as_csv::<String>().unwrap_or(None);

let torrents_response = app_data
.database
.get_torrents_search_sorted_paginated(&params.search, &categories, &sort, offset, page_size as u8)
.get_torrents_search_sorted_paginated(&params.search, &categories, &sort, offset, page_size)
.await?;

Ok(HttpResponse::Ok().json(OkResponse { data: torrents_response }))
}

fn max_torrent_page_size() -> u8 {
30
}

fn default_page_size() -> u8 {
10
}

fn get_torrent_infohash_from_request(req: &HttpRequest) -> Result<InfoHash, ServiceError> {
match req.match_info().get("info_hash") {
None => Err(ServiceError::BadRequest),
Expand Down
4 changes: 2 additions & 2 deletions tests/common/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ impl Client {

// Context: torrent

pub async fn get_torrents(&self) -> TextResponse {
self.http_client.get("torrents", Query::empty()).await
pub async fn get_torrents(&self, params: Query) -> TextResponse {
self.http_client.get("torrents", params).await
}

pub async fn get_torrent(&self, infohash: &InfoHash) -> TextResponse {
Expand Down
103 changes: 102 additions & 1 deletion tests/e2e/contexts/torrent/contract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ mod for_guests {
use crate::common::contexts::torrent::responses::{
Category, File, TorrentDetails, TorrentDetailsResponse, TorrentListResponse,
};
use crate::common::http::{Query, QueryParam};
use crate::e2e::contexts::torrent::asserts::expected_torrent;
use crate::e2e::contexts::torrent::steps::upload_random_torrent_to_index;
use crate::e2e::contexts::user::steps::new_logged_in_user;
Expand All @@ -44,7 +45,7 @@ mod for_guests {
let uploader = new_logged_in_user(&env).await;
let (_test_torrent, indexed_torrent) = upload_random_torrent_to_index(&uploader, &env).await;

let response = client.get_torrents().await;
let response = client.get_torrents(Query::empty()).await;

let torrent_list_response: TorrentListResponse = serde_json::from_str(&response.body).unwrap();

Expand All @@ -53,6 +54,106 @@ mod for_guests {
assert!(response.is_json_and_ok());
}

#[tokio::test]
async fn it_should_allow_to_get_torrents_with_pagination() {
let mut env = TestEnv::new();
env.start().await;

if !env.provides_a_tracker() {
println!("test skipped. It requires a tracker to be running.");
return;
}

let uploader = new_logged_in_user(&env).await;

// Given we insert two torrents
let (_test_torrent, _indexed_torrent) = upload_random_torrent_to_index(&uploader, &env).await;
let (_test_torrent, _indexed_torrent) = upload_random_torrent_to_index(&uploader, &env).await;

let client = Client::unauthenticated(&env.server_socket_addr().unwrap());

// When we request only one torrent per page
let response = client
.get_torrents(Query::with_params([QueryParam::new("page_size", "1")].to_vec()))
.await;

let torrent_list_response: TorrentListResponse = serde_json::from_str(&response.body).unwrap();

// Then we should have only one torrent per page
assert_eq!(torrent_list_response.data.results.len(), 1);
assert!(response.is_json_and_ok());
}

#[tokio::test]
async fn it_should_allow_to_limit_the_number_of_torrents_per_request() {
let mut env = TestEnv::new();
env.start().await;

if !env.provides_a_tracker() {
println!("test skipped. It requires a tracker to be running.");
return;
}

let uploader = new_logged_in_user(&env).await;

let max_torrent_page_size = 30;

// Given we insert one torrent more than the page size limit
for _ in 0..max_torrent_page_size {
let (_test_torrent, _indexed_torrent) = upload_random_torrent_to_index(&uploader, &env).await;
}

let client = Client::unauthenticated(&env.server_socket_addr().unwrap());

// When we request more torrents than the page size limit
let response = client
.get_torrents(Query::with_params(
[QueryParam::new(
"page_size",
&format!("{}", (max_torrent_page_size + 1).to_string()),
)]
.to_vec(),
))
.await;

let torrent_list_response: TorrentListResponse = serde_json::from_str(&response.body).unwrap();

// Then we should get only the page size limit
assert_eq!(torrent_list_response.data.results.len(), max_torrent_page_size);
assert!(response.is_json_and_ok());
}

#[tokio::test]
async fn it_should_return_a_default_amount_of_torrents_per_request_if_no_page_size_is_provided() {
let mut env = TestEnv::new();
env.start().await;

if !env.provides_a_tracker() {
println!("test skipped. It requires a tracker to be running.");
return;
}

let uploader = new_logged_in_user(&env).await;

let default_torrent_page_size = 10;

// Given we insert one torrent more than the default page size
for _ in 0..default_torrent_page_size {
let (_test_torrent, _indexed_torrent) = upload_random_torrent_to_index(&uploader, &env).await;
}

let client = Client::unauthenticated(&env.server_socket_addr().unwrap());

// When we request more torrents than the default page size limit
let response = client.get_torrents(Query::empty()).await;

let torrent_list_response: TorrentListResponse = serde_json::from_str(&response.body).unwrap();

// Then we should get only the default number of torrents per page
assert_eq!(torrent_list_response.data.results.len(), default_torrent_page_size);
assert!(response.is_json_and_ok());
}

#[tokio::test]
async fn it_should_allow_guests_to_get_torrent_details_searching_by_infohash() {
let mut env = TestEnv::new();
Expand Down

0 comments on commit 41b6000

Please sign in to comment.