Skip to content

Commit

Permalink
feat(http): [#192] scrape request for Axum HTTP tracker
Browse files Browse the repository at this point in the history
  • Loading branch information
josecelano committed Feb 27, 2023
1 parent ae1a076 commit 86ce93c
Show file tree
Hide file tree
Showing 7 changed files with 162 additions and 64 deletions.
13 changes: 2 additions & 11 deletions src/http/axum_implementation/handlers/scrape.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
use std::sync::Arc;

use axum::extract::State;
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use log::debug;

use crate::http::axum_implementation::extractors::peer_ip;
use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp;
use crate::http::axum_implementation::extractors::scrape_request::ExtractRequest;
use crate::http::axum_implementation::services;
use crate::http::axum_implementation::{responses, services};
use crate::tracker::Tracker;

#[allow(clippy::unused_async)]
Expand All @@ -19,20 +18,12 @@ pub async fn handle(
) -> Response {
debug!("http scrape request: {:#?}", &scrape_request);

/*
todo:
- [x] Add the service that sends the event for statistics.
- [ ] Build the HTTP bencoded response.
*/

let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, &remote_client_ip) {
Ok(peer_ip) => peer_ip,
Err(err) => return err,
};

let scrape_data = services::scrape::invoke(tracker.clone(), &scrape_request.info_hashes, &peer_ip).await;

debug!("scrape data: {:#?}", &scrape_data);

(StatusCode::OK, "todo").into_response()
responses::scrape::Bencoded::from(scrape_data).into_response()
}
28 changes: 9 additions & 19 deletions src/http/axum_implementation/requests/scrape.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ use crate::protocol::info_hash::{ConversionError, InfoHash};

pub type NumberOfBytes = i64;

// Query param name
const INFO_HASH_SCRAPE_PARAM: &str = "info_hash";
// Query param names
const INFO_HASH: &str = "info_hash";

#[derive(Debug, PartialEq)]
pub struct Scrape {
Expand All @@ -27,12 +27,6 @@ pub enum ParseScrapeQueryError {
location: &'static Location<'static>,
param_name: String,
},
#[error("invalid param value {param_value} for {param_name} in {location}")]
InvalidParam {
param_name: String,
param_value: String,
location: &'static Location<'static>,
},
#[error("invalid param value {param_value} for {param_name} in {source}")]
InvalidInfoHashParam {
param_name: String,
Expand Down Expand Up @@ -60,14 +54,14 @@ impl TryFrom<Query> for Scrape {
}

fn extract_info_hashes(query: &Query) -> Result<Vec<InfoHash>, ParseScrapeQueryError> {
match query.get_param_vec(INFO_HASH_SCRAPE_PARAM) {
match query.get_param_vec(INFO_HASH) {
Some(raw_params) => {
let mut info_hashes = vec![];

for raw_param in raw_params {
let info_hash =
percent_decode_info_hash(&raw_param).map_err(|err| ParseScrapeQueryError::InvalidInfoHashParam {
param_name: INFO_HASH_SCRAPE_PARAM.to_owned(),
param_name: INFO_HASH.to_owned(),
param_value: raw_param.clone(),
source: Located(err).into(),
})?;
Expand All @@ -80,7 +74,7 @@ fn extract_info_hashes(query: &Query) -> Result<Vec<InfoHash>, ParseScrapeQueryE
None => {
return Err(ParseScrapeQueryError::MissingParam {
location: Location::caller(),
param_name: INFO_HASH_SCRAPE_PARAM.to_owned(),
param_name: INFO_HASH.to_owned(),
})
}
}
Expand All @@ -92,16 +86,12 @@ mod tests {
mod scrape_request {

use crate::http::axum_implementation::query::Query;
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH_SCRAPE_PARAM};
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH};
use crate::protocol::info_hash::InfoHash;

#[test]
fn should_be_instantiated_from_the_url_query_with_only_one_infohash() {
let raw_query = Query::from(vec![(
INFO_HASH_SCRAPE_PARAM,
"%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0",
)])
.to_string();
let raw_query = Query::from(vec![(INFO_HASH, "%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0")]).to_string();

let query = raw_query.parse::<Query>().unwrap();

Expand All @@ -118,7 +108,7 @@ mod tests {
mod when_it_is_instantiated_from_the_url_query_params {

use crate::http::axum_implementation::query::Query;
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH_SCRAPE_PARAM};
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH};

#[test]
fn it_should_fail_if_the_query_does_not_include_the_info_hash_param() {
Expand All @@ -129,7 +119,7 @@ mod tests {

#[test]
fn it_should_fail_if_the_info_hash_param_is_invalid() {
let raw_query = Query::from(vec![(INFO_HASH_SCRAPE_PARAM, "INVALID_INFO_HASH_VALUE")]).to_string();
let raw_query = Query::from(vec![(INFO_HASH, "INVALID_INFO_HASH_VALUE")]).to_string();

assert!(Scrape::try_from(raw_query.parse::<Query>().unwrap()).is_err());
}
Expand Down
1 change: 1 addition & 0 deletions src/http/axum_implementation/responses/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pub mod announce;
pub mod error;
pub mod ok;
pub mod scrape;
106 changes: 106 additions & 0 deletions src/http/axum_implementation/responses/scrape.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
use std::borrow::Cow;

use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use bip_bencode::{ben_int, ben_map, BMutAccess};

use crate::tracker::ScrapeData;

#[derive(Debug, PartialEq, Default)]
pub struct Bencoded {
scrape_data: ScrapeData,
}

impl Bencoded {
/// # Panics
///
/// Will return an error if it can't access the bencode as a mutable `BDictAccess`.
#[must_use]
pub fn body(&self) -> Vec<u8> {
let mut scrape_list = ben_map!();

let scrape_list_mut = scrape_list.dict_mut().unwrap();

for (info_hash, value) in &self.scrape_data.files {
scrape_list_mut.insert(
Cow::from(info_hash.bytes().to_vec()),
ben_map! {
"complete" => ben_int!(i64::from(value.complete)),
"downloaded" => ben_int!(i64::from(value.downloaded)),
"incomplete" => ben_int!(i64::from(value.incomplete))
},
);
}

(ben_map! {
"files" => scrape_list
})
.encode()
}
}

impl From<ScrapeData> for Bencoded {
fn from(scrape_data: ScrapeData) -> Self {
Self { scrape_data }
}
}

impl IntoResponse for Bencoded {
fn into_response(self) -> Response {
(StatusCode::OK, self.body()).into_response()
}
}

#[cfg(test)]
mod tests {

mod scrape_response {
use crate::http::axum_implementation::responses::scrape::Bencoded;
use crate::protocol::info_hash::InfoHash;
use crate::tracker::torrent::SwarmMetadata;
use crate::tracker::ScrapeData;

fn sample_scrape_data() -> ScrapeData {
let info_hash = InfoHash([0x69; 20]);
let mut scrape_data = ScrapeData::empty();
scrape_data.add_file(
&info_hash,
SwarmMetadata {
complete: 1,
downloaded: 2,
incomplete: 3,
},
);
scrape_data
}

#[test]
fn should_be_converted_from_scrape_data() {
let response = Bencoded::from(sample_scrape_data());

assert_eq!(
response,
Bencoded {
scrape_data: sample_scrape_data()
}
);
}

#[test]
fn should_be_bencoded() {
let response = Bencoded {
scrape_data: sample_scrape_data(),
};

let bytes = response.body();

// cspell:disable-next-line
let expected_bytes = b"d5:filesd20:iiiiiiiiiiiiiiiiiiiid8:completei1e10:downloadedi2e10:incompletei3eeee";

assert_eq!(
String::from_utf8(bytes).unwrap(),
String::from_utf8(expected_bytes.to_vec()).unwrap()
);
}
}
}
2 changes: 1 addition & 1 deletion src/tracker/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ pub struct AnnounceData {

#[derive(Debug, PartialEq, Default)]
pub struct ScrapeData {
files: HashMap<InfoHash, SwarmMetadata>,
pub files: HashMap<InfoHash, SwarmMetadata>,
}

impl ScrapeData {
Expand Down
46 changes: 30 additions & 16 deletions tests/http/asserts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,36 @@ pub async fn assert_is_announce_response(response: Response) {

// Error responses

// Specific errors for announce request

pub async fn assert_missing_query_params_for_announce_request_error_response(response: Response) {
assert_eq!(response.status(), 200);

assert_bencoded_error(
&response.text().await.unwrap(),
"missing query params for announce request",
Location::caller(),
);
}

pub async fn assert_bad_announce_request_error_response(response: Response, failure: &str) {
assert_cannot_parse_query_params_error_response(response, &format!(" for announce request: {failure}")).await;
}

// Specific errors for scrape request

pub async fn assert_missing_query_params_for_scrape_request_error_response(response: Response) {
assert_eq!(response.status(), 200);

assert_bencoded_error(
&response.text().await.unwrap(),
"missing query params for scrape request",
Location::caller(),
);
}

// Other errors

pub async fn assert_internal_server_error_response(response: Response) {
assert_eq!(response.status(), 200);

Expand Down Expand Up @@ -156,22 +186,6 @@ pub async fn assert_invalid_remote_address_on_xff_header_error_response(response
);
}

// Specific errors for announce request

pub async fn assert_missing_query_params_for_announce_request_error_response(response: Response) {
assert_eq!(response.status(), 200);

assert_bencoded_error(
&response.text().await.unwrap(),
"missing query params for announce request",
Location::caller(),
);
}

pub async fn assert_bad_announce_request_error_response(response: Response, failure: &str) {
assert_cannot_parse_query_params_error_response(response, &format!(" for announce request: {failure}")).await;
}

pub async fn assert_cannot_parse_query_param_error_response(response: Response, failure: &str) {
assert_cannot_parse_query_params_error_response(response, &format!(": {failure}")).await;
}
Expand Down
30 changes: 13 additions & 17 deletions tests/http_tracker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2198,24 +2198,25 @@ mod axum_http_tracker_server {
use torrust_tracker::tracker::peer;

use crate::common::fixtures::{invalid_info_hashes, PeerBuilder};
use crate::http::asserts::{assert_internal_server_error_response, assert_scrape_response};
use crate::http::asserts::{
assert_cannot_parse_query_params_error_response, assert_missing_query_params_for_scrape_request_error_response,
assert_scrape_response,
};
use crate::http::client::Client;
use crate::http::requests;
use crate::http::requests::scrape::QueryBuilder;
use crate::http::responses::scrape::{self, File, ResponseBuilder};
use crate::http::server::{start_ipv6_http_tracker, start_public_http_tracker};

//#[tokio::test]
#[allow(dead_code)]
async fn should_fail_when_the_request_is_empty() {
#[tokio::test]
async fn should_fail_when_the_url_query_component_is_empty() {
let http_tracker_server = start_public_http_tracker(Version::Axum).await;
let response = Client::new(http_tracker_server.get_connection_info()).get("scrape").await;

assert_internal_server_error_response(response).await;
assert_missing_query_params_for_scrape_request_error_response(response).await;
}

//#[tokio::test]
#[allow(dead_code)]
#[tokio::test]
async fn should_fail_when_the_info_hash_param_is_invalid() {
let http_tracker_server = start_public_http_tracker(Version::Axum).await;

Expand All @@ -2228,13 +2229,11 @@ mod axum_http_tracker_server {
.get(&format!("announce?{params}"))
.await;

// code-review: it's not returning the invalid info hash error
assert_internal_server_error_response(response).await;
assert_cannot_parse_query_params_error_response(response, "").await;
}
}

//#[tokio::test]
#[allow(dead_code)]
#[tokio::test]
async fn should_return_the_file_with_the_incomplete_peer_when_there_is_one_peer_with_bytes_pending_to_download() {
let http_tracker = start_public_http_tracker(Version::Axum).await;

Expand Down Expand Up @@ -2272,8 +2271,7 @@ mod axum_http_tracker_server {
assert_scrape_response(response, &expected_scrape_response).await;
}

//#[tokio::test]
#[allow(dead_code)]
#[tokio::test]
async fn should_return_the_file_with_the_complete_peer_when_there_is_one_peer_with_no_bytes_pending_to_download() {
let http_tracker = start_public_http_tracker(Version::Axum).await;

Expand Down Expand Up @@ -2311,8 +2309,7 @@ mod axum_http_tracker_server {
assert_scrape_response(response, &expected_scrape_response).await;
}

//#[tokio::test]
#[allow(dead_code)]
#[tokio::test]
async fn should_return_a_file_with_zeroed_values_when_there_are_no_peers() {
let http_tracker = start_public_http_tracker(Version::Axum).await;

Expand All @@ -2329,8 +2326,7 @@ mod axum_http_tracker_server {
assert_scrape_response(response, &scrape::Response::with_one_file(info_hash.bytes(), File::zeroed())).await;
}

//#[tokio::test]
#[allow(dead_code)]
#[tokio::test]
async fn should_accept_multiple_infohashes() {
let http_tracker = start_public_http_tracker(Version::Axum).await;

Expand Down

0 comments on commit 86ce93c

Please sign in to comment.