Skip to content

Commit

Permalink
Merge #192: Axum HTTP tracker: scrape response in public mode
Browse files Browse the repository at this point in the history
4b3f979 refactor(udp): [#192] use new tracker::scrape method in UDP tracker (Jose Celano)
86ce93c feat(http): [#192] scrape request for Axum HTTP tracker (Jose Celano)
ae1a076 feat(http): [#191] add scrape app service (Jose Celano)
c4bee79 feat(http): [#191] add Tracker::scrape function (Jose Celano)
0c7735a fix(http): typo in comment (Jose Celano)
2de8265 feat(http): [#191] parse scrape req with multiple infohashes (Jose Celano)
30cf3b9 feat(http): [#192] Query struct allows multiple values for the same param (Jose Celano)
0cab696 feat(http): [#191] add cargo dependency: multimap (Jose Celano)
ea8d4d8 feat(http): [#191] add route and extractor for scrape req in Axum HTTP tracker (Jose Celano)

Pull request description:

  `scrape` request in the new Axum HTTP tracker implementation. Only por `public` mode.

Top commit has no ACKs.

Tree-SHA512: f1500c46b8e2e2eeb834ce5206a7f547b28de04dc2d20383d1c9b459fdf38fac89ec24420cc0e6447f5fdfa7d385231b10f3a28ed3fd986e44bb1f3e644294fc
  • Loading branch information
josecelano committed Feb 27, 2023
2 parents b826f59 + 4b3f979 commit 12a42b7
Show file tree
Hide file tree
Showing 26 changed files with 856 additions and 240 deletions.
10 changes: 10 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ axum = "0.6.1"
axum-server = { version = "0.4.4", features = ["tls-rustls"] }
axum-client-ip = "0.4.0"
bip_bencode = "0.4.4"
multimap = "0.8.3"


[dev-dependencies]
Expand Down
1 change: 1 addition & 0 deletions cSpell.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
"Lphant",
"middlewares",
"mockall",
"multimap",
"myacicontext",
"nanos",
"nextest",
Expand Down
1 change: 1 addition & 0 deletions src/http/axum_implementation/extractors/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pub mod announce_request;
pub mod peer_ip;
pub mod remote_client_ip;
pub mod scrape_request;
2 changes: 1 addition & 1 deletion src/http/axum_implementation/extractors/peer_ip.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ impl From<ResolutionError> for responses::error::Error {
///
/// Will return an error if the peer IP cannot be obtained according to the configuration.
/// For example, if the IP is extracted from an HTTP header which is missing in the request.
pub fn assign_ip_address_to_peer(on_reverse_proxy: bool, remote_client_ip: &RemoteClientIp) -> Result<IpAddr, Response> {
pub fn resolve(on_reverse_proxy: bool, remote_client_ip: &RemoteClientIp) -> Result<IpAddr, Response> {
if on_reverse_proxy {
if let Some(ip) = remote_client_ip.right_most_x_forwarded_for {
Ok(ip)
Expand Down
45 changes: 45 additions & 0 deletions src/http/axum_implementation/extractors/scrape_request.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
use std::panic::Location;

use axum::async_trait;
use axum::extract::FromRequestParts;
use axum::http::request::Parts;
use axum::response::{IntoResponse, Response};

use crate::http::axum_implementation::query::Query;
use crate::http::axum_implementation::requests::scrape::{ParseScrapeQueryError, Scrape};
use crate::http::axum_implementation::responses;

pub struct ExtractRequest(pub Scrape);

#[async_trait]
impl<S> FromRequestParts<S> for ExtractRequest
where
S: Send + Sync,
{
type Rejection = Response;

async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let raw_query = parts.uri.query();

if raw_query.is_none() {
return Err(responses::error::Error::from(ParseScrapeQueryError::MissingParams {
location: Location::caller(),
})
.into_response());
}

let query = raw_query.unwrap().parse::<Query>();

if let Err(error) = query {
return Err(responses::error::Error::from(error).into_response());
}

let scrape_request = Scrape::try_from(query.unwrap());

if let Err(error) = scrape_request {
return Err(responses::error::Error::from(error).into_response());
}

Ok(ExtractRequest(scrape_request.unwrap()))
}
}
4 changes: 2 additions & 2 deletions src/http/axum_implementation/handlers/announce.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use axum::response::{IntoResponse, Response};
use log::debug;

use crate::http::axum_implementation::extractors::announce_request::ExtractRequest;
use crate::http::axum_implementation::extractors::peer_ip::assign_ip_address_to_peer;
use crate::http::axum_implementation::extractors::peer_ip;
use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp;
use crate::http::axum_implementation::requests::announce::{Announce, Compact, Event};
use crate::http::axum_implementation::responses::announce;
Expand All @@ -24,7 +24,7 @@ pub async fn handle(
) -> Response {
debug!("http announce request: {:#?}", announce_request);

let peer_ip = match assign_ip_address_to_peer(tracker.config.on_reverse_proxy, &remote_client_ip) {
let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, &remote_client_ip) {
Ok(peer_ip) => peer_ip,
Err(err) => return err,
};
Expand Down
1 change: 1 addition & 0 deletions src/http/axum_implementation/handlers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod announce;
pub mod scrape;
pub mod status;
29 changes: 29 additions & 0 deletions src/http/axum_implementation/handlers/scrape.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use std::sync::Arc;

use axum::extract::State;
use axum::response::{IntoResponse, Response};
use log::debug;

use crate::http::axum_implementation::extractors::peer_ip;
use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp;
use crate::http::axum_implementation::extractors::scrape_request::ExtractRequest;
use crate::http::axum_implementation::{responses, services};
use crate::tracker::Tracker;

#[allow(clippy::unused_async)]
pub async fn handle(
State(tracker): State<Arc<Tracker>>,
ExtractRequest(scrape_request): ExtractRequest,
remote_client_ip: RemoteClientIp,
) -> Response {
debug!("http scrape request: {:#?}", &scrape_request);

let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, &remote_client_ip) {
Ok(peer_ip) => peer_ip,
Err(err) => return err,
};

let scrape_data = services::scrape::invoke(tracker.clone(), &scrape_request.info_hashes, &peer_ip).await;

responses::scrape::Bencoded::from(scrape_data).into_response()
}
2 changes: 1 addition & 1 deletion src/http/axum_implementation/handlers/status.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ use crate::http::axum_implementation::resources::ok::Ok;
use crate::http::axum_implementation::responses::ok;

#[allow(clippy::unused_async)]
pub async fn get_status_handler(remote_client_ip: RemoteClientIp) -> Json<Ok> {
pub async fn handle(remote_client_ip: RemoteClientIp) -> Json<Ok> {
ok::response(&remote_client_ip)
}
Loading

0 comments on commit 12a42b7

Please sign in to comment.