Skip to content

Commit

Permalink
dev: some basic cleanup for the http client
Browse files Browse the repository at this point in the history
  • Loading branch information
da2ce7 committed Mar 28, 2024
1 parent 2e1893a commit d67bae0
Show file tree
Hide file tree
Showing 15 changed files with 370 additions and 272 deletions.
55 changes: 33 additions & 22 deletions packages/primitives/src/info_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,18 @@ use std::panic::Location;

use thiserror::Error;

pub type ByteArray20 = [u8; 20];

/// `BitTorrent` Info Hash v1
#[derive(PartialEq, Eq, Hash, Clone, Copy, Default, Debug)]
pub struct InfoHash(pub [u8; 20]);
pub struct InfoHash(pub ByteArray20);

pub const INFO_HASH_BYTES_LEN: usize = 20;

impl InfoHash {
/// Create a new `InfoHash` from a byte slice.
///
/// # Panics
///
/// Will panic if byte slice does not contains the exact amount of bytes need for the `InfoHash`.
#[must_use]
pub fn from_bytes(bytes: &[u8]) -> Self {
assert_eq!(bytes.len(), INFO_HASH_BYTES_LEN);
let mut ret = Self([0u8; INFO_HASH_BYTES_LEN]);
ret.0.clone_from_slice(bytes);
ret
}

/// Returns the `InfoHash` internal byte array.
#[must_use]
pub fn bytes(&self) -> [u8; 20] {
pub fn bytes(&self) -> ByteArray20 {
self.0
}

Expand Down Expand Up @@ -57,14 +46,30 @@ impl std::fmt::Display for InfoHash {
}

impl std::str::FromStr for InfoHash {
type Err = binascii::ConvertError;
type Err = ConversionError;

fn from_str(s: &str) -> Result<Self, Self::Err> {
const INFO_HASH_CHAR_LEN: usize = INFO_HASH_BYTES_LEN * 2;
let mut i = Self([0u8; 20]);
if s.len() != 40 {
return Err(binascii::ConvertError::InvalidInputLength);

if s.len() < INFO_HASH_CHAR_LEN {
return Err(ConversionError::NotEnoughBytes {
location: Location::caller(),
message: format! {"got {} string, expected {}", s.len(), INFO_HASH_CHAR_LEN},
});
}
binascii::hex2bin(s.as_bytes(), &mut i.0)?;
if s.len() > INFO_HASH_CHAR_LEN {
return Err(ConversionError::TooManyBytes {
location: Location::caller(),
message: format! {"got {} string, expected {}", s.len(), INFO_HASH_CHAR_LEN},
});
}

binascii::hex2bin(s.as_bytes(), &mut i.0).map_err(|e| ConversionError::HexToBinError {
location: Location::caller(),
message: format! {"got {e:?} error"},
})?;

Ok(i)
}
}
Expand Down Expand Up @@ -96,8 +101,8 @@ impl std::convert::From<&i32> for InfoHash {
}
}

impl std::convert::From<[u8; 20]> for InfoHash {
fn from(val: [u8; 20]) -> Self {
impl std::convert::From<ByteArray20> for InfoHash {
fn from(val: ByteArray20) -> Self {
InfoHash(val)
}
}
Expand All @@ -117,6 +122,12 @@ pub enum ConversionError {
location: &'static Location<'static>,
message: String,
},

#[error("hex to bin didn't parse: {message} {location}")]
HexToBinError {
location: &'static Location<'static>,
message: String,
},
}

impl TryFrom<Vec<u8>> for InfoHash {
Expand All @@ -135,7 +146,7 @@ impl TryFrom<Vec<u8>> for InfoHash {
message: format! {"got {} bytes, expected {}", bytes.len(), INFO_HASH_BYTES_LEN},
});
}
Ok(Self::from_bytes(&bytes))
Ok((*bytes).into())
}
}

Expand Down
66 changes: 10 additions & 56 deletions src/console/clients/checker/checks/http.rs
Original file line number Diff line number Diff line change
@@ -1,41 +1,16 @@
use std::str::FromStr;
use std::sync::Arc;
use std::str::FromStr as _;
use std::time::Duration;

use anyhow::Result;
use colored::Colorize;
use thiserror::Error;
use torrust_tracker_primitives::info_hash::InfoHash;
use url::Url;

use crate::console::clients;
use crate::console::clients::checker::console::Console;
use crate::console::clients::checker::printer::Printer;
use crate::console::clients::checker::service::{CheckError, CheckResult};
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};

#[derive(Debug, Clone, Error)]
pub enum Error {
#[error("Http request did not receive a response within the timeout: {err:?}")]
HttpClientError {
err: crate::shared::bit_torrent::tracker::http::client::Error,
},
#[error("Http failed to get a response at all: {err:?}")]
ResponseError { err: Arc<reqwest::Error> },
#[error("Failed to deserialize the serde bencoded response data with the error: \"{err:?}\"")]
ParseSerdeBencodeError {
data: hyper::body::Bytes,
err: Arc<serde_bencode::Error>,
},

#[error("Failed to deserialize the bencoded response data with the error: \"{err:?}\"")]
ParseScrapeBencodeError {
data: hyper::body::Bytes,
err: Arc<scrape::BencodeParseError>,
},
}
use crate::console::clients::http::Error;
use crate::shared::bit_torrent::tracker::http::client::responses;

pub async fn run(http_trackers: Vec<Url>, timeout: Duration, console: Console) -> Vec<CheckResult> {
let mut check_results = Vec::default();
Expand Down Expand Up @@ -71,37 +46,16 @@ pub async fn run(http_trackers: Vec<Url>, timeout: Duration, console: Console) -
check_results
}

async fn check_http_announce(url: &Url, timeout: Duration) -> Result<Announce, Error> {
async fn check_http_announce(url: &Url, timeout: Duration) -> Result<responses::Announce, Error> {
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");

let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;

let response = client
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
.await
.map_err(|err| Error::HttpClientError { err })?;

let body = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;

serde_bencode::from_bytes::<Announce>(&body).map_err(|e| Error::ParseSerdeBencodeError {
data: body,
err: e.into(),
})
clients::http::check_http_announce(url, timeout, info_hash).await
}

async fn check_http_scrape(url: &Url, timeout: Duration) -> Result<scrape::Response, Error> {
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");

let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;

let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?;

let body = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;
async fn check_http_scrape(url: &Url, timeout: Duration) -> Result<responses::Scrape, Error> {
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
let info_hashes = vec![InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required")]; // # DevSkim: ignore DS173237

scrape::Response::try_from_bencoded(&body).map_err(|e| Error::ParseScrapeBencodeError {
data: body,
err: e.into(),
})
clients::http::check_http_scrape(url, timeout, &info_hashes).await
}
3 changes: 2 additions & 1 deletion src/console/clients/checker/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use super::checks;
use super::config::Configuration;
use super::console::Console;
use crate::console::clients::checker::printer::Printer;
use crate::console::clients::http;

pub struct Service {
pub(crate) config: Arc<Configuration>,
Expand All @@ -23,7 +24,7 @@ pub enum CheckError {
#[error("Error In Udp: socket: {socket_addr:?}")]
UdpError { socket_addr: SocketAddr },
#[error("Error In Http: url: {url:?}")]
HttpCheckError { url: Url, err: checks::http::Error },
HttpCheckError { url: Url, err: http::Error },
#[error("Error In HeathCheck: url: {url:?}")]
HealthCheckError { url: Url, err: checks::health::Error },
}
Expand Down
46 changes: 17 additions & 29 deletions src/console/clients/http/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,12 @@
use std::str::FromStr;
use std::time::Duration;

use anyhow::Context;
use anyhow::{bail, Context};
use clap::{Parser, Subcommand};
use reqwest::Url;
use torrust_tracker_primitives::info_hash::InfoHash;

use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};
use crate::console::clients::http::{check_http_announce, check_http_scrape};

#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
Expand Down Expand Up @@ -64,43 +61,34 @@ pub async fn run() -> anyhow::Result<()> {
}

async fn announce_command(tracker_url: String, timeout: Duration, info_hash: String) -> anyhow::Result<()> {
let base_url = Url::parse(&tracker_url).context("failed to parse HTTP tracker base URL")?;
let info_hash =
InfoHash::from_str(&info_hash).expect("Invalid infohash. Example infohash: `9c38422213e30bff212b30c360d26f9a02136422`");
let url = Url::parse(&tracker_url).context("failed to parse HTTP tracker base URL")?;
let info_hash = InfoHash::from_str(&info_hash).context("Unable to parse info_hash provided as a string")?;

let response = Client::new(base_url, timeout)?
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
.await?;
let response = check_http_announce(&url, timeout, info_hash)
.await
.context("it should get a announce response")?;

let body = response.bytes().await.context("it should get back a valid response")?;

let announce_response: Announce = serde_bencode::from_bytes(&body).context(format!(
"response body should be a valid announce response, got: \"{:#?}\"",
&body
))?;

let json = serde_json::to_string(&announce_response).context("failed to serialize scrape response into JSON")?;
let json = serde_json::to_string(&response).context("failed to serialize scrape response into JSON")?;

println!("{json}");

Ok(())
}

async fn scrape_command(tracker_url: &str, timeout: Duration, info_hashes: &[String]) -> anyhow::Result<()> {
let base_url = Url::parse(tracker_url).context("failed to parse HTTP tracker base URL")?;
let i = info_hashes.iter().map(|s| InfoHash::from_str(s));

let query = requests::scrape::Query::try_from(info_hashes).context("failed to parse infohashes")?;

let response = Client::new(base_url, timeout)?.scrape(&query).await?;
if i.clone().any(|i| i.is_err()) {
bail!("supplied bad infohash: {:?}", i);
}

let body = response.bytes().await.context("it should get back a valid response")?;
let url = Url::parse(tracker_url).context("failed to parse HTTP tracker base URL")?;

let scrape_response = scrape::Response::try_from_bencoded(&body).context(format!(
"response body should be a valid scrape response, got: \"{:#?}\"",
&body
))?;
let response = check_http_scrape(&url, timeout, &i.flatten().collect::<Vec<_>>())
.await
.context("it should get the scrape result")?;

let json = serde_json::to_string(&scrape_response).context("failed to serialize scrape response into JSON")?;
let json = serde_json::to_string(&response).context("failed to serialize scrape response into JSON")?;

println!("{json}");

Expand Down
69 changes: 69 additions & 0 deletions src/console/clients/http/mod.rs
Original file line number Diff line number Diff line change
@@ -1 +1,70 @@
use std::sync::Arc;
use std::time::Duration;

use thiserror::Error;
use torrust_tracker_primitives::info_hash::InfoHash;
use url::Url;

use crate::shared::bit_torrent::tracker::http::client::requests::{announce, scrape};
use crate::shared::bit_torrent::tracker::http::client::{responses, Client};

pub mod app;

#[derive(Debug, Clone, Error)]
pub enum Error {
#[error("Http request did not receive a response within the timeout: {err:?}")]
HttpClientError {
err: crate::shared::bit_torrent::tracker::http::client::Error,
},
#[error("Http failed to get a response at all: {err:?}")]
ResponseError { err: Arc<reqwest::Error> },

#[error("Failed to deserialize the bencoded response data with the error: \"{err:?}\"")]
ParseBencodeError {
data: hyper::body::Bytes,
err: responses::BencodeParseError,
},
}

/// .
///
/// # Errors
///
/// This function will return an error if .
pub async fn check_http_announce(url: &Url, timeout: Duration, info_hash: InfoHash) -> Result<responses::Announce, Error> {
let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;

let response = client
.announce(
&announce::QueryBuilder::with_default_values()
.with_info_hash(&info_hash)
.build(),
)
.await
.map_err(|err| Error::HttpClientError { err })?;

let body = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;

responses::announce::ResponseBuilder::try_from(&body)
.map_err(|err| Error::ParseBencodeError { data: body, err })
.map(responses::announce::ResponseBuilder::build)
}

/// .
///
/// # Errors
///
/// This function will return an error if .
pub async fn check_http_scrape(url: &Url, timeout: Duration, info_hashes: &[InfoHash]) -> Result<responses::Scrape, Error> {
let query = info_hashes.iter().copied().collect::<scrape::QueryBuilder>().build();

let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;

let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?;

let body = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;

responses::scrape::ResponseBuilder::try_from(&body)
.map_err(|err| Error::ParseBencodeError { data: body, err })
.map(responses::scrape::ResponseBuilder::build)
}
2 changes: 1 addition & 1 deletion src/servers/udp/logging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub fn log_request(request: &Request, request_id: &RequestId, server_socket_addr
let transaction_id = announce_request.transaction_id;
let transaction_id_str = transaction_id.0.to_string();
let connection_id_str = announce_request.connection_id.0.to_string();
let info_hash_str = InfoHash::from_bytes(&announce_request.info_hash.0).to_hex_string();
let info_hash_str = InfoHash::from(announce_request.info_hash.0).to_hex_string();

tracing::span!(
target: "UDP TRACKER",
Expand Down
Loading

0 comments on commit d67bae0

Please sign in to comment.