Skip to content

Commit

Permalink
dev: refactor torrent entry defs
Browse files Browse the repository at this point in the history
  • Loading branch information
da2ce7 committed Mar 22, 2024
1 parent d40f9c7 commit 90f7102
Show file tree
Hide file tree
Showing 9 changed files with 111 additions and 107 deletions.
23 changes: 11 additions & 12 deletions packages/torrent-repository/src/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ use torrust_tracker_primitives::peer::ReadInfo as _;
use torrust_tracker_primitives::swarm_metadata::SwarmMetadata;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch};

use crate::{EntryMutexStd, EntryMutexTokio, EntrySingle};

/// A data structure containing all the information about a torrent in the tracker.
///
/// This is the tracker entry for a given torrent and contains the swarm data,
Expand All @@ -21,9 +23,6 @@ pub struct Entry {
/// The number of peers that have ever completed downloading the torrent associated to this entry
pub(crate) completed: u32,
}
pub type Single = Entry;
pub type MutexStd = Arc<std::sync::Mutex<Entry>>;
pub type MutexTokio = Arc<tokio::sync::Mutex<Entry>>;

pub trait ReadInfo {
/// It returns the swarm metadata (statistics) as a struct:
Expand Down Expand Up @@ -115,7 +114,7 @@ pub trait UpdateAsync {
fn remove_inactive_peers(self, current_cutoff: DurationSinceUnixEpoch) -> impl std::future::Future<Output = ()> + Send;
}

impl ReadInfo for Single {
impl ReadInfo for EntrySingle {
#[allow(clippy::cast_possible_truncation)]
fn get_stats(&self) -> SwarmMetadata {
let complete: u32 = self.peers.values().filter(|peer| peer.is_seeder()).count() as u32;
Expand Down Expand Up @@ -149,7 +148,7 @@ impl ReadInfo for Single {
}
}

impl ReadInfo for MutexStd {
impl ReadInfo for EntryMutexStd {
fn get_stats(&self) -> SwarmMetadata {
self.lock().expect("it should get a lock").get_stats()
}
Expand All @@ -167,7 +166,7 @@ impl ReadInfo for MutexStd {
}
}

impl ReadInfoAsync for MutexTokio {
impl ReadInfoAsync for EntryMutexTokio {
async fn get_stats(self) -> SwarmMetadata {
self.lock().await.get_stats()
}
Expand All @@ -185,7 +184,7 @@ impl ReadInfoAsync for MutexTokio {
}
}

impl ReadPeers for Single {
impl ReadPeers for EntrySingle {
fn get_peers(&self, limit: Option<usize>) -> Vec<Arc<peer::Peer>> {
match limit {
Some(limit) => self.peers.values().take(limit).cloned().collect(),
Expand Down Expand Up @@ -215,7 +214,7 @@ impl ReadPeers for Single {
}
}

impl ReadPeers for MutexStd {
impl ReadPeers for EntryMutexStd {
fn get_peers(&self, limit: Option<usize>) -> Vec<Arc<peer::Peer>> {
self.lock().expect("it should get lock").get_peers(limit)
}
Expand All @@ -225,7 +224,7 @@ impl ReadPeers for MutexStd {
}
}

impl ReadPeersAsync for MutexTokio {
impl ReadPeersAsync for EntryMutexTokio {
async fn get_peers(self, limit: Option<usize>) -> Vec<Arc<peer::Peer>> {
self.lock().await.get_peers(limit)
}
Expand All @@ -235,7 +234,7 @@ impl ReadPeersAsync for MutexTokio {
}
}

impl Update for Single {
impl Update for EntrySingle {
fn insert_or_update_peer(&mut self, peer: &peer::Peer) -> bool {
let mut did_torrent_stats_change: bool = false;

Expand Down Expand Up @@ -270,7 +269,7 @@ impl Update for Single {
}
}

impl UpdateSync for MutexStd {
impl UpdateSync for EntryMutexStd {
fn insert_or_update_peer(&self, peer: &peer::Peer) -> bool {
self.lock().expect("it should lock the entry").insert_or_update_peer(peer)
}
Expand All @@ -288,7 +287,7 @@ impl UpdateSync for MutexStd {
}
}

impl UpdateAsync for MutexTokio {
impl UpdateAsync for EntryMutexTokio {
async fn insert_or_update_peer(self, peer: &peer::Peer) -> bool {
self.lock().await.insert_or_update_peer(peer)
}
Expand Down
18 changes: 12 additions & 6 deletions packages/torrent-repository/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
use std::sync::Arc;

pub mod entry;
pub mod repository;

pub type TorrentsRwLockStd = repository::RwLockStd<entry::Single>;
pub type TorrentsRwLockStdMutexStd = repository::RwLockStd<entry::MutexStd>;
pub type TorrentsRwLockStdMutexTokio = repository::RwLockStd<entry::MutexTokio>;
pub type TorrentsRwLockTokio = repository::RwLockTokio<entry::Single>;
pub type TorrentsRwLockTokioMutexStd = repository::RwLockTokio<entry::MutexStd>;
pub type TorrentsRwLockTokioMutexTokio = repository::RwLockTokio<entry::MutexTokio>;
pub type EntrySingle = entry::Entry;
pub type EntryMutexStd = Arc<std::sync::Mutex<entry::Entry>>;
pub type EntryMutexTokio = Arc<tokio::sync::Mutex<entry::Entry>>;

pub type TorrentsRwLockStd = repository::RwLockStd<EntrySingle>;
pub type TorrentsRwLockStdMutexStd = repository::RwLockStd<EntryMutexStd>;
pub type TorrentsRwLockStdMutexTokio = repository::RwLockStd<EntryMutexTokio>;
pub type TorrentsRwLockTokio = repository::RwLockTokio<EntrySingle>;
pub type TorrentsRwLockTokioMutexStd = repository::RwLockTokio<EntryMutexStd>;
pub type TorrentsRwLockTokioMutexTokio = repository::RwLockTokio<EntryMutexTokio>;
24 changes: 12 additions & 12 deletions packages/torrent-repository/src/repository/rw_lock_std.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,20 @@ use torrust_tracker_primitives::torrent_metrics::TorrentsMetrics;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrents};

use super::{Repository, UpdateTorrentSync};
use crate::entry::{self, ReadInfo, Update};
use crate::TorrentsRwLockStd;
use crate::entry::{ReadInfo, Update};
use crate::{EntrySingle, TorrentsRwLockStd};

impl TorrentsRwLockStd {
fn get_torrents<'a>(&'a self) -> std::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, entry::Single>>
fn get_torrents<'a>(&'a self) -> std::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, EntrySingle>>
where
std::collections::BTreeMap<InfoHash, entry::Single>: 'a,
std::collections::BTreeMap<InfoHash, EntrySingle>: 'a,
{
self.torrents.read().expect("it should get the read lock")
}

fn get_torrents_mut<'a>(&'a self) -> std::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, entry::Single>>
fn get_torrents_mut<'a>(&'a self) -> std::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, EntrySingle>>
where
std::collections::BTreeMap<InfoHash, entry::Single>: 'a,
std::collections::BTreeMap<InfoHash, EntrySingle>: 'a,
{
self.torrents.write().expect("it should get the write lock")
}
Expand All @@ -32,7 +32,7 @@ impl UpdateTorrentSync for TorrentsRwLockStd {
fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::Peer) -> (bool, SwarmMetadata) {
let mut db = self.get_torrents_mut();

let entry = db.entry(*info_hash).or_insert(entry::Single::default());
let entry = db.entry(*info_hash).or_insert(EntrySingle::default());

entry.insert_or_update_peer_and_get_stats(peer)
}
Expand All @@ -44,8 +44,8 @@ impl UpdateTorrentSync for Arc<TorrentsRwLockStd> {
}
}

impl Repository<entry::Single> for TorrentsRwLockStd {
async fn get(&self, key: &InfoHash) -> Option<entry::Single> {
impl Repository<EntrySingle> for TorrentsRwLockStd {
async fn get(&self, key: &InfoHash) -> Option<EntrySingle> {
let db = self.get_torrents();
db.get(key).cloned()
}
Expand All @@ -64,7 +64,7 @@ impl Repository<entry::Single> for TorrentsRwLockStd {
metrics
}

async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, entry::Single)> {
async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, EntrySingle)> {
let db = self.get_torrents();

match pagination {
Expand All @@ -87,7 +87,7 @@ impl Repository<entry::Single> for TorrentsRwLockStd {
continue;
}

let entry = entry::Single {
let entry = EntrySingle {
peers: BTreeMap::default(),
completed: *completed,
};
Expand All @@ -96,7 +96,7 @@ impl Repository<entry::Single> for TorrentsRwLockStd {
}
}

async fn remove(&self, key: &InfoHash) -> Option<entry::Single> {
async fn remove(&self, key: &InfoHash) -> Option<EntrySingle> {
let mut db = self.get_torrents_mut();
db.remove(key)
}
Expand Down
24 changes: 12 additions & 12 deletions packages/torrent-repository/src/repository/rw_lock_std_mutex_std.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,20 @@ use torrust_tracker_primitives::torrent_metrics::TorrentsMetrics;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrents};

use super::{Repository, UpdateTorrentSync};
use crate::entry::{self, ReadInfo, UpdateSync};
use crate::TorrentsRwLockStdMutexStd;
use crate::entry::{ReadInfo, UpdateSync};
use crate::{EntryMutexStd, EntrySingle, TorrentsRwLockStdMutexStd};

impl TorrentsRwLockStdMutexStd {
fn get_torrents<'a>(&'a self) -> std::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, entry::MutexStd>>
fn get_torrents<'a>(&'a self) -> std::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, EntryMutexStd>>
where
std::collections::BTreeMap<InfoHash, entry::MutexStd>: 'a,
std::collections::BTreeMap<InfoHash, crate::EntryMutexStd>: 'a,
{
self.torrents.read().expect("unable to get torrent list")
}

fn get_torrents_mut<'a>(&'a self) -> std::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, entry::MutexStd>>
fn get_torrents_mut<'a>(&'a self) -> std::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, EntryMutexStd>>
where
std::collections::BTreeMap<InfoHash, entry::MutexStd>: 'a,
std::collections::BTreeMap<InfoHash, EntryMutexStd>: 'a,
{
self.torrents.write().expect("unable to get writable torrent list")
}
Expand Down Expand Up @@ -50,8 +50,8 @@ impl UpdateTorrentSync for Arc<TorrentsRwLockStdMutexStd> {
}
}

impl Repository<entry::MutexStd> for TorrentsRwLockStdMutexStd {
async fn get(&self, key: &InfoHash) -> Option<entry::MutexStd> {
impl Repository<EntryMutexStd> for TorrentsRwLockStdMutexStd {
async fn get(&self, key: &InfoHash) -> Option<EntryMutexStd> {
let db = self.get_torrents();
db.get(key).cloned()
}
Expand All @@ -70,7 +70,7 @@ impl Repository<entry::MutexStd> for TorrentsRwLockStdMutexStd {
metrics
}

async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, entry::MutexStd)> {
async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, EntryMutexStd)> {
let db = self.get_torrents();

match pagination {
Expand All @@ -93,8 +93,8 @@ impl Repository<entry::MutexStd> for TorrentsRwLockStdMutexStd {
continue;
}

let entry = entry::MutexStd::new(
entry::Single {
let entry = EntryMutexStd::new(
EntrySingle {
peers: BTreeMap::default(),
completed: *completed,
}
Expand All @@ -105,7 +105,7 @@ impl Repository<entry::MutexStd> for TorrentsRwLockStdMutexStd {
}
}

async fn remove(&self, key: &InfoHash) -> Option<entry::MutexStd> {
async fn remove(&self, key: &InfoHash) -> Option<EntryMutexStd> {
let mut db = self.get_torrents_mut();
db.remove(key)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,19 @@ use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrent

use super::{Repository, UpdateTorrentAsync};
use crate::entry::{self, ReadInfo, UpdateAsync};
use crate::TorrentsRwLockStdMutexTokio;
use crate::{EntryMutexTokio, EntrySingle, TorrentsRwLockStdMutexTokio};

impl TorrentsRwLockStdMutexTokio {
fn get_torrents<'a>(&'a self) -> std::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, entry::MutexTokio>>
fn get_torrents<'a>(&'a self) -> std::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, EntryMutexTokio>>
where
std::collections::BTreeMap<InfoHash, entry::MutexTokio>: 'a,
std::collections::BTreeMap<InfoHash, EntryMutexTokio>: 'a,
{
self.torrents.read().expect("unable to get torrent list")
}

fn get_torrents_mut<'a>(&'a self) -> std::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, entry::MutexTokio>>
fn get_torrents_mut<'a>(&'a self) -> std::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, EntryMutexTokio>>
where
std::collections::BTreeMap<InfoHash, entry::MutexTokio>: 'a,
std::collections::BTreeMap<InfoHash, EntryMutexTokio>: 'a,
{
self.torrents.write().expect("unable to get writable torrent list")
}
Expand Down Expand Up @@ -53,13 +53,13 @@ impl UpdateTorrentAsync for Arc<TorrentsRwLockStdMutexTokio> {
}
}

impl Repository<entry::MutexTokio> for TorrentsRwLockStdMutexTokio {
async fn get(&self, key: &InfoHash) -> Option<entry::MutexTokio> {
impl Repository<EntryMutexTokio> for TorrentsRwLockStdMutexTokio {
async fn get(&self, key: &InfoHash) -> Option<EntryMutexTokio> {
let db = self.get_torrents();
db.get(key).cloned()
}

async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, entry::MutexTokio)> {
async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, EntryMutexTokio)> {
let db = self.get_torrents();

match pagination {
Expand Down Expand Up @@ -98,8 +98,8 @@ impl Repository<entry::MutexTokio> for TorrentsRwLockStdMutexTokio {
continue;
}

let entry = entry::MutexTokio::new(
entry::Single {
let entry = EntryMutexTokio::new(
EntrySingle {
peers: BTreeMap::default(),
completed: *completed,
}
Expand All @@ -110,7 +110,7 @@ impl Repository<entry::MutexTokio> for TorrentsRwLockStdMutexTokio {
}
}

async fn remove(&self, key: &InfoHash) -> Option<entry::MutexTokio> {
async fn remove(&self, key: &InfoHash) -> Option<EntryMutexTokio> {
let mut db = self.get_torrents_mut();
db.remove(key)
}
Expand Down
24 changes: 12 additions & 12 deletions packages/torrent-repository/src/repository/rw_lock_tokio.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,22 @@ use torrust_tracker_primitives::torrent_metrics::TorrentsMetrics;
use torrust_tracker_primitives::{peer, DurationSinceUnixEpoch, PersistentTorrents};

use super::{Repository, UpdateTorrentAsync};
use crate::entry::{self, ReadInfo, Update};
use crate::TorrentsRwLockTokio;
use crate::entry::{ReadInfo, Update};
use crate::{EntrySingle, TorrentsRwLockTokio};

impl TorrentsRwLockTokio {
async fn get_torrents<'a>(&'a self) -> tokio::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, entry::Single>>
async fn get_torrents<'a>(&'a self) -> tokio::sync::RwLockReadGuard<'a, std::collections::BTreeMap<InfoHash, EntrySingle>>
where
std::collections::BTreeMap<InfoHash, entry::Single>: 'a,
std::collections::BTreeMap<InfoHash, EntrySingle>: 'a,
{
self.torrents.read().await
}

async fn get_torrents_mut<'a>(
&'a self,
) -> tokio::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, entry::Single>>
) -> tokio::sync::RwLockWriteGuard<'a, std::collections::BTreeMap<InfoHash, EntrySingle>>
where
std::collections::BTreeMap<InfoHash, entry::Single>: 'a,
std::collections::BTreeMap<InfoHash, EntrySingle>: 'a,
{
self.torrents.write().await
}
Expand All @@ -34,7 +34,7 @@ impl UpdateTorrentAsync for TorrentsRwLockTokio {
async fn update_torrent_with_peer_and_get_stats(&self, info_hash: &InfoHash, peer: &peer::Peer) -> (bool, SwarmMetadata) {
let mut db = self.get_torrents_mut().await;

let entry = db.entry(*info_hash).or_insert(entry::Single::default());
let entry = db.entry(*info_hash).or_insert(EntrySingle::default());

entry.insert_or_update_peer_and_get_stats(peer)
}
Expand All @@ -46,13 +46,13 @@ impl UpdateTorrentAsync for Arc<TorrentsRwLockTokio> {
}
}

impl Repository<entry::Single> for TorrentsRwLockTokio {
async fn get(&self, key: &InfoHash) -> Option<entry::Single> {
impl Repository<EntrySingle> for TorrentsRwLockTokio {
async fn get(&self, key: &InfoHash) -> Option<EntrySingle> {
let db = self.get_torrents().await;
db.get(key).cloned()
}

async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, entry::Single)> {
async fn get_paginated(&self, pagination: Option<&Pagination>) -> Vec<(InfoHash, EntrySingle)> {
let db = self.get_torrents().await;

match pagination {
Expand Down Expand Up @@ -89,7 +89,7 @@ impl Repository<entry::Single> for TorrentsRwLockTokio {
continue;
}

let entry = entry::Single {
let entry = EntrySingle {
peers: BTreeMap::default(),
completed: *completed,
};
Expand All @@ -98,7 +98,7 @@ impl Repository<entry::Single> for TorrentsRwLockTokio {
}
}

async fn remove(&self, key: &InfoHash) -> Option<entry::Single> {
async fn remove(&self, key: &InfoHash) -> Option<EntrySingle> {
let mut db = self.get_torrents_mut().await;
db.remove(key)
}
Expand Down
Loading

0 comments on commit 90f7102

Please sign in to comment.