Skip to content

Commit

Permalink
WIP: Cuprate#178, write banned peers to disk
Browse files Browse the repository at this point in the history
  • Loading branch information
willco-1 committed Sep 10, 2024
1 parent 1c32ce5 commit 3297777
Show file tree
Hide file tree
Showing 5 changed files with 92 additions and 30 deletions.
3 changes: 2 additions & 1 deletion p2p/address-book/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ indexmap = { workspace = true, features = ["std"] }

rand = { workspace = true, features = ["std", "std_rng"] }

borsh = { workspace = true, features = ["derive", "std"]}
borsh = { git =" https://github.com/willco-1/borsh-rs.git", features = ["derive", "std"], optional = true }


[dev-dependencies]
cuprate-test-utils = {path = "../../test-utils"}
Expand Down
4 changes: 4 additions & 0 deletions p2p/address-book/src/book.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ impl<Z: BorshNetworkZone> AddressBook<Z> {
}
}




fn poll_save_to_disk(&mut self, cx: &mut Context<'_>) {
if let Some(handle) = &mut self.peer_save_task_handle {
// if we have already spawned a task to save the peer list wait for that to complete.
Expand Down Expand Up @@ -130,6 +133,7 @@ impl<Z: BorshNetworkZone> AddressBook<Z> {
&self.cfg,
&self.white_list,
&self.gray_list,
&self.banned_peers,
));
}

Expand Down
29 changes: 29 additions & 0 deletions p2p/address-book/src/peer_list/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,35 @@ fn make_fake_peer_list_with_random_pruning_seeds(
}
PeerList::new(peer_list)
}
pub fn make_fake_peer_list_with_bans<Z: BorshNetworkZone>(
numb_o_peers: u32,
) -> (PeerList<Z>, HashMap<Z::BanID, Instant>)
where
Z::Addr: NetZoneAddress,
{
let mut r = rand::thread_rng();
let mut peer_list = Vec::with_capacity(numb_o_peers as usize);
let mut banned_peers = HashMap::new();

for idx in 0..numb_o_peers {
let peer = make_fake_peer(idx, None); // Assuming make_fake_peer returns a peer

if r.gen_bool(0.2) { // 20% chance of being banned
let ban_duration = Duration::from_secs(r.gen_range(3600..86400));
let ban_until = Instant::now() + ban_duration;


let ban_id = peer.addr.ban_id();
banned_peers.insert(ban_id, ban_until);
}

peer_list.push(peer);
}

let peer_list = PeerList::new(peer_list);

(peer_list, banned_peers)
}

#[test]
fn peer_list_reduce_length() {
Expand Down
83 changes: 56 additions & 27 deletions p2p/address-book/src/store.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use std::fs;
use std::collections::HashMap;

use borsh::{from_slice, to_vec, BorshDeserialize, BorshSerialize};
use tokio::task::{spawn_blocking, JoinHandle};
use tokio::time::Instant;

use cuprate_p2p_core::{services::ZoneSpecificPeerListEntryBase, NetZoneAddress};

Expand All @@ -13,76 +15,95 @@ use crate::{peer_list::PeerList, AddressBookConfig, BorshNetworkZone};
struct SerPeerDataV1<'a, A: NetZoneAddress> {
white_list: Vec<&'a ZoneSpecificPeerListEntryBase<A>>,
gray_list: Vec<&'a ZoneSpecificPeerListEntryBase<A>>,
banned_peers: &'a HashMap<<A as NetZoneAddress>::BanID, Instant>,
banned_peers: HashMap<<A as NetZoneAddress>::BanID, u64>,
}


#[derive(BorshDeserialize)]
struct DeserPeerDataV1<A: NetZoneAddress> {
white_list: Vec<ZoneSpecificPeerListEntryBase<A>>,
gray_list: Vec<ZoneSpecificPeerListEntryBase<A>>,
banned_peers: HashMap<<A as NetZoneAddress>::BanID, u64>,
}



pub fn save_peers_to_disk<Z: BorshNetworkZone>(
cfg: &AddressBookConfig,
white_list: &PeerList<Z>,
gray_list: &PeerList<Z>,
banned_peers: &HashMap<<Z::Addr as NetZoneAddress>::BanID, Instant>,
banned_peers: &HashMap<<Z::Addr as NetZoneAddress>::BanID, Instant>,
) -> JoinHandle<std::io::Result<()>> {
// maybe move this to another thread but that would require cloning the data ... this
// happens so infrequently that it's probably not worth it.
let data = to_vec(&SerPeerDataV1 {
white_list: white_list.peers.values().collect::<Vec<_>>(),
gray_list: gray_list.peers.values().collect::<Vec<_>>(),
banned_peers,
})
.unwrap();
let data = SerPeerDataV1 {
white_list: white_list.peers.values().collect(),
gray_list: gray_list.peers.values().collect(),
banned_peers: banned_peers.iter().map(|(&ban_id, &instant)| {
(ban_id, instant.duration_since(Instant::now()).as_millis() as u64)
}).collect(),
};

let file = cfg.peer_store_file.clone();
spawn_blocking(move || fs::write(&file, &data))
}

pub async fn read_peers_from_disk<Z: BorshNetworkZone>(
cfg: &AddressBookConfig,
) -> Result<
(
Vec<ZoneSpecificPeerListEntryBase<Z::Addr>>,
Vec<ZoneSpecificPeerListEntryBase<Z::Addr>>,
),
std::io::Error,
> {
) -> Result<(
Vec<ZoneSpecificPeerListEntryBase<Z::Addr>>,
Vec<ZoneSpecificPeerListEntryBase<Z::Addr>>,
HashMap<<Z::Addr as NetZoneAddress>::BanID, Instant>,
), std::io::Error> {
let file = cfg.peer_store_file.clone();
let data = spawn_blocking(move || fs::read(file)).await.unwrap()?;
let mut file = File::open(&file).await?;
let deser_peer_data: DeserPeerDataV1<Z::Addr> = from_reader(&mut file)?;

let de_ser: DeserPeerDataV1<Z::Addr> = from_slice(&data)?;
Ok((de_ser.white_list, de_ser.gray_list))
}
let banned_peers = deser_peer_data.banned_peers.into_iter()
.map(|(ban_id, timestamp)| (ban_id, Instant::now() + Duration::from_millis(timestamp)))
.collect();

Ok((deser_peer_data.white_list, deser_peer_data.gray_list, banned_peers))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::peer_list::{tests::make_fake_peer_list, PeerList};

use cuprate_test_utils::test_netzone::{TestNetZone, TestNetZoneAddr};
use std::time::{Instant, Duration};

#[test]
fn ser_deser_peer_list() {
let white_list = make_fake_peer_list(0, 50);
let gray_list = make_fake_peer_list(50, 100);
let (white_list, mut banned_peers) = make_fake_peer_list_with_bans(50);
let gray_list = make_fake_peer_list(50, 50);

// Add some more bans for testing
for _ in 0..10 {
let peer = make_fake_peer(banned_peers.peers.len() as u32, None);
banned_peers.peers.insert(peer.adr.clone(), peer);
banned_peers.ban_ids.insert(peer.adr.ban_id(), vec![peer.adr.clone()]);
banned_peers.insert(peer.adr.ban_id(), Instant::now() + Duration::from_secs(3600));
}

let data = to_vec(&SerPeerDataV1 {
white_list: white_list.peers.values().collect::<Vec<_>>(),
gray_list: gray_list.peers.values().collect::<Vec<_>>(),
banned_peers,
})
.unwrap();
banned_peers: banned_peers.iter().map(|(ban_id, &instant)| {
(*ban_id, instant.duration_since(Instant::now()).as_secs() as u64)
}).collect(),
}).unwrap();

let de_ser: DeserPeerDataV1<TestNetZoneAddr> = from_slice(&data).unwrap();

let white_list_2: PeerList<TestNetZone<true, true, true>> =
PeerList::new(de_ser.white_list);
let white_list_2: PeerList<TestNetZone<true, true, true>> = PeerList::new(de_ser.white_list);
let gray_list_2: PeerList<TestNetZone<true, true, true>> = PeerList::new(de_ser.gray_list);
let mut banned_peers_2 = HashMap::new();

for (ban_id, timestamp) in de_ser.banned_peers {
banned_peers_2.insert(ban_id, Instant::now() + Duration::from_secs(timestamp));
}

// Test white and gray lists
assert_eq!(white_list.peers.len(), white_list_2.peers.len());
assert_eq!(gray_list.peers.len(), gray_list_2.peers.len());

Expand All @@ -93,5 +114,13 @@ mod tests {
for addr in gray_list.peers.keys() {
assert!(gray_list_2.contains_peer(addr));
}

// Test banned peers
assert_eq!(banned_peers.len(), banned_peers_2.len());

for (ban_id, ban_until) in banned_peers.iter() {
let ban_until_2 = banned_peers_2.get(ban_id).unwrap();
assert!(ban_until.duration_since(*ban_until_2).abs() < Duration::from_secs(1));
}
}
}
3 changes: 1 addition & 2 deletions p2p/p2p-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ thiserror = { workspace = true }
tracing = { workspace = true, features = ["std", "attributes"] }
hex-literal = { workspace = true }

borsh = { workspace = true, features = ["derive", "std"], optional = true }

borsh = { git =" https://github.com/willco-1/borsh-rs.git", features = ["derive", "std"], optional = true }
[dev-dependencies]
cuprate-test-utils = {path = "../../test-utils"}

Expand Down

0 comments on commit 3297777

Please sign in to comment.