chore: replace std locks to parking_lot locks in simulations (#141)
* replace std locks to parking_lot locks
This commit is contained in:
parent
2d60ce9921
commit
d864fecd07
|
@ -13,6 +13,7 @@ crc32fast = "1.3"
|
||||||
crossbeam = { version = "0.8.2", features = ["crossbeam-channel"] }
|
crossbeam = { version = "0.8.2", features = ["crossbeam-channel"] }
|
||||||
fixed-slice-deque = "0.1.0-beta2"
|
fixed-slice-deque = "0.1.0-beta2"
|
||||||
nomos-core = { path = "../nomos-core" }
|
nomos-core = { path = "../nomos-core" }
|
||||||
|
parking_lot = "0.12"
|
||||||
polars = { version = "0.27", features = ["serde", "object", "json", "csv-file", "parquet", "dtype-struct"] }
|
polars = { version = "0.27", features = ["serde", "object", "json", "csv-file", "parquet", "dtype-struct"] }
|
||||||
rand = { version = "0.8", features = ["small_rng"] }
|
rand = { version = "0.8", features = ["small_rng"] }
|
||||||
rayon = "1.7"
|
rayon = "1.7"
|
||||||
|
|
|
@ -7,11 +7,12 @@ use simulations::streaming::polars::PolarsSubscriber;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::Arc;
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
// crates
|
// crates
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use crossbeam::channel;
|
use crossbeam::channel;
|
||||||
|
use parking_lot::RwLock;
|
||||||
use rand::rngs::SmallRng;
|
use rand::rngs::SmallRng;
|
||||||
use rand::seq::SliceRandom;
|
use rand::seq::SliceRandom;
|
||||||
use rand::{Rng, SeedableRng};
|
use rand::{Rng, SeedableRng};
|
||||||
|
|
|
@ -419,11 +419,12 @@ fn get_roles(
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, BTreeSet, HashMap},
|
collections::{BTreeMap, BTreeSet, HashMap},
|
||||||
sync::{Arc, RwLock},
|
sync::Arc,
|
||||||
time::{Duration, SystemTime, UNIX_EPOCH},
|
time::{Duration, SystemTime, UNIX_EPOCH},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crossbeam::channel;
|
use crossbeam::channel;
|
||||||
|
use parking_lot::RwLock;
|
||||||
use rand::{
|
use rand::{
|
||||||
rngs::{mock::StepRng, SmallRng},
|
rngs::{mock::StepRng, SmallRng},
|
||||||
Rng, SeedableRng,
|
Rng, SeedableRng,
|
||||||
|
@ -803,13 +804,13 @@ mod tests {
|
||||||
let nodes = Arc::new(RwLock::new(nodes));
|
let nodes = Arc::new(RwLock::new(nodes));
|
||||||
for _ in 0..9 {
|
for _ in 0..9 {
|
||||||
network.dispatch_after(Duration::from_millis(100));
|
network.dispatch_after(Duration::from_millis(100));
|
||||||
nodes.write().unwrap().par_iter_mut().for_each(|(_, node)| {
|
nodes.write().par_iter_mut().for_each(|(_, node)| {
|
||||||
node.step();
|
node.step();
|
||||||
});
|
});
|
||||||
network.collect_messages();
|
network.collect_messages();
|
||||||
}
|
}
|
||||||
|
|
||||||
for (_, node) in nodes.read().unwrap().iter() {
|
for (_, node) in nodes.read().iter() {
|
||||||
assert_eq!(node.current_view(), 2);
|
assert_eq!(node.current_view(), 2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,10 +8,11 @@ pub mod dummy_streaming;
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
ops::{Deref, DerefMut},
|
ops::{Deref, DerefMut},
|
||||||
sync::{Arc, RwLock},
|
sync::Arc,
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
// crates
|
// crates
|
||||||
|
use parking_lot::RwLock;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
// internal
|
// internal
|
||||||
use crate::overlay::{Layout, OverlaySettings, SimulationOverlay};
|
use crate::overlay::{Layout, OverlaySettings, SimulationOverlay};
|
||||||
|
@ -155,12 +156,12 @@ pub trait OverlayGetter {
|
||||||
|
|
||||||
impl OverlayGetter for SharedState<OverlayState> {
|
impl OverlayGetter for SharedState<OverlayState> {
|
||||||
fn get_view(&self, index: usize) -> Option<ViewOverlay> {
|
fn get_view(&self, index: usize) -> Option<ViewOverlay> {
|
||||||
let overlay_state = self.read().unwrap();
|
let overlay_state = self.read();
|
||||||
overlay_state.overlays.get(&index).cloned()
|
overlay_state.overlays.get(&index).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_all_nodes(&self) -> Vec<NodeId> {
|
fn get_all_nodes(&self) -> Vec<NodeId> {
|
||||||
let overlay_state = self.read().unwrap();
|
let overlay_state = self.read();
|
||||||
overlay_state.all_nodes.clone()
|
overlay_state.all_nodes.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,17 +22,9 @@ where
|
||||||
type Error = anyhow::Error;
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
fn try_from(state: &crate::warding::SimulationState<N>) -> Result<Self, Self::Error> {
|
fn try_from(state: &crate::warding::SimulationState<N>) -> Result<Self, Self::Error> {
|
||||||
serde_json::to_value(
|
serde_json::to_value(state.nodes.read().iter().map(N::state).collect::<Vec<_>>())
|
||||||
state
|
.map(OutData::new)
|
||||||
.nodes
|
.map_err(From::from)
|
||||||
.read()
|
|
||||||
.expect("simulations: SimulationState panic when requiring a read lock")
|
|
||||||
.iter()
|
|
||||||
.map(N::state)
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
)
|
|
||||||
.map(OutData::new)
|
|
||||||
.map_err(From::from)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,13 +27,7 @@ where
|
||||||
nodes: Arc::clone(&runner.nodes),
|
nodes: Arc::clone(&runner.nodes),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut node_ids: Vec<NodeId> = runner
|
let mut node_ids: Vec<NodeId> = runner.nodes.read().iter().map(N::id).collect();
|
||||||
.nodes
|
|
||||||
.read()
|
|
||||||
.expect("Read access to nodes vector")
|
|
||||||
.iter()
|
|
||||||
.map(N::id)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let inner_runner = runner.inner.clone();
|
let inner_runner = runner.inner.clone();
|
||||||
let nodes = runner.nodes;
|
let nodes = runner.nodes;
|
||||||
|
@ -47,13 +41,12 @@ where
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
default => {
|
default => {
|
||||||
let mut inner_runner = inner_runner.write().expect("Write access to inner in async runner");
|
let mut inner_runner = inner_runner.write();
|
||||||
node_ids.shuffle(&mut inner_runner.rng);
|
node_ids.shuffle(&mut inner_runner.rng);
|
||||||
for ids_chunk in node_ids.chunks(chunk_size) {
|
for ids_chunk in node_ids.chunks(chunk_size) {
|
||||||
let ids: HashSet<NodeId> = ids_chunk.iter().copied().collect();
|
let ids: HashSet<NodeId> = ids_chunk.iter().copied().collect();
|
||||||
nodes
|
nodes
|
||||||
.write()
|
.write()
|
||||||
.expect("Write access to nodes vector")
|
|
||||||
.par_iter_mut()
|
.par_iter_mut()
|
||||||
.filter(|n| ids.contains(&n.id()))
|
.filter(|n| ids.contains(&n.id()))
|
||||||
.for_each(N::step);
|
.for_each(N::step);
|
||||||
|
|
|
@ -31,17 +31,14 @@ where
|
||||||
|
|
||||||
let inner_runner = runner.inner.clone();
|
let inner_runner = runner.inner.clone();
|
||||||
let nodes = runner.nodes;
|
let nodes = runner.nodes;
|
||||||
let nodes_remaining: BTreeSet<NodeId> =
|
let nodes_remaining: BTreeSet<NodeId> = (0..nodes.read().len()).map(From::from).collect();
|
||||||
(0..nodes.read().expect("Read access to nodes vector").len())
|
|
||||||
.map(From::from)
|
|
||||||
.collect();
|
|
||||||
let iterations: Vec<_> = (0..maximum_iterations).collect();
|
let iterations: Vec<_> = (0..maximum_iterations).collect();
|
||||||
let (stop_tx, stop_rx) = bounded(1);
|
let (stop_tx, stop_rx) = bounded(1);
|
||||||
let p = runner.producer.clone();
|
let p = runner.producer.clone();
|
||||||
let p1 = runner.producer;
|
let p1 = runner.producer;
|
||||||
let handle = std::thread::spawn(move || {
|
let handle = std::thread::spawn(move || {
|
||||||
let mut inner_runner: std::sync::RwLockWriteGuard<super::SimulationRunnerInner<M>> =
|
let mut inner_runner: parking_lot::RwLockWriteGuard<super::SimulationRunnerInner<M>> =
|
||||||
inner_runner.write().expect("Locking runner");
|
inner_runner.write();
|
||||||
|
|
||||||
'main: for chunk in iterations.chunks(update_rate) {
|
'main: for chunk in iterations.chunks(update_rate) {
|
||||||
select! {
|
select! {
|
||||||
|
@ -57,7 +54,7 @@ where
|
||||||
);
|
);
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut shared_nodes = nodes.write().expect("Write access to nodes vector");
|
let mut shared_nodes = nodes.write();
|
||||||
let node: &mut N = shared_nodes
|
let node: &mut N = shared_nodes
|
||||||
.get_mut(node_id.inner())
|
.get_mut(node_id.inner())
|
||||||
.expect("Node should be present");
|
.expect("Node should be present");
|
||||||
|
|
|
@ -81,7 +81,7 @@ where
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default => {
|
default => {
|
||||||
let mut inner_runner = inner_runner.write().expect("Lock runner");
|
let mut inner_runner = inner_runner.write();
|
||||||
let (group_index, node_id) =
|
let (group_index, node_id) =
|
||||||
choose_random_layer_and_node_id(&mut inner_runner.rng, &distribution, &layers, &mut deque);
|
choose_random_layer_and_node_id(&mut inner_runner.rng, &distribution, &layers, &mut deque);
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ where
|
||||||
deque.get_mut(group_index).unwrap().remove(&node_id);
|
deque.get_mut(group_index).unwrap().remove(&node_id);
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut shared_nodes = nodes.write().expect("Write access to nodes vector");
|
let mut shared_nodes = nodes.write();
|
||||||
let node: &mut N = shared_nodes
|
let node: &mut N = shared_nodes
|
||||||
.get_mut(node_id.inner())
|
.get_mut(node_id.inner())
|
||||||
.expect("Node should be present");
|
.expect("Node should be present");
|
||||||
|
@ -168,13 +168,7 @@ where
|
||||||
// add a +1 so we always have
|
// add a +1 so we always have
|
||||||
let mut deque = FixedSliceDeque::new(gap + 1);
|
let mut deque = FixedSliceDeque::new(gap + 1);
|
||||||
// push first layer
|
// push first layer
|
||||||
let node_ids: BTreeSet<NodeId> = runner
|
let node_ids: BTreeSet<NodeId> = runner.nodes.write().iter().map(|node| node.id()).collect();
|
||||||
.nodes
|
|
||||||
.write()
|
|
||||||
.expect("Single access to runner nodes")
|
|
||||||
.iter()
|
|
||||||
.map(|node| node.id())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
deque.push_back(node_ids);
|
deque.push_back(node_ids);
|
||||||
// allocate default sets
|
// allocate default sets
|
||||||
|
|
|
@ -4,12 +4,13 @@ mod layered_runner;
|
||||||
mod sync_runner;
|
mod sync_runner;
|
||||||
|
|
||||||
// std
|
// std
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
// crates
|
// crates
|
||||||
use crate::streaming::{StreamProducer, Subscriber, SubscriberHandle};
|
use crate::streaming::{StreamProducer, Subscriber, SubscriberHandle};
|
||||||
use crossbeam::channel::Sender;
|
use crossbeam::channel::Sender;
|
||||||
|
use parking_lot::RwLock;
|
||||||
use rand::rngs::SmallRng;
|
use rand::rngs::SmallRng;
|
||||||
use rand::{RngCore, SeedableRng};
|
use rand::{RngCore, SeedableRng};
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
|
|
@ -35,13 +35,13 @@ where
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
default => {
|
default => {
|
||||||
let mut inner_runner = inner_runner.write().expect("Write access to inner simulation state");
|
let mut inner_runner = inner_runner.write();
|
||||||
|
|
||||||
// we must use a code block to make sure once the step call is finished then the write lock will be released, because in Record::try_from(&state),
|
// we must use a code block to make sure once the step call is finished then the write lock will be released, because in Record::try_from(&state),
|
||||||
// we need to call the read lock, if we do not release the write lock,
|
// we need to call the read lock, if we do not release the write lock,
|
||||||
// then dead lock will occur
|
// then dead lock will occur
|
||||||
{
|
{
|
||||||
let mut nodes = nodes.write().expect("Write access to nodes vector");
|
let mut nodes = nodes.write();
|
||||||
inner_runner.step(&mut nodes);
|
inner_runner.step(&mut nodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,10 +80,11 @@ mod tests {
|
||||||
streaming::StreamProducer,
|
streaming::StreamProducer,
|
||||||
};
|
};
|
||||||
use crossbeam::channel;
|
use crossbeam::channel;
|
||||||
|
use parking_lot::RwLock;
|
||||||
use rand::rngs::mock::StepRng;
|
use rand::rngs::mock::StepRng;
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, HashMap},
|
collections::{BTreeMap, HashMap},
|
||||||
sync::{Arc, RwLock},
|
sync::Arc,
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -142,11 +143,11 @@ mod tests {
|
||||||
let producer = StreamProducer::default();
|
let producer = StreamProducer::default();
|
||||||
let runner: SimulationRunner<DummyMessage, DummyNode, OutData> =
|
let runner: SimulationRunner<DummyMessage, DummyNode, OutData> =
|
||||||
SimulationRunner::new(network, nodes, producer, settings);
|
SimulationRunner::new(network, nodes, producer, settings);
|
||||||
let mut nodes = runner.nodes.write().unwrap();
|
let mut nodes = runner.nodes.write();
|
||||||
runner.inner.write().unwrap().step(&mut nodes);
|
runner.inner.write().step(&mut nodes);
|
||||||
drop(nodes);
|
drop(nodes);
|
||||||
|
|
||||||
let nodes = runner.nodes.read().unwrap();
|
let nodes = runner.nodes.read();
|
||||||
for node in nodes.iter() {
|
for node in nodes.iter() {
|
||||||
assert_eq!(node.current_view(), 0);
|
assert_eq!(node.current_view(), 0);
|
||||||
}
|
}
|
||||||
|
@ -189,11 +190,11 @@ mod tests {
|
||||||
let runner: SimulationRunner<DummyMessage, DummyNode, OutData> =
|
let runner: SimulationRunner<DummyMessage, DummyNode, OutData> =
|
||||||
SimulationRunner::new(network, nodes, Default::default(), settings);
|
SimulationRunner::new(network, nodes, Default::default(), settings);
|
||||||
|
|
||||||
let mut nodes = runner.nodes.write().unwrap();
|
let mut nodes = runner.nodes.write();
|
||||||
runner.inner.write().unwrap().step(&mut nodes);
|
runner.inner.write().step(&mut nodes);
|
||||||
drop(nodes);
|
drop(nodes);
|
||||||
|
|
||||||
let nodes = runner.nodes.read().unwrap();
|
let nodes = runner.nodes.read();
|
||||||
let state = nodes[1].state();
|
let state = nodes[1].state();
|
||||||
assert_eq!(state.message_count, 10);
|
assert_eq!(state.message_count, 10);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
use std::{
|
use std::{any::Any, io::stdout, sync::Arc};
|
||||||
any::Any,
|
|
||||||
io::stdout,
|
|
||||||
sync::{Arc, Mutex},
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::{Receivers, StreamSettings, Subscriber};
|
use super::{Receivers, StreamSettings, Subscriber};
|
||||||
|
use parking_lot::Mutex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Deserialize)]
|
#[derive(Debug, Clone, Default, Deserialize)]
|
||||||
|
@ -98,13 +95,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sink(&self, state: Arc<Self::Record>) -> anyhow::Result<()> {
|
fn sink(&self, state: Arc<Self::Record>) -> anyhow::Result<()> {
|
||||||
serde_json::to_writer(
|
serde_json::to_writer(&mut *self.writer.lock(), &state)?;
|
||||||
&mut *self
|
|
||||||
.writer
|
|
||||||
.lock()
|
|
||||||
.expect("fail to lock writer in io subscriber"),
|
|
||||||
&state,
|
|
||||||
)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,7 +126,7 @@ mod tests {
|
||||||
type Error = anyhow::Error;
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
fn try_from(value: &SimulationState<DummyStreamingNode<()>>) -> Result<Self, Self::Error> {
|
fn try_from(value: &SimulationState<DummyStreamingNode<()>>) -> Result<Self, Self::Error> {
|
||||||
let nodes = value.nodes.read().expect("failed to read nodes");
|
let nodes = value.nodes.read();
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
states: nodes
|
states: nodes
|
||||||
.iter()
|
.iter()
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
|
use super::{Receivers, StreamSettings, Subscriber};
|
||||||
|
use parking_lot::Mutex;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
fs::{File, OpenOptions},
|
fs::{File, OpenOptions},
|
||||||
io::Write,
|
io::Write,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
sync::{Arc, Mutex},
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{Receivers, StreamSettings, Subscriber};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct NaiveSettings {
|
pub struct NaiveSettings {
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
|
@ -94,7 +94,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sink(&self, state: Arc<Self::Record>) -> anyhow::Result<()> {
|
fn sink(&self, state: Arc<Self::Record>) -> anyhow::Result<()> {
|
||||||
let mut file = self.file.lock().expect("failed to lock file");
|
let mut file = self.file.lock();
|
||||||
serde_json::to_writer(&mut *file, &state)?;
|
serde_json::to_writer(&mut *file, &state)?;
|
||||||
file.write_all(b",\n")?;
|
file.write_all(b",\n")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -131,7 +131,6 @@ mod tests {
|
||||||
states: value
|
states: value
|
||||||
.nodes
|
.nodes
|
||||||
.read()
|
.read()
|
||||||
.expect("failed to read nodes")
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|node| (node.id(), node.current_view()))
|
.map(|node| (node.id(), node.current_view()))
|
||||||
.collect(),
|
.collect(),
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use parking_lot::Mutex;
|
||||||
use polars::prelude::*;
|
use polars::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -5,7 +6,6 @@ use std::{
|
||||||
io::Cursor,
|
io::Cursor,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
sync::Mutex,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{Receivers, StreamSettings};
|
use super::{Receivers, StreamSettings};
|
||||||
|
@ -72,10 +72,7 @@ where
|
||||||
R: Serialize,
|
R: Serialize,
|
||||||
{
|
{
|
||||||
fn persist(&self) -> anyhow::Result<()> {
|
fn persist(&self) -> anyhow::Result<()> {
|
||||||
let data = self
|
let data = self.data.lock();
|
||||||
.data
|
|
||||||
.lock()
|
|
||||||
.expect("failed to lock data in PolarsSubscriber pesist");
|
|
||||||
let mut cursor = Cursor::new(Vec::new());
|
let mut cursor = Cursor::new(Vec::new());
|
||||||
serde_json::to_writer(&mut cursor, &*data).expect("Dump data to json ");
|
serde_json::to_writer(&mut cursor, &*data).expect("Dump data to json ");
|
||||||
let mut data = JsonReader::new(cursor)
|
let mut data = JsonReader::new(cursor)
|
||||||
|
@ -137,10 +134,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sink(&self, state: Arc<Self::Record>) -> anyhow::Result<()> {
|
fn sink(&self, state: Arc<Self::Record>) -> anyhow::Result<()> {
|
||||||
self.data
|
self.data.lock().push(state);
|
||||||
.lock()
|
|
||||||
.expect("failed to lock data in PolarsSubscriber")
|
|
||||||
.push(state);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,10 +14,7 @@ impl<N: Node> SimulationWard<N> for MinMaxViewWard {
|
||||||
fn analyze(&mut self, state: &Self::SimulationState) -> bool {
|
fn analyze(&mut self, state: &Self::SimulationState) -> bool {
|
||||||
let mut min = usize::MAX;
|
let mut min = usize::MAX;
|
||||||
let mut max = 0;
|
let mut max = 0;
|
||||||
let nodes = state
|
let nodes = state.nodes.read();
|
||||||
.nodes
|
|
||||||
.read()
|
|
||||||
.expect("simulations: MinMaxViewWard panic when requiring a read lock");
|
|
||||||
for node in nodes.iter() {
|
for node in nodes.iter() {
|
||||||
let view = node.current_view();
|
let view = node.current_view();
|
||||||
min = min.min(view);
|
min = min.min(view);
|
||||||
|
@ -31,7 +28,8 @@ impl<N: Node> SimulationWard<N> for MinMaxViewWard {
|
||||||
mod test {
|
mod test {
|
||||||
use crate::warding::minmax::MinMaxViewWard;
|
use crate::warding::minmax::MinMaxViewWard;
|
||||||
use crate::warding::{SimulationState, SimulationWard};
|
use crate::warding::{SimulationState, SimulationWard};
|
||||||
use std::sync::{Arc, RwLock};
|
use parking_lot::RwLock;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rebase_threshold() {
|
fn rebase_threshold() {
|
||||||
|
@ -43,7 +41,7 @@ mod test {
|
||||||
assert!(!minmax.analyze(&state));
|
assert!(!minmax.analyze(&state));
|
||||||
|
|
||||||
// push a new node with 10
|
// push a new node with 10
|
||||||
state.nodes.write().unwrap().push(20);
|
state.nodes.write().push(20);
|
||||||
// we now have two nodes and the max - min is 10 > max_gap 5, so true
|
// we now have two nodes and the max - min is 10 > max_gap 5, so true
|
||||||
assert!(minmax.analyze(&state));
|
assert!(minmax.analyze(&state));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// std
|
// std
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::Arc;
|
||||||
// crates
|
// crates
|
||||||
|
use parking_lot::RwLock;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
// internal
|
// internal
|
||||||
use crate::node::Node;
|
use crate::node::Node;
|
||||||
|
|
|
@ -34,10 +34,7 @@ impl StalledViewWard {
|
||||||
impl<N: Node> SimulationWard<N> for StalledViewWard {
|
impl<N: Node> SimulationWard<N> for StalledViewWard {
|
||||||
type SimulationState = SimulationState<N>;
|
type SimulationState = SimulationState<N>;
|
||||||
fn analyze(&mut self, state: &Self::SimulationState) -> bool {
|
fn analyze(&mut self, state: &Self::SimulationState) -> bool {
|
||||||
let nodes = state
|
let nodes = state.nodes.read();
|
||||||
.nodes
|
|
||||||
.read()
|
|
||||||
.expect("simulations: StalledViewWard panic when requiring a read lock");
|
|
||||||
self.update_state(checksum(nodes.as_slice()));
|
self.update_state(checksum(nodes.as_slice()));
|
||||||
self.criterion >= self.threshold
|
self.criterion >= self.threshold
|
||||||
}
|
}
|
||||||
|
@ -57,7 +54,8 @@ fn checksum<N: Node>(nodes: &[N]) -> u32 {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
use std::sync::{Arc, RwLock};
|
use parking_lot::RwLock;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rebase_threshold() {
|
fn rebase_threshold() {
|
||||||
|
@ -78,7 +76,7 @@ mod test {
|
||||||
assert!(stalled.analyze(&state));
|
assert!(stalled.analyze(&state));
|
||||||
|
|
||||||
// push a new one, so the criterion is reset to 0
|
// push a new one, so the criterion is reset to 0
|
||||||
state.nodes.write().unwrap().push(20);
|
state.nodes.write().push(20);
|
||||||
assert!(!stalled.analyze(&state));
|
assert!(!stalled.analyze(&state));
|
||||||
|
|
||||||
// increase the criterion, 2
|
// increase the criterion, 2
|
||||||
|
|
|
@ -15,7 +15,6 @@ impl<N: Node> SimulationWard<N> for MaxViewWard {
|
||||||
state
|
state
|
||||||
.nodes
|
.nodes
|
||||||
.read()
|
.read()
|
||||||
.expect("simulations: MaxViewWard panic when requiring a read lock")
|
|
||||||
.iter()
|
.iter()
|
||||||
.all(|n| n.current_view() >= self.max_view)
|
.all(|n| n.current_view() >= self.max_view)
|
||||||
}
|
}
|
||||||
|
@ -25,7 +24,8 @@ impl<N: Node> SimulationWard<N> for MaxViewWard {
|
||||||
mod test {
|
mod test {
|
||||||
use crate::warding::ttf::MaxViewWard;
|
use crate::warding::ttf::MaxViewWard;
|
||||||
use crate::warding::{SimulationState, SimulationWard};
|
use crate::warding::{SimulationState, SimulationWard};
|
||||||
use std::sync::{Arc, RwLock};
|
use parking_lot::RwLock;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rebase_threshold() {
|
fn rebase_threshold() {
|
||||||
|
@ -37,7 +37,7 @@ mod test {
|
||||||
};
|
};
|
||||||
assert!(ttf.analyze(&state));
|
assert!(ttf.analyze(&state));
|
||||||
|
|
||||||
state.nodes.write().unwrap().push(9);
|
state.nodes.write().push(9);
|
||||||
assert!(!ttf.analyze(&state));
|
assert!(!ttf.analyze(&state));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue