state-res: Fix clippy warnings
This commit is contained in:
parent
e31b4d5c98
commit
01515aea41
@ -8,7 +8,7 @@
|
|||||||
#![allow(clippy::exhaustive_structs)]
|
#![allow(clippy::exhaustive_structs)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, BTreeSet},
|
collections::{HashMap, HashSet},
|
||||||
convert::{TryFrom, TryInto},
|
convert::{TryFrom, TryInto},
|
||||||
sync::{
|
sync::{
|
||||||
atomic::{AtomicU64, Ordering::SeqCst},
|
atomic::{AtomicU64, Ordering::SeqCst},
|
||||||
@ -19,7 +19,7 @@ use std::{
|
|||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
use event::StateEvent;
|
use event::StateEvent;
|
||||||
use js_int::uint;
|
use js_int::uint;
|
||||||
use maplit::{btreemap, btreeset};
|
use maplit::{btreemap, hashmap, hashset};
|
||||||
use ruma_common::MilliSecondsSinceUnixEpoch;
|
use ruma_common::MilliSecondsSinceUnixEpoch;
|
||||||
use ruma_events::{
|
use ruma_events::{
|
||||||
pdu::{EventHash, Pdu, RoomV3Pdu},
|
pdu::{EventHash, Pdu, RoomV3Pdu},
|
||||||
@ -37,12 +37,12 @@ static SERVER_TIMESTAMP: AtomicU64 = AtomicU64::new(0);
|
|||||||
|
|
||||||
fn lexico_topo_sort(c: &mut Criterion) {
|
fn lexico_topo_sort(c: &mut Criterion) {
|
||||||
c.bench_function("lexicographical topological sort", |b| {
|
c.bench_function("lexicographical topological sort", |b| {
|
||||||
let graph = btreemap! {
|
let graph = hashmap! {
|
||||||
event_id("l") => btreeset![event_id("o")],
|
event_id("l") => hashset![event_id("o")],
|
||||||
event_id("m") => btreeset![event_id("n"), event_id("o")],
|
event_id("m") => hashset![event_id("n"), event_id("o")],
|
||||||
event_id("n") => btreeset![event_id("o")],
|
event_id("n") => hashset![event_id("o")],
|
||||||
event_id("o") => btreeset![], // "o" has zero outgoing edges but 4 incoming edges
|
event_id("o") => hashset![], // "o" has zero outgoing edges but 4 incoming edges
|
||||||
event_id("p") => btreeset![event_id("o")],
|
event_id("p") => hashset![event_id("o")],
|
||||||
};
|
};
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let _ = StateResolution::lexicographical_topological_sort(&graph, |id| {
|
let _ = StateResolution::lexicographical_topological_sort(&graph, |id| {
|
||||||
@ -54,7 +54,7 @@ fn lexico_topo_sort(c: &mut Criterion) {
|
|||||||
|
|
||||||
fn resolution_shallow_auth_chain(c: &mut Criterion) {
|
fn resolution_shallow_auth_chain(c: &mut Criterion) {
|
||||||
c.bench_function("resolve state of 5 events one fork", |b| {
|
c.bench_function("resolve state of 5 events one fork", |b| {
|
||||||
let mut store = TestStore(btreemap! {});
|
let mut store = TestStore(hashmap! {});
|
||||||
|
|
||||||
// build up the DAG
|
// build up the DAG
|
||||||
let (state_at_bob, state_at_charlie, _) = store.set_up();
|
let (state_at_bob, state_at_charlie, _) = store.set_up();
|
||||||
@ -154,7 +154,7 @@ criterion_main!(benches);
|
|||||||
// IMPLEMENTATION DETAILS AHEAD
|
// IMPLEMENTATION DETAILS AHEAD
|
||||||
//
|
//
|
||||||
/////////////////////////////////////////////////////////////////////*/
|
/////////////////////////////////////////////////////////////////////*/
|
||||||
pub struct TestStore<E: Event>(pub BTreeMap<EventId, Arc<E>>);
|
pub struct TestStore<E: Event>(pub HashMap<EventId, Arc<E>>);
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
impl<E: Event> TestStore<E> {
|
impl<E: Event> TestStore<E> {
|
||||||
@ -179,8 +179,8 @@ impl<E: Event> TestStore<E> {
|
|||||||
&self,
|
&self,
|
||||||
room_id: &RoomId,
|
room_id: &RoomId,
|
||||||
event_ids: &[EventId],
|
event_ids: &[EventId],
|
||||||
) -> Result<BTreeSet<EventId>> {
|
) -> Result<HashSet<EventId>> {
|
||||||
let mut result = BTreeSet::new();
|
let mut result = HashSet::new();
|
||||||
let mut stack = event_ids.to_vec();
|
let mut stack = event_ids.to_vec();
|
||||||
|
|
||||||
// DFS for auth event chain
|
// DFS for auth event chain
|
||||||
@ -206,26 +206,21 @@ impl<E: Event> TestStore<E> {
|
|||||||
room_id: &RoomId,
|
room_id: &RoomId,
|
||||||
event_ids: Vec<Vec<EventId>>,
|
event_ids: Vec<Vec<EventId>>,
|
||||||
) -> Result<Vec<EventId>> {
|
) -> Result<Vec<EventId>> {
|
||||||
let mut chains = vec![];
|
let mut auth_chain_sets = vec![];
|
||||||
for ids in event_ids {
|
for ids in event_ids {
|
||||||
// TODO state store `auth_event_ids` returns self in the event ids list
|
// TODO state store `auth_event_ids` returns self in the event ids list
|
||||||
// when an event returns `auth_event_ids` self is not contained
|
// when an event returns `auth_event_ids` self is not contained
|
||||||
let chain = self.auth_event_ids(room_id, &ids)?.into_iter().collect::<BTreeSet<_>>();
|
let chain = self.auth_event_ids(room_id, &ids)?.into_iter().collect::<HashSet<_>>();
|
||||||
chains.push(chain);
|
auth_chain_sets.push(chain);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(chain) = chains.first() {
|
if let Some(first) = auth_chain_sets.first().cloned() {
|
||||||
let rest = chains.iter().skip(1).flatten().cloned().collect();
|
let common = auth_chain_sets
|
||||||
let common = chain.intersection(&rest).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
Ok(chains
|
|
||||||
.iter()
|
.iter()
|
||||||
.flatten()
|
.skip(1)
|
||||||
.filter(|id| !common.contains(id))
|
.fold(first, |a, b| a.intersection(b).cloned().collect::<HashSet<EventId>>());
|
||||||
.cloned()
|
|
||||||
.collect::<BTreeSet<_>>()
|
Ok(auth_chain_sets.into_iter().flatten().filter(|id| !common.contains(id)).collect())
|
||||||
.into_iter()
|
|
||||||
.collect())
|
|
||||||
} else {
|
} else {
|
||||||
Ok(vec![])
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
@ -387,7 +382,7 @@ where
|
|||||||
|
|
||||||
// all graphs start with these input events
|
// all graphs start with these input events
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
fn INITIAL_EVENTS() -> BTreeMap<EventId, Arc<StateEvent>> {
|
fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||||
vec![
|
vec![
|
||||||
to_pdu_event::<EventId>(
|
to_pdu_event::<EventId>(
|
||||||
"CREATE",
|
"CREATE",
|
||||||
@ -469,7 +464,7 @@ fn INITIAL_EVENTS() -> BTreeMap<EventId, Arc<StateEvent>> {
|
|||||||
|
|
||||||
// all graphs start with these input events
|
// all graphs start with these input events
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
fn BAN_STATE_SET() -> BTreeMap<EventId, Arc<StateEvent>> {
|
fn BAN_STATE_SET() -> HashMap<EventId, Arc<StateEvent>> {
|
||||||
vec![
|
vec![
|
||||||
to_pdu_event(
|
to_pdu_event(
|
||||||
"PA",
|
"PA",
|
||||||
|
@ -105,7 +105,7 @@ impl StateResolution {
|
|||||||
let mut auth_diff = StateResolution::get_auth_chain_diff(room_id, auth_chain_sets)?;
|
let mut auth_diff = StateResolution::get_auth_chain_diff(room_id, auth_chain_sets)?;
|
||||||
|
|
||||||
// Add the auth_diff to conflicting now we have a full set of conflicting events
|
// Add the auth_diff to conflicting now we have a full set of conflicting events
|
||||||
auth_diff.extend(conflicting.values().cloned().flatten().filter_map(|o| o));
|
auth_diff.extend(conflicting.values().cloned().flatten().flatten());
|
||||||
|
|
||||||
debug!("auth diff: {}", auth_diff.len());
|
debug!("auth diff: {}", auth_diff.len());
|
||||||
trace!("{:?}", auth_diff);
|
trace!("{:?}", auth_diff);
|
||||||
@ -230,9 +230,9 @@ impl StateResolution {
|
|||||||
let common = auth_chain_sets
|
let common = auth_chain_sets
|
||||||
.iter()
|
.iter()
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.fold(first, |a, b| a.intersection(&b).cloned().collect::<HashSet<EventId>>());
|
.fold(first, |a, b| a.intersection(b).cloned().collect::<HashSet<EventId>>());
|
||||||
|
|
||||||
Ok(auth_chain_sets.into_iter().flatten().filter(|id| !common.contains(&id)).collect())
|
Ok(auth_chain_sets.into_iter().flatten().filter(|id| !common.contains(id)).collect())
|
||||||
} else {
|
} else {
|
||||||
Ok(hashset![])
|
Ok(hashset![])
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user