Add test testing conduits event sorting logic
This could possibly be turned into another public function for exporting. A list of ruma::Pdu (events) and another list of ruma::Pdu (auth_events) and returns the sorted list of events.
This commit is contained in:
parent
63be0b550f
commit
e8acae05ff
@ -34,6 +34,7 @@ features = ["client-api", "federation-api", "appservice-api"]
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.3.3"
|
criterion = "0.3.3"
|
||||||
|
rand = "0.7.3"
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "state_res_bench"
|
name = "state_res_bench"
|
||||||
|
@ -378,10 +378,14 @@ impl StateResolution {
|
|||||||
// https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm
|
// https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm
|
||||||
|
|
||||||
// TODO make the BTreeSet conversion cleaner ??
|
// TODO make the BTreeSet conversion cleaner ??
|
||||||
|
// outdegree_map is an event referring to the events before it, the
|
||||||
|
// more outdegree's the more recent the event.
|
||||||
let mut outdegree_map: BTreeMap<EventId, BTreeSet<EventId>> = graph
|
let mut outdegree_map: BTreeMap<EventId, BTreeSet<EventId>> = graph
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(k, v)| (k.clone(), v.iter().cloned().collect()))
|
.map(|(k, v)| (k.clone(), v.iter().cloned().collect()))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// The number of events that depend on the given event (the eventId key)
|
||||||
let mut reverse_graph = BTreeMap::new();
|
let mut reverse_graph = BTreeMap::new();
|
||||||
|
|
||||||
// Vec of nodes that have zero out degree, least recent events.
|
// Vec of nodes that have zero out degree, least recent events.
|
||||||
@ -389,7 +393,7 @@ impl StateResolution {
|
|||||||
|
|
||||||
for (node, edges) in graph.iter() {
|
for (node, edges) in graph.iter() {
|
||||||
if edges.is_empty() {
|
if edges.is_empty() {
|
||||||
// the `Reverse` is because rusts bin heap sorts largest -> smallest we need
|
// the `Reverse` is because rusts `BinaryHeap` sorts largest -> smallest we need
|
||||||
// smallest -> largest
|
// smallest -> largest
|
||||||
zero_outdegree.push(Reverse((key_fn(node), node)));
|
zero_outdegree.push(Reverse((key_fn(node), node)));
|
||||||
}
|
}
|
||||||
@ -407,7 +411,7 @@ impl StateResolution {
|
|||||||
|
|
||||||
// we remove the oldest node (most incoming edges) and check against all other
|
// we remove the oldest node (most incoming edges) and check against all other
|
||||||
let mut sorted = vec![];
|
let mut sorted = vec![];
|
||||||
// match out the `Reverse` and take the smallest `node` each time
|
// destructure the `Reverse` and take the smallest `node` each time
|
||||||
while let Some(Reverse((_, node))) = heap.pop() {
|
while let Some(Reverse((_, node))) = heap.pop() {
|
||||||
let node: &EventId = node;
|
let node: &EventId = node;
|
||||||
for parent in reverse_graph.get(node).unwrap() {
|
for parent in reverse_graph.get(node).unwrap() {
|
||||||
|
324
tests/event_sorting.rs
Normal file
324
tests/event_sorting.rs
Normal file
@ -0,0 +1,324 @@
|
|||||||
|
use std::{cell::RefCell, collections::BTreeMap, convert::TryFrom};
|
||||||
|
|
||||||
|
use ruma::{
|
||||||
|
events::{
|
||||||
|
pdu::EventHash,
|
||||||
|
room::{
|
||||||
|
join_rules::JoinRule,
|
||||||
|
member::{MemberEventContent, MembershipState},
|
||||||
|
},
|
||||||
|
EventType,
|
||||||
|
},
|
||||||
|
identifiers::{EventId, RoomId, RoomVersionId, UserId},
|
||||||
|
};
|
||||||
|
use serde_json::{json, Value as JsonValue};
|
||||||
|
use state_res::{StateEvent, StateMap, StateStore};
|
||||||
|
use tracing_subscriber as tracer;
|
||||||
|
|
||||||
|
use std::sync::Once;
|
||||||
|
|
||||||
|
static LOGGER: Once = Once::new();
|
||||||
|
|
||||||
|
static mut SERVER_TIMESTAMP: i32 = 0;
|
||||||
|
|
||||||
|
fn event_id(id: &str) -> EventId {
|
||||||
|
if id.contains('$') {
|
||||||
|
return EventId::try_from(id).unwrap();
|
||||||
|
}
|
||||||
|
EventId::try_from(format!("${}:foo", id)).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn alice() -> UserId {
|
||||||
|
UserId::try_from("@alice:foo").unwrap()
|
||||||
|
}
|
||||||
|
fn bob() -> UserId {
|
||||||
|
UserId::try_from("@bob:foo").unwrap()
|
||||||
|
}
|
||||||
|
fn charlie() -> UserId {
|
||||||
|
UserId::try_from("@charlie:foo").unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn room_id() -> RoomId {
|
||||||
|
RoomId::try_from("!test:foo").unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn member_content_join() -> JsonValue {
|
||||||
|
serde_json::to_value(MemberEventContent {
|
||||||
|
membership: MembershipState::Join,
|
||||||
|
displayname: None,
|
||||||
|
avatar_url: None,
|
||||||
|
is_direct: None,
|
||||||
|
third_party_invite: None,
|
||||||
|
})
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TestStore(RefCell<BTreeMap<EventId, StateEvent>>);
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
impl StateStore for TestStore {
|
||||||
|
fn get_event(&self, room_id: &RoomId, event_id: &EventId) -> Result<StateEvent, String> {
|
||||||
|
self.0
|
||||||
|
.borrow()
|
||||||
|
.get(event_id)
|
||||||
|
.cloned()
|
||||||
|
.ok_or(format!("{} not found", event_id.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_pdu_event<S>(
|
||||||
|
id: &str,
|
||||||
|
sender: UserId,
|
||||||
|
ev_type: EventType,
|
||||||
|
state_key: Option<&str>,
|
||||||
|
content: JsonValue,
|
||||||
|
auth_events: &[S],
|
||||||
|
prev_events: &[S],
|
||||||
|
) -> StateEvent
|
||||||
|
where
|
||||||
|
S: AsRef<str>,
|
||||||
|
{
|
||||||
|
let ts = unsafe {
|
||||||
|
let ts = SERVER_TIMESTAMP;
|
||||||
|
// increment the "origin_server_ts" value
|
||||||
|
SERVER_TIMESTAMP += 1;
|
||||||
|
ts
|
||||||
|
};
|
||||||
|
let id = if id.contains('$') {
|
||||||
|
id.to_string()
|
||||||
|
} else {
|
||||||
|
format!("${}:foo", id)
|
||||||
|
};
|
||||||
|
let auth_events = auth_events
|
||||||
|
.iter()
|
||||||
|
.map(AsRef::as_ref)
|
||||||
|
.map(event_id)
|
||||||
|
.map(|id| {
|
||||||
|
(
|
||||||
|
id,
|
||||||
|
EventHash {
|
||||||
|
sha256: "hello".into(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let prev_events = prev_events
|
||||||
|
.iter()
|
||||||
|
.map(AsRef::as_ref)
|
||||||
|
.map(event_id)
|
||||||
|
.map(|id| {
|
||||||
|
(
|
||||||
|
id,
|
||||||
|
EventHash {
|
||||||
|
sha256: "hello".into(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let json = if let Some(state_key) = state_key {
|
||||||
|
json!({
|
||||||
|
"auth_events": auth_events,
|
||||||
|
"prev_events": prev_events,
|
||||||
|
"event_id": id,
|
||||||
|
"sender": sender,
|
||||||
|
"type": ev_type,
|
||||||
|
"state_key": state_key,
|
||||||
|
"content": content,
|
||||||
|
"origin_server_ts": ts,
|
||||||
|
"room_id": room_id(),
|
||||||
|
"origin": "foo",
|
||||||
|
"depth": 0,
|
||||||
|
"hashes": { "sha256": "hello" },
|
||||||
|
"signatures": {},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
json!({
|
||||||
|
"auth_events": auth_events,
|
||||||
|
"prev_events": prev_events,
|
||||||
|
"event_id": id,
|
||||||
|
"sender": sender,
|
||||||
|
"type": ev_type,
|
||||||
|
"content": content,
|
||||||
|
"origin_server_ts": ts,
|
||||||
|
"room_id": room_id(),
|
||||||
|
"origin": "foo",
|
||||||
|
"depth": 0,
|
||||||
|
"hashes": { "sha256": "hello" },
|
||||||
|
"signatures": {},
|
||||||
|
})
|
||||||
|
};
|
||||||
|
serde_json::from_value(json).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
// all graphs start with these input events
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn INITIAL_EVENTS() -> BTreeMap<EventId, StateEvent> {
|
||||||
|
// this is always called so we can init the logger here
|
||||||
|
let _ = LOGGER.call_once(|| {
|
||||||
|
tracer::fmt()
|
||||||
|
.with_env_filter(tracer::EnvFilter::from_default_env())
|
||||||
|
.init()
|
||||||
|
});
|
||||||
|
|
||||||
|
vec![
|
||||||
|
to_pdu_event::<EventId>(
|
||||||
|
"CREATE",
|
||||||
|
alice(),
|
||||||
|
EventType::RoomCreate,
|
||||||
|
Some(""),
|
||||||
|
json!({ "creator": alice() }),
|
||||||
|
&[],
|
||||||
|
&[],
|
||||||
|
),
|
||||||
|
to_pdu_event(
|
||||||
|
"IMA",
|
||||||
|
alice(),
|
||||||
|
EventType::RoomMember,
|
||||||
|
Some(alice().to_string().as_str()),
|
||||||
|
member_content_join(),
|
||||||
|
&["CREATE"],
|
||||||
|
&["CREATE"],
|
||||||
|
),
|
||||||
|
to_pdu_event(
|
||||||
|
"IPOWER",
|
||||||
|
alice(),
|
||||||
|
EventType::RoomPowerLevels,
|
||||||
|
Some(""),
|
||||||
|
json!({"users": {alice().to_string(): 100}}),
|
||||||
|
&["CREATE", "IMA"],
|
||||||
|
&["IMA"],
|
||||||
|
),
|
||||||
|
to_pdu_event(
|
||||||
|
"IJR",
|
||||||
|
alice(),
|
||||||
|
EventType::RoomJoinRules,
|
||||||
|
Some(""),
|
||||||
|
json!({ "join_rule": JoinRule::Public }),
|
||||||
|
&["CREATE", "IMA", "IPOWER"],
|
||||||
|
&["IPOWER"],
|
||||||
|
),
|
||||||
|
to_pdu_event(
|
||||||
|
"IMB",
|
||||||
|
bob(),
|
||||||
|
EventType::RoomMember,
|
||||||
|
Some(bob().to_string().as_str()),
|
||||||
|
member_content_join(),
|
||||||
|
&["CREATE", "IJR", "IPOWER"],
|
||||||
|
&["IJR"],
|
||||||
|
),
|
||||||
|
to_pdu_event(
|
||||||
|
"IMC",
|
||||||
|
charlie(),
|
||||||
|
EventType::RoomMember,
|
||||||
|
Some(charlie().to_string().as_str()),
|
||||||
|
member_content_join(),
|
||||||
|
&["CREATE", "IJR", "IPOWER"],
|
||||||
|
&["IMB"],
|
||||||
|
),
|
||||||
|
to_pdu_event::<EventId>(
|
||||||
|
"END",
|
||||||
|
charlie(),
|
||||||
|
EventType::RoomMessage,
|
||||||
|
None,
|
||||||
|
json!({}),
|
||||||
|
&[],
|
||||||
|
&[],
|
||||||
|
),
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.map(|ev| (ev.event_id(), ev))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn shuffle(list: &mut [EventId]) {
|
||||||
|
use rand::Rng;
|
||||||
|
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
for i in 1..list.len() {
|
||||||
|
let j = rng.gen_range(0, list.len());
|
||||||
|
list.swap(i, j);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_event_sort() {
|
||||||
|
let mut events = INITIAL_EVENTS();
|
||||||
|
|
||||||
|
let store = TestStore(RefCell::new(events.clone()));
|
||||||
|
|
||||||
|
let event_map = events
|
||||||
|
.values()
|
||||||
|
.map(|ev| ((ev.kind(), ev.state_key()), ev.clone()))
|
||||||
|
.collect::<StateMap<_>>();
|
||||||
|
|
||||||
|
let auth_chain = &[] as &[_];
|
||||||
|
|
||||||
|
let power_events = event_map
|
||||||
|
.values()
|
||||||
|
.filter(|pdu| pdu.is_power_event())
|
||||||
|
.map(|pdu| pdu.event_id())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// This is a TODO in conduit
|
||||||
|
// TODO these events are not guaranteed to be sorted but they are resolved, do
|
||||||
|
// we need the auth_chain
|
||||||
|
let sorted_power_events = state_res::StateResolution::reverse_topological_power_sort(
|
||||||
|
&room_id(),
|
||||||
|
&power_events,
|
||||||
|
&mut events,
|
||||||
|
&store,
|
||||||
|
&auth_chain,
|
||||||
|
);
|
||||||
|
|
||||||
|
// This is a TODO in conduit
|
||||||
|
// TODO we may be able to skip this since they are resolved according to spec
|
||||||
|
let resolved_power = state_res::StateResolution::iterative_auth_check(
|
||||||
|
&room_id(),
|
||||||
|
&RoomVersionId::Version6,
|
||||||
|
&sorted_power_events,
|
||||||
|
&BTreeMap::new(), // unconflicted events
|
||||||
|
&mut events,
|
||||||
|
&store,
|
||||||
|
)
|
||||||
|
.expect("iterative auth check failed on resolved events");
|
||||||
|
|
||||||
|
// don't remove any events so we know it sorts them all correctly
|
||||||
|
let mut events_to_sort = events.keys().cloned().collect::<Vec<_>>();
|
||||||
|
|
||||||
|
shuffle(&mut events_to_sort);
|
||||||
|
|
||||||
|
let power_level = resolved_power.get(&(EventType::RoomPowerLevels, Some("".into())));
|
||||||
|
|
||||||
|
let sorted_event_ids = state_res::StateResolution::mainline_sort(
|
||||||
|
&room_id(),
|
||||||
|
&events_to_sort,
|
||||||
|
power_level,
|
||||||
|
&mut events,
|
||||||
|
&store,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
vec![
|
||||||
|
"$CREATE:foo",
|
||||||
|
"$IMA:foo",
|
||||||
|
"$IPOWER:foo",
|
||||||
|
"$IJR:foo",
|
||||||
|
"$IMB:foo",
|
||||||
|
"$IMC:foo",
|
||||||
|
"$END:foo"
|
||||||
|
],
|
||||||
|
sorted_event_ids
|
||||||
|
.iter()
|
||||||
|
.map(|id| id.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sort() {
|
||||||
|
for _ in 0..20 {
|
||||||
|
// since we shuffle the eventIds before we sort them introducing randomness
|
||||||
|
// seems like we should test this a few times
|
||||||
|
test_event_sort()
|
||||||
|
}
|
||||||
|
}
|
@ -1,12 +1,6 @@
|
|||||||
#![allow(clippy::or_fun_call, clippy::expect_fun_call)]
|
#![allow(clippy::or_fun_call, clippy::expect_fun_call)]
|
||||||
|
|
||||||
use std::{
|
use std::{cell::RefCell, collections::BTreeMap, convert::TryFrom, sync::Once, time::UNIX_EPOCH};
|
||||||
cell::RefCell,
|
|
||||||
collections::{BTreeMap, BTreeSet},
|
|
||||||
convert::TryFrom,
|
|
||||||
sync::Once,
|
|
||||||
time::UNIX_EPOCH,
|
|
||||||
};
|
|
||||||
|
|
||||||
use ruma::{
|
use ruma::{
|
||||||
events::{
|
events::{
|
||||||
@ -153,7 +147,7 @@ fn do_check(events: &[StateEvent], edges: Vec<Vec<EventId>>, expected_state_ids:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO The event is just remade, adding the auth_events and prev_events here
|
// TODO The event is just remade, adding the auth_events and prev_events here
|
||||||
// UPDATE: the `to_pdu_event` was split into `init` and the fn below, could be better
|
// the `to_pdu_event` was split into `init` and the fn below, could be better
|
||||||
let e = fake_event;
|
let e = fake_event;
|
||||||
let ev_id = e.event_id();
|
let ev_id = e.event_id();
|
||||||
let event = to_pdu_event(
|
let event = to_pdu_event(
|
||||||
@ -165,12 +159,9 @@ fn do_check(events: &[StateEvent], edges: Vec<Vec<EventId>>, expected_state_ids:
|
|||||||
&auth_events,
|
&auth_events,
|
||||||
prev_events,
|
prev_events,
|
||||||
);
|
);
|
||||||
|
|
||||||
// we have to update our store, an actual user of this lib would
|
// we have to update our store, an actual user of this lib would
|
||||||
// be giving us state from a DB.
|
// be giving us state from a DB.
|
||||||
//
|
|
||||||
// TODO
|
|
||||||
// TODO we need to convert the `StateResolution::resolve` to use the event_map
|
|
||||||
// because the user of this crate cannot update their DB's state.
|
|
||||||
*store.0.borrow_mut().get_mut(&ev_id).unwrap() = event.clone();
|
*store.0.borrow_mut().get_mut(&ev_id).unwrap() = event.clone();
|
||||||
|
|
||||||
state_at_event.insert(node, state_after);
|
state_at_event.insert(node, state_after);
|
||||||
@ -209,17 +200,6 @@ pub struct TestStore(RefCell<BTreeMap<EventId, StateEvent>>);
|
|||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
impl StateStore for TestStore {
|
impl StateStore for TestStore {
|
||||||
fn get_events(&self, room_id: &RoomId, events: &[EventId]) -> Result<Vec<StateEvent>, String> {
|
|
||||||
Ok(self
|
|
||||||
.0
|
|
||||||
.borrow()
|
|
||||||
.iter()
|
|
||||||
.filter(|e| events.contains(e.0))
|
|
||||||
.map(|(_, s)| s)
|
|
||||||
.cloned()
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_event(&self, room_id: &RoomId, event_id: &EventId) -> Result<StateEvent, String> {
|
fn get_event(&self, room_id: &RoomId, event_id: &EventId) -> Result<StateEvent, String> {
|
||||||
self.0
|
self.0
|
||||||
.borrow()
|
.borrow()
|
||||||
@ -227,65 +207,6 @@ impl StateStore for TestStore {
|
|||||||
.cloned()
|
.cloned()
|
||||||
.ok_or(format!("{} not found", event_id.to_string()))
|
.ok_or(format!("{} not found", event_id.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn auth_event_ids(
|
|
||||||
&self,
|
|
||||||
room_id: &RoomId,
|
|
||||||
event_ids: &[EventId],
|
|
||||||
) -> Result<Vec<EventId>, String> {
|
|
||||||
let mut result = vec![];
|
|
||||||
let mut stack = event_ids.to_vec();
|
|
||||||
|
|
||||||
// DFS for auth event chain
|
|
||||||
while !stack.is_empty() {
|
|
||||||
let ev_id = stack.pop().unwrap();
|
|
||||||
if result.contains(&ev_id) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
result.push(ev_id.clone());
|
|
||||||
|
|
||||||
let event = self.get_event(room_id, &ev_id).unwrap();
|
|
||||||
stack.extend(event.auth_events());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn auth_chain_diff(
|
|
||||||
&self,
|
|
||||||
room_id: &RoomId,
|
|
||||||
event_ids: Vec<Vec<EventId>>,
|
|
||||||
) -> Result<Vec<EventId>, String> {
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
let mut chains = vec![];
|
|
||||||
for ids in event_ids {
|
|
||||||
// TODO state store `auth_event_ids` returns self in the event ids list
|
|
||||||
// when an event returns `auth_event_ids` self is not contained
|
|
||||||
let chain = self
|
|
||||||
.auth_event_ids(room_id, &ids)?
|
|
||||||
.into_iter()
|
|
||||||
.collect::<BTreeSet<_>>();
|
|
||||||
chains.push(chain);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(chain) = chains.first() {
|
|
||||||
let rest = chains.iter().skip(1).flatten().cloned().collect();
|
|
||||||
let common = chain.intersection(&rest).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
Ok(chains
|
|
||||||
.iter()
|
|
||||||
.flatten()
|
|
||||||
.filter(|id| !common.contains(&id))
|
|
||||||
.cloned()
|
|
||||||
.collect::<BTreeSet<_>>()
|
|
||||||
.into_iter()
|
|
||||||
.collect())
|
|
||||||
} else {
|
|
||||||
Ok(vec![])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn event_id(id: &str) -> EventId {
|
fn event_id(id: &str) -> EventId {
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::or_fun_call, clippy::expect_fun_call)]
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
collections::{BTreeMap, BTreeSet},
|
collections::{BTreeMap, BTreeSet},
|
||||||
@ -317,14 +315,20 @@ fn do_check(events: &[StateEvent], edges: Vec<Vec<EventId>>, expected_state_ids:
|
|||||||
|
|
||||||
for pair in INITIAL_EDGES().windows(2) {
|
for pair in INITIAL_EDGES().windows(2) {
|
||||||
if let [a, b] = &pair {
|
if let [a, b] = &pair {
|
||||||
graph.entry(a.clone()).or_insert(vec![]).push(b.clone());
|
graph
|
||||||
|
.entry(a.clone())
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(b.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for edge_list in edges {
|
for edge_list in edges {
|
||||||
for pair in edge_list.windows(2) {
|
for pair in edge_list.windows(2) {
|
||||||
if let [a, b] = &pair {
|
if let [a, b] = &pair {
|
||||||
graph.entry(a.clone()).or_insert(vec![]).push(b.clone());
|
graph
|
||||||
|
.entry(a.clone())
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(b.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -439,14 +443,16 @@ fn do_check(events: &[StateEvent], edges: Vec<Vec<EventId>>, expected_state_ids:
|
|||||||
|
|
||||||
let mut expected_state = StateMap::new();
|
let mut expected_state = StateMap::new();
|
||||||
for node in expected_state_ids {
|
for node in expected_state_ids {
|
||||||
let ev = event_map.get(&node).expect(&format!(
|
let ev = event_map.get(&node).unwrap_or_else(|| {
|
||||||
"{} not found in {:?}",
|
panic!(
|
||||||
node.to_string(),
|
"{} not found in {:?}",
|
||||||
event_map
|
node.to_string(),
|
||||||
.keys()
|
event_map
|
||||||
.map(ToString::to_string)
|
.keys()
|
||||||
.collect::<Vec<_>>(),
|
.map(ToString::to_string)
|
||||||
));
|
.collect::<Vec<_>>(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
let key = (ev.kind(), ev.state_key());
|
let key = (ev.kind(), ev.state_key());
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user