identifiers: Make EventId a DST
This commit is contained in:
@@ -16,7 +16,7 @@ pub trait Event {
|
||||
pub type StateMap<T> = BTreeMap<(EventType, Option<String>), T>;
|
||||
|
||||
/// A mapping of `EventId` to `T`, usually a `StateEvent`.
|
||||
pub type EventMap<T> = BTreeMap<EventId, T>;
|
||||
pub type EventMap<T> = BTreeMap<Box<EventId>, T>;
|
||||
|
||||
struct StateResolution {
|
||||
// For now the StateResolution struct is empty. If "caching" `event_map`
|
||||
@@ -30,10 +30,10 @@ impl StateResolution {
|
||||
pub fn resolve<E: Event>(
|
||||
room_id: &RoomId,
|
||||
room_version: &RoomVersionId,
|
||||
state_sets: &[StateMap<EventId>],
|
||||
auth_events: Vec<Vec<EventId>>,
|
||||
state_sets: &[StateMap<Box<EventId>>],
|
||||
auth_events: Vec<Vec<Box<EventId>>>,
|
||||
event_map: &mut EventMap<Arc<E>>,
|
||||
) -> Result<StateMap<EventId>> {;
|
||||
) -> Result<StateMap<Box<EventId>>> {;
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
@@ -49,7 +49,7 @@ fn lexico_topo_sort(c: &mut Criterion) {
|
||||
};
|
||||
b.iter(|| {
|
||||
let _ = state_res::lexicographical_topological_sort(&graph, |id| {
|
||||
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0)), id.clone()))
|
||||
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0)), id.to_owned()))
|
||||
});
|
||||
})
|
||||
});
|
||||
@@ -104,7 +104,7 @@ fn resolve_deeper_event_set(c: &mut Criterion) {
|
||||
.map(|ev| {
|
||||
(
|
||||
(ev.event_type().to_owned(), ev.state_key().unwrap().to_owned()),
|
||||
ev.event_id().clone(),
|
||||
ev.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -122,7 +122,7 @@ fn resolve_deeper_event_set(c: &mut Criterion) {
|
||||
.map(|ev| {
|
||||
(
|
||||
(ev.event_type().to_owned(), ev.state_key().unwrap().to_owned()),
|
||||
ev.event_id().clone(),
|
||||
ev.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -161,7 +161,7 @@ criterion_main!(benches);
|
||||
// IMPLEMENTATION DETAILS AHEAD
|
||||
//
|
||||
/////////////////////////////////////////////////////////////////////*/
|
||||
struct TestStore<E: Event>(HashMap<EventId, Arc<E>>);
|
||||
struct TestStore<E: Event>(HashMap<Box<EventId>, Arc<E>>);
|
||||
|
||||
#[allow(unused)]
|
||||
impl<E: Event> TestStore<E> {
|
||||
@@ -173,7 +173,7 @@ impl<E: Event> TestStore<E> {
|
||||
}
|
||||
|
||||
/// Returns the events that correspond to the `event_ids` sorted in the same order.
|
||||
fn get_events(&self, room_id: &RoomId, event_ids: &[EventId]) -> Result<Vec<Arc<E>>> {
|
||||
fn get_events(&self, room_id: &RoomId, event_ids: &[Box<EventId>]) -> Result<Vec<Arc<E>>> {
|
||||
let mut events = vec![];
|
||||
for id in event_ids {
|
||||
events.push(self.get_event(room_id, id)?);
|
||||
@@ -185,8 +185,8 @@ impl<E: Event> TestStore<E> {
|
||||
fn auth_event_ids(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<EventId>,
|
||||
) -> Result<HashSet<EventId>> {
|
||||
event_ids: Vec<Box<EventId>>,
|
||||
) -> Result<HashSet<Box<EventId>>> {
|
||||
let mut result = HashSet::new();
|
||||
let mut stack = event_ids;
|
||||
|
||||
@@ -201,18 +201,19 @@ impl<E: Event> TestStore<E> {
|
||||
|
||||
let event = self.get_event(room_id, &ev_id)?;
|
||||
|
||||
stack.extend(event.auth_events().cloned());
|
||||
stack.extend(event.auth_events().map(ToOwned::to_owned));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Returns a Vec<EventId> representing the difference in auth chains of the given `events`.
|
||||
/// Returns a Vec<Box<EventId>> representing the difference in auth chains of the given
|
||||
/// `events`.
|
||||
fn auth_chain_diff(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<Vec<EventId>>,
|
||||
) -> Result<Vec<EventId>> {
|
||||
event_ids: Vec<Vec<Box<EventId>>>,
|
||||
) -> Result<Vec<Box<EventId>>> {
|
||||
let mut auth_chain_sets = vec![];
|
||||
for ids in event_ids {
|
||||
// TODO state store `auth_event_ids` returns self in the event ids list
|
||||
@@ -225,7 +226,7 @@ impl<E: Event> TestStore<E> {
|
||||
let common = auth_chain_sets
|
||||
.iter()
|
||||
.skip(1)
|
||||
.fold(first, |a, b| a.intersection(b).cloned().collect::<HashSet<EventId>>());
|
||||
.fold(first, |a, b| a.intersection(b).cloned().collect::<HashSet<Box<EventId>>>());
|
||||
|
||||
Ok(auth_chain_sets.into_iter().flatten().filter(|id| !common.contains(id)).collect())
|
||||
} else {
|
||||
@@ -235,8 +236,11 @@ impl<E: Event> TestStore<E> {
|
||||
}
|
||||
|
||||
impl TestStore<StateEvent> {
|
||||
fn set_up(&mut self) -> (StateMap<EventId>, StateMap<EventId>, StateMap<EventId>) {
|
||||
let create_event = to_pdu_event::<EventId>(
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn set_up(
|
||||
&mut self,
|
||||
) -> (StateMap<Box<EventId>>, StateMap<Box<EventId>>, StateMap<Box<EventId>>) {
|
||||
let create_event = to_pdu_event::<&EventId>(
|
||||
"CREATE",
|
||||
alice(),
|
||||
EventType::RoomCreate,
|
||||
@@ -245,7 +249,7 @@ impl TestStore<StateEvent> {
|
||||
&[],
|
||||
&[],
|
||||
);
|
||||
let cre = create_event.event_id().clone();
|
||||
let cre = create_event.event_id().to_owned();
|
||||
self.0.insert(cre.clone(), Arc::clone(&create_event));
|
||||
|
||||
let alice_mem = to_pdu_event(
|
||||
@@ -257,7 +261,7 @@ impl TestStore<StateEvent> {
|
||||
&[cre.clone()],
|
||||
&[cre.clone()],
|
||||
);
|
||||
self.0.insert(alice_mem.event_id().clone(), Arc::clone(&alice_mem));
|
||||
self.0.insert(alice_mem.event_id().to_owned(), Arc::clone(&alice_mem));
|
||||
|
||||
let join_rules = to_pdu_event(
|
||||
"IJR",
|
||||
@@ -265,10 +269,10 @@ impl TestStore<StateEvent> {
|
||||
EventType::RoomJoinRules,
|
||||
Some(""),
|
||||
to_raw_json_value(&RoomJoinRulesEventContent::new(JoinRule::Public)).unwrap(),
|
||||
&[cre.clone(), alice_mem.event_id().clone()],
|
||||
&[alice_mem.event_id().clone()],
|
||||
&[cre.clone(), alice_mem.event_id().to_owned()],
|
||||
&[alice_mem.event_id().to_owned()],
|
||||
);
|
||||
self.0.insert(join_rules.event_id().clone(), join_rules.clone());
|
||||
self.0.insert(join_rules.event_id().to_owned(), join_rules.clone());
|
||||
|
||||
// Bob and Charlie join at the same time, so there is a fork
|
||||
// this will be represented in the state_sets when we resolve
|
||||
@@ -278,10 +282,10 @@ impl TestStore<StateEvent> {
|
||||
EventType::RoomMember,
|
||||
Some(bob().to_string().as_str()),
|
||||
member_content_join(),
|
||||
&[cre.clone(), join_rules.event_id().clone()],
|
||||
&[join_rules.event_id().clone()],
|
||||
&[cre.clone(), join_rules.event_id().to_owned()],
|
||||
&[join_rules.event_id().to_owned()],
|
||||
);
|
||||
self.0.insert(bob_mem.event_id().clone(), bob_mem.clone());
|
||||
self.0.insert(bob_mem.event_id().to_owned(), bob_mem.clone());
|
||||
|
||||
let charlie_mem = to_pdu_event(
|
||||
"IMC",
|
||||
@@ -289,17 +293,17 @@ impl TestStore<StateEvent> {
|
||||
EventType::RoomMember,
|
||||
Some(charlie().to_string().as_str()),
|
||||
member_content_join(),
|
||||
&[cre, join_rules.event_id().clone()],
|
||||
&[join_rules.event_id().clone()],
|
||||
&[cre, join_rules.event_id().to_owned()],
|
||||
&[join_rules.event_id().to_owned()],
|
||||
);
|
||||
self.0.insert(charlie_mem.event_id().clone(), charlie_mem.clone());
|
||||
self.0.insert(charlie_mem.event_id().to_owned(), charlie_mem.clone());
|
||||
|
||||
let state_at_bob = [&create_event, &alice_mem, &join_rules, &bob_mem]
|
||||
.iter()
|
||||
.map(|e| {
|
||||
(
|
||||
(e.event_type().to_owned(), e.state_key().unwrap().to_owned()),
|
||||
e.event_id().clone(),
|
||||
e.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -309,7 +313,7 @@ impl TestStore<StateEvent> {
|
||||
.map(|e| {
|
||||
(
|
||||
(e.event_type().to_owned(), e.state_key().unwrap().to_owned()),
|
||||
e.event_id().clone(),
|
||||
e.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -319,7 +323,7 @@ impl TestStore<StateEvent> {
|
||||
.map(|e| {
|
||||
(
|
||||
(e.event_type().to_owned(), e.state_key().unwrap().to_owned()),
|
||||
e.event_id().clone(),
|
||||
e.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -328,11 +332,11 @@ impl TestStore<StateEvent> {
|
||||
}
|
||||
}
|
||||
|
||||
fn event_id(id: &str) -> EventId {
|
||||
fn event_id(id: &str) -> Box<EventId> {
|
||||
if id.contains('$') {
|
||||
return EventId::try_from(id).unwrap();
|
||||
return id.try_into().unwrap();
|
||||
}
|
||||
EventId::try_from(format!("${}:foo", id)).unwrap()
|
||||
format!("${}:foo", id).try_into().unwrap()
|
||||
}
|
||||
|
||||
fn alice() -> UserId {
|
||||
@@ -384,7 +388,7 @@ where
|
||||
|
||||
let state_key = state_key.map(ToOwned::to_owned);
|
||||
Arc::new(StateEvent {
|
||||
event_id: EventId::try_from(id).unwrap(),
|
||||
event_id: id.try_into().unwrap(),
|
||||
rest: Pdu::RoomV3Pdu(RoomV3Pdu {
|
||||
room_id: room_id(),
|
||||
sender,
|
||||
@@ -407,9 +411,9 @@ where
|
||||
|
||||
// all graphs start with these input events
|
||||
#[allow(non_snake_case)]
|
||||
fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
fn INITIAL_EVENTS() -> HashMap<Box<EventId>, Arc<StateEvent>> {
|
||||
vec![
|
||||
to_pdu_event::<EventId>(
|
||||
to_pdu_event::<&EventId>(
|
||||
"CREATE",
|
||||
alice(),
|
||||
EventType::RoomCreate,
|
||||
@@ -463,7 +467,7 @@ fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
&["CREATE", "IJR", "IPOWER"],
|
||||
&["IMB"],
|
||||
),
|
||||
to_pdu_event::<EventId>(
|
||||
to_pdu_event::<&EventId>(
|
||||
"START",
|
||||
charlie(),
|
||||
EventType::RoomTopic,
|
||||
@@ -472,7 +476,7 @@ fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
&[],
|
||||
&[],
|
||||
),
|
||||
to_pdu_event::<EventId>(
|
||||
to_pdu_event::<&EventId>(
|
||||
"END",
|
||||
charlie(),
|
||||
EventType::RoomTopic,
|
||||
@@ -483,13 +487,13 @@ fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|ev| (ev.event_id().clone(), ev))
|
||||
.map(|ev| (ev.event_id().to_owned(), ev))
|
||||
.collect()
|
||||
}
|
||||
|
||||
// all graphs start with these input events
|
||||
#[allow(non_snake_case)]
|
||||
fn BAN_STATE_SET() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
fn BAN_STATE_SET() -> HashMap<Box<EventId>, Arc<StateEvent>> {
|
||||
vec![
|
||||
to_pdu_event(
|
||||
"PA",
|
||||
@@ -529,7 +533,7 @@ fn BAN_STATE_SET() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|ev| (ev.event_id().clone(), ev))
|
||||
.map(|ev| (ev.event_id().to_owned(), ev))
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -602,8 +606,8 @@ mod event {
|
||||
|
||||
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &EventId> + '_> {
|
||||
match &self.rest {
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.prev_events.iter().map(|(id, _)| id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.prev_events.iter()),
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.prev_events.iter().map(|(id, _)| &**id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.prev_events.iter().map(|id| &**id)),
|
||||
#[cfg(not(feature = "unstable-exhaustive-types"))]
|
||||
_ => unreachable!("new PDU version"),
|
||||
}
|
||||
@@ -611,8 +615,8 @@ mod event {
|
||||
|
||||
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &EventId> + '_> {
|
||||
match &self.rest {
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.auth_events.iter().map(|(id, _)| id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.auth_events.iter()),
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.auth_events.iter().map(|(id, _)| &**id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.auth_events.iter().map(|id| &**id)),
|
||||
#[cfg(not(feature = "unstable-exhaustive-types"))]
|
||||
_ => unreachable!("new PDU version"),
|
||||
}
|
||||
@@ -620,8 +624,8 @@ mod event {
|
||||
|
||||
fn redacts(&self) -> Option<&EventId> {
|
||||
match &self.rest {
|
||||
Pdu::RoomV1Pdu(ev) => ev.redacts.as_ref(),
|
||||
Pdu::RoomV3Pdu(ev) => ev.redacts.as_ref(),
|
||||
Pdu::RoomV1Pdu(ev) => ev.redacts.as_deref(),
|
||||
Pdu::RoomV3Pdu(ev) => ev.redacts.as_deref(),
|
||||
#[cfg(not(feature = "unstable-exhaustive-types"))]
|
||||
_ => unreachable!("new PDU version"),
|
||||
}
|
||||
@@ -630,7 +634,7 @@ mod event {
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct StateEvent {
|
||||
pub event_id: EventId,
|
||||
pub event_id: Box<EventId>,
|
||||
#[serde(flatten)]
|
||||
pub rest: Pdu,
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ pub use state_event::Event;
|
||||
pub type StateMap<T> = HashMap<(EventType, String), T>;
|
||||
|
||||
/// A mapping of `EventId` to `T`, usually a `ServerPdu`.
|
||||
type EventMap<T> = HashMap<EventId, T>;
|
||||
type EventMap<T> = HashMap<Box<EventId>, T>;
|
||||
|
||||
/// Resolve sets of state events as they come in.
|
||||
///
|
||||
@@ -56,12 +56,12 @@ type EventMap<T> = HashMap<EventId, T>;
|
||||
pub fn resolve<'a, E, SSI>(
|
||||
room_version: &RoomVersionId,
|
||||
state_sets: impl IntoIterator<IntoIter = SSI>,
|
||||
auth_chain_sets: Vec<HashSet<EventId>>,
|
||||
auth_chain_sets: Vec<HashSet<Box<EventId>>>,
|
||||
fetch_event: impl Fn(&EventId) -> Option<E>,
|
||||
) -> Result<StateMap<EventId>>
|
||||
) -> Result<StateMap<Box<EventId>>>
|
||||
where
|
||||
E: Event + Clone,
|
||||
SSI: Iterator<Item = &'a StateMap<EventId>> + Clone,
|
||||
SSI: Iterator<Item = &'a StateMap<Box<EventId>>> + Clone,
|
||||
{
|
||||
info!("State resolution starting");
|
||||
|
||||
@@ -124,7 +124,7 @@ where
|
||||
// auth
|
||||
let events_to_resolve = all_conflicted
|
||||
.iter()
|
||||
.filter(|id| !deduped_power_ev.contains(id))
|
||||
.filter(|&id| !deduped_power_ev.contains(id))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -136,7 +136,8 @@ where
|
||||
|
||||
debug!("power event: {:?}", power_event);
|
||||
|
||||
let sorted_left_events = mainline_sort(&events_to_resolve, power_event, &fetch_event)?;
|
||||
let sorted_left_events =
|
||||
mainline_sort(&events_to_resolve, power_event.map(|id| &**id), &fetch_event)?;
|
||||
|
||||
trace!("events left, sorted: {:?}", sorted_left_events);
|
||||
|
||||
@@ -161,8 +162,8 @@ where
|
||||
/// exactly one eventId. This includes missing events, if one state_set includes an event that none
|
||||
/// of the other have this is a conflicting event.
|
||||
fn separate<'a>(
|
||||
state_sets_iter: impl Iterator<Item = &'a StateMap<EventId>> + Clone,
|
||||
) -> (StateMap<EventId>, StateMap<Vec<EventId>>) {
|
||||
state_sets_iter: impl Iterator<Item = &'a StateMap<Box<EventId>>> + Clone,
|
||||
) -> (StateMap<Box<EventId>>, StateMap<Vec<Box<EventId>>>) {
|
||||
let mut unconflicted_state = StateMap::new();
|
||||
let mut conflicted_state = StateMap::new();
|
||||
|
||||
@@ -186,10 +187,12 @@ fn separate<'a>(
|
||||
}
|
||||
|
||||
/// Returns a Vec of deduped EventIds that appear in some chains but not others.
|
||||
fn get_auth_chain_diff(auth_chain_sets: Vec<HashSet<EventId>>) -> impl Iterator<Item = EventId> {
|
||||
fn get_auth_chain_diff(
|
||||
auth_chain_sets: Vec<HashSet<Box<EventId>>>,
|
||||
) -> impl Iterator<Item = Box<EventId>> {
|
||||
let num_sets = auth_chain_sets.len();
|
||||
|
||||
let mut id_counts: HashMap<EventId, usize> = HashMap::new();
|
||||
let mut id_counts: HashMap<Box<EventId>, usize> = HashMap::new();
|
||||
for id in auth_chain_sets.into_iter().flatten() {
|
||||
*id_counts.entry(id).or_default() += 1;
|
||||
}
|
||||
@@ -205,10 +208,10 @@ fn get_auth_chain_diff(auth_chain_sets: Vec<HashSet<EventId>>) -> impl Iterator<
|
||||
/// The power level is negative because a higher power level is equated to an earlier (further back
|
||||
/// in time) origin server timestamp.
|
||||
fn reverse_topological_power_sort<E: Event>(
|
||||
events_to_sort: Vec<EventId>,
|
||||
auth_diff: &HashSet<EventId>,
|
||||
events_to_sort: Vec<Box<EventId>>,
|
||||
auth_diff: &HashSet<Box<EventId>>,
|
||||
fetch_event: impl Fn(&EventId) -> Option<E>,
|
||||
) -> Result<Vec<EventId>> {
|
||||
) -> Result<Vec<Box<EventId>>> {
|
||||
debug!("reverse topological sort of power events");
|
||||
|
||||
let mut graph = HashMap::new();
|
||||
@@ -242,7 +245,7 @@ fn reverse_topological_power_sort<E: Event>(
|
||||
// This return value is the key used for sorting events,
|
||||
// events are then sorted by power level, time,
|
||||
// and lexically by event_id.
|
||||
Ok((-*pl, ev.origin_server_ts(), ev.event_id().clone()))
|
||||
Ok((-*pl, ev.origin_server_ts(), ev.event_id().to_owned()))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -251,11 +254,11 @@ fn reverse_topological_power_sort<E: Event>(
|
||||
/// `key_fn` is used as a tie breaker. The tie breaker happens based on power level, age, and
|
||||
/// event_id.
|
||||
pub fn lexicographical_topological_sort<F>(
|
||||
graph: &HashMap<EventId, HashSet<EventId>>,
|
||||
graph: &HashMap<Box<EventId>, HashSet<Box<EventId>>>,
|
||||
key_fn: F,
|
||||
) -> Result<Vec<EventId>>
|
||||
) -> Result<Vec<Box<EventId>>>
|
||||
where
|
||||
F: Fn(&EventId) -> Result<(Int, MilliSecondsSinceUnixEpoch, EventId)>,
|
||||
F: Fn(&EventId) -> Result<(Int, MilliSecondsSinceUnixEpoch, Box<EventId>)>,
|
||||
{
|
||||
info!("starting lexicographical topological sort");
|
||||
// NOTE: an event that has no incoming edges happened most recently,
|
||||
@@ -271,7 +274,7 @@ where
|
||||
|
||||
// The number of events that depend on the given event (the EventId key)
|
||||
// How many events reference this event in the DAG as a parent
|
||||
let mut reverse_graph: HashMap<&EventId, HashSet<&EventId>> = HashMap::new();
|
||||
let mut reverse_graph: HashMap<_, HashSet<_>> = HashMap::new();
|
||||
|
||||
// Vec of nodes that have zero out degree, least recent events.
|
||||
let mut zero_outdegree = vec![];
|
||||
@@ -295,8 +298,7 @@ where
|
||||
let mut sorted = vec![];
|
||||
// Destructure the `Reverse` and take the smallest `node` each time
|
||||
while let Some(Reverse((_, node))) = heap.pop() {
|
||||
let node: &EventId = node;
|
||||
for parent in reverse_graph.get(node).expect("EventId in heap is also in reverse_graph") {
|
||||
for &parent in reverse_graph.get(node).expect("EventId in heap is also in reverse_graph") {
|
||||
// The number of outgoing edges this node has
|
||||
let out = outdegree_map
|
||||
.get_mut(parent)
|
||||
@@ -310,7 +312,7 @@ where
|
||||
}
|
||||
|
||||
// synapse yields we push then return the vec
|
||||
sorted.push(node.clone());
|
||||
sorted.push(node.to_owned());
|
||||
}
|
||||
|
||||
Ok(sorted)
|
||||
@@ -373,16 +375,16 @@ fn get_power_level_for_sender<E: Event>(
|
||||
/// ## Returns
|
||||
///
|
||||
/// The `unconflicted_state` combined with the newly auth'ed events. So any event that fails the
|
||||
/// `event_auth::auth_check` will be excluded from the returned `StateMap<EventId>`.
|
||||
/// `event_auth::auth_check` will be excluded from the returned `StateMap<Box<EventId>>`.
|
||||
///
|
||||
/// For each `events_to_check` event we gather the events needed to auth it from the the
|
||||
/// `fetch_event` closure and verify each event using the `event_auth::auth_check` function.
|
||||
fn iterative_auth_check<E: Event + Clone>(
|
||||
room_version: &RoomVersion,
|
||||
events_to_check: &[EventId],
|
||||
unconflicted_state: StateMap<EventId>,
|
||||
events_to_check: &[Box<EventId>],
|
||||
unconflicted_state: StateMap<Box<EventId>>,
|
||||
fetch_event: impl Fn(&EventId) -> Option<E>,
|
||||
) -> Result<StateMap<EventId>> {
|
||||
) -> Result<StateMap<Box<EventId>>> {
|
||||
info!("starting iterative auth check");
|
||||
|
||||
debug!("performing auth checks on {:?}", events_to_check);
|
||||
@@ -476,10 +478,10 @@ fn iterative_auth_check<E: Event + Clone>(
|
||||
/// the events before (with the first power level as a parent) will be marked as depth 1. depth 1 is
|
||||
/// "older" than depth 0.
|
||||
fn mainline_sort<E: Event>(
|
||||
to_sort: &[EventId],
|
||||
to_sort: &[Box<EventId>],
|
||||
resolved_power_level: Option<&EventId>,
|
||||
fetch_event: impl Fn(&EventId) -> Option<E>,
|
||||
) -> Result<Vec<EventId>> {
|
||||
) -> Result<Vec<Box<EventId>>> {
|
||||
debug!("mainline sort of events");
|
||||
|
||||
// There are no EventId's to sort, bail.
|
||||
@@ -488,7 +490,7 @@ fn mainline_sort<E: Event>(
|
||||
}
|
||||
|
||||
let mut mainline = vec![];
|
||||
let mut pl = resolved_power_level.cloned();
|
||||
let mut pl = resolved_power_level.map(ToOwned::to_owned);
|
||||
while let Some(p) = pl {
|
||||
mainline.push(p.clone());
|
||||
|
||||
@@ -499,7 +501,7 @@ fn mainline_sort<E: Event>(
|
||||
let ev = fetch_event(aid)
|
||||
.ok_or_else(|| Error::NotFound(format!("Failed to find {}", aid)))?;
|
||||
if is_type_and_key(&ev, &EventType::RoomPowerLevels, "") {
|
||||
pl = Some(aid.clone());
|
||||
pl = Some(aid.to_owned());
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -548,7 +550,7 @@ fn get_mainline_depth<E: Event>(
|
||||
) -> Result<usize> {
|
||||
while let Some(sort_ev) = event {
|
||||
debug!("mainline event_id {}", sort_ev.event_id());
|
||||
let id = &sort_ev.event_id();
|
||||
let id = sort_ev.event_id();
|
||||
if let Some(depth) = mainline_map.get(id) {
|
||||
return Ok(*depth);
|
||||
}
|
||||
@@ -568,9 +570,9 @@ fn get_mainline_depth<E: Event>(
|
||||
}
|
||||
|
||||
fn add_event_and_auth_chain_to_graph<E: Event>(
|
||||
graph: &mut HashMap<EventId, HashSet<EventId>>,
|
||||
event_id: EventId,
|
||||
auth_diff: &HashSet<EventId>,
|
||||
graph: &mut HashMap<Box<EventId>, HashSet<Box<EventId>>>,
|
||||
event_id: Box<EventId>,
|
||||
auth_diff: &HashSet<Box<EventId>>,
|
||||
fetch_event: impl Fn(&EventId) -> Option<E>,
|
||||
) {
|
||||
let mut state = vec![event_id];
|
||||
@@ -580,11 +582,11 @@ fn add_event_and_auth_chain_to_graph<E: Event>(
|
||||
for aid in fetch_event(&eid).as_ref().map(|ev| ev.auth_events()).into_iter().flatten() {
|
||||
if auth_diff.contains(aid) {
|
||||
if !graph.contains_key(aid) {
|
||||
state.push(aid.clone());
|
||||
state.push(aid.to_owned());
|
||||
}
|
||||
|
||||
// We just inserted this at the start of the while loop
|
||||
graph.get_mut(&eid).unwrap().insert(aid.clone());
|
||||
graph.get_mut(&eid).unwrap().insert(aid.to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -690,8 +692,10 @@ mod tests {
|
||||
let power_level = resolved_power.get(&(EventType::RoomPowerLevels, "".to_owned()));
|
||||
|
||||
let sorted_event_ids =
|
||||
crate::mainline_sort(&events_to_sort, power_level, |id| events.get(id).map(Arc::clone))
|
||||
.unwrap();
|
||||
crate::mainline_sort(&events_to_sort, power_level.map(|id| &**id), |id| {
|
||||
events.get(id).map(Arc::clone)
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
vec![
|
||||
@@ -1066,7 +1070,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let res = crate::lexicographical_topological_sort(&graph, |id| {
|
||||
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0)), id.clone()))
|
||||
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0)), id.to_owned()))
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -1193,7 +1197,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn BAN_STATE_SET() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
fn BAN_STATE_SET() -> HashMap<Box<EventId>, Arc<StateEvent>> {
|
||||
vec![
|
||||
to_pdu_event(
|
||||
"PA",
|
||||
@@ -1238,7 +1242,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn JOIN_RULE() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
fn JOIN_RULE() -> HashMap<Box<EventId>, Arc<StateEvent>> {
|
||||
vec![
|
||||
to_pdu_event(
|
||||
"JR",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap, HashSet},
|
||||
convert::{TryFrom, TryInto},
|
||||
convert::TryInto,
|
||||
sync::{
|
||||
atomic::{AtomicU64, Ordering::SeqCst},
|
||||
Arc,
|
||||
@@ -32,15 +32,19 @@ static SERVER_TIMESTAMP: AtomicU64 = AtomicU64::new(0);
|
||||
|
||||
pub fn do_check(
|
||||
events: &[Arc<StateEvent>],
|
||||
edges: Vec<Vec<EventId>>,
|
||||
expected_state_ids: Vec<EventId>,
|
||||
edges: Vec<Vec<Box<EventId>>>,
|
||||
expected_state_ids: Vec<Box<EventId>>,
|
||||
) {
|
||||
// To activate logging use `RUST_LOG=debug cargo t`
|
||||
|
||||
let init_events = INITIAL_EVENTS();
|
||||
|
||||
let mut store = TestStore(
|
||||
init_events.values().chain(events).map(|ev| (ev.event_id().clone(), ev.clone())).collect(),
|
||||
init_events
|
||||
.values()
|
||||
.chain(events)
|
||||
.map(|ev| (ev.event_id().to_owned(), ev.clone()))
|
||||
.collect(),
|
||||
);
|
||||
|
||||
// This will be lexi_topo_sorted for resolution
|
||||
@@ -51,42 +55,42 @@ pub fn do_check(
|
||||
// Create the DB of events that led up to this point
|
||||
// TODO maybe clean up some of these clones it is just tests but...
|
||||
for ev in init_events.values().chain(events) {
|
||||
graph.insert(ev.event_id().clone(), HashSet::new());
|
||||
fake_event_map.insert(ev.event_id().clone(), ev.clone());
|
||||
graph.insert(ev.event_id().to_owned(), HashSet::new());
|
||||
fake_event_map.insert(ev.event_id().to_owned(), ev.clone());
|
||||
}
|
||||
|
||||
for pair in INITIAL_EDGES().windows(2) {
|
||||
if let [a, b] = &pair {
|
||||
graph.entry(a.clone()).or_insert_with(HashSet::new).insert(b.clone());
|
||||
graph.entry(a.to_owned()).or_insert_with(HashSet::new).insert(b.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for edge_list in edges {
|
||||
for pair in edge_list.windows(2) {
|
||||
if let [a, b] = &pair {
|
||||
graph.entry(a.clone()).or_insert_with(HashSet::new).insert(b.clone());
|
||||
graph.entry(a.to_owned()).or_insert_with(HashSet::new).insert(b.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// event_id -> StateEvent
|
||||
let mut event_map: HashMap<EventId, Arc<StateEvent>> = HashMap::new();
|
||||
// event_id -> StateMap<EventId>
|
||||
let mut state_at_event: HashMap<EventId, StateMap<EventId>> = HashMap::new();
|
||||
let mut event_map: HashMap<Box<EventId>, Arc<StateEvent>> = HashMap::new();
|
||||
// event_id -> StateMap<Box<EventId>>
|
||||
let mut state_at_event: HashMap<Box<EventId>, StateMap<Box<EventId>>> = HashMap::new();
|
||||
|
||||
// Resolve the current state and add it to the state_at_event map then continue
|
||||
// on in "time"
|
||||
for node in crate::lexicographical_topological_sort(&graph, |id| {
|
||||
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0)), id.clone()))
|
||||
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0)), id.to_owned()))
|
||||
})
|
||||
.unwrap()
|
||||
{
|
||||
let fake_event = fake_event_map.get(&node).unwrap();
|
||||
let event_id = fake_event.event_id().clone();
|
||||
let event_id = fake_event.event_id().to_owned();
|
||||
|
||||
let prev_events = graph.get(&node).unwrap();
|
||||
|
||||
let state_before: StateMap<EventId> = if prev_events.is_empty() {
|
||||
let state_before: StateMap<Box<EventId>> = if prev_events.is_empty() {
|
||||
HashMap::new()
|
||||
} else if prev_events.len() == 1 {
|
||||
state_at_event.get(prev_events.iter().next().unwrap()).unwrap().clone()
|
||||
@@ -126,7 +130,7 @@ pub fn do_check(
|
||||
|
||||
let ty = fake_event.event_type().to_owned();
|
||||
let key = fake_event.state_key().unwrap().to_owned();
|
||||
state_after.insert((ty, key), event_id.clone());
|
||||
state_after.insert((ty, key), event_id.to_owned());
|
||||
|
||||
let auth_types = auth_types_for_event(
|
||||
fake_event.event_type(),
|
||||
@@ -146,7 +150,7 @@ pub fn do_check(
|
||||
// TODO The event is just remade, adding the auth_events and prev_events here
|
||||
// the `to_pdu_event` was split into `init` and the fn below, could be better
|
||||
let e = fake_event;
|
||||
let ev_id = e.event_id().clone();
|
||||
let ev_id = e.event_id();
|
||||
let event = to_pdu_event(
|
||||
e.event_id().as_str(),
|
||||
e.sender().clone(),
|
||||
@@ -159,10 +163,10 @@ pub fn do_check(
|
||||
|
||||
// We have to update our store, an actual user of this lib would
|
||||
// be giving us state from a DB.
|
||||
store.0.insert(ev_id.clone(), event.clone());
|
||||
store.0.insert(ev_id.to_owned(), event.clone());
|
||||
|
||||
state_at_event.insert(node, state_after);
|
||||
event_map.insert(event_id.clone(), Arc::clone(store.0.get(&ev_id).unwrap()));
|
||||
event_map.insert(event_id.to_owned(), Arc::clone(store.0.get(ev_id).unwrap()));
|
||||
}
|
||||
|
||||
let mut expected_state = StateMap::new();
|
||||
@@ -180,10 +184,10 @@ pub fn do_check(
|
||||
expected_state.insert(key, node);
|
||||
}
|
||||
|
||||
let start_state = state_at_event.get(&event_id!("$START:foo")).unwrap();
|
||||
let start_state = state_at_event.get(event_id!("$START:foo")).unwrap();
|
||||
|
||||
let end_state = state_at_event
|
||||
.get(&event_id!("$END:foo"))
|
||||
.get(event_id!("$END:foo"))
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|(k, v)| {
|
||||
@@ -195,13 +199,13 @@ pub fn do_check(
|
||||
&& **k != (EventType::RoomMessage, "dummy".to_owned())
|
||||
})
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<StateMap<EventId>>();
|
||||
.collect::<StateMap<Box<EventId>>>();
|
||||
|
||||
assert_eq!(expected_state, end_state);
|
||||
}
|
||||
|
||||
#[allow(clippy::exhaustive_structs)]
|
||||
pub struct TestStore<E: Event>(pub HashMap<EventId, Arc<E>>);
|
||||
pub struct TestStore<E: Event>(pub HashMap<Box<EventId>, Arc<E>>);
|
||||
|
||||
impl<E: Event> TestStore<E> {
|
||||
pub fn get_event(&self, _: &RoomId, event_id: &EventId) -> Result<Arc<E>> {
|
||||
@@ -215,8 +219,8 @@ impl<E: Event> TestStore<E> {
|
||||
pub fn auth_event_ids(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<EventId>,
|
||||
) -> Result<HashSet<EventId>> {
|
||||
event_ids: Vec<Box<EventId>>,
|
||||
) -> Result<HashSet<Box<EventId>>> {
|
||||
let mut result = HashSet::new();
|
||||
let mut stack = event_ids;
|
||||
|
||||
@@ -230,7 +234,7 @@ impl<E: Event> TestStore<E> {
|
||||
|
||||
let event = self.get_event(room_id, &ev_id)?;
|
||||
|
||||
stack.extend(event.auth_events().cloned());
|
||||
stack.extend(event.auth_events().map(ToOwned::to_owned));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
@@ -238,9 +242,12 @@ impl<E: Event> TestStore<E> {
|
||||
}
|
||||
|
||||
// A StateStore implementation for testing
|
||||
#[allow(clippy::type_complexity)]
|
||||
impl TestStore<StateEvent> {
|
||||
pub fn set_up(&mut self) -> (StateMap<EventId>, StateMap<EventId>, StateMap<EventId>) {
|
||||
let create_event = to_pdu_event::<EventId>(
|
||||
pub fn set_up(
|
||||
&mut self,
|
||||
) -> (StateMap<Box<EventId>>, StateMap<Box<EventId>>, StateMap<Box<EventId>>) {
|
||||
let create_event = to_pdu_event::<&EventId>(
|
||||
"CREATE",
|
||||
alice(),
|
||||
EventType::RoomCreate,
|
||||
@@ -249,7 +256,7 @@ impl TestStore<StateEvent> {
|
||||
&[],
|
||||
&[],
|
||||
);
|
||||
let cre = create_event.event_id().clone();
|
||||
let cre = create_event.event_id().to_owned();
|
||||
self.0.insert(cre.clone(), Arc::clone(&create_event));
|
||||
|
||||
let alice_mem = to_pdu_event(
|
||||
@@ -261,7 +268,7 @@ impl TestStore<StateEvent> {
|
||||
&[cre.clone()],
|
||||
&[cre.clone()],
|
||||
);
|
||||
self.0.insert(alice_mem.event_id().clone(), Arc::clone(&alice_mem));
|
||||
self.0.insert(alice_mem.event_id().to_owned(), Arc::clone(&alice_mem));
|
||||
|
||||
let join_rules = to_pdu_event(
|
||||
"IJR",
|
||||
@@ -269,10 +276,10 @@ impl TestStore<StateEvent> {
|
||||
EventType::RoomJoinRules,
|
||||
Some(""),
|
||||
to_raw_json_value(&RoomJoinRulesEventContent::new(JoinRule::Public)).unwrap(),
|
||||
&[cre.clone(), alice_mem.event_id().clone()],
|
||||
&[alice_mem.event_id().clone()],
|
||||
&[cre.clone(), alice_mem.event_id().to_owned()],
|
||||
&[alice_mem.event_id().to_owned()],
|
||||
);
|
||||
self.0.insert(join_rules.event_id().clone(), join_rules.clone());
|
||||
self.0.insert(join_rules.event_id().to_owned(), join_rules.clone());
|
||||
|
||||
// Bob and Charlie join at the same time, so there is a fork
|
||||
// this will be represented in the state_sets when we resolve
|
||||
@@ -282,10 +289,10 @@ impl TestStore<StateEvent> {
|
||||
EventType::RoomMember,
|
||||
Some(bob().to_string().as_str()),
|
||||
member_content_join(),
|
||||
&[cre.clone(), join_rules.event_id().clone()],
|
||||
&[join_rules.event_id().clone()],
|
||||
&[cre.clone(), join_rules.event_id().to_owned()],
|
||||
&[join_rules.event_id().to_owned()],
|
||||
);
|
||||
self.0.insert(bob_mem.event_id().clone(), bob_mem.clone());
|
||||
self.0.insert(bob_mem.event_id().to_owned(), bob_mem.clone());
|
||||
|
||||
let charlie_mem = to_pdu_event(
|
||||
"IMC",
|
||||
@@ -293,17 +300,17 @@ impl TestStore<StateEvent> {
|
||||
EventType::RoomMember,
|
||||
Some(charlie().to_string().as_str()),
|
||||
member_content_join(),
|
||||
&[cre, join_rules.event_id().clone()],
|
||||
&[join_rules.event_id().clone()],
|
||||
&[cre, join_rules.event_id().to_owned()],
|
||||
&[join_rules.event_id().to_owned()],
|
||||
);
|
||||
self.0.insert(charlie_mem.event_id().clone(), charlie_mem.clone());
|
||||
self.0.insert(charlie_mem.event_id().to_owned(), charlie_mem.clone());
|
||||
|
||||
let state_at_bob = [&create_event, &alice_mem, &join_rules, &bob_mem]
|
||||
.iter()
|
||||
.map(|e| {
|
||||
(
|
||||
(e.event_type().to_owned(), e.state_key().unwrap().to_owned()),
|
||||
e.event_id().clone(),
|
||||
e.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -313,7 +320,7 @@ impl TestStore<StateEvent> {
|
||||
.map(|e| {
|
||||
(
|
||||
(e.event_type().to_owned(), e.state_key().unwrap().to_owned()),
|
||||
e.event_id().clone(),
|
||||
e.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -323,7 +330,7 @@ impl TestStore<StateEvent> {
|
||||
.map(|e| {
|
||||
(
|
||||
(e.event_type().to_owned(), e.state_key().unwrap().to_owned()),
|
||||
e.event_id().clone(),
|
||||
e.event_id().to_owned(),
|
||||
)
|
||||
})
|
||||
.collect::<StateMap<_>>();
|
||||
@@ -332,11 +339,12 @@ impl TestStore<StateEvent> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn event_id(id: &str) -> EventId {
|
||||
pub fn event_id(id: &str) -> Box<EventId> {
|
||||
if id.contains('$') {
|
||||
return EventId::try_from(id).unwrap();
|
||||
return id.try_into().unwrap();
|
||||
}
|
||||
EventId::try_from(format!("${}:foo", id)).unwrap()
|
||||
|
||||
format!("${}:foo", id).try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn alice() -> UserId {
|
||||
@@ -383,7 +391,7 @@ pub fn to_init_pdu_event(
|
||||
|
||||
let state_key = state_key.map(ToOwned::to_owned);
|
||||
Arc::new(StateEvent {
|
||||
event_id: EventId::try_from(id).unwrap(),
|
||||
event_id: id.try_into().unwrap(),
|
||||
rest: Pdu::RoomV3Pdu(RoomV3Pdu {
|
||||
room_id: room_id(),
|
||||
sender,
|
||||
@@ -423,7 +431,7 @@ where
|
||||
|
||||
let state_key = state_key.map(ToOwned::to_owned);
|
||||
Arc::new(StateEvent {
|
||||
event_id: EventId::try_from(id).unwrap(),
|
||||
event_id: id.try_into().unwrap(),
|
||||
rest: Pdu::RoomV3Pdu(RoomV3Pdu {
|
||||
room_id: room_id(),
|
||||
sender,
|
||||
@@ -446,9 +454,9 @@ where
|
||||
|
||||
// all graphs start with these input events
|
||||
#[allow(non_snake_case)]
|
||||
pub fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
pub fn INITIAL_EVENTS() -> HashMap<Box<EventId>, Arc<StateEvent>> {
|
||||
vec![
|
||||
to_pdu_event::<EventId>(
|
||||
to_pdu_event::<&EventId>(
|
||||
"CREATE",
|
||||
alice(),
|
||||
EventType::RoomCreate,
|
||||
@@ -502,7 +510,7 @@ pub fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
&["CREATE", "IJR", "IPOWER"],
|
||||
&["IMB"],
|
||||
),
|
||||
to_pdu_event::<EventId>(
|
||||
to_pdu_event::<&EventId>(
|
||||
"START",
|
||||
charlie(),
|
||||
EventType::RoomMessage,
|
||||
@@ -511,7 +519,7 @@ pub fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
&[],
|
||||
&[],
|
||||
),
|
||||
to_pdu_event::<EventId>(
|
||||
to_pdu_event::<&EventId>(
|
||||
"END",
|
||||
charlie(),
|
||||
EventType::RoomMessage,
|
||||
@@ -522,12 +530,12 @@ pub fn INITIAL_EVENTS() -> HashMap<EventId, Arc<StateEvent>> {
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|ev| (ev.event_id().clone(), ev))
|
||||
.map(|ev| (ev.event_id().to_owned(), ev))
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn INITIAL_EDGES() -> Vec<EventId> {
|
||||
pub fn INITIAL_EDGES() -> Vec<Box<EventId>> {
|
||||
vec!["START", "IMC", "IMB", "IJR", "IPOWER", "IMA", "CREATE"]
|
||||
.into_iter()
|
||||
.map(event_id)
|
||||
@@ -603,8 +611,8 @@ pub mod event {
|
||||
|
||||
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &EventId> + '_> {
|
||||
match &self.rest {
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.prev_events.iter().map(|(id, _)| id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.prev_events.iter()),
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.prev_events.iter().map(|(id, _)| &**id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.prev_events.iter().map(|id| &**id)),
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => unreachable!("new PDU version"),
|
||||
}
|
||||
@@ -612,8 +620,8 @@ pub mod event {
|
||||
|
||||
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &EventId> + '_> {
|
||||
match &self.rest {
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.auth_events.iter().map(|(id, _)| id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.auth_events.iter()),
|
||||
Pdu::RoomV1Pdu(ev) => Box::new(ev.auth_events.iter().map(|(id, _)| &**id)),
|
||||
Pdu::RoomV3Pdu(ev) => Box::new(ev.auth_events.iter().map(|id| &**id)),
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => unreachable!("new PDU version"),
|
||||
}
|
||||
@@ -621,8 +629,8 @@ pub mod event {
|
||||
|
||||
fn redacts(&self) -> Option<&EventId> {
|
||||
match &self.rest {
|
||||
Pdu::RoomV1Pdu(ev) => ev.redacts.as_ref(),
|
||||
Pdu::RoomV3Pdu(ev) => ev.redacts.as_ref(),
|
||||
Pdu::RoomV1Pdu(ev) => ev.redacts.as_deref(),
|
||||
Pdu::RoomV3Pdu(ev) => ev.redacts.as_deref(),
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => unreachable!("new PDU version"),
|
||||
}
|
||||
@@ -632,7 +640,7 @@ pub mod event {
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
#[allow(clippy::exhaustive_structs)]
|
||||
pub struct StateEvent {
|
||||
pub event_id: EventId,
|
||||
pub event_id: Box<EventId>,
|
||||
#[serde(flatten)]
|
||||
pub rest: Pdu,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user