Skip to content

Commit a4dc0ce

Browse files
committed
!start testing the full event cache lazy-load flow
1 parent 2b048d8 commit a4dc0ce

File tree

1 file changed

+257
-2
lines changed

1 file changed

+257
-2
lines changed

crates/matrix-sdk/tests/integration/event_cache.rs

+257-2
Original file line numberDiff line numberDiff line change
@@ -15,18 +15,20 @@ use matrix_sdk::{
1515
paginator::PaginatorState, BackPaginationOutcome, EventCacheError, PaginationToken,
1616
RoomEventCacheUpdate, TimelineHasBeenResetWhilePaginating,
1717
},
18+
linked_chunk::{ChunkIdentifier, Position, Update},
1819
test_utils::{
1920
assert_event_matches_msg,
2021
mocks::{MatrixMockServer, RoomMessagesResponseTemplate},
2122
},
2223
};
24+
use matrix_sdk_base::event_cache::Gap;
2325
use matrix_sdk_test::{
24-
async_test, event_factory::EventFactory, GlobalAccountDataTestEvent, JoinedRoomBuilder,
26+
async_test, event_factory::EventFactory, GlobalAccountDataTestEvent, JoinedRoomBuilder, ALICE,
2527
};
2628
use ruma::{
2729
event_id,
2830
events::{AnySyncMessageLikeEvent, AnySyncTimelineEvent},
29-
room_id, user_id, RoomVersionId,
31+
room_id, user_id, EventId, RoomVersionId,
3032
};
3133
use serde_json::json;
3234
use tokio::{spawn, sync::broadcast, time::sleep};
@@ -1608,3 +1610,256 @@ async fn test_apply_redaction_when_redacted_and_redaction_are_in_same_sync() {
16081610
// That's all, folks!
16091611
assert!(subscriber.is_empty());
16101612
}
1613+
1614+
macro_rules! assert_event_id {
1615+
($timeline_event:expr, $event_id:literal) => {
1616+
assert_eq!($timeline_event.event_id().unwrap().as_str(), $event_id);
1617+
};
1618+
}
1619+
1620+
#[async_test]
1621+
async fn test_lazy_loading() {
1622+
let room_id = room_id!("!foo:bar.baz");
1623+
let event_factory = EventFactory::new().room(room_id).sender(&ALICE);
1624+
1625+
let mock_server = MatrixMockServer::new().await;
1626+
let client = mock_server.client_builder().build().await;
1627+
1628+
// Set up the event cache store.
1629+
{
1630+
let event_cache_store = client.event_cache_store().lock().await.unwrap();
1631+
1632+
// The event cache contains 4 chunks as such (from newest to older):
1633+
// 4. a chunk of 7 items
1634+
// 3. a chunk of 5 items
1635+
// 2. a chunk of a gap
1636+
// 1. a chunk of 6 items
1637+
event_cache_store
1638+
.handle_linked_chunk_updates(
1639+
room_id,
1640+
vec![
1641+
// chunk #1
1642+
Update::NewItemsChunk {
1643+
previous: None,
1644+
new: ChunkIdentifier::new(0),
1645+
next: None,
1646+
},
1647+
// … and its 6 items
1648+
Update::PushItems {
1649+
at: Position::new(ChunkIdentifier::new(0), 0),
1650+
items: (0..6)
1651+
.map(|nth| {
1652+
event_factory
1653+
.text_msg("foo")
1654+
.event_id(&EventId::parse(format!("$ev0_{nth}")).unwrap())
1655+
.into_event()
1656+
})
1657+
.collect::<Vec<_>>(),
1658+
},
1659+
// chunk #2
1660+
Update::NewGapChunk {
1661+
previous: Some(ChunkIdentifier::new(0)),
1662+
new: ChunkIdentifier::new(1),
1663+
next: None,
1664+
gap: Gap { prev_token: "raclette".to_owned() },
1665+
},
1666+
// chunk #3
1667+
Update::NewItemsChunk {
1668+
previous: Some(ChunkIdentifier::new(1)),
1669+
new: ChunkIdentifier::new(2),
1670+
next: None,
1671+
},
1672+
// … and its 5 items
1673+
Update::PushItems {
1674+
at: Position::new(ChunkIdentifier::new(2), 0),
1675+
items: (0..5)
1676+
.map(|nth| {
1677+
event_factory
1678+
.text_msg("foo")
1679+
.event_id(&EventId::parse(format!("$ev2_{nth}")).unwrap())
1680+
.into_event()
1681+
})
1682+
.collect::<Vec<_>>(),
1683+
},
1684+
// chunk #4
1685+
Update::NewItemsChunk {
1686+
previous: Some(ChunkIdentifier::new(2)),
1687+
new: ChunkIdentifier::new(3),
1688+
next: None,
1689+
},
1690+
// … and its 7 items
1691+
Update::PushItems {
1692+
at: Position::new(ChunkIdentifier::new(3), 0),
1693+
items: (0..7)
1694+
.map(|nth| {
1695+
event_factory
1696+
.text_msg("foo")
1697+
.event_id(&EventId::parse(format!("$ev3_{nth}")).unwrap())
1698+
.into_event()
1699+
})
1700+
.collect::<Vec<_>>(),
1701+
},
1702+
],
1703+
)
1704+
.await
1705+
.unwrap();
1706+
}
1707+
1708+
// Set up the event cache.
1709+
let event_cache = client.event_cache();
1710+
event_cache.subscribe().unwrap();
1711+
event_cache.enable_storage().unwrap();
1712+
1713+
let room = mock_server.sync_joined_room(&client, room_id).await;
1714+
let (room_event_cache, _room_event_cache_drop_handle) = room.event_cache().await.unwrap();
1715+
1716+
let (initial_updates, mut updates_stream) = room_event_cache.subscribe().await;
1717+
1718+
// Initial events!
1719+
//
1720+
// Only 7 events are loaded! They are from the last chunk.
1721+
{
1722+
assert_eq!(initial_updates.len(), 7);
1723+
1724+
// Yummy, exactly what we expect!
1725+
assert_event_id!(initial_updates[0], "$ev3_0");
1726+
assert_event_id!(initial_updates[1], "$ev3_1");
1727+
assert_event_id!(initial_updates[2], "$ev3_2");
1728+
assert_event_id!(initial_updates[3], "$ev3_3");
1729+
assert_event_id!(initial_updates[4], "$ev3_4");
1730+
assert_event_id!(initial_updates[5], "$ev3_5");
1731+
assert_event_id!(initial_updates[6], "$ev3_6");
1732+
1733+
// The stream of updates is waiting, patiently.
1734+
assert!(updates_stream.is_empty());
1735+
}
1736+
1737+
// Let's paginate to load more!
1738+
//
1739+
// One more chunkw will be loaded from the store. This new chunk contains 5
1740+
// items. No need to reach the network.
1741+
{
1742+
let pagination_outcome =
1743+
room_event_cache.pagination().run_backwards(10, once).await.unwrap();
1744+
1745+
// Oh! 5 events! How classy.
1746+
assert_eq!(pagination_outcome.events.len(), 5);
1747+
1748+
// Hello you. Well… Uoy olleh! Remember, this is a backwards pagination, so
1749+
// events are returned in reverse order.
1750+
assert_event_id!(pagination_outcome.events[0], "$ev2_4");
1751+
assert_event_id!(pagination_outcome.events[1], "$ev2_3");
1752+
assert_event_id!(pagination_outcome.events[2], "$ev2_2");
1753+
assert_event_id!(pagination_outcome.events[3], "$ev2_1");
1754+
assert_event_id!(pagination_outcome.events[4], "$ev2_0");
1755+
1756+
// And there is more!
1757+
assert!(pagination_outcome.reached_start.not());
1758+
1759+
// Let's check the stream. It should reflect what the
1760+
// `pagination_outcome` provides.
1761+
let update = updates_stream.recv().await.unwrap();
1762+
1763+
assert_matches!(update, RoomEventCacheUpdate::UpdateTimelineEvents { diffs, .. } => {
1764+
// 5 diffs?! *feigns surprise*
1765+
assert_eq!(diffs.len(), 5);
1766+
1767+
// Hello you. Again. This time in the expected order.
1768+
assert_matches!(&diffs[0], VectorDiff::Insert { index: 0, value: event } => {
1769+
assert_event_id!(event, "$ev2_0");
1770+
});
1771+
assert_matches!(&diffs[1], VectorDiff::Insert { index: 1, value: event } => {
1772+
assert_event_id!(event, "$ev2_1");
1773+
});
1774+
assert_matches!(&diffs[2], VectorDiff::Insert { index: 2, value: event } => {
1775+
assert_event_id!(event, "$ev2_2");
1776+
});
1777+
assert_matches!(&diffs[3], VectorDiff::Insert { index: 3, value: event } => {
1778+
assert_event_id!(event, "$ev2_3");
1779+
});
1780+
assert_matches!(&diffs[4], VectorDiff::Insert { index: 4, value: event } => {
1781+
assert_event_id!(event, "$ev2_4");
1782+
});
1783+
});
1784+
1785+
assert!(updates_stream.is_empty());
1786+
}
1787+
1788+
// This is a funny game. We are having fun, don't we? Let's paginate to load
1789+
// moaare!
1790+
//
1791+
// One more chunk will be loaded from the store. This new chunk contains a
1792+
// gap. Network will be reached. 4 events will be received, and inserted in the
1793+
// event cache store, forever 🫶..
1794+
{
1795+
let _network_pagination = mock_server
1796+
.mock_room_messages()
1797+
.from("raclette")
1798+
.ok(RoomMessagesResponseTemplate::default().end_token("numerobis").events({
1799+
(0..4)
1800+
// Backwards pagination implies events are in “reverse order”.
1801+
.rev()
1802+
.map(|nth| {
1803+
event_factory
1804+
.text_msg("foo")
1805+
.event_id(&EventId::parse(format!("$ev1_{nth}")).unwrap())
1806+
})
1807+
.collect::<Vec<_>>()
1808+
}))
1809+
.mock_once()
1810+
.mount_as_scoped()
1811+
.await;
1812+
1813+
let pagination_outcome =
1814+
room_event_cache.pagination().run_backwards(10, once).await.unwrap();
1815+
1816+
// 🙈… 4 events! Of course. We've never doubt.
1817+
assert_eq!(pagination_outcome.events.len(), 4);
1818+
1819+
// Hello you, in reverse order because this is a backward pagination.
1820+
assert_event_id!(pagination_outcome.events[0], "$ev1_3");
1821+
assert_event_id!(pagination_outcome.events[1], "$ev1_2");
1822+
assert_event_id!(pagination_outcome.events[2], "$ev1_1");
1823+
assert_event_id!(pagination_outcome.events[3], "$ev1_0");
1824+
1825+
// And there is more because we didn't reach the start of the timeline yet.
1826+
assert!(pagination_outcome.reached_start.not());
1827+
1828+
// Let's check the stream. It should reflect what the
1829+
// `pagination_outcome` provides.
1830+
let update = updates_stream.recv().await.unwrap();
1831+
1832+
assert_matches!(update, RoomEventCacheUpdate::UpdateTimelineEvents { diffs, .. } => {
1833+
// 1, 2, 3, 4 diffs!
1834+
assert_eq!(diffs.len(), 4);
1835+
1836+
// Hello you. Again. And again, in the expected order.
1837+
assert_matches!(&diffs[0], VectorDiff::Insert { index: 0, value: event } => {
1838+
assert_event_id!(event, "$ev1_0");
1839+
});
1840+
assert_matches!(&diffs[1], VectorDiff::Insert { index: 1, value: event } => {
1841+
assert_event_id!(event, "$ev1_1");
1842+
});
1843+
assert_matches!(&diffs[2], VectorDiff::Insert { index: 2, value: event } => {
1844+
assert_event_id!(event, "$ev1_2");
1845+
});
1846+
assert_matches!(&diffs[3], VectorDiff::Insert { index: 3, value: event } => {
1847+
assert_event_id!(event, "$ev1_3");
1848+
});
1849+
});
1850+
1851+
assert!(updates_stream.is_empty());
1852+
}
1853+
1854+
// More interesting now. Let's paginate again!
1855+
//
1856+
// A new chunk containing a gap has been inserted by the previous network
1857+
// pagination, because it contained an `end` token. Let's see how the
1858+
// `EventCache` will reconciliate all that. A new chunk will not be loaded from
1859+
// the store because the last chunk is a gap.
1860+
//
1861+
// The new network pagination will return 1 new event, and 1 event already
1862+
// known. The complex part is: how to detect if an event is known if not all
1863+
// events are loaded in memory? This is what we mostly test here.
1864+
{}
1865+
}

0 commit comments

Comments
 (0)