2022-09-06 23:15:09 +02:00
|
|
|
use std::{collections::hash_map, mem::size_of, sync::Arc};
|
|
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
use ruma::{
|
2022-10-09 17:25:06 +02:00
|
|
|
api::client::error::ErrorKind, CanonicalJsonObject, EventId, OwnedUserId, RoomId, UserId,
|
2022-10-05 20:34:31 +02:00
|
|
|
};
|
2022-09-06 23:15:09 +02:00
|
|
|
use tracing::error;
|
|
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
use crate::{database::KeyValueDatabase, service, services, utils, Error, PduEvent, Result};
|
2022-09-06 23:15:09 +02:00
|
|
|
|
2022-10-05 18:36:12 +02:00
|
|
|
impl service::rooms::timeline::Data for KeyValueDatabase {
|
|
|
|
|
fn first_pdu_in_room(&self, room_id: &RoomId) -> Result<Option<Arc<PduEvent>>> {
|
2022-10-05 20:34:31 +02:00
|
|
|
let prefix = services()
|
|
|
|
|
.rooms
|
|
|
|
|
.short
|
2022-10-05 18:36:12 +02:00
|
|
|
.get_shortroomid(room_id)?
|
|
|
|
|
.expect("room exists")
|
|
|
|
|
.to_be_bytes()
|
|
|
|
|
.to_vec();
|
|
|
|
|
|
|
|
|
|
// Look for PDUs in that room.
|
|
|
|
|
self.pduid_pdu
|
|
|
|
|
.iter_from(&prefix, false)
|
|
|
|
|
.filter(|(k, _)| k.starts_with(&prefix))
|
|
|
|
|
.map(|(_, pdu)| {
|
|
|
|
|
serde_json::from_slice(&pdu)
|
|
|
|
|
.map_err(|_| Error::bad_database("Invalid first PDU in db."))
|
|
|
|
|
.map(Arc::new)
|
|
|
|
|
})
|
|
|
|
|
.next()
|
|
|
|
|
.transpose()
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-07 19:42:22 +02:00
|
|
|
fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64> {
|
2022-06-19 22:56:14 +02:00
|
|
|
match self
|
|
|
|
|
.lasttimelinecount_cache
|
2021-08-15 06:46:00 +02:00
|
|
|
.lock()
|
|
|
|
|
.unwrap()
|
2022-06-19 22:56:14 +02:00
|
|
|
.entry(room_id.to_owned())
|
2021-08-15 06:46:00 +02:00
|
|
|
{
|
2022-06-19 22:56:14 +02:00
|
|
|
hash_map::Entry::Vacant(v) => {
|
|
|
|
|
if let Some(last_count) = self
|
|
|
|
|
.pdus_until(&sender_user, &room_id, u64::MAX)?
|
|
|
|
|
.filter_map(|r| {
|
|
|
|
|
// Filter out buggy events
|
|
|
|
|
if r.is_err() {
|
|
|
|
|
error!("Bad pdu in pdus_since: {:?}", r);
|
|
|
|
|
}
|
|
|
|
|
r.ok()
|
|
|
|
|
})
|
|
|
|
|
.map(|(pduid, _)| self.pdu_count(&pduid))
|
|
|
|
|
.next()
|
|
|
|
|
{
|
|
|
|
|
Ok(*v.insert(last_count?))
|
|
|
|
|
} else {
|
|
|
|
|
Ok(0)
|
2021-08-14 19:07:50 +02:00
|
|
|
}
|
2021-08-12 23:04:00 +02:00
|
|
|
}
|
2022-06-19 22:56:14 +02:00
|
|
|
hash_map::Entry::Occupied(o) => Ok(*o.get()),
|
2021-08-12 23:04:00 +02:00
|
|
|
}
|
2022-06-19 22:56:14 +02:00
|
|
|
}
|
2021-08-12 23:04:00 +02:00
|
|
|
|
2022-06-19 22:56:14 +02:00
|
|
|
/// Returns the `count` of this pdu's id.
|
2022-08-07 19:42:22 +02:00
|
|
|
fn get_pdu_count(&self, event_id: &EventId) -> Result<Option<u64>> {
|
2022-06-19 22:56:14 +02:00
|
|
|
self.eventid_pduid
|
|
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
|
.map(|pdu_id| self.pdu_count(&pdu_id))
|
|
|
|
|
.transpose()
|
2021-08-11 19:15:38 +02:00
|
|
|
}
|
|
|
|
|
|
2022-06-19 22:56:14 +02:00
|
|
|
/// Returns the json of a pdu.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn get_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
2022-06-19 22:56:14 +02:00
|
|
|
self.eventid_pduid
|
|
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
|
.map_or_else(
|
|
|
|
|
|| self.eventid_outlierpdu.get(event_id.as_bytes()),
|
|
|
|
|
|pduid| {
|
|
|
|
|
Ok(Some(self.pduid_pdu.get(&pduid)?.ok_or_else(|| {
|
|
|
|
|
Error::bad_database("Invalid pduid in eventid_pduid.")
|
|
|
|
|
})?))
|
|
|
|
|
},
|
|
|
|
|
)?
|
|
|
|
|
.map(|pdu| {
|
|
|
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
|
2021-08-14 19:47:49 +02:00
|
|
|
})
|
|
|
|
|
.transpose()
|
2021-08-12 23:04:00 +02:00
|
|
|
}
|
|
|
|
|
|
2022-06-19 22:56:14 +02:00
|
|
|
/// Returns the json of a pdu.
|
2022-10-05 20:34:31 +02:00
|
|
|
fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
2021-07-15 19:54:04 +02:00
|
|
|
self.eventid_pduid
|
|
|
|
|
.get(event_id.as_bytes())?
|
2021-10-13 11:51:30 +02:00
|
|
|
.map(|pduid| {
|
|
|
|
|
self.pduid_pdu
|
|
|
|
|
.get(&pduid)?
|
|
|
|
|
.ok_or_else(|| Error::bad_database("Invalid pduid in eventid_pduid."))
|
|
|
|
|
})
|
|
|
|
|
.transpose()?
|
2021-07-15 19:54:04 +02:00
|
|
|
.map(|pdu| {
|
|
|
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
|
|
|
|
|
})
|
|
|
|
|
.transpose()
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-26 10:27:51 +02:00
|
|
|
/// Returns the pdu's id.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<Vec<u8>>> {
|
2021-10-13 11:51:30 +02:00
|
|
|
self.eventid_pduid.get(event_id.as_bytes())
|
2020-05-26 10:27:51 +02:00
|
|
|
}
|
|
|
|
|
|
2021-03-26 11:10:45 +01:00
|
|
|
/// Returns the pdu.
|
|
|
|
|
///
|
|
|
|
|
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn get_non_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
2021-03-26 11:10:45 +01:00
|
|
|
self.eventid_pduid
|
|
|
|
|
.get(event_id.as_bytes())?
|
2021-10-13 11:51:30 +02:00
|
|
|
.map(|pduid| {
|
|
|
|
|
self.pduid_pdu
|
|
|
|
|
.get(&pduid)?
|
|
|
|
|
.ok_or_else(|| Error::bad_database("Invalid pduid in eventid_pduid."))
|
|
|
|
|
})
|
|
|
|
|
.transpose()?
|
2021-03-26 11:10:45 +01:00
|
|
|
.map(|pdu| {
|
2021-06-17 20:34:14 +02:00
|
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
|
2021-03-26 11:10:45 +01:00
|
|
|
})
|
|
|
|
|
.transpose()
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-24 18:25:52 +02:00
|
|
|
/// Returns the pdu.
|
2021-02-01 12:44:30 -05:00
|
|
|
///
|
|
|
|
|
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn get_pdu(&self, event_id: &EventId) -> Result<Option<Arc<PduEvent>>> {
|
2021-09-13 19:45:56 +02:00
|
|
|
if let Some(p) = self.pdu_cache.lock().unwrap().get_mut(event_id) {
|
2021-06-30 09:52:01 +02:00
|
|
|
return Ok(Some(Arc::clone(p)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Some(pdu) = self
|
|
|
|
|
.eventid_pduid
|
2020-08-06 08:29:59 -04:00
|
|
|
.get(event_id.as_bytes())?
|
2021-10-13 11:51:30 +02:00
|
|
|
.map_or_else(
|
|
|
|
|
|| self.eventid_outlierpdu.get(event_id.as_bytes()),
|
2021-03-13 16:30:12 +01:00
|
|
|
|pduid| {
|
2021-07-14 12:31:38 +02:00
|
|
|
Ok(Some(self.pduid_pdu.get(&pduid)?.ok_or_else(|| {
|
2021-03-13 16:30:12 +01:00
|
|
|
Error::bad_database("Invalid pduid in eventid_pduid.")
|
2021-07-14 12:31:38 +02:00
|
|
|
})?))
|
2021-03-13 16:30:12 +01:00
|
|
|
},
|
|
|
|
|
)?
|
|
|
|
|
.map(|pdu| {
|
2021-07-14 12:31:38 +02:00
|
|
|
serde_json::from_slice(&pdu)
|
2021-06-30 09:52:01 +02:00
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))
|
2021-07-14 12:31:38 +02:00
|
|
|
.map(Arc::new)
|
2020-05-24 18:25:52 +02:00
|
|
|
})
|
2021-06-30 09:52:01 +02:00
|
|
|
.transpose()?
|
|
|
|
|
{
|
|
|
|
|
self.pdu_cache
|
2021-07-18 20:43:39 +02:00
|
|
|
.lock()
|
2021-06-30 09:52:01 +02:00
|
|
|
.unwrap()
|
2021-11-26 20:36:40 +01:00
|
|
|
.insert(event_id.to_owned(), Arc::clone(&pdu));
|
2021-06-30 09:52:01 +02:00
|
|
|
Ok(Some(pdu))
|
|
|
|
|
} else {
|
|
|
|
|
Ok(None)
|
|
|
|
|
}
|
2020-05-24 18:25:52 +02:00
|
|
|
}
|
2021-01-29 21:45:33 -05:00
|
|
|
|
2020-05-26 10:27:51 +02:00
|
|
|
/// Returns the pdu.
|
2021-02-03 20:00:01 -05:00
|
|
|
///
|
|
|
|
|
/// This does __NOT__ check the outliers `Tree`.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn get_pdu_from_id(&self, pdu_id: &[u8]) -> Result<Option<PduEvent>> {
|
2020-06-09 15:13:17 +02:00
|
|
|
self.pduid_pdu.get(pdu_id)?.map_or(Ok(None), |pdu| {
|
|
|
|
|
Ok(Some(
|
|
|
|
|
serde_json::from_slice(&pdu)
|
2020-06-11 10:03:08 +02:00
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
2020-06-09 15:13:17 +02:00
|
|
|
))
|
|
|
|
|
})
|
2020-05-26 10:27:51 +02:00
|
|
|
}
|
|
|
|
|
|
2020-11-30 14:46:47 -05:00
|
|
|
/// Returns the pdu as a `BTreeMap<String, CanonicalJsonValue>`.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn get_pdu_json_from_id(&self, pdu_id: &[u8]) -> Result<Option<CanonicalJsonObject>> {
|
2020-09-15 16:13:54 +02:00
|
|
|
self.pduid_pdu.get(pdu_id)?.map_or(Ok(None), |pdu| {
|
|
|
|
|
Ok(Some(
|
|
|
|
|
serde_json::from_slice(&pdu)
|
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
|
|
|
|
))
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2022-06-19 22:56:14 +02:00
|
|
|
/// Returns the `count` of this pdu's id.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn pdu_count(&self, pdu_id: &[u8]) -> Result<u64> {
|
2022-06-19 22:56:14 +02:00
|
|
|
utils::u64_from_bytes(&pdu_id[pdu_id.len() - size_of::<u64>()..])
|
|
|
|
|
.map_err(|_| Error::bad_database("PDU has invalid count bytes."))
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
fn append_pdu(
|
|
|
|
|
&self,
|
|
|
|
|
pdu_id: &[u8],
|
|
|
|
|
pdu: &PduEvent,
|
|
|
|
|
json: &CanonicalJsonObject,
|
|
|
|
|
count: u64,
|
|
|
|
|
) -> Result<()> {
|
2022-10-05 20:33:55 +02:00
|
|
|
self.pduid_pdu.insert(
|
|
|
|
|
pdu_id,
|
2022-10-05 20:34:31 +02:00
|
|
|
&serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"),
|
|
|
|
|
)?;
|
2022-10-05 20:33:55 +02:00
|
|
|
|
|
|
|
|
self.lasttimelinecount_cache
|
|
|
|
|
.lock()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.insert(pdu.room_id.clone(), count);
|
|
|
|
|
|
|
|
|
|
self.eventid_pduid
|
|
|
|
|
.insert(pdu.event_id.as_bytes(), &pdu_id)?;
|
|
|
|
|
self.eventid_outlierpdu.remove(pdu.event_id.as_bytes())?;
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-09 15:13:17 +02:00
|
|
|
/// Removes a pdu and creates a new one with the same id.
|
2021-06-08 18:10:00 +02:00
|
|
|
fn replace_pdu(&self, pdu_id: &[u8], pdu: &PduEvent) -> Result<()> {
|
2021-09-13 19:45:56 +02:00
|
|
|
if self.pduid_pdu.get(pdu_id)?.is_some() {
|
2020-06-09 15:13:17 +02:00
|
|
|
self.pduid_pdu.insert(
|
2021-09-13 19:45:56 +02:00
|
|
|
pdu_id,
|
2022-10-05 20:34:31 +02:00
|
|
|
&serde_json::to_vec(pdu).expect("CanonicalJsonObject is always a valid"),
|
|
|
|
|
)?;
|
2020-05-26 10:27:51 +02:00
|
|
|
Ok(())
|
|
|
|
|
} else {
|
2020-06-09 15:13:17 +02:00
|
|
|
Err(Error::BadRequest(
|
|
|
|
|
ErrorKind::NotFound,
|
|
|
|
|
"PDU does not exist.",
|
|
|
|
|
))
|
2020-05-26 10:27:51 +02:00
|
|
|
}
|
|
|
|
|
}
|
2020-05-24 18:25:52 +02:00
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
/// Returns an iterator over all events in a room that happened after the event with id `since`
|
2020-07-29 17:37:26 +02:00
|
|
|
/// in chronological order.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn pdus_since<'a>(
|
2021-06-08 18:10:00 +02:00
|
|
|
&'a self,
|
2020-06-16 12:11:38 +02:00
|
|
|
user_id: &UserId,
|
2020-05-03 17:25:31 +02:00
|
|
|
room_id: &RoomId,
|
|
|
|
|
since: u64,
|
2022-10-08 13:02:52 +02:00
|
|
|
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a>> {
|
2022-10-05 20:34:31 +02:00
|
|
|
let prefix = services()
|
|
|
|
|
.rooms
|
|
|
|
|
.short
|
2021-08-14 19:47:49 +02:00
|
|
|
.get_shortroomid(room_id)?
|
|
|
|
|
.expect("room exists")
|
|
|
|
|
.to_be_bytes()
|
|
|
|
|
.to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2020-07-29 17:03:04 +02:00
|
|
|
// Skip the first pdu if it's exactly at since, because we sent that last time
|
|
|
|
|
let mut first_pdu_id = prefix.clone();
|
2020-07-29 17:37:26 +02:00
|
|
|
first_pdu_id.extend_from_slice(&(since + 1).to_be_bytes());
|
2020-07-29 17:03:04 +02:00
|
|
|
|
2021-11-26 20:36:40 +01:00
|
|
|
let user_id = user_id.to_owned();
|
2021-08-12 23:04:00 +02:00
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
Ok(Box::new(
|
|
|
|
|
self.pduid_pdu
|
|
|
|
|
.iter_from(&first_pdu_id, false)
|
|
|
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
|
|
|
|
.map(move |(pdu_id, v)| {
|
|
|
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
|
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
|
|
|
if pdu.sender != user_id {
|
|
|
|
|
pdu.remove_transaction_id()?;
|
|
|
|
|
}
|
|
|
|
|
Ok((pdu_id, pdu))
|
|
|
|
|
}),
|
|
|
|
|
))
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
|
2020-07-26 17:34:12 +02:00
|
|
|
/// Returns an iterator over all events and their tokens in a room that happened before the
|
|
|
|
|
/// event with id `until` in reverse-chronological order.
|
2022-09-06 23:15:09 +02:00
|
|
|
fn pdus_until<'a>(
|
2021-06-08 18:10:00 +02:00
|
|
|
&'a self,
|
2020-06-16 12:11:38 +02:00
|
|
|
user_id: &UserId,
|
2020-05-03 17:25:31 +02:00
|
|
|
room_id: &RoomId,
|
|
|
|
|
until: u64,
|
2022-10-08 13:02:52 +02:00
|
|
|
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a>> {
|
2020-05-03 17:25:31 +02:00
|
|
|
// Create the first part of the full pdu id
|
2022-10-05 20:34:31 +02:00
|
|
|
let prefix = services()
|
|
|
|
|
.rooms
|
|
|
|
|
.short
|
2021-08-14 19:47:49 +02:00
|
|
|
.get_shortroomid(room_id)?
|
|
|
|
|
.expect("room exists")
|
|
|
|
|
.to_be_bytes()
|
|
|
|
|
.to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
|
|
|
|
|
let mut current = prefix.clone();
|
2021-06-30 20:31:51 +02:00
|
|
|
current.extend_from_slice(&(until.saturating_sub(1)).to_be_bytes()); // -1 because we don't want event at `until`
|
2020-05-03 17:25:31 +02:00
|
|
|
|
|
|
|
|
let current: &[u8] = ¤t;
|
|
|
|
|
|
2021-11-26 20:36:40 +01:00
|
|
|
let user_id = user_id.to_owned();
|
2021-08-12 23:04:00 +02:00
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
Ok(Box::new(
|
|
|
|
|
self.pduid_pdu
|
|
|
|
|
.iter_from(current, true)
|
|
|
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
|
|
|
|
.map(move |(pdu_id, v)| {
|
|
|
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
|
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
|
|
|
if pdu.sender != user_id {
|
|
|
|
|
pdu.remove_transaction_id()?;
|
|
|
|
|
}
|
|
|
|
|
Ok((pdu_id, pdu))
|
|
|
|
|
}),
|
|
|
|
|
))
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
|
2022-09-06 23:15:09 +02:00
|
|
|
fn pdus_after<'a>(
|
2021-06-08 18:10:00 +02:00
|
|
|
&'a self,
|
2020-06-16 12:11:38 +02:00
|
|
|
user_id: &UserId,
|
2020-06-04 13:58:55 +02:00
|
|
|
room_id: &RoomId,
|
|
|
|
|
from: u64,
|
2022-10-08 13:02:52 +02:00
|
|
|
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a>> {
|
2020-06-04 13:58:55 +02:00
|
|
|
// Create the first part of the full pdu id
|
2022-10-05 20:34:31 +02:00
|
|
|
let prefix = services()
|
|
|
|
|
.rooms
|
|
|
|
|
.short
|
2021-08-14 19:47:49 +02:00
|
|
|
.get_shortroomid(room_id)?
|
|
|
|
|
.expect("room exists")
|
|
|
|
|
.to_be_bytes()
|
|
|
|
|
.to_vec();
|
2020-06-04 13:58:55 +02:00
|
|
|
|
|
|
|
|
let mut current = prefix.clone();
|
|
|
|
|
current.extend_from_slice(&(from + 1).to_be_bytes()); // +1 so we don't send the base event
|
|
|
|
|
|
|
|
|
|
let current: &[u8] = ¤t;
|
|
|
|
|
|
2021-11-26 20:36:40 +01:00
|
|
|
let user_id = user_id.to_owned();
|
2021-08-12 23:04:00 +02:00
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
Ok(Box::new(
|
|
|
|
|
self.pduid_pdu
|
|
|
|
|
.iter_from(current, false)
|
|
|
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
|
|
|
|
.map(move |(pdu_id, v)| {
|
|
|
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
|
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
|
|
|
if pdu.sender != user_id {
|
|
|
|
|
pdu.remove_transaction_id()?;
|
|
|
|
|
}
|
|
|
|
|
Ok((pdu_id, pdu))
|
|
|
|
|
}),
|
|
|
|
|
))
|
2020-06-04 13:58:55 +02:00
|
|
|
}
|
2022-10-05 20:33:55 +02:00
|
|
|
|
2022-10-05 20:34:31 +02:00
|
|
|
fn increment_notification_counts(
|
|
|
|
|
&self,
|
|
|
|
|
room_id: &RoomId,
|
2022-10-09 17:25:06 +02:00
|
|
|
notifies: Vec<OwnedUserId>,
|
|
|
|
|
highlights: Vec<OwnedUserId>,
|
2022-10-05 20:34:31 +02:00
|
|
|
) -> Result<()> {
|
2022-10-08 13:02:52 +02:00
|
|
|
let mut notifies_batch = Vec::new();
|
|
|
|
|
let mut highlights_batch = Vec::new();
|
2022-10-05 20:33:55 +02:00
|
|
|
for user in notifies {
|
|
|
|
|
let mut userroom_id = user.as_bytes().to_vec();
|
|
|
|
|
userroom_id.push(0xff);
|
|
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
|
|
|
|
notifies_batch.push(userroom_id);
|
|
|
|
|
}
|
|
|
|
|
for user in highlights {
|
|
|
|
|
let mut userroom_id = user.as_bytes().to_vec();
|
|
|
|
|
userroom_id.push(0xff);
|
|
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
|
|
|
|
highlights_batch.push(userroom_id);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.userroomid_notificationcount
|
|
|
|
|
.increment_batch(&mut notifies_batch.into_iter())?;
|
|
|
|
|
self.userroomid_highlightcount
|
|
|
|
|
.increment_batch(&mut highlights_batch.into_iter())?;
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2022-08-07 19:42:22 +02:00
|
|
|
}
|