Fix lots of clippy lints

This commit is contained in:
Jonas Platte 2021-06-17 20:34:14 +02:00
parent af2ce5803e
commit f3e630c064
No known key found for this signature in database
GPG key ID: CC154DE0E30B7C67
17 changed files with 140 additions and 202 deletions

View file

@ -97,13 +97,12 @@ pub async fn get_alias_helper(
.map_or_else(Vec::new, |aliases| { .map_or_else(Vec::new, |aliases| {
aliases aliases
.iter() .iter()
.map(|aliases| { .filter_map(|aliases| {
aliases aliases
.get("regex") .get("regex")
.and_then(|regex| regex.as_str()) .and_then(|regex| regex.as_str())
.and_then(|regex| Regex::new(regex).ok()) .and_then(|regex| Regex::new(regex).ok())
}) })
.filter_map(|o| o)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });

View file

@ -135,9 +135,7 @@ pub async fn get_public_rooms_filtered_helper(
filter: &IncomingFilter, filter: &IncomingFilter,
_network: &IncomingRoomNetwork, _network: &IncomingRoomNetwork,
) -> ConduitResult<get_public_rooms_filtered::Response> { ) -> ConduitResult<get_public_rooms_filtered::Response> {
if let Some(other_server) = server if let Some(other_server) = server.filter(|server| *server != db.globals.server_name().as_str())
.clone()
.filter(|server| *server != db.globals.server_name().as_str())
{ {
let response = db let response = db
.sending .sending

View file

@ -743,12 +743,10 @@ pub async fn invite_helper(
let create_event_content = create_event let create_event_content = create_event
.as_ref() .as_ref()
.map(|create_event| { .map(|create_event| {
Ok::<_, Error>( serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone())
serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone()) .expect("Raw::from_value always works.")
.expect("Raw::from_value always works.") .deserialize()
.deserialize() .map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))
.map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))?,
)
}) })
.transpose()?; .transpose()?;

View file

@ -202,10 +202,8 @@ pub async fn logout_all_route(
) -> ConduitResult<logout_all::Response> { ) -> ConduitResult<logout_all::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
for device_id in db.users.all_device_ids(sender_user) { for device_id in db.users.all_device_ids(sender_user).flatten() {
if let Ok(device_id) = device_id { db.users.remove_device(&sender_user, &device_id)?;
db.users.remove_device(&sender_user, &device_id)?;
}
} }
db.flush().await?; db.flush().await?;

View file

@ -146,11 +146,9 @@ pub async fn sync_events_route(
let since_state = since_shortstatehash let since_state = since_shortstatehash
.as_ref() .as_ref()
.map(|since_shortstatehash| { .map(|since_shortstatehash| {
Ok::<_, Error>( since_shortstatehash
since_shortstatehash .map(|since_shortstatehash| db.rooms.state_full(since_shortstatehash))
.map(|since_shortstatehash| db.rooms.state_full(since_shortstatehash)) .transpose()
.transpose()?,
)
}) })
.transpose()?; .transpose()?;
@ -255,7 +253,7 @@ pub async fn sync_events_route(
device_list_updates.extend( device_list_updates.extend(
db.rooms db.rooms
.room_members(&room_id) .room_members(&room_id)
.filter_map(|user_id| Some(user_id.ok()?)) .flatten()
.filter(|user_id| { .filter(|user_id| {
// Don't send key updates from the sender to the sender // Don't send key updates from the sender to the sender
sender_user != user_id sender_user != user_id
@ -313,9 +311,10 @@ pub async fn sync_events_route(
Ok(None) Ok(None)
} }
}) })
.filter_map(|u| u.ok()) // Filter out buggy users // Filter out buggy users
.filter_map(|u| u.ok())
// Filter for possible heroes // Filter for possible heroes
.filter_map(|u| u) .flatten()
{ {
if heroes.contains(&hero) || hero == sender_user.as_str() { if heroes.contains(&hero) || hero == sender_user.as_str() {
continue; continue;

View file

@ -34,29 +34,25 @@ impl Appservice {
.get(id) .get(id)
.map_or_else( .map_or_else(
|| { || {
Ok(self self.id_appserviceregistrations
.id_appserviceregistrations
.get(id.as_bytes())? .get(id.as_bytes())?
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>(serde_yaml::from_slice(&bytes).map_err(|_| { serde_yaml::from_slice(&bytes).map_err(|_| {
Error::bad_database( Error::bad_database(
"Invalid registration bytes in id_appserviceregistrations.", "Invalid registration bytes in id_appserviceregistrations.",
) )
})?) })
}) })
.transpose()?) .transpose()
}, },
|r| Ok(Some(r.clone())), |r| Ok(Some(r.clone())),
) )
} }
pub fn iter_ids<'a>( pub fn iter_ids(&self) -> Result<impl Iterator<Item = Result<String>> + Send + Sync + '_> {
&'a self,
) -> Result<impl Iterator<Item = Result<String>> + Send + Sync + 'a> {
Ok(self.id_appserviceregistrations.iter().map(|(id, _)| { Ok(self.id_appserviceregistrations.iter().map(|(id, _)| {
Ok(utils::string_from_bytes(&id).map_err(|_| { utils::string_from_bytes(&id)
Error::bad_database("Invalid id bytes in id_appserviceregistrations.") .map_err(|_| Error::bad_database("Invalid id bytes in id_appserviceregistrations."))
})?)
})) }))
} }

View file

@ -171,14 +171,14 @@ impl Globals {
} }
pub fn next_count(&self) -> Result<u64> { pub fn next_count(&self) -> Result<u64> {
Ok(utils::u64_from_bytes(&self.globals.increment(COUNTER)?) utils::u64_from_bytes(&self.globals.increment(COUNTER)?)
.map_err(|_| Error::bad_database("Count has invalid bytes."))?) .map_err(|_| Error::bad_database("Count has invalid bytes."))
} }
pub fn current_count(&self) -> Result<u64> { pub fn current_count(&self) -> Result<u64> {
self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| { self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| {
Ok(utils::u64_from_bytes(&bytes) utils::u64_from_bytes(&bytes)
.map_err(|_| Error::bad_database("Count has invalid bytes."))?) .map_err(|_| Error::bad_database("Count has invalid bytes."))
}) })
} }

View file

@ -119,9 +119,8 @@ impl KeyBackups {
self.backupid_algorithm self.backupid_algorithm
.get(&key)? .get(&key)?
.map_or(Ok(None), |bytes| { .map_or(Ok(None), |bytes| {
Ok(serde_json::from_slice(&bytes).map_err(|_| { serde_json::from_slice(&bytes)
Error::bad_database("Algorithm in backupid_algorithm is invalid.") .map_err(|_| Error::bad_database("Algorithm in backupid_algorithm is invalid."))
})?)
}) })
} }

View file

@ -110,9 +110,9 @@ impl Media {
let content_type = parts let content_type = parts
.next() .next()
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>(utils::string_from_bytes(bytes).map_err(|_| { utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.") Error::bad_database("Content type in mediaid_file is invalid unicode.")
})?) })
}) })
.transpose()?; .transpose()?;
@ -199,9 +199,9 @@ impl Media {
let content_type = parts let content_type = parts
.next() .next()
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>(utils::string_from_bytes(bytes).map_err(|_| { utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.") Error::bad_database("Content type in mediaid_file is invalid unicode.")
})?) })
}) })
.transpose()?; .transpose()?;
@ -235,9 +235,9 @@ impl Media {
let content_type = parts let content_type = parts
.next() .next()
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>(utils::string_from_bytes(bytes).map_err(|_| { utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.") Error::bad_database("Content type in mediaid_file is invalid unicode.")
})?) })
}) })
.transpose()?; .transpose()?;

View file

@ -51,8 +51,8 @@ impl PushData {
self.senderkey_pusher self.senderkey_pusher
.get(senderkey)? .get(senderkey)?
.map(|push| { .map(|push| {
Ok(serde_json::from_slice(&*push) serde_json::from_slice(&*push)
.map_err(|_| Error::bad_database("Invalid Pusher in db."))?) .map_err(|_| Error::bad_database("Invalid Pusher in db."))
}) })
.transpose() .transpose()
} }
@ -64,8 +64,8 @@ impl PushData {
self.senderkey_pusher self.senderkey_pusher
.scan_prefix(prefix) .scan_prefix(prefix)
.map(|(_, push)| { .map(|(_, push)| {
Ok(serde_json::from_slice(&*push) serde_json::from_slice(&*push)
.map_err(|_| Error::bad_database("Invalid Pusher in db."))?) .map_err(|_| Error::bad_database("Invalid Pusher in db."))
}) })
.collect() .collect()
} }

View file

@ -93,14 +93,10 @@ impl Rooms {
.map(|(_, bytes)| self.shorteventid_eventid.get(&bytes).ok().flatten()) .map(|(_, bytes)| self.shorteventid_eventid.get(&bytes).ok().flatten())
.flatten() .flatten()
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>( EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("EventID in stateid_shorteventid is invalid unicode.")
Error::bad_database("EventID in stateid_shorteventid is invalid unicode.") })?)
})?) .map_err(|_| Error::bad_database("EventId in stateid_shorteventid is invalid."))
.map_err(|_| {
Error::bad_database("EventId in stateid_shorteventid is invalid.")
})?,
)
}) })
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.collect()) .collect())
@ -116,14 +112,10 @@ impl Rooms {
.map(|(_, bytes)| self.shorteventid_eventid.get(&bytes).ok().flatten()) .map(|(_, bytes)| self.shorteventid_eventid.get(&bytes).ok().flatten())
.flatten() .flatten()
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>( EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("EventID in stateid_shorteventid is invalid unicode.")
Error::bad_database("EventID in stateid_shorteventid is invalid unicode.") })?)
})?) .map_err(|_| Error::bad_database("EventId in stateid_shorteventid is invalid."))
.map_err(|_| {
Error::bad_database("EventId in stateid_shorteventid is invalid.")
})?,
)
}) })
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.map(|eventid| self.get_pdu(&eventid)) .map(|eventid| self.get_pdu(&eventid))
@ -168,16 +160,10 @@ impl Rooms {
.map(|bytes| self.shorteventid_eventid.get(&bytes).ok().flatten()) .map(|bytes| self.shorteventid_eventid.get(&bytes).ok().flatten())
.flatten() .flatten()
.map(|bytes| { .map(|bytes| {
Ok::<_, Error>( EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("EventID in stateid_shorteventid is invalid unicode.")
Error::bad_database( })?)
"EventID in stateid_shorteventid is invalid unicode.", .map_err(|_| Error::bad_database("EventId in stateid_shorteventid is invalid."))
)
})?)
.map_err(|_| {
Error::bad_database("EventId in stateid_shorteventid is invalid.")
})?,
)
}) })
.map(|r| r.ok()) .map(|r| r.ok())
.flatten()) .flatten())
@ -204,16 +190,16 @@ impl Rooms {
self.eventid_shorteventid self.eventid_shorteventid
.get(event_id.as_bytes())? .get(event_id.as_bytes())?
.map_or(Ok(None), |shorteventid| { .map_or(Ok(None), |shorteventid| {
Ok(self self.shorteventid_shortstatehash.get(&shorteventid)?.map_or(
.shorteventid_shortstatehash Ok::<_, Error>(None),
.get(&shorteventid)? |bytes| {
.map_or(Ok::<_, Error>(None), |bytes| {
Ok(Some(utils::u64_from_bytes(&bytes).map_err(|_| { Ok(Some(utils::u64_from_bytes(&bytes).map_err(|_| {
Error::bad_database( Error::bad_database(
"Invalid shortstatehash bytes in shorteventid_shortstatehash", "Invalid shortstatehash bytes in shorteventid_shortstatehash",
) )
})?)) })?))
})?) },
)
}) })
} }
@ -485,7 +471,7 @@ impl Rooms {
self.eventid_pduid self.eventid_pduid
.get(event_id.as_bytes())? .get(event_id.as_bytes())?
.map_or_else::<Result<_>, _, _>( .map_or_else::<Result<_>, _, _>(
|| Ok(self.eventid_outlierpdu.get(event_id.as_bytes())?), || self.eventid_outlierpdu.get(event_id.as_bytes()),
|pduid| { |pduid| {
Ok(Some(self.pduid_pdu.get(&pduid)?.ok_or_else(|| { Ok(Some(self.pduid_pdu.get(&pduid)?.ok_or_else(|| {
Error::bad_database("Invalid pduid in eventid_pduid.") Error::bad_database("Invalid pduid in eventid_pduid.")
@ -493,8 +479,7 @@ impl Rooms {
}, },
)? )?
.map(|pdu| { .map(|pdu| {
Ok(serde_json::from_slice(&pdu) serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
.map_err(|_| Error::bad_database("Invalid PDU in db."))?)
}) })
.transpose() .transpose()
} }
@ -521,8 +506,7 @@ impl Rooms {
}, },
)? )?
.map(|pdu| { .map(|pdu| {
Ok(serde_json::from_slice(&pdu) serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
.map_err(|_| Error::bad_database("Invalid PDU in db."))?)
}) })
.transpose() .transpose()
} }
@ -534,7 +518,7 @@ impl Rooms {
self.eventid_pduid self.eventid_pduid
.get(event_id.as_bytes())? .get(event_id.as_bytes())?
.map_or_else::<Result<_>, _, _>( .map_or_else::<Result<_>, _, _>(
|| Ok(self.eventid_outlierpdu.get(event_id.as_bytes())?), || self.eventid_outlierpdu.get(event_id.as_bytes()),
|pduid| { |pduid| {
Ok(Some(self.pduid_pdu.get(&pduid)?.ok_or_else(|| { Ok(Some(self.pduid_pdu.get(&pduid)?.ok_or_else(|| {
Error::bad_database("Invalid pduid in eventid_pduid.") Error::bad_database("Invalid pduid in eventid_pduid.")
@ -542,8 +526,7 @@ impl Rooms {
}, },
)? )?
.map(|pdu| { .map(|pdu| {
Ok(serde_json::from_slice(&pdu) serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
.map_err(|_| Error::bad_database("Invalid PDU in db."))?)
}) })
.transpose() .transpose()
} }
@ -594,12 +577,10 @@ impl Rooms {
self.roomid_pduleaves self.roomid_pduleaves
.scan_prefix(prefix) .scan_prefix(prefix)
.map(|(_, bytes)| { .map(|(_, bytes)| {
Ok::<_, Error>( EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
EventId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("EventID in roomid_pduleaves is invalid unicode.")
Error::bad_database("EventID in roomid_pduleaves is invalid unicode.") })?)
})?) .map_err(|_| Error::bad_database("EventId in roomid_pduleaves is invalid."))
.map_err(|_| Error::bad_database("EventId in roomid_pduleaves is invalid."))?,
)
}) })
.collect() .collect()
} }
@ -1213,12 +1194,10 @@ impl Rooms {
let create_event_content = create_event let create_event_content = create_event
.as_ref() .as_ref()
.map(|create_event| { .map(|create_event| {
Ok::<_, Error>( serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone())
serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone()) .expect("Raw::from_value always works.")
.expect("Raw::from_value always works.") .deserialize()
.deserialize() .map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))
.map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))?,
)
}) })
.transpose()?; .transpose()?;
@ -1382,13 +1361,12 @@ impl Rooms {
.map_or_else(Vec::new, |users| { .map_or_else(Vec::new, |users| {
users users
.iter() .iter()
.map(|users| { .filter_map(|users| {
users users
.get("regex") .get("regex")
.and_then(|regex| regex.as_str()) .and_then(|regex| regex.as_str())
.and_then(|regex| Regex::new(regex).ok()) .and_then(|regex| Regex::new(regex).ok())
}) })
.filter_map(|o| o)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });
let aliases = namespaces let aliases = namespaces
@ -1397,13 +1375,12 @@ impl Rooms {
.map_or_else(Vec::new, |aliases| { .map_or_else(Vec::new, |aliases| {
aliases aliases
.iter() .iter()
.map(|aliases| { .filter_map(|aliases| {
aliases aliases
.get("regex") .get("regex")
.and_then(|regex| regex.as_str()) .and_then(|regex| regex.as_str())
.and_then(|regex| Regex::new(regex).ok()) .and_then(|regex| Regex::new(regex).ok())
}) })
.filter_map(|o| o)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });
let rooms = namespaces let rooms = namespaces
@ -2011,10 +1988,10 @@ impl Rooms {
prefix.push(0xff); prefix.push(0xff);
self.aliasid_alias.scan_prefix(prefix).map(|(_, bytes)| { self.aliasid_alias.scan_prefix(prefix).map(|(_, bytes)| {
Ok(utils::string_from_bytes(&bytes) utils::string_from_bytes(&bytes)
.map_err(|_| Error::bad_database("Invalid alias bytes in aliasid_alias."))? .map_err(|_| Error::bad_database("Invalid alias bytes in aliasid_alias."))?
.try_into() .try_into()
.map_err(|_| Error::bad_database("Invalid alias in aliasid_alias."))?) .map_err(|_| Error::bad_database("Invalid alias in aliasid_alias."))
}) })
} }
@ -2032,14 +2009,14 @@ impl Rooms {
Ok(self.publicroomids.get(room_id.as_bytes())?.is_some()) Ok(self.publicroomids.get(room_id.as_bytes())?.is_some())
} }
pub fn public_rooms<'a>(&'a self) -> impl Iterator<Item = Result<RoomId>> + 'a { pub fn public_rooms(&self) -> impl Iterator<Item = Result<RoomId>> + '_ {
self.publicroomids.iter().map(|(bytes, _)| { self.publicroomids.iter().map(|(bytes, _)| {
Ok( RoomId::try_from(
RoomId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { utils::string_from_bytes(&bytes).map_err(|_| {
Error::bad_database("Room ID in publicroomids is invalid unicode.") Error::bad_database("Room ID in publicroomids is invalid unicode.")
})?) })?,
.map_err(|_| Error::bad_database("Room ID in publicroomids is invalid."))?,
) )
.map_err(|_| Error::bad_database("Room ID in publicroomids is invalid."))
}) })
} }
@ -2105,34 +2082,27 @@ impl Rooms {
&'a self, &'a self,
users: Vec<UserId>, users: Vec<UserId>,
) -> Result<impl Iterator<Item = Result<RoomId>> + 'a> { ) -> Result<impl Iterator<Item = Result<RoomId>> + 'a> {
let iterators = users let iterators = users.into_iter().map(move |user_id| {
.into_iter() let mut prefix = user_id.as_bytes().to_vec();
.map(move |user_id| { prefix.push(0xff);
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
Ok::<_, Error>( self.userroomid_joined
self.userroomid_joined .scan_prefix(prefix)
.scan_prefix(prefix) .map(|(key, _)| {
.map(|(key, _)| { let roomid_index = key
let roomid_index = key .iter()
.iter() .enumerate()
.enumerate() .find(|(_, &b)| b == 0xff)
.find(|(_, &b)| b == 0xff) .ok_or_else(|| Error::bad_database("Invalid userroomid_joined in db."))?
.ok_or_else(|| { .0
Error::bad_database("Invalid userroomid_joined in db.") + 1; // +1 because the room id starts AFTER the separator
})?
.0
+ 1; // +1 because the room id starts AFTER the separator
let room_id = key[roomid_index..].to_vec(); let room_id = key[roomid_index..].to_vec();
Ok::<_, Error>(room_id) Ok::<_, Error>(room_id)
}) })
.filter_map(|r| r.ok()), .filter_map(|r| r.ok())
) });
})
.filter_map(|r| r.ok());
// We use the default compare function because keys are sorted correctly (not reversed) // We use the default compare function because keys are sorted correctly (not reversed)
Ok(utils::common_elements(iterators, Ord::cmp) Ok(utils::common_elements(iterators, Ord::cmp)
@ -2154,7 +2124,7 @@ impl Rooms {
prefix.push(0xff); prefix.push(0xff);
self.roomserverids.scan_prefix(prefix).map(|(key, _)| { self.roomserverids.scan_prefix(prefix).map(|(key, _)| {
Ok(Box::<ServerName>::try_from( Box::<ServerName>::try_from(
utils::string_from_bytes( utils::string_from_bytes(
&key.rsplit(|&b| b == 0xff) &key.rsplit(|&b| b == 0xff)
.next() .next()
@ -2164,7 +2134,7 @@ impl Rooms {
Error::bad_database("Server name in roomserverids is invalid unicode.") Error::bad_database("Server name in roomserverids is invalid unicode.")
})?, })?,
) )
.map_err(|_| Error::bad_database("Server name in roomserverids is invalid."))?) .map_err(|_| Error::bad_database("Server name in roomserverids is invalid."))
}) })
} }
@ -2177,7 +2147,7 @@ impl Rooms {
prefix.push(0xff); prefix.push(0xff);
self.serverroomids.scan_prefix(prefix).map(|(key, _)| { self.serverroomids.scan_prefix(prefix).map(|(key, _)| {
Ok(RoomId::try_from( RoomId::try_from(
utils::string_from_bytes( utils::string_from_bytes(
&key.rsplit(|&b| b == 0xff) &key.rsplit(|&b| b == 0xff)
.next() .next()
@ -2185,7 +2155,7 @@ impl Rooms {
) )
.map_err(|_| Error::bad_database("RoomId in serverroomids is invalid unicode."))?, .map_err(|_| Error::bad_database("RoomId in serverroomids is invalid unicode."))?,
) )
.map_err(|_| Error::bad_database("RoomId in serverroomids is invalid."))?) .map_err(|_| Error::bad_database("RoomId in serverroomids is invalid."))
}) })
} }
@ -2199,7 +2169,7 @@ impl Rooms {
prefix.push(0xff); prefix.push(0xff);
self.roomuserid_joined.scan_prefix(prefix).map(|(key, _)| { self.roomuserid_joined.scan_prefix(prefix).map(|(key, _)| {
Ok(UserId::try_from( UserId::try_from(
utils::string_from_bytes( utils::string_from_bytes(
&key.rsplit(|&b| b == 0xff) &key.rsplit(|&b| b == 0xff)
.next() .next()
@ -2209,7 +2179,7 @@ impl Rooms {
Error::bad_database("User ID in roomuserid_joined is invalid unicode.") Error::bad_database("User ID in roomuserid_joined is invalid unicode.")
})?, })?,
) )
.map_err(|_| Error::bad_database("User ID in roomuserid_joined is invalid."))?) .map_err(|_| Error::bad_database("User ID in roomuserid_joined is invalid."))
}) })
} }
@ -2224,7 +2194,7 @@ impl Rooms {
self.roomuseroncejoinedids self.roomuseroncejoinedids
.scan_prefix(prefix) .scan_prefix(prefix)
.map(|(key, _)| { .map(|(key, _)| {
Ok(UserId::try_from( UserId::try_from(
utils::string_from_bytes( utils::string_from_bytes(
&key.rsplit(|&b| b == 0xff) &key.rsplit(|&b| b == 0xff)
.next() .next()
@ -2234,7 +2204,7 @@ impl Rooms {
Error::bad_database("User ID in room_useroncejoined is invalid unicode.") Error::bad_database("User ID in room_useroncejoined is invalid unicode.")
})?, })?,
) )
.map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))?) .map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))
}) })
} }
@ -2250,7 +2220,7 @@ impl Rooms {
self.roomuserid_invitecount self.roomuserid_invitecount
.scan_prefix(prefix) .scan_prefix(prefix)
.map(|(key, _)| { .map(|(key, _)| {
Ok(UserId::try_from( UserId::try_from(
utils::string_from_bytes( utils::string_from_bytes(
&key.rsplit(|&b| b == 0xff) &key.rsplit(|&b| b == 0xff)
.next() .next()
@ -2260,7 +2230,7 @@ impl Rooms {
Error::bad_database("User ID in roomuserid_invited is invalid unicode.") Error::bad_database("User ID in roomuserid_invited is invalid unicode.")
})?, })?,
) )
.map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))?) .map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))
}) })
} }
@ -2303,7 +2273,7 @@ impl Rooms {
self.userroomid_joined self.userroomid_joined
.scan_prefix(user_id.as_bytes().to_vec()) .scan_prefix(user_id.as_bytes().to_vec())
.map(|(key, _)| { .map(|(key, _)| {
Ok(RoomId::try_from( RoomId::try_from(
utils::string_from_bytes( utils::string_from_bytes(
&key.rsplit(|&b| b == 0xff) &key.rsplit(|&b| b == 0xff)
.next() .next()
@ -2313,7 +2283,7 @@ impl Rooms {
Error::bad_database("Room ID in userroomid_joined is invalid unicode.") Error::bad_database("Room ID in userroomid_joined is invalid unicode.")
})?, })?,
) )
.map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))?) .map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))
}) })
} }

View file

@ -306,12 +306,10 @@ impl RoomEdus {
.typingid_userid .typingid_userid
.scan_prefix(prefix) .scan_prefix(prefix)
.map(|(_, user_id)| { .map(|(_, user_id)| {
Ok::<_, Error>( UserId::try_from(utils::string_from_bytes(&user_id).map_err(|_| {
UserId::try_from(utils::string_from_bytes(&user_id).map_err(|_| { Error::bad_database("User ID in typingid_userid is invalid unicode.")
Error::bad_database("User ID in typingid_userid is invalid unicode.") })?)
})?) .map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))
.map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))?,
)
}) })
{ {
user_ids.insert(user_id?); user_ids.insert(user_id?);

View file

@ -41,6 +41,6 @@ impl TransactionIds {
key.extend_from_slice(txn_id.as_bytes()); key.extend_from_slice(txn_id.as_bytes());
// If there's no entry, this is a new transaction // If there's no entry, this is a new transaction
Ok(self.userdevicetxnid_response.get(&key)?) self.userdevicetxnid_response.get(&key)
} }
} }

View file

@ -57,9 +57,7 @@ impl Uiaa {
{ {
let mut uiaainfo = session let mut uiaainfo = session
.as_ref() .as_ref()
.map(|session| { .map(|session| self.get_uiaa_session(&user_id, &device_id, session))
Ok::<_, Error>(self.get_uiaa_session(&user_id, &device_id, session)?)
})
.unwrap_or_else(|| Ok(uiaainfo.clone()))?; .unwrap_or_else(|| Ok(uiaainfo.clone()))?;
if uiaainfo.session.is_none() { if uiaainfo.session.is_none() {

View file

@ -88,14 +88,12 @@ impl Users {
} }
/// Returns an iterator over all users on this homeserver. /// Returns an iterator over all users on this homeserver.
pub fn iter<'a>(&'a self) -> impl Iterator<Item = Result<UserId>> + 'a { pub fn iter(&self) -> impl Iterator<Item = Result<UserId>> + '_ {
self.userid_password.iter().map(|(bytes, _)| { self.userid_password.iter().map(|(bytes, _)| {
Ok( UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("User ID in userid_password is invalid unicode.")
Error::bad_database("User ID in userid_password is invalid unicode.") })?)
})?) .map_err(|_| Error::bad_database("User ID in userid_password is invalid."))
.map_err(|_| Error::bad_database("User ID in userid_password is invalid."))?,
)
}) })
} }
@ -588,16 +586,10 @@ impl Users {
.iter_from(&start, false) .iter_from(&start, false)
.take_while(move |(k, _)| k.starts_with(&prefix)) .take_while(move |(k, _)| k.starts_with(&prefix))
.map(|(_, bytes)| { .map(|(_, bytes)| {
Ok( UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.")
Error::bad_database( })?)
"User ID in devicekeychangeid_userid is invalid unicode.", .map_err(|_| Error::bad_database("User ID in devicekeychangeid_userid is invalid."))
)
})?)
.map_err(|_| {
Error::bad_database("User ID in devicekeychangeid_userid is invalid.")
})?,
)
}) })
} }
@ -863,9 +855,8 @@ impl Users {
self.userdeviceid_metadata self.userdeviceid_metadata
.scan_prefix(key) .scan_prefix(key)
.map(|(_, bytes)| { .map(|(_, bytes)| {
Ok(serde_json::from_slice::<Device>(&bytes).map_err(|_| { serde_json::from_slice::<Device>(&bytes)
Error::bad_database("Device in userdeviceid_metadata is invalid.") .map_err(|_| Error::bad_database("Device in userdeviceid_metadata is invalid."))
})?)
}) })
} }

View file

@ -1691,13 +1691,12 @@ pub(crate) fn append_incoming_pdu(
.map_or_else(Vec::new, |users| { .map_or_else(Vec::new, |users| {
users users
.iter() .iter()
.map(|users| { .filter_map(|users| {
users users
.get("regex") .get("regex")
.and_then(|regex| regex.as_str()) .and_then(|regex| regex.as_str())
.and_then(|regex| Regex::new(regex).ok()) .and_then(|regex| Regex::new(regex).ok())
}) })
.filter_map(|o| o)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });
let aliases = namespaces let aliases = namespaces
@ -2026,12 +2025,10 @@ pub fn create_join_event_template_route(
let create_event_content = create_event let create_event_content = create_event
.as_ref() .as_ref()
.map(|create_event| { .map(|create_event| {
Ok::<_, Error>( serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone())
serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone()) .expect("Raw::from_value always works.")
.expect("Raw::from_value always works.") .deserialize()
.deserialize() .map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))
.map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))?,
)
}) })
.transpose()?; .transpose()?;

View file

@ -84,22 +84,19 @@ pub fn common_elements(
let mut other_iterators = iterators.map(|i| i.peekable()).collect::<Vec<_>>(); let mut other_iterators = iterators.map(|i| i.peekable()).collect::<Vec<_>>();
Some(first_iterator.filter(move |target| { Some(first_iterator.filter(move |target| {
other_iterators other_iterators.iter_mut().all(|it| {
.iter_mut() while let Some(element) = it.peek() {
.map(|it| { match check_order(element, target) {
while let Some(element) = it.peek() { Ordering::Greater => return false, // We went too far
match check_order(element, target) { Ordering::Equal => return true, // Element is in both iters
Ordering::Greater => return false, // We went too far Ordering::Less => {
Ordering::Equal => return true, // Element is in both iters // Keep searching
Ordering::Less => { it.next();
// Keep searching
it.next();
}
} }
} }
false }
}) false
.all(|b| b) })
})) }))
} }