Qnn idiomatic refactor (#570)

This commit is contained in:
Firstyear 2021-08-21 14:44:55 +10:00 committed by GitHub
parent a3c0b8ccfe
commit 9456cac15b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 466 additions and 631 deletions

View file

@ -582,7 +582,7 @@ pub trait AccessControlsTransaction<'a> {
// add search_attrs to allowed.
Some(acs.attrs.iter().map(|s| s.as_str()))
} else {
trace!(entry = ?e.get_uuid(), acs = %acs.acp.name, "entry matches acs"); // should this be `security_access`?
trace!(entry = ?e.get_uuid(), acs = %acs.acp.name, "entry DOES NOT match acs"); // should this be `security_access`?
ltrace!(
audit,
"entry {:?} DOES NOT match acs {}",
@ -741,82 +741,73 @@ pub trait AccessControlsTransaction<'a> {
"access::search_filter_entry_attributes<allowed_entries>",
|| {
entries
.into_iter()
.map(|e| {
// Get the set of attributes you can see for this entry
// this is within your related acp scope.
let allowed_attrs: BTreeSet<&str> = related_acp
.iter()
.filter_map(|(acs, f_res)| {
if e.entry_match_no_index(&f_res) {
security_access!(
target = ?e.get_uuid(),
acs = %acs.acp.name,
"target entry matches acs",
);
lsecurity_access!(
audit,
"target entry {:?} matches acs {}",
e.get_uuid(),
acs.acp.name
);
// add search_attrs to allowed iterator
Some(
acs.attrs
.iter()
.map(|s| s.as_str()) // TODO: Refactor in another PR
.filter(|s| {
// could simplify with as_ref, map, and unwrap_or(true)
match &req_attrs {
// We return all as we requested all.
None => true,
Some(r_attrs) => {
// If we have a req_attrs set, we only return
// things that were requested.
r_attrs.contains(s)
}
}
}),
)
} else {
trace!(
target = ?e.get_uuid(),
acs = %acs.acp.name,
"target entry DOES NOT match acs",
);
ltrace!(
audit,
"target entry {:?} DOES NOT match acs {}",
e.get_uuid(),
acs.acp.name
);
None
}
})
.flatten()
.collect();
.into_iter()
.map(|e| {
// Get the set of attributes you can see for this entry
// this is within your related acp scope.
let allowed_attrs: BTreeSet<&str> = related_acp
.iter()
.filter_map(|(acs, f_res)| {
if e.entry_match_no_index(&f_res) {
security_access!(
target = ?e.get_uuid(),
acs = %acs.acp.name,
"target entry matches acs",
);
lsecurity_access!(
audit,
"target entry {:?} matches acs {}",
e.get_uuid(),
acs.acp.name
);
// add search_attrs to allowed iterator
Some(
acs.attrs
.iter()
.map(|s| s.as_str())
.filter(|s| {
req_attrs.as_ref().map(|r_attrs| r_attrs.contains(s)).unwrap_or(true)
}),
)
} else {
trace!(
target = ?e.get_uuid(),
acs = %acs.acp.name,
"target entry DOES NOT match acs",
);
ltrace!(
audit,
"target entry {:?} DOES NOT match acs {}",
e.get_uuid(),
acs.acp.name
);
None
}
})
.flatten()
.collect();
// Remove all others that are present on the entry.
security_access!(
requested = ?req_attrs,
allowed = ?allowed_attrs,
"attributes"
);
lsecurity_access!(
audit,
"requested attributes --> {:?} allowed attributes --> {:?}",
req_attrs,
allowed_attrs
);
// Remove all others that are present on the entry.
security_access!(
requested = ?req_attrs,
allowed = ?allowed_attrs,
"attributes"
);
lsecurity_access!(
audit,
"requested attributes --> {:?} allowed attributes --> {:?}",
req_attrs,
allowed_attrs
);
// Now purge the attrs that are NOT allowed.
spanned!("access::search_filter_entry_attributes<reduce_attributes>", {
lperf_trace_segment!(audit, "access::search_filter_entry_attributes<reduce_attributes>", || {
e.reduce_attributes(&allowed_attrs)
})
// Now purge the attrs that are NOT allowed.
spanned!("access::search_filter_entry_attributes<reduce_attributes>", {
lperf_trace_segment!(audit, "access::search_filter_entry_attributes<reduce_attributes>", || {
e.reduce_attributes(&allowed_attrs)
})
})
.collect()
})
.collect()
}
)
}
@ -865,16 +856,11 @@ pub trait AccessControlsTransaction<'a> {
let acp_resolve_filter_cache = self.get_acp_resolve_filter_cache();
// Pre-check if the no-no purge class is present
let disallow = me.modlist.iter().fold(false, |acc, m| {
if acc {
acc
} else {
match m {
Modify::Purged(a) => a == "class",
_ => false,
}
}
});
let disallow = me
.modlist
.iter()
.any(|m| matches!(m, Modify::Purged(a) if a == "class"));
if disallow {
lsecurity_access!(audit, "Disallowing purge class in modification");
return Ok(false);
@ -899,9 +885,7 @@ pub trait AccessControlsTransaction<'a> {
e
})
.ok()
.map(|f_res|
(acs, f_res)
)
.map(|f_res| (acs, f_res))
} else {
None
}
@ -977,62 +961,53 @@ pub trait AccessControlsTransaction<'a> {
lsecurity_access!(audit, "Requested remove set: {:?}", requested_rem);
lsecurity_access!(audit, "Requested class set: {:?}", requested_classes);
let r = entries.iter().fold(true, |acc, e| {
if !acc {
let r = entries.iter().all(|e| {
// For this entry, find the acp's that apply to it from the
// set that apply to the entry that is performing the operation
let scoped_acp: Vec<&AccessControlModify> = related_acp
.iter()
.filter_map(|(acm, f_res)| {
if e.entry_match_no_index(&f_res) {
Some(*acm)
} else {
None
}
})
.collect();
// Build the sets of classes, pres and rem we are allowed to modify, extend
// or use based on the set of matched acps.
let allowed_pres: BTreeSet<&str> = scoped_acp
.iter()
.flat_map(|acp| acp.presattrs.iter().map(|v| v.as_str()))
.collect();
let allowed_rem: BTreeSet<&str> = scoped_acp
.iter()
.flat_map(|acp| acp.remattrs.iter().map(|v| v.as_str()))
.collect();
let allowed_classes: BTreeSet<&str> = scoped_acp
.iter()
.flat_map(|acp| acp.classes.iter().map(|v| v.as_str()))
.collect();
// Now check all the subsets are true. Remember, purge class
// is already checked above.
if !requested_pres.is_subset(&allowed_pres) {
lsecurity_access!(audit, "requested_pres is not a subset of allowed");
lsecurity_access!(audit, "{:?} !⊆ {:?}", requested_pres, allowed_pres);
false
} else if !requested_rem.is_subset(&allowed_rem) {
lsecurity_access!(audit, "requested_rem is not a subset of allowed");
lsecurity_access!(audit, "{:?} !⊆ {:?}", requested_rem, allowed_rem);
false
} else if !requested_classes.is_subset(&allowed_classes) {
lsecurity_access!(audit, "requested_classes is not a subset of allowed");
lsecurity_access!(audit, "{:?} !⊆ {:?}", requested_classes, allowed_classes);
false
} else {
// For this entry, find the acp's that apply to it from the
// set that apply to the entry that is performing the operation
let scoped_acp: Vec<&AccessControlModify> = related_acp
.iter()
.filter_map(|(acm, f_res)| {
if e.entry_match_no_index(&f_res) {
Some(*acm)
} else {
None
}
})
.collect();
// Build the sets of classes, pres and rem we are allowed to modify, extend
// or use based on the set of matched acps.
let allowed_pres: BTreeSet<&str> = scoped_acp
.iter()
.flat_map(|acp| acp.presattrs.iter().map(|v| v.as_str()))
.collect();
let allowed_rem: BTreeSet<&str> = scoped_acp
.iter()
.flat_map(|acp| acp.remattrs.iter().map(|v| v.as_str()))
.collect();
let allowed_classes: BTreeSet<&str> = scoped_acp
.iter()
.flat_map(|acp| acp.classes.iter().map(|v| v.as_str()))
.collect();
// Now check all the subsets are true. Remember, purge class
// is already checked above.
if !requested_pres.is_subset(&allowed_pres) {
lsecurity_access!(audit, "requested_pres is not a subset of allowed");
lsecurity_access!(audit, "{:?} !⊆ {:?}", requested_pres, allowed_pres);
false
} else if !requested_rem.is_subset(&allowed_rem) {
lsecurity_access!(audit, "requested_rem is not a subset of allowed");
lsecurity_access!(audit, "{:?} !⊆ {:?}", requested_rem, allowed_rem);
false
} else if !requested_classes.is_subset(&allowed_classes) {
lsecurity_access!(audit, "requested_classes is not a subset of allowed");
lsecurity_access!(
audit,
"{:?} !⊆ {:?}",
requested_classes,
allowed_classes
);
false
} else {
lsecurity_access!(audit, "passed pres, rem, classes check.");
true
}
lsecurity_access!(audit, "passed pres, rem, classes check.");
true
} // if acc == false
});
if r {
@ -1070,27 +1045,26 @@ pub trait AccessControlsTransaction<'a> {
let related_acp: Vec<(&AccessControlCreate, _)> = create_state
.iter()
.filter_map(|acs| {
match (&acs.acp.receiver).resolve(&ce.ident, None, Some(acp_resolve_filter_cache)) {
Ok(f_res) => {
if rec_entry.entry_match_no_index(&f_res) {
(&acs.acp.targetscope)
.resolve(&ce.ident, None, Some(acp_resolve_filter_cache))
.map_err(|e| {
ladmin_error!(
audit,
"A internal filter/event was passed for resolution!?!? {:?}",
e
);
e
})
.ok()
.map(|f_res|
(acs, f_res)
)
} else {
None
}
}
match acs
.acp
.receiver
.resolve(&ce.ident, None, Some(acp_resolve_filter_cache))
{
Ok(f_res) if rec_entry.entry_match_no_index(&f_res) => acs
.acp
.targetscope
.resolve(&ce.ident, None, Some(acp_resolve_filter_cache))
.map_err(|e| {
ladmin_error!(
audit,
"A internal filter/event was passed for resolution!?!? {:?}",
e
);
e
})
.ok()
.map(|f_res| (acs, f_res)),
Ok(_) => None,
Err(e) => {
ladmin_error!(
audit,
@ -1106,93 +1080,67 @@ pub trait AccessControlsTransaction<'a> {
// lsecurity_access!(audit, "Related acc -> {:?}", related_acp);
// For each entry
let r = entries.iter().fold(true, |acc, e| {
if !acc {
// We have already failed, move on.
false
} else {
// Build the set of requested classes and attrs here.
let create_attrs: BTreeSet<&str> = e.get_ava_names().collect();
// If this is empty, we make an empty set, which is fine because
// the empty class set despite matching is_subset, will have the
// following effect:
// * there is no class on entry, so schema will fail
// * plugin-base will add object to give a class, but excess
// attrs will cause fail (could this be a weakness?)
// * class is a "may", so this could be empty in the rules, so
// if the accr is empty this would not be a true subset,
// so this would "fail", but any content in the accr would
// have to be validated.
//
// I still think if this is None, we should just fail here ...
// because it shouldn't be possible to match.
let r = entries.iter().all(|e| {
// Build the set of requested classes and attrs here.
let create_attrs: BTreeSet<&str> = e.get_ava_names().collect();
// If this is empty, we make an empty set, which is fine because
// the empty class set despite matching is_subset, will have the
// following effect:
// * there is no class on entry, so schema will fail
// * plugin-base will add object to give a class, but excess
// attrs will cause fail (could this be a weakness?)
// * class is a "may", so this could be empty in the rules, so
// if the accr is empty this would not be a true subset,
// so this would "fail", but any content in the accr would
// have to be validated.
//
// I still think if this is None, we should just fail here ...
// because it shouldn't be possible to match.
let create_classes: BTreeSet<&str> = match e.get_ava_as_str("class") {
Some(s) => s.collect(),
None => {
ladmin_error!(audit, "Class set failed to build - corrupted entry?");
let create_classes: BTreeSet<&str> = match e.get_ava_as_str("class") {
Some(s) => s.collect(),
None => {
ladmin_error!(audit, "Class set failed to build - corrupted entry?");
return false;
}
};
related_acp.iter().any(|(accr, f_res)| {
// Check to see if allowed.
if e.entry_match_no_index(&f_res) {
lsecurity_access!(audit, "entry {:?} matches acs {:?}", e, accr);
// It matches, so now we have to check attrs and classes.
// Remember, we have to match ALL requested attrs
// and classes to pass!
let allowed_attrs: BTreeSet<&str> =
accr.attrs.iter().map(|s| s.as_str()).collect();
let allowed_classes: BTreeSet<&str> =
accr.classes.iter().map(|s| s.as_str()).collect();
if !create_attrs.is_subset(&allowed_attrs) {
lsecurity_access!(audit, "create_attrs is not a subset of allowed");
lsecurity_access!(audit, "{:?} !⊆ {:?}", create_attrs, allowed_attrs);
return false;
}
};
related_acp.iter().fold(false, |r_acc, (accr, f_res)| {
if r_acc {
// Already allowed, continue.
r_acc
} else {
// Check to see if allowed.
if e.entry_match_no_index(&f_res) {
lsecurity_access!(audit, "entry {:?} matches acs {:?}", e, accr);
// It matches, so now we have to check attrs and classes.
// Remember, we have to match ALL requested attrs
// and classes to pass!
let allowed_attrs: BTreeSet<&str> =
accr.attrs.iter().map(|s| s.as_str()).collect();
let allowed_classes: BTreeSet<&str> =
accr.classes.iter().map(|s| s.as_str()).collect();
if !create_attrs.is_subset(&allowed_attrs) {
lsecurity_access!(
audit,
"create_attrs is not a subset of allowed"
);
lsecurity_access!(
audit,
"{:?} !⊆ {:?}",
create_attrs,
allowed_attrs
);
return false;
}
if !create_classes.is_subset(&allowed_classes) {
lsecurity_access!(
audit,
"create_classes is not a subset of allowed"
);
lsecurity_access!(
audit,
"{:?} !⊆ {:?}",
create_classes,
allowed_classes
);
return false;
}
lsecurity_access!(audit, "passed");
true
} else {
ltrace!(
audit,
"entry {:?} DOES NOT match acs {}",
e,
accr.acp.name
);
// Does not match, fail this rule.
false
}
if !create_classes.is_subset(&allowed_classes) {
lsecurity_access!(audit, "create_classes is not a subset of allowed");
lsecurity_access!(
audit,
"{:?} !⊆ {:?}",
create_classes,
allowed_classes
);
return false;
}
})
}
lsecurity_access!(audit, "passed");
true
} else {
ltrace!(audit, "entry {:?} DOES NOT match acs {}", e, accr.acp.name);
// Does not match, fail this rule.
false
}
})
// Find the set of related acps for this entry.
//
// For each "created" entry.
@ -1237,10 +1185,10 @@ pub trait AccessControlsTransaction<'a> {
let related_acp: Vec<(&AccessControlDelete, _)> = delete_state
.iter()
.filter_map(|acs| {
match (&acs.acp.receiver).resolve(&de.ident, None, Some(acp_resolve_filter_cache)) {
match acs.acp.receiver.resolve(&de.ident, None, Some(acp_resolve_filter_cache)) {
Ok(f_res) => {
if rec_entry.entry_match_no_index(&f_res) {
(&acs.acp.targetscope)
acs.acp.targetscope
.resolve(&de.ident, None, Some(acp_resolve_filter_cache))
.map_err(|e| {
ladmin_error!(
@ -1277,37 +1225,29 @@ pub trait AccessControlsTransaction<'a> {
*/
// For each entry
let r = entries.iter().fold(true, |acc, e| {
if !acc {
// Any false, denies the whole operation.
false
} else {
related_acp.iter().fold(false, |r_acc, (acd, f_res)| {
if r_acc {
// If something allowed us to delete, skip doing silly work.
r_acc
} else if e.entry_match_no_index(&f_res) {
lsecurity_access!(
audit,
"entry {:?} matches acs {}",
e.get_uuid(),
acd.acp.name
);
// It matches, so we can delete this!
lsecurity_access!(audit, "passed");
true
} else {
ltrace!(
audit,
"entry {:?} DOES NOT match acs {}",
e.get_uuid(),
acd.acp.name
);
// Does not match, fail.
false
} // else
}) // fold related_acp
} // if/else
let r = entries.iter().all(|e| {
related_acp.iter().any(|(acd, f_res)| {
if e.entry_match_no_index(&f_res) {
lsecurity_access!(
audit,
"entry {:?} matches acs {}",
e.get_uuid(),
acd.acp.name
);
// It matches, so we can delete this!
lsecurity_access!(audit, "passed");
true
} else {
ltrace!(
audit,
"entry {:?} DOES NOT match acs {}",
e.get_uuid(),
acd.acp.name
);
// Does not match, fail.
false
} // else
}) // any related_acp
});
if r {
lsecurity_access!(audit, "allowed ✅");

View file

@ -122,30 +122,21 @@ impl QueryServerReadV1 {
})?;
// Make an event from the request
// TODO: Refactor this in another PR to use `map_err` and `?`
let search = match SearchEvent::from_message(
&mut audit,
ident,
&req,
&idms_prox_read.qs_read,
) {
Ok(s) => s,
Err(e) => {
admin_error!(?e, "Failed to begin search");
ladmin_error!(audit, "Failed to begin search: {:?}", e);
return Err(e);
}
};
let search =
SearchEvent::from_message(&mut audit, ident, &req, &idms_prox_read.qs_read)
.map_err(|e| {
admin_error!(?e, "Failed to begin search");
ladmin_error!(audit, "Failed to begin search: {:?}", e);
e
})?;
trace!(?search, "Begin event");
ltrace!(audit, "Begin event {:?}", search);
// TODO: Refactor this in another PR to use `?`
match idms_prox_read.qs_read.search_ext(&mut audit, &search) {
Ok(entries) => SearchResult::new(&mut audit, &idms_prox_read.qs_read, &entries)
.map(SearchResult::response),
Err(e) => Err(e),
}
let entries = idms_prox_read.qs_read.search_ext(&mut audit, &search)?;
SearchResult::new(&mut audit, &idms_prox_read.qs_read, &entries)
.map(SearchResult::response)
})
});
// At the end of the event we send it for logging.
@ -387,48 +378,26 @@ impl QueryServerReadV1 {
e
})?;
// TODO: Refactor this in another PR to use `?`
let srch = match SearchEvent::from_whoami_request(
&mut audit,
ident,
&idms_prox_read.qs_read,
) {
Ok(s) => s,
Err(e) => {
admin_error!(?e, "Failed to begin whoami");
ladmin_error!(audit, "Failed to begin whoami: {:?}", e);
return Err(e);
}
};
let srch =
SearchEvent::from_whoami_request(&mut audit, ident, &idms_prox_read.qs_read)
.map_err(|e| {
admin_error!(?e, "Failed to begin whoami");
ladmin_error!(audit, "Failed to begin whoami: {:?}", e);
e
})?;
trace!(search = ?srch, "Begin event");
ltrace!(audit, "Begin event {:?}", srch);
// TODO: Refactor this in another PR to use `?`
match idms_prox_read.qs_read.search_ext(&mut audit, &srch) {
Ok(mut entries) => {
// assert there is only one ...
match entries.len() {
0 => Err(OperationError::NoMatchingEntries),
1 => {
#[allow(clippy::expect_used)]
let e = entries.pop().expect("Entry length mismatch!!!");
// Now convert to a response, and return
WhoamiResult::new(&mut audit, &idms_prox_read.qs_read, &e, uat)
.map(|ok_wr| ok_wr.response())
}
// Somehow we matched multiple, which should be impossible.
_ => Err(OperationError::InvalidState),
}
// TODO: Refactor this in another PR use use the following
// entries.pop() {
// Some(e) if entries.is_empty() => // whoami stuff...
// Some(_) => Err(OperationError::InvalidState) // matched multiple
// _ => Err(OperationError::NoMatchingEntries),
// }
let mut entries = idms_prox_read.qs_read.search_ext(&mut audit, &srch)?;
match entries.pop() {
Some(e) if entries.is_empty() => {
WhoamiResult::new(&mut audit, &idms_prox_read.qs_read, &e, uat)
.map(WhoamiResult::response)
}
// Something else went wrong ...
Err(e) => Err(e),
Some(_) => Err(OperationError::InvalidState), // Somehow matched multiple entries...
_ => Err(OperationError::NoMatchingEntries),
}
})
});

View file

@ -21,11 +21,13 @@ use uuid::Uuid;
const DBV_ID2ENTRY: &str = "id2entry";
const DBV_INDEXV: &str = "indexv";
#[allow(clippy::needless_pass_by_value)] // needs to accept value from `map_err`
fn sqlite_error(e: rusqlite::Error) -> OperationError {
admin_error!(?e, "SQLite Error");
OperationError::SqliteError
}
#[allow(clippy::needless_pass_by_value)] // needs to accept value from `map_err`
fn serde_cbor_error(e: serde_cbor::Error) -> OperationError {
admin_error!(?e, "Serde CBOR Error");
OperationError::SerdeCborError

View file

@ -341,25 +341,22 @@ impl TryFrom<DbCredV1> for Credential {
None => None,
};
let v_webauthn = match webauthn {
Some(dbw) => Some(
dbw.into_iter()
.map(|wc| {
(
wc.label,
WebauthnCredential {
cred_id: wc.id,
cred: wc.cred,
counter: wc.counter,
verified: wc.verified,
registration_policy: wc.registration_policy,
},
)
})
.collect(),
),
None => None,
};
let v_webauthn = webauthn.map(|dbw| {
dbw.into_iter()
.map(|wc| {
(
wc.label,
WebauthnCredential {
cred_id: wc.id,
cred: wc.cred,
counter: wc.counter,
verified: wc.verified,
registration_policy: wc.registration_policy,
},
)
})
.collect()
});
let v_backup_code = match backup_code {
Some(dbb) => Some(BackupCodes::try_from(dbb)?),
@ -544,11 +541,11 @@ impl Credential {
}
CredentialType::PasswordMfa(_, _, map, _) | CredentialType::Webauthn(map) => map
.iter()
.fold(None, |acc, (k, v)| {
if acc.is_none() && &v.cred_id == cid && v.counter < counter {
.find_map(|(k, v)| {
if &v.cred_id == cid && v.counter < counter {
Some(k)
} else {
acc
None
}
})
.map(|label| {
@ -841,13 +838,13 @@ impl Credential {
match &self.type_ {
CredentialType::PasswordMfa(_, _, _, opt_bc) => opt_bc
.as_ref()
.ok_or(OperationError::InvalidAccountState(String::from(
"No backup codes are available for this account",
)))
.and_then(|bc| {
Ok(BackupCodesView {
backup_codes: bc.code_set.clone().into_iter().collect(),
})
.ok_or_else(|| {
OperationError::InvalidAccountState(
"No backup codes are available for this account".to_string(),
)
})
.map(|bc| BackupCodesView {
backup_codes: bc.code_set.clone().into_iter().collect(),
}),
_ => Err(OperationError::InvalidAccountState(
"Non-MFA credential type".to_string(),

View file

@ -605,7 +605,7 @@ impl<STATE> Entry<EntryInvalid, STATE> {
.map(|s| {
// This should NOT fail - if it does, it means our schema is
// in an invalid state!
Ok(schema_attributes.get(s).ok_or(SchemaError::Corrupted)?)
schema_attributes.get(s).ok_or(SchemaError::Corrupted)
})
.collect();

View file

@ -957,12 +957,7 @@ mod tests {
);
if let AuthState::Choose(auth_mechs) = state {
assert!(
true == auth_mechs.iter().fold(false, |acc, x| match x {
AuthMech::Anonymous => true,
_ => acc,
})
);
assert!(auth_mechs.iter().any(|x| matches!(x, AuthMech::Anonymous)));
} else {
panic!("Invalid auth state")
}
@ -973,12 +968,9 @@ mod tests {
.expect("Failed to select anonymous mech.");
if let AuthState::Continue(auth_mechs) = state {
assert!(
true == auth_mechs.iter().fold(false, |acc, x| match x {
AuthAllowed::Anonymous => true,
_ => acc,
})
);
assert!(auth_mechs
.iter()
.any(|x| matches!(x, AuthAllowed::Anonymous)));
} else {
panic!("Invalid auth state")
}
@ -999,12 +991,7 @@ mod tests {
let mut session = session.unwrap();
if let AuthState::Choose(auth_mechs) = state {
assert!(
true == auth_mechs.iter().fold(false, |acc, x| match x {
AuthMech::Password => true,
_ => acc,
})
);
assert!(auth_mechs.iter().any(|x| matches!(x, AuthMech::Password)));
} else {
panic!();
}
@ -1014,12 +1001,9 @@ mod tests {
.expect("Failed to select anonymous mech.");
if let AuthState::Continue(auth_mechs) = state {
assert!(
true == auth_mechs.iter().fold(false, |acc, x| match x {
AuthAllowed::Password => true,
_ => acc,
})
);
assert!(auth_mechs
.iter()
.any(|x| matches!(x, AuthAllowed::Password)));
} else {
panic!("Invalid auth state")
}
@ -1147,12 +1131,9 @@ mod tests {
let mut session = session.expect("Session was unable to be created.");
if let AuthState::Choose(auth_mechs) = state {
assert!(
true == auth_mechs.iter().fold(false, |acc, x| match x {
AuthMech::PasswordMfa => true,
_ => acc,
})
);
assert!(auth_mechs
.iter()
.any(|x| matches!(x, AuthMech::PasswordMfa)))
} else {
panic!();
}
@ -1171,10 +1152,20 @@ mod tests {
rchal = Some(chal.clone());
true
}
// Why does this also return `true`? If we hit this but not
// Webauthn, then we will panic when unwrapping `rchal` later...
AuthAllowed::Totp => true,
_ => acc,
})
);
// I feel like this is what we should be doing
// assuming there will only be one `AuthAllowed::Webauthn`.
// rchal = auth_mechs.iter().find_map(|x| match x {
// AuthAllowed::Webauthn(chal) => Some(chal),
// _ => None,
// });
// assert!(rchal.is_some());
} else {
panic!("Invalid auth state")
}
@ -1441,12 +1432,7 @@ mod tests {
let mut session = session.unwrap();
if let AuthState::Choose(auth_mechs) = state {
assert!(
true == auth_mechs.iter().fold(false, |acc, x| match x {
AuthMech::Webauthn => true,
_ => acc,
})
);
assert!(auth_mechs.iter().any(|x| matches!(x, AuthMech::Webauthn)));
} else {
panic!();
}

View file

@ -22,7 +22,7 @@ macro_rules! try_from_account_e {
($au:expr, $value:expr, $qs:expr) => {{
let name = $value
.get_ava_single_str("name")
.map(|s| s.to_string()) // TODO: Refactor in another PR
.map(str::to_string)
.ok_or_else(|| {
OperationError::InvalidAccountState("Missing attribute: name".to_string())
})?;
@ -31,7 +31,6 @@ macro_rules! try_from_account_e {
let upg = Group { name, uuid };
// TODO: Refactor to use `map` and `unwrap_or_else`
let mut groups: Vec<Group> = match $value.get_ava_as_refuuid("memberof") {
Some(riter) => {
// given a list of uuid, make a filter: even if this is empty, the be will

View file

@ -60,20 +60,17 @@ impl Plugin for Protected {
return Ok(());
}
cand.iter().fold(Ok(()), |acc, cand| match acc {
Err(_) => acc,
Ok(_) => {
if cand.attribute_equality("class", &PVCLASS_SYSTEM)
|| cand.attribute_equality("class", &PVCLASS_DOMAIN_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_CONFIG)
|| cand.attribute_equality("class", &PVCLASS_TOMBSTONE)
|| cand.attribute_equality("class", &PVCLASS_RECYCLED)
{
Err(OperationError::SystemProtectedObject)
} else {
acc
}
cand.iter().try_fold((), |(), cand| {
if cand.attribute_equality("class", &PVCLASS_SYSTEM)
|| cand.attribute_equality("class", &PVCLASS_DOMAIN_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_CONFIG)
|| cand.attribute_equality("class", &PVCLASS_TOMBSTONE)
|| cand.attribute_equality("class", &PVCLASS_RECYCLED)
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})
}
@ -93,55 +90,42 @@ impl Plugin for Protected {
return Ok(());
}
// Prevent adding class: system, domain_info, tombstone, or recycled.
me.modlist.iter().fold(Ok(()), |acc, m| {
if acc.is_err() {
acc
} else {
match m {
Modify::Present(a, v) => {
// TODO: Can we avoid this clone?
if a == "class"
&& (v == &(*VCLASS_SYSTEM)
|| v == &(*VCLASS_DOMAIN_INFO)
|| v == &(*VCLASS_SYSTEM_INFO)
|| v == &(*VCLASS_SYSTEM_CONFIG)
|| v == &(*VCLASS_TOMBSTONE)
|| v == &(*VCLASS_RECYCLED))
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
}
_ => Ok(()),
me.modlist.iter().try_fold((), |(), m| match m {
Modify::Present(a, v) => {
// TODO: Can we avoid this clone?
if a == "class"
&& (v == &(*VCLASS_SYSTEM)
|| v == &(*VCLASS_DOMAIN_INFO)
|| v == &(*VCLASS_SYSTEM_INFO)
|| v == &(*VCLASS_SYSTEM_CONFIG)
|| v == &(*VCLASS_TOMBSTONE)
|| v == &(*VCLASS_RECYCLED))
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
}
_ => Ok(()),
})?;
// HARD block mods on tombstone or recycle. We soft block on the rest as they may
// have some allowed attrs.
cand.iter().fold(Ok(()), |acc, cand| match acc {
Err(_) => acc,
Ok(_) => {
if cand.attribute_equality("class", &PVCLASS_TOMBSTONE)
|| cand.attribute_equality("class", &PVCLASS_RECYCLED)
{
Err(OperationError::SystemProtectedObject)
} else {
acc
}
cand.iter().try_fold((), |(), cand| {
if cand.attribute_equality("class", &PVCLASS_TOMBSTONE)
|| cand.attribute_equality("class", &PVCLASS_RECYCLED)
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})?;
// if class: system, check the mods are "allowed"
let system_pres = cand.iter().fold(false, |acc, c| {
if acc {
acc
} else {
// We don't need to check for domain info here because domain_info has a class
// system also. We just need to block it from being created.
c.attribute_equality("class", &PVCLASS_SYSTEM)
}
let system_pres = cand.iter().any(|c| {
// We don't need to check for domain info here because domain_info has a class
// system also. We just need to block it from being created.
c.attribute_equality("class", &PVCLASS_SYSTEM)
});
ltrace!(au, "class: system -> {}", system_pres);
@ -151,20 +135,14 @@ impl Plugin for Protected {
}
// Something altered is system, check if it's allowed.
me.modlist.iter().fold(Ok(()), |acc, m| {
me.modlist.iter().try_fold((), |(), m| {
// Already hit an error, move on.
if acc.is_err() {
acc
} else {
let a = match m {
Modify::Present(a, _) => a,
Modify::Removed(a, _) => a,
Modify::Purged(a) => a,
};
match ALLOWED_ATTRS.get(a.as_str()) {
Some(_) => Ok(()),
None => Err(OperationError::SystemProtectedObject),
}
let a = match m {
Modify::Present(a, _) | Modify::Removed(a, _) | Modify::Purged(a) => a,
};
match ALLOWED_ATTRS.get(a.as_str()) {
Some(_) => Ok(()),
None => Err(OperationError::SystemProtectedObject),
}
})
}
@ -184,20 +162,17 @@ impl Plugin for Protected {
return Ok(());
}
cand.iter().fold(Ok(()), |acc, cand| match acc {
Err(_) => acc,
Ok(_) => {
if cand.attribute_equality("class", &PVCLASS_SYSTEM)
|| cand.attribute_equality("class", &PVCLASS_DOMAIN_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_CONFIG)
|| cand.attribute_equality("class", &PVCLASS_TOMBSTONE)
|| cand.attribute_equality("class", &PVCLASS_RECYCLED)
{
Err(OperationError::SystemProtectedObject)
} else {
acc
}
cand.iter().try_fold((), |(), cand| {
if cand.attribute_equality("class", &PVCLASS_SYSTEM)
|| cand.attribute_equality("class", &PVCLASS_DOMAIN_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_INFO)
|| cand.attribute_equality("class", &PVCLASS_SYSTEM_CONFIG)
|| cand.attribute_equality("class", &PVCLASS_TOMBSTONE)
|| cand.attribute_equality("class", &PVCLASS_RECYCLED)
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})
}

View file

@ -139,20 +139,16 @@ impl Plugin for Spn {
// trigger the spn regen ... which is expensive. Future
// TODO #157: will be improvements to modify on large txns.
let domain_name_changed =
cand.iter()
.zip(pre_cand.iter())
.fold(None, |acc, (post, pre)| {
if acc.is_some() {
acc
} else if post.attribute_equality("uuid", &PV_UUID_DOMAIN_INFO)
&& post.get_ava_single("domain_name") != pre.get_ava_single("domain_name")
{
post.get_ava_single("domain_name")
} else {
acc
}
});
let domain_name_changed = cand.iter().zip(pre_cand.iter()).find_map(|(post, pre)| {
let domain_name = post.get_ava_single("domain_name");
if post.attribute_equality("uuid", &PV_UUID_DOMAIN_INFO)
&& domain_name != pre.get_ava_single("domain_name")
{
domain_name
} else {
None
}
});
let domain_name = match domain_name_changed {
Some(s) => s,

View file

@ -451,23 +451,10 @@ pub trait QueryServerTransaction<'a> {
})?;
let se = SearchEvent::new_internal(f_valid);
// TODO: Refactor to use this
// let mut vs = self.search(audit, &se)?;
// match vs.pop() {
// Some(entry) if vs.is_empty() => Ok(entry),
// _ => Err(OperationError::NoMatchingEntries),
// }
let res = self.search(audit, &se);
match res {
Ok(vs) => {
if vs.len() > 1 {
return Err(OperationError::NoMatchingEntries);
}
vs.into_iter()
.next()
.ok_or(OperationError::NoMatchingEntries)
}
Err(e) => Err(e),
let mut vs = self.search(audit, &se)?;
match vs.pop() {
Some(entry) if vs.is_empty() => Ok(entry),
_ => Err(OperationError::NoMatchingEntries),
}
})
})
@ -484,23 +471,11 @@ pub trait QueryServerTransaction<'a> {
lperf_segment!(audit, "server::internal_search_ext_uuid", || {
let filter_intent = filter_all!(f_eq("uuid", PartialValue::new_uuid(*uuid)));
let filter = filter!(f_eq("uuid", PartialValue::new_uuid(*uuid)));
let res = self.impersonate_search_ext(audit, filter, filter_intent, event);
// TODO: Refactor to use this
// let mut vs = self.impersonate_search_ext(audit, filter, filter_intent, event)?;
// match vs.pop() {
// Some(entry) if vs.is_empty() => Ok(entry),
// _ => Err(OperationError::NoMatchingEntries),
// }
match res {
Ok(vs) => {
if vs.len() > 1 {
return Err(OperationError::NoMatchingEntries);
}
vs.into_iter()
.next()
.ok_or(OperationError::NoMatchingEntries)
}
Err(e) => Err(e),
let mut vs = self.impersonate_search_ext(audit, filter, filter_intent, event)?;
match vs.pop() {
Some(entry) if vs.is_empty() => Ok(entry),
_ => Err(OperationError::NoMatchingEntries),
}
})
})
@ -517,23 +492,11 @@ pub trait QueryServerTransaction<'a> {
lperf_segment!(audit, "server::internal_search_uuid", || {
let filter_intent = filter_all!(f_eq("uuid", PartialValue::new_uuid(*uuid)));
let filter = filter!(f_eq("uuid", PartialValue::new_uuid(*uuid)));
let res = self.impersonate_search(audit, filter, filter_intent, event);
// TODO: Refactor to use this
// let mut vs = self.impersonate_search(audit, filter, filter_intent, event)?;
// match vs.pop() {
// Some(entry) if vs.is_empty() => Ok(entry),
// _ => Err(OperationError::NoMatchingEntries),
// }
match res {
Ok(vs) => {
if vs.len() > 1 {
return Err(OperationError::NoMatchingEntries);
}
vs.into_iter()
.next()
.ok_or(OperationError::NoMatchingEntries)
}
Err(e) => Err(e),
let mut vs = self.impersonate_search(audit, filter, filter_intent, event)?;
match vs.pop() {
Some(entry) if vs.is_empty() => Ok(entry),
_ => Err(OperationError::NoMatchingEntries),
}
})
})
@ -767,7 +730,7 @@ pub trait QueryServerTransaction<'a> {
} else if value.is_sshkey() {
value
.get_sshkey()
.map(|s| s.to_string()) // TODO: Refactor in another PR
.map(str::to_string)
.ok_or(OperationError::InvalidValueState)
} else {
// Not? Okay, do the to string.
@ -782,7 +745,7 @@ pub trait QueryServerTransaction<'a> {
self.internal_search_uuid(audit, &UUID_DOMAIN_INFO)
.and_then(|e| {
e.get_ava_single_str("domain_name")
.map(|s| s.to_string()) // TODO: Refactor in another PR
.map(str::to_string)
.ok_or(OperationError::InvalidEntryState)
})
.map_err(|e| {
@ -804,7 +767,7 @@ pub trait QueryServerTransaction<'a> {
let mut badlist_hashset = HashSet::with_capacity(badlist_entry.len());
badlist_entry
.iter()
.filter_map(|e| e.as_string()) // TODO: Refactor in another PR
.filter_map(Value::as_string)
.cloned()
.for_each(|s| {
badlist_hashset.insert(s);
@ -1305,18 +1268,17 @@ impl<'a> QueryServerWriteTransaction<'a> {
}
// Now, delete only what you can see
let pre_candidates = match self.impersonate_search_valid(
audit,
de.filter.clone(),
de.filter_orig.clone(),
&de.ident,
) {
Ok(results) => results,
Err(e) => {
let pre_candidates = self
.impersonate_search_valid(
audit,
de.filter.clone(),
de.filter_orig.clone(),
&de.ident,
)
.map_err(|e| {
ladmin_error!(audit, "delete: error in pre-candidate selection {:?}", e);
return Err(e);
}
};
e
})?;
// Apply access controls to reduce the set if required.
// delete_allow_operation
@ -1367,7 +1329,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
OperationError::SchemaViolation(e)
})
// seal if it worked.
.map(|r| r.seal())
.map(Entry::seal)
})
.collect();
@ -1440,16 +1402,13 @@ impl<'a> QueryServerWriteTransaction<'a> {
ladmin_error!(audit, "Unable to generate search cid {:?}", e);
e
})?;
let ts = match self.internal_search(
let ts = self.internal_search(
audit,
filter_all!(f_and!([
f_eq("class", PVCLASS_TOMBSTONE.clone()),
f_lt("last_modified_cid", PartialValue::new_cid(cid)),
])),
) {
Ok(r) => r,
Err(e) => return Err(e),
};
)?;
if ts.is_empty() {
ladmin_info!(audit, "No Tombstones present - purge operation success");
@ -1477,16 +1436,13 @@ impl<'a> QueryServerWriteTransaction<'a> {
ladmin_error!(audit, "Unable to generate search cid {:?}", e);
e
})?;
let rc = match self.internal_search(
let rc = self.internal_search(
audit,
filter_all!(f_and!([
f_eq("class", PVCLASS_RECYCLED.clone()),
f_lt("last_modified_cid", PartialValue::new_cid(cid)),
])),
) {
Ok(r) => r,
Err(e) => return Err(e),
};
)?;
if rc.is_empty() {
ladmin_info!(audit, "No recycled present - purge operation success");
@ -1504,7 +1460,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
OperationError::SchemaViolation(e)
})
// seal if it worked.
.map(|r| r.seal())
.map(Entry::seal)
})
.collect();
@ -1560,12 +1516,12 @@ impl<'a> QueryServerWriteTransaction<'a> {
let mut dm_mods: HashMap<Uuid, ModifyList<ModifyInvalid>> =
HashMap::with_capacity(revive_cands.len());
revive_cands.into_iter().for_each(|e| {
for e in revive_cands {
// Get this entries uuid.
let u: Uuid = *e.get_uuid();
if let Some(riter) = e.get_ava_as_refuuid("directmemberof") {
riter.for_each(|g_uuid| {
for g_uuid in riter {
dm_mods
.entry(*g_uuid)
.and_modify(|mlist| {
@ -1582,9 +1538,9 @@ impl<'a> QueryServerWriteTransaction<'a> {
);
ModifyList::new_list(vec![m])
});
});
};
});
}
}
}
// Now impersonate the modify
self.impersonate_modify_valid(
@ -1596,13 +1552,13 @@ impl<'a> QueryServerWriteTransaction<'a> {
)?;
// If and only if that succeeds, apply the direct membership modifications
// if possible.
let r: Result<_, _> = dm_mods.into_iter().try_for_each(|(g, mods)| {
for (g, mods) in dm_mods {
// I think the filter/filter_all shouldn't matter here because the only
// valid direct memberships should be still valid/live references.
let f = filter_all!(f_eq("uuid", PartialValue::new_uuid(g)));
self.internal_modify(audit, &f, &mods)
});
r
self.internal_modify(audit, &f, &mods)?;
}
Ok(())
})
}
@ -1640,19 +1596,18 @@ impl<'a> QueryServerWriteTransaction<'a> {
// This is now done in the event transform
// This also checks access controls due to use of the impersonation.
let pre_candidates = match self.impersonate_search_valid(
audit,
me.filter.clone(),
me.filter_orig.clone(),
&me.ident,
) {
Ok(results) => results,
Err(e) => {
let pre_candidates = self
.impersonate_search_valid(
audit,
me.filter.clone(),
me.filter_orig.clone(),
&me.ident,
)
.map_err(|e| {
admin_error!("modify: error in pre-candidate selection {:?}", e);
ladmin_error!(audit, "modify: error in pre-candidate selection {:?}", e);
return Err(e);
}
};
e
})?;
if pre_candidates.is_empty() {
if me.ident.is_internal() {
@ -1905,7 +1860,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
ladmin_error!(audit, "Schema Violation {:?}", e);
OperationError::SchemaViolation(e)
})
.map(|e| e.seal())
.map(Entry::seal)
})
.collect();
@ -2008,12 +1963,12 @@ impl<'a> QueryServerWriteTransaction<'a> {
candidates.iter_mut().for_each(|er| {
let opt_names: Option<BTreeSet<_>> = er.pop_ava("name").map(|vs| {
vs.into_iter()
.filter_map(|v| v.migrate_iutf8_iname())
.filter_map(Value::migrate_iutf8_iname)
.collect()
});
let opt_dnames: Option<BTreeSet<_>> = er.pop_ava("domain_name").map(|vs| {
vs.into_iter()
.filter_map(|v| v.migrate_iutf8_iname())
.filter_map(Value::migrate_iutf8_iname)
.collect()
});
@ -2030,7 +1985,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
// Schema check all.
let res: Result<Vec<Entry<EntrySealed, EntryCommitted>>, SchemaError> = candidates
.into_iter()
.map(|e| e.validate(&self.schema).map(|e| e.seal()))
.map(|e| e.validate(&self.schema).map(Entry::seal))
.collect();
let norm_cand: Vec<Entry<_, _>> = match res {
@ -2227,35 +2182,29 @@ impl<'a> QueryServerWriteTransaction<'a> {
ltrace!(audit, "internal_migrate_or_create search {:?}", filt);
match self.internal_search(audit, filt.clone()) {
Ok(results) => {
if results.is_empty() {
// It does not exist. Create it.
self.internal_create(audit, vec![e])
} else if results.len() == 1 {
// If the thing is subset, pass
match e.gen_modlist_assert(&self.schema) {
Ok(modlist) => {
// Apply to &results[0]
ltrace!(audit, "Generated modlist -> {:?}", modlist);
self.internal_modify(audit, &filt, &modlist)
}
Err(e) => Err(OperationError::SchemaViolation(e)),
}
} else {
ladmin_error!(
audit,
"Invalid Result Set - Expected One Entry for {:?} - {:?}",
filt,
results
);
Err(OperationError::InvalidDbState)
let results = self.internal_search(audit, filt.clone())?;
if results.is_empty() {
// It does not exist. Create it.
self.internal_create(audit, vec![e])
} else if results.len() == 1 {
// If the thing is subset, pass
match e.gen_modlist_assert(&self.schema) {
Ok(modlist) => {
// Apply to &results[0]
ltrace!(audit, "Generated modlist -> {:?}", modlist);
self.internal_modify(audit, &filt, &modlist)
}
Err(e) => Err(OperationError::SchemaViolation(e)),
}
Err(e) => {
// An error occured. pass it back up.
Err(e)
}
} else {
ladmin_error!(
audit,
"Invalid Result Set - Expected One Entry for {:?} - {:?}",
filt,
results
);
Err(OperationError::InvalidDbState)
}
}
@ -2340,6 +2289,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
} else {
ladmin_error!(audit, "initialise_schema_core -> Error {:?}", r);
}
// why do we have error handling if it's always supposed to be `Ok`?
debug_assert!(r.is_ok());
r
}
@ -2380,19 +2330,19 @@ impl<'a> QueryServerWriteTransaction<'a> {
JSON_SCHEMA_ATTR_NSUNIQUEID,
];
let r: Result<Vec<()>, _> = idm_schema
let r = idm_schema
.iter()
// Each item individually logs it's result
.map(|e_str| self.internal_migrate_or_create_str(audit, e_str))
.collect();
.try_for_each(|e_str| self.internal_migrate_or_create_str(audit, e_str));
if r.is_ok() {
ladmin_info!(audit, "initialise_schema_idm -> Ok!");
} else {
ladmin_error!(audit, "initialise_schema_idm -> Error {:?}", r);
}
debug_assert!(r.is_ok());
debug_assert!(r.is_ok()); // why return a result if we assert it's `Ok`?
r.map(|_| ())
r
}
// This function is idempotent

View file

@ -111,6 +111,7 @@ fn format_json(processed_logs: &TreeProcessed) -> Vec<u8> {
}
}
#[allow(clippy::expect_used)]
let uuid = span
.uuid
.as_deref()
@ -135,6 +136,7 @@ fn format_json(processed_logs: &TreeProcessed) -> Vec<u8> {
let mut writer = vec![];
let mut spans = vec![];
#[allow(clippy::expect_used)]
fmt_rec(&processed_logs, &mut spans, None, &mut writer).expect("Write failed");
writer
}
@ -195,6 +197,7 @@ fn format_pretty(processed_logs: &TreeProcessed) -> Vec<u8> {
writeln!(writer)
}
TreeProcessed::Span(span) => {
#[allow(clippy::expect_used)]
let uuid = span
.uuid
.as_deref()
@ -287,6 +290,7 @@ fn format_pretty(processed_logs: &TreeProcessed) -> Vec<u8> {
let mut writer = vec![];
let mut indent = vec![];
#[allow(clippy::expect_used)]
fmt_rec(&processed_logs, &mut indent, None, None, &mut writer).expect("Write failed");
writer
}

View file

@ -19,6 +19,7 @@ impl ExportProcessor {
impl Processor for ExportProcessor {
fn process(&self, preprocessed: TreePreProcessed) {
#[allow(clippy::expect_used)]
self.sender
.send(preprocessed)
.expect("Processing channel has been closed, cannot log events.");
@ -27,6 +28,7 @@ impl Processor for ExportProcessor {
impl Processor for TestProcessor {
fn process(&self, preprocessed: TreePreProcessed) {
#[allow(clippy::expect_used)]
preprocessed.process().expect("Failed to write logs");
}
}

View file

@ -105,6 +105,7 @@ impl TreeSubscriber<ExportProcessor> {
let subscriber = TreeSubscriber::new_with(fmt, log_tx);
let logger = async move {
while let Some(processor) = log_rx.recv().await {
#[allow(clippy::expect_used)]
processor.process().expect("Failed to write logs");
}
};
@ -130,6 +131,7 @@ impl<P: Processor> TreeSubscriber<P> {
let current = self.inner.current_span();
// If there's no current span, we short-circuit.
let id = current.id()?;
#[allow(clippy::expect_used)]
let span = self
.inner
.span(id)
@ -138,12 +140,14 @@ impl<P: Processor> TreeSubscriber<P> {
span.scope().into_iter().find_map(|span| {
let extensions = span.extensions();
// If `uuid` is `None`, then we keep searching.
#[allow(clippy::expect_used)]
let uuid = extensions
.get::<TreeSpan>()
.expect("Span buffer not found, this is a bug")
.uuid
.as_ref()?;
// TODO: make spans store UUID's as a u128 or 2 u64's
#[allow(clippy::expect_used)]
Some(Uuid::parse_str(uuid.as_str()).expect("Unable to parse UUID, this is a bug"))
})
}
@ -204,6 +208,7 @@ impl<P: Processor> TreeLayer<P> {
fn log_to_parent(&self, logs: Tree, parent: Option<SpanRef<Registry>>) {
match parent {
// The parent exists- write to them
#[allow(clippy::expect_used)]
Some(span) => span
.extensions_mut()
.get_mut::<TreeSpan>()
@ -220,6 +225,7 @@ impl<P: Processor> TreeLayer<P> {
impl<P: Processor> Layer<Registry> for TreeLayer<P> {
fn new_span(&self, attrs: &Attributes, id: &Id, ctx: Context<Registry>) {
#[allow(clippy::expect_used)]
let span = ctx.span(id).expect("Span not found, this is a bug");
let name = attrs.metadata().name();
@ -261,6 +267,7 @@ impl<P: Processor> Layer<Registry> for TreeLayer<P> {
if immediate {
use super::formatter::format_immediate_event;
let maybe_scope = ctx.event_scope(event).map(Scope::from_root);
#[allow(clippy::expect_used)]
let formatted_event = format_immediate_event(&tree_event, maybe_scope)
.expect("Formatting immediate event failed");
eprintln!("{}", formatted_event);
@ -270,6 +277,7 @@ impl<P: Processor> Layer<Registry> for TreeLayer<P> {
}
fn on_enter(&self, id: &Id, ctx: Context<Registry>) {
#[allow(clippy::expect_used)]
ctx.span(id)
.expect("Span not found, this is a bug")
.extensions_mut()
@ -279,6 +287,7 @@ impl<P: Processor> Layer<Registry> for TreeLayer<P> {
}
fn on_exit(&self, id: &Id, ctx: Context<Registry>) {
#[allow(clippy::expect_used)]
ctx.span(id)
.expect("Span not found, this is a bug")
.extensions_mut()
@ -288,14 +297,17 @@ impl<P: Processor> Layer<Registry> for TreeLayer<P> {
}
fn on_close(&self, id: Id, ctx: Context<Registry>) {
#[allow(clippy::expect_used)]
let span = ctx.span(&id).expect("Span not found, this is a bug");
let mut extensions = span.extensions_mut();
#[allow(clippy::expect_used)]
let span_buf = extensions
.remove::<TreeSpan>()
.expect("Span buffer not found, this is a bug");
#[allow(clippy::expect_used)]
let duration = extensions
.remove::<Timer>()
.expect("Timer not found, this is a bug")
@ -321,9 +333,9 @@ impl TreeEvent {
impl Visit for Visitor {
fn record_u64(&mut self, field: &Field, value: u64) {
if field.name() == "event_tag" {
if field.name() == "event_tag_id" {
let tag = EventTag::try_from(value).unwrap_or_else(|_| {
panic!("Invalid `event_tag`: {}, this is a bug", value)
panic!("Invalid `event_tag_id`: {}, this is a bug", value)
});
self.tag = Some(tag);
} else {
@ -342,6 +354,7 @@ impl TreeEvent {
fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) {
if field.name() == "message" {
use fmt::Write;
#[allow(clippy::expect_used)]
write!(self.message, "{:?}", value).expect("Write failed");
} else {
self.values.push((field.name(), format!("{:?}", value)));
@ -491,6 +504,7 @@ impl TreeProcessed {
pub fn operation_id() -> Option<Uuid> {
tracing::dispatcher::get_default(|dispatch| {
// Try to find the release subscriber
#[allow(clippy::expect_used)]
dispatch
.downcast_ref::<TreeSubscriber<ExportProcessor>>()
.map(TreeSubscriber::<ExportProcessor>::thread_operation_id)
@ -506,6 +520,7 @@ pub fn operation_id() -> Option<Uuid> {
pub fn main_init() -> JoinHandle<()> {
let (subscriber, logger) = TreeSubscriber::pretty();
#[allow(clippy::expect_used)]
tracing::subscriber::set_global_default(subscriber)
.expect("🚨🚨🚨 Global subscriber already set, this is a bug 🚨🚨🚨");
tokio::spawn(logger)