mirror of
https://github.com/kanidm/kanidm.git
synced 2025-06-04 07:13:54 +02:00
of course I started looking at clippy things and now I can't stop (#2560)
This commit is contained in:
parent
68d788a9f7
commit
4efdb7208f
|
@ -143,9 +143,9 @@ impl Display for KanidmClientBuilder {
|
|||
|
||||
#[test]
|
||||
fn test_kanidmclientbuilder_display() {
|
||||
let foo = KanidmClientBuilder::default();
|
||||
println!("{}", foo);
|
||||
assert!(foo.to_string().contains("verify_ca"));
|
||||
let defaultclient = KanidmClientBuilder::default();
|
||||
println!("{}", defaultclient);
|
||||
assert!(defaultclient.to_string().contains("verify_ca"));
|
||||
|
||||
let testclient = KanidmClientBuilder {
|
||||
address: Some("https://example.com".to_string()),
|
||||
|
@ -156,7 +156,7 @@ fn test_kanidmclientbuilder_display() {
|
|||
use_system_proxies: true,
|
||||
token_cache_path: Some(CLIENT_TOKEN_CACHE.to_string()),
|
||||
};
|
||||
println!("foo {}", testclient);
|
||||
println!("testclient {}", testclient);
|
||||
assert!(testclient.to_string().contains("verify_ca: true"));
|
||||
assert!(testclient.to_string().contains("verify_hostnames: true"));
|
||||
|
||||
|
|
|
@ -1569,7 +1569,9 @@ mod tests {
|
|||
|
||||
match r.verify(password) {
|
||||
Ok(r) => assert!(r),
|
||||
Err(_) => {
|
||||
Err(_) =>
|
||||
{
|
||||
#[allow(clippy::panic)]
|
||||
if cfg!(openssl3) {
|
||||
warn!("To run this test, enable the legacy provider.");
|
||||
} else {
|
||||
|
@ -1592,11 +1594,13 @@ mod tests {
|
|||
assert!(r.requires_upgrade());
|
||||
match r.verify(password) {
|
||||
Ok(r) => assert!(r),
|
||||
Err(_) => {
|
||||
Err(_) =>
|
||||
{
|
||||
#[allow(clippy::panic)]
|
||||
if cfg!(openssl3) {
|
||||
warn!("To run this test, enable the legacy provider.");
|
||||
} else {
|
||||
assert!(false);
|
||||
panic!("OpenSSL3 feature not enabled")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -551,8 +551,8 @@ fn test_ca_loader() {
|
|||
|
||||
let ca_config = CAConfig::default();
|
||||
if let Ok(ca) = build_ca(Some(ca_config)) {
|
||||
write_ca(&ca_key_tempfile.path(), &ca_cert_tempfile.path(), &ca).unwrap();
|
||||
assert!(load_ca(&ca_key_tempfile.path(), &ca_cert_tempfile.path()).is_ok());
|
||||
write_ca(ca_key_tempfile.path(), ca_cert_tempfile.path(), &ca).unwrap();
|
||||
assert!(load_ca(ca_key_tempfile.path(), ca_cert_tempfile.path()).is_ok());
|
||||
};
|
||||
|
||||
let good_ca_configs = vec![
|
||||
|
@ -567,8 +567,8 @@ fn test_ca_loader() {
|
|||
println!("testing good config {:?}", config);
|
||||
let ca_config = CAConfig::new(config.0, config.1, config.2).unwrap();
|
||||
let ca = build_ca(Some(ca_config)).unwrap();
|
||||
write_ca(&ca_key_tempfile.path(), &ca_cert_tempfile.path(), &ca).unwrap();
|
||||
let ca_result = load_ca(&ca_key_tempfile.path(), &ca_cert_tempfile.path());
|
||||
write_ca(ca_key_tempfile.path(), ca_cert_tempfile.path(), &ca).unwrap();
|
||||
let ca_result = load_ca(ca_key_tempfile.path(), ca_cert_tempfile.path());
|
||||
println!("result: {:?}", ca_result);
|
||||
assert!(ca_result.is_ok());
|
||||
});
|
||||
|
@ -583,8 +583,8 @@ fn test_ca_loader() {
|
|||
);
|
||||
let ca_config = CAConfig::new(config.0, config.1, config.2).unwrap();
|
||||
let ca = build_ca(Some(ca_config)).unwrap();
|
||||
write_ca(&ca_key_tempfile.path(), &ca_cert_tempfile.path(), &ca).unwrap();
|
||||
let ca_result = load_ca(&ca_key_tempfile.path(), &ca_cert_tempfile.path());
|
||||
write_ca(ca_key_tempfile.path(), ca_cert_tempfile.path(), &ca).unwrap();
|
||||
let ca_result = load_ca(ca_key_tempfile.path(), ca_cert_tempfile.path());
|
||||
println!("result: {:?}", ca_result);
|
||||
assert!(ca_result.is_err());
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
///
|
||||
/// It's not perfect, but it's a start!
|
||||
fn figure_out_if_we_have_all_the_routes() {
|
||||
use std::collections::HashMap;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
// load this file
|
||||
let module_filename = format!("{}/src/https/apidocs/mod.rs", env!("CARGO_MANIFEST_DIR"));
|
||||
|
@ -12,7 +12,7 @@ fn figure_out_if_we_have_all_the_routes() {
|
|||
|
||||
// find all the lines that start with super::v1:: and end with a comma
|
||||
let apidocs_function_finder = regex::Regex::new(r#"super::([a-zA-Z0-9_:]+),"#).unwrap();
|
||||
let mut apidocs_routes: HashMap<String, Vec<(String, String)>> = HashMap::new();
|
||||
let mut apidocs_routes: BTreeMap<String, Vec<(String, String)>> = BTreeMap::new();
|
||||
for line in file.lines() {
|
||||
if let Some(caps) = apidocs_function_finder.captures(line) {
|
||||
let route = caps.get(1).unwrap().as_str();
|
||||
|
@ -42,7 +42,7 @@ fn figure_out_if_we_have_all_the_routes() {
|
|||
regex::Regex::new(r#"(any|delete|get|head|options|patch|post|put|trace)\(([a-z:_]+)\)"#)
|
||||
.unwrap();
|
||||
// work our way through the source files in this package looking for routedefs
|
||||
let mut found_routes: HashMap<String, Vec<(String, String)>> = HashMap::new();
|
||||
let mut found_routes: BTreeMap<String, Vec<(String, String)>> = BTreeMap::new();
|
||||
let walker = walkdir::WalkDir::new(format!("{}/src", env!("CARGO_MANIFEST_DIR")))
|
||||
.follow_links(false)
|
||||
.into_iter();
|
||||
|
@ -63,7 +63,7 @@ fn figure_out_if_we_have_all_the_routes() {
|
|||
let source_module = relative_filename.split("/").last().unwrap();
|
||||
let source_module = source_module.split(".").next().unwrap();
|
||||
|
||||
let file = std::fs::read_to_string(&entry.path()).unwrap();
|
||||
let file = std::fs::read_to_string(entry.path()).unwrap();
|
||||
for line in file.lines() {
|
||||
if line.contains("skip_route_check") {
|
||||
println!("Skipping this line because it contains skip_route_check");
|
||||
|
|
|
@ -631,7 +631,7 @@ pub(crate) trait IdlSqliteTransaction {
|
|||
Ok(Some(v)) => {
|
||||
let r: Result<String, _> = v.get(0);
|
||||
match r {
|
||||
Ok(t) if t == "ok" => vec![],
|
||||
Ok(t) if t == "ok" => Vec::new(),
|
||||
_ => vec![Err(ConsistencyError::SqliteIntegrityFailure)],
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1707,9 +1707,9 @@ impl<'a> BackendWriteTransaction<'a> {
|
|||
#[cfg(test)]
|
||||
pub fn load_test_idl(
|
||||
&mut self,
|
||||
attr: &String,
|
||||
attr: &str,
|
||||
itype: IndexType,
|
||||
idx_key: &String,
|
||||
idx_key: &str,
|
||||
) -> Result<Option<IDLBitRange>, OperationError> {
|
||||
self.get_idlayer().get_idl(attr, itype, idx_key)
|
||||
}
|
||||
|
@ -2135,7 +2135,7 @@ mod tests {
|
|||
|
||||
macro_rules! run_test {
|
||||
($test_fn:expr) => {{
|
||||
let _ = sketching::test_init();
|
||||
sketching::test_init();
|
||||
|
||||
// This is a demo idxmeta, purely for testing.
|
||||
let idxmeta = vec![
|
||||
|
|
|
@ -3439,7 +3439,7 @@ mod tests {
|
|||
) -> CredentialUpdateSessionStatus {
|
||||
// Start the registration
|
||||
let c_status = cutxn
|
||||
.credential_passkey_init(&cust, ct)
|
||||
.credential_passkey_init(cust, ct)
|
||||
.expect("Failed to initiate passkey registration");
|
||||
|
||||
assert!(c_status.passkeys.is_empty());
|
||||
|
@ -3457,7 +3457,7 @@ mod tests {
|
|||
// Finish the registration
|
||||
let label = "softtoken".to_string();
|
||||
let c_status = cutxn
|
||||
.credential_passkey_finish(&cust, ct, label, &passkey_resp)
|
||||
.credential_passkey_finish(cust, ct, label, &passkey_resp)
|
||||
.expect("Failed to initiate passkey registration");
|
||||
|
||||
assert!(matches!(c_status.mfaregstate, MfaRegStateStatus::None));
|
||||
|
|
|
@ -3699,7 +3699,7 @@ mod tests {
|
|||
.internal_search_writeable(&filt)
|
||||
.expect("Failed to perform internal search writeable");
|
||||
for (_, entry) in work_set.iter_mut() {
|
||||
let _ = entry.force_trim_ava(Attribute::OAuth2Session.into());
|
||||
let _ = entry.force_trim_ava(Attribute::OAuth2Session);
|
||||
}
|
||||
assert!(idms_prox_write
|
||||
.qs_write
|
||||
|
@ -5153,12 +5153,9 @@ mod tests {
|
|||
OAUTH2_SCOPE_OPENID.to_string()
|
||||
);
|
||||
|
||||
let consent_token =
|
||||
if let AuthoriseResponse::ConsentRequested { consent_token, .. } = consent_request {
|
||||
consent_token
|
||||
} else {
|
||||
unreachable!();
|
||||
};
|
||||
let AuthoriseResponse::ConsentRequested { consent_token, .. } = consent_request else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
// == Manually submit the consent token to the permit for the permit_success
|
||||
drop(idms_prox_read);
|
||||
|
|
|
@ -360,6 +360,7 @@ impl IdmServerDelayed {
|
|||
Err(TryRecvError::Disconnected) => {
|
||||
panic!("Task queue disconnected");
|
||||
}
|
||||
#[allow(clippy::panic)]
|
||||
Ok(m) => {
|
||||
trace!(?m);
|
||||
panic!("Task queue not empty");
|
||||
|
@ -3665,7 +3666,7 @@ mod tests {
|
|||
.internal_search_writeable(&filt)
|
||||
.expect("Failed to perform internal search writeable");
|
||||
for (_, entry) in work_set.iter_mut() {
|
||||
let _ = entry.force_trim_ava(Attribute::UserAuthTokenSession.into());
|
||||
let _ = entry.force_trim_ava(Attribute::UserAuthTokenSession);
|
||||
}
|
||||
assert!(idms_prox_write
|
||||
.qs_write
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#[cfg(test)]
|
||||
macro_rules! setup_test {
|
||||
() => {{
|
||||
let _ = sketching::test_init();
|
||||
sketching::test_init();
|
||||
|
||||
// Create an in memory BE
|
||||
let schema_outer = Schema::new().expect("Failed to init schema");
|
||||
|
@ -27,7 +27,7 @@ macro_rules! setup_test {
|
|||
) => {{
|
||||
use crate::prelude::duration_from_epoch_now;
|
||||
|
||||
let _ = sketching::test_init();
|
||||
sketching::test_init();
|
||||
|
||||
// Create an in memory BE
|
||||
let schema_outer = Schema::new().expect("Failed to init schema");
|
||||
|
|
|
@ -252,7 +252,7 @@ mod tests {
|
|||
);
|
||||
|
||||
// Mod the user
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession.into(), session);
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession, session);
|
||||
|
||||
server_txn
|
||||
.internal_modify(
|
||||
|
@ -726,7 +726,7 @@ mod tests {
|
|||
);
|
||||
|
||||
// Mod the user
|
||||
let modlist = ModifyList::new_append(Attribute::OAuth2Session.into(), session);
|
||||
let modlist = ModifyList::new_append(Attribute::OAuth2Session, session);
|
||||
|
||||
server_txn
|
||||
.internal_modify(
|
||||
|
@ -833,7 +833,7 @@ mod tests {
|
|||
);
|
||||
|
||||
// Mod the user
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession.into(), session);
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession, session);
|
||||
|
||||
server_txn
|
||||
.internal_modify(
|
||||
|
|
|
@ -219,7 +219,7 @@ impl EntryChangeState {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn get_attr_cid(&self, attr: &Attribute) -> Option<&Cid> {
|
||||
pub(crate) fn get_attr_cid(&self, attr: Attribute) -> Option<&Cid> {
|
||||
match &self.st {
|
||||
State::Live { at: _, changes } => changes.get(attr.as_ref()),
|
||||
State::Tombstone { at: _ } => None,
|
||||
|
|
|
@ -708,7 +708,7 @@ async fn test_repl_increment_basic_deleted_attr(server_a: &QueryServer, server_b
|
|||
let e1_cs = e1.get_changestate();
|
||||
let e2_cs = e2.get_changestate();
|
||||
assert!(e1_cs == e2_cs);
|
||||
assert!(e1_cs.get_attr_cid(&Attribute::Description).is_some());
|
||||
assert!(e1_cs.get_attr_cid(Attribute::Description).is_some());
|
||||
|
||||
server_b_txn.commit().expect("Failed to commit");
|
||||
drop(server_a_txn);
|
||||
|
@ -3267,7 +3267,7 @@ async fn test_repl_increment_session_new(server_a: &QueryServer, server_b: &Quer
|
|||
},
|
||||
);
|
||||
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession.into(), session);
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession, session);
|
||||
|
||||
server_a_txn
|
||||
.internal_modify(
|
||||
|
@ -3306,7 +3306,7 @@ async fn test_repl_increment_session_new(server_a: &QueryServer, server_b: &Quer
|
|||
},
|
||||
);
|
||||
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession.into(), session);
|
||||
let modlist = ModifyList::new_append(Attribute::UserAuthTokenSession, session);
|
||||
|
||||
server_b_txn
|
||||
.internal_modify(
|
||||
|
|
|
@ -2688,10 +2688,7 @@ mod tests {
|
|||
)
|
||||
.into_invalid_new();
|
||||
let res = e_attr_invalid.validate(&schema);
|
||||
assert!(match res {
|
||||
Err(SchemaError::MissingMustAttribute(_)) => true,
|
||||
_ => false,
|
||||
});
|
||||
matches!(res, Err(SchemaError::MissingMustAttribute(_)));
|
||||
|
||||
let e_attr_invalid_may = entry_init!(
|
||||
(Attribute::Class, EntryClass::Object.to_value()),
|
||||
|
|
|
@ -619,7 +619,9 @@ mod tests {
|
|||
(Attribute::Name, Value::new_iname(name)),
|
||||
(
|
||||
Attribute::Uuid,
|
||||
Value::new_uuid_s(uuid).expect(Attribute::Uuid.as_ref())
|
||||
#[allow(clippy::panic)]
|
||||
Value::new_uuid_s(uuid)
|
||||
.unwrap_or_else(|| { panic!("{}", Attribute::Uuid.as_ref().to_string()) })
|
||||
),
|
||||
(Attribute::Description, Value::new_utf8s("testperson-entry")),
|
||||
(Attribute::DisplayName, Value::new_utf8s(name))
|
||||
|
@ -627,13 +629,15 @@ mod tests {
|
|||
}
|
||||
|
||||
fn create_group(name: &str, uuid: &str, members: &[&str]) -> Entry<EntryInit, EntryNew> {
|
||||
#[allow(clippy::panic)]
|
||||
let mut e1 = entry_init!(
|
||||
(Attribute::Class, EntryClass::Object.to_value()),
|
||||
(Attribute::Class, EntryClass::Group.to_value()),
|
||||
(Attribute::Name, Value::new_iname(name)),
|
||||
(
|
||||
Attribute::Uuid,
|
||||
Value::new_uuid_s(uuid).expect(Attribute::Uuid.as_ref())
|
||||
Value::new_uuid_s(uuid)
|
||||
.unwrap_or_else(|| { panic!("{}", Attribute::Uuid.as_ref().to_string()) })
|
||||
),
|
||||
(Attribute::Description, Value::new_utf8s("testgroup-entry"))
|
||||
);
|
||||
|
|
|
@ -270,12 +270,12 @@ impl ValueSetEmailAddress {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn from_repl_v1(primary: &String, data: &[String]) -> Result<ValueSet, OperationError> {
|
||||
pub fn from_repl_v1(primary: &str, data: &[String]) -> Result<ValueSet, OperationError> {
|
||||
let set: BTreeSet<_> = data.iter().cloned().collect();
|
||||
|
||||
if set.contains(primary) {
|
||||
Ok(Box::new(ValueSetEmailAddress {
|
||||
primary: primary.clone(),
|
||||
primary: primary.to_string(),
|
||||
set,
|
||||
}))
|
||||
} else {
|
||||
|
|
|
@ -94,7 +94,7 @@ pub fn png_has_trailer(contents: &Vec<u8>) -> Result<bool, ImageValidationError>
|
|||
// needs to be pub for bench things
|
||||
pub fn png_lodepng_validate(
|
||||
contents: &Vec<u8>,
|
||||
filename: &String,
|
||||
filename: &str,
|
||||
) -> Result<(), ImageValidationError> {
|
||||
match lodepng::decode32(contents) {
|
||||
Ok(val) => {
|
||||
|
@ -126,14 +126,14 @@ pub fn png_lodepng_validate(
|
|||
#[test]
|
||||
/// this tests a variety of input options for `png_consume_chunks_until_iend`
|
||||
fn test_png_consume_chunks_until_iend() {
|
||||
let mut foo = vec![0, 0, 0, 1]; // the length
|
||||
let mut testchunks = vec![0, 0, 0, 1]; // the length
|
||||
|
||||
foo.extend(PNG_CHUNK_END); // ... the type of chunk we're looking at!
|
||||
foo.push(1); // the data
|
||||
foo.extend([0, 0, 0, 1]); // the 4-byte checksum which we ignore
|
||||
testchunks.extend(PNG_CHUNK_END); // ... the type of chunk we're looking at!
|
||||
testchunks.push(1); // the data
|
||||
testchunks.extend([0, 0, 0, 1]); // the 4-byte checksum which we ignore
|
||||
let expected: [u8; 0] = [];
|
||||
let foo = foo.as_slice();
|
||||
let res = png_consume_chunks_until_iend(&foo);
|
||||
let testchunks_slice = testchunks.as_slice();
|
||||
let res = png_consume_chunks_until_iend(&testchunks_slice);
|
||||
|
||||
// simple, valid image works
|
||||
match res {
|
||||
|
@ -149,10 +149,10 @@ fn test_png_consume_chunks_until_iend() {
|
|||
// let's make sure it works with a bunch of different length inputs
|
||||
let mut x = 11;
|
||||
while x > 0 {
|
||||
let foo = &foo[0..=x];
|
||||
let res = png_consume_chunks_until_iend(&foo);
|
||||
let newslice = &testchunks_slice[0..=x];
|
||||
let res = png_consume_chunks_until_iend(&newslice);
|
||||
trace!("chunkstatus at size {} {:?}", x, &res);
|
||||
assert!(res.is_err());
|
||||
x = x - 1;
|
||||
x -= 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//! - @yaleman
|
||||
//!
|
||||
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
// use kanidm_client::KanidmClient;
|
||||
use kanidmd_lib::constants::entries::Attribute;
|
||||
use kanidmd_lib::constants::groups::{idm_builtin_admin_groups, idm_builtin_non_admin_groups};
|
||||
|
@ -79,14 +79,15 @@ where
|
|||
}
|
||||
|
||||
/// The uuidmap is a map of uuids to EntryInitNew objects, which we use to get the name of the objects
|
||||
fn as_mermaid(&mut self, uuidmap: &HashMap<T, EntryInitNew>) -> String {
|
||||
fn as_mermaid(&mut self, uuidmap: &BTreeMap<T, EntryInitNew>) -> String {
|
||||
let mut res = format!("graph RL;\n");
|
||||
for (left, right, _weight) in self.all_edges() {
|
||||
let left = uuidmap.get(&left).unwrap();
|
||||
let right = uuidmap.get(&right).unwrap();
|
||||
|
||||
res += &format!(
|
||||
" {} --> {}\n",
|
||||
res = format!(
|
||||
"{} {} --> {}\n",
|
||||
res,
|
||||
EntryType::from(left).as_mermaid_tag(),
|
||||
EntryType::from(right).as_mermaid_tag(),
|
||||
);
|
||||
|
@ -96,7 +97,7 @@ where
|
|||
}
|
||||
|
||||
async fn enumerate_default_groups(/*_client: KanidmClient*/) {
|
||||
let mut uuidmap: HashMap<Uuid, EntryInitNew> = HashMap::new();
|
||||
let mut uuidmap: BTreeMap<Uuid, EntryInitNew> = BTreeMap::new();
|
||||
|
||||
let mut graph = Graph::new();
|
||||
|
||||
|
|
|
@ -12,13 +12,13 @@ async fn check_that_the_swagger_api_loads(rsclient: kanidm_client::KanidmClient)
|
|||
rsclient.set_token("".into()).await;
|
||||
info!("Running test: check_that_the_swagger_api_loads");
|
||||
let url = rsclient.make_url("/docs/v1/openapi.json");
|
||||
let foo: OpenAPIResponse = reqwest::get(url.clone())
|
||||
let openapi_response: OpenAPIResponse = reqwest::get(url.clone())
|
||||
.await
|
||||
.expect("Failed to get openapi.json")
|
||||
.json()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(foo.openapi != "1.2.3");
|
||||
assert!(openapi_response.openapi != "1.2.3");
|
||||
|
||||
// this validates that it's valid JSON schema, but not that it's valid openapi... but it's a start.
|
||||
let schema: serde_json::Value = reqwest::get(url)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#![deny(warnings)]
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::convert::TryFrom;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -290,7 +290,7 @@ async fn test_oauth2_openid_basic_flow(rsclient: KanidmClient) {
|
|||
let redir_url = Url::parse(&redir_str).expect("Url parse failure");
|
||||
|
||||
// We should have state and code.
|
||||
let pairs: HashMap<_, _> = redir_url.query_pairs().collect();
|
||||
let pairs: BTreeMap<_, _> = redir_url.query_pairs().collect();
|
||||
|
||||
let code = pairs.get("code").expect("code not found!");
|
||||
|
||||
|
@ -669,7 +669,7 @@ async fn test_oauth2_openid_public_flow(rsclient: KanidmClient) {
|
|||
let redir_url = Url::parse(&redir_str).expect("Url parse failure");
|
||||
|
||||
// We should have state and code.
|
||||
let pairs: HashMap<_, _> = redir_url.query_pairs().collect();
|
||||
let pairs: BTreeMap<_, _> = redir_url.query_pairs().collect();
|
||||
|
||||
let code = pairs.get("code").expect("code not found!");
|
||||
|
||||
|
|
|
@ -504,7 +504,7 @@ async fn test_server_rest_domain_lifecycle(rsclient: KanidmClient) {
|
|||
dlocal
|
||||
.attrs
|
||||
.get(Attribute::DomainDisplayName.as_ref())
|
||||
.and_then(|v| v.get(0))
|
||||
.and_then(|v| v.first())
|
||||
== Some(&"Super Cool Crabz".to_string())
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::process_ipa_sync_result;
|
||||
use kanidm_proto::scim_v1::{ScimSyncRequest, ScimSyncState};
|
||||
use std::collections::HashMap;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use ldap3_client::LdapSyncRepl;
|
||||
|
||||
|
@ -14,7 +14,7 @@ async fn test_ldap_to_scim() {
|
|||
let expect_scim_request: ScimSyncRequest =
|
||||
serde_json::from_str(TEST_SCIM_SYNC_REPL_1).expect("failed to parse scim sync");
|
||||
|
||||
let entry_config_map = HashMap::default();
|
||||
let entry_config_map = BTreeMap::default();
|
||||
|
||||
let scim_sync_request =
|
||||
process_ipa_sync_result(ScimSyncState::Refresh, sync_request, &entry_config_map)
|
||||
|
|
|
@ -57,7 +57,7 @@ use ldap3_client::{proto, LdapClientBuilder, LdapSyncRepl, LdapSyncReplEntry, Ld
|
|||
|
||||
include!("./opt.rs");
|
||||
|
||||
async fn driver_main(opt: Opt) {
|
||||
async fn driver_main(opt: Opt) -> Result<(), ()> {
|
||||
debug!("Starting kanidm ldap sync driver.");
|
||||
|
||||
let mut f = match File::open(&opt.ldap_sync_config) {
|
||||
|
@ -68,7 +68,7 @@ async fn driver_main(opt: Opt) {
|
|||
&opt.ldap_sync_config.display(),
|
||||
e
|
||||
);
|
||||
return;
|
||||
return Err(());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -79,7 +79,7 @@ async fn driver_main(opt: Opt) {
|
|||
&opt.ldap_sync_config.display(),
|
||||
e
|
||||
);
|
||||
return;
|
||||
return Err(());
|
||||
};
|
||||
|
||||
let sync_config: Config = match toml::from_str(contents.as_str()) {
|
||||
|
@ -90,7 +90,7 @@ async fn driver_main(opt: Opt) {
|
|||
&opt.ldap_sync_config.display(),
|
||||
e
|
||||
);
|
||||
return;
|
||||
return Err(());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -101,7 +101,7 @@ async fn driver_main(opt: Opt) {
|
|||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
error!("Failed to parse {}", opt.client_config.to_string_lossy());
|
||||
return;
|
||||
return Err(());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -111,7 +111,7 @@ async fn driver_main(opt: Opt) {
|
|||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
error!("Failed to parse cron schedule expression");
|
||||
return;
|
||||
return Err(());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -129,7 +129,7 @@ async fn driver_main(opt: Opt) {
|
|||
Ok(l) => l,
|
||||
Err(e) => {
|
||||
error!(?e, "Failed to bind status socket");
|
||||
return;
|
||||
return Err(());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -248,6 +248,7 @@ async fn driver_main(opt: Opt) {
|
|||
} else if let Err(e) = run_sync(cb, &sync_config, &opt).await {
|
||||
error!(?e, "Sync completed with error");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_sync(
|
||||
|
@ -821,7 +822,9 @@ fn main() {
|
|||
|
||||
tracing::debug!("Using {} worker threads", par_count);
|
||||
|
||||
rt.block_on(async move { driver_main(opt).await });
|
||||
if rt.block_on(async move { driver_main(opt).await }).is_err() {
|
||||
std::process::exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -835,11 +838,11 @@ async fn test_driver_main() {
|
|||
dry_run: false,
|
||||
skip_root_check: true,
|
||||
};
|
||||
let _ = sketching::test_init();
|
||||
sketching::test_init();
|
||||
|
||||
println!("testing config");
|
||||
// because it can't find the profile file it'll just stop
|
||||
assert_eq!(driver_main(testopt.clone()).await, ());
|
||||
assert!(driver_main(testopt.clone()).await.is_err());
|
||||
println!("done testing missing config");
|
||||
|
||||
let testopt = Opt {
|
||||
|
@ -847,10 +850,10 @@ async fn test_driver_main() {
|
|||
ldap_sync_config: PathBuf::from(format!("{}/Cargo.toml", env!("CARGO_MANIFEST_DIR"))),
|
||||
..testopt
|
||||
};
|
||||
|
||||
println!("valid file path, invalid contents");
|
||||
assert_eq!(driver_main(testopt.clone()).await, ());
|
||||
assert!(driver_main(testopt.clone()).await.is_err());
|
||||
println!("done with valid file path, invalid contents");
|
||||
|
||||
let testopt = Opt {
|
||||
client_config: PathBuf::from(format!(
|
||||
"{}/../../../examples/iam_migration_ldap.toml",
|
||||
|
@ -864,6 +867,6 @@ async fn test_driver_main() {
|
|||
};
|
||||
|
||||
println!("valid file path, invalid contents");
|
||||
assert_eq!(driver_main(testopt).await, ());
|
||||
assert!(driver_main(testopt).await.is_err());
|
||||
println!("done with valid file path, valid contents");
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ pub fn supported() -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
fn do_setfscreatecon_for_path(path_raw: &String, labeler: &Labeler<File>) -> Result<(), String> {
|
||||
fn do_setfscreatecon_for_path(path_raw: &str, labeler: &Labeler<File>) -> Result<(), String> {
|
||||
match labeler.look_up(&CString::new(path_raw.to_owned()).unwrap(), 0) {
|
||||
Ok(context) => {
|
||||
if context.set_for_new_file_system_objects(true).is_err() {
|
||||
|
|
Loading…
Reference in a new issue