Update to concread, add hooks for cache quiescing (#641)

This commit is contained in:
Firstyear 2022-02-16 09:20:37 +10:00 committed by GitHub
parent 840024f006
commit 6e1ed9ea07
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 259 additions and 239 deletions

438
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -28,6 +28,7 @@ exclude = [
# tokio-util = { git = "https://github.com/Firstyear/tokio.git", rev = "aa6fb48d9a1f3652ee79e3b018a2b9d0c9f89c1e" } # tokio-util = { git = "https://github.com/Firstyear/tokio.git", rev = "aa6fb48d9a1f3652ee79e3b018a2b9d0c9f89c1e" }
# concread = { path = "../concread" } # concread = { path = "../concread" }
concread = { git = "https://github.com/kanidm/concread.git" }
# idlset = { path = "../idlset" } # idlset = { path = "../idlset" }
# ldap3_server = { path = "../ldap3_server" } # ldap3_server = { path = "../ldap3_server" }
# webauthn-rs = { path = "../webauthn-rs" } # webauthn-rs = { path = "../webauthn-rs" }

View file

@ -66,8 +66,8 @@ r2d2_sqlite = "0.19"
structopt = { version = "0.3", default-features = false } structopt = { version = "0.3", default-features = false }
time = { version = "0.2", features = ["serde", "std"] } time = { version = "0.2", features = ["serde", "std"] }
hashbrown = "0.11" hashbrown = { version = "0.11", features = ["serde", "inline-more", "ahash"] }
concread = "^0.2.21" concread = "^0.3"
smolset = "1.3" smolset = "1.3"
# concread = { version = "^0.2.9", features = ["simd_support"] } # concread = { version = "^0.2.9", features = ["simd_support"] }

View file

@ -1327,11 +1327,16 @@ impl AccessControls {
*/ */
acp_resolve_filter_cache: ARCacheBuilder::new() acp_resolve_filter_cache: ARCacheBuilder::new()
.set_size(ACP_RESOLVE_FILTER_CACHE_MAX, ACP_RESOLVE_FILTER_CACHE_LOCAL) .set_size(ACP_RESOLVE_FILTER_CACHE_MAX, ACP_RESOLVE_FILTER_CACHE_LOCAL)
.set_reader_quiesce(true)
.build() .build()
.expect("Failed to construct acp_resolve_filter_cache"), .expect("Failed to construct acp_resolve_filter_cache"),
} }
} }
pub fn try_quiesce(&self) {
self.acp_resolve_filter_cache.try_quiesce();
}
pub fn read(&self) -> AccessControlsReadTransaction { pub fn read(&self) -> AccessControlsReadTransaction {
AccessControlsReadTransaction { AccessControlsReadTransaction {
inner: self.inner.read(), inner: self.inner.read(),

View file

@ -1,5 +1,6 @@
use hashbrown::HashSet;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{collections::HashSet, time::Duration}; use std::time::Duration;
use url::Url; use url::Url;
use uuid::Uuid; use uuid::Uuid;
use webauthn_rs::proto::{COSEKey, UserVerificationPolicy}; use webauthn_rs::proto::{COSEKey, UserVerificationPolicy};

View file

@ -1078,11 +1078,11 @@ impl IdlArcSqlite {
// Autotune heuristic. // Autotune heuristic.
let mut cache_size = cfg.arcsize.unwrap_or_else(|| { let mut cache_size = cfg.arcsize.unwrap_or_else(|| {
// For now I've noticed about 20% of the number of entries // Due to changes in concread, we can now scale this up! We now aim for 120%
// works well, but it may not be perfect ... // of entries.
db.get_allids_count() db.get_allids_count()
.map(|c| { .map(|c| {
let tmpsize = (c / 5) as usize; let tmpsize = ((c / 5) as usize) * 6;
// if our calculation's too small anyway, just set it to the minimum target // if our calculation's too small anyway, just set it to the minimum target
std::cmp::max(tmpsize, DEFAULT_CACHE_TARGET) std::cmp::max(tmpsize, DEFAULT_CACHE_TARGET)
}) })
@ -1106,6 +1106,7 @@ impl IdlArcSqlite {
DEFAULT_CACHE_WMISS, DEFAULT_CACHE_WMISS,
false, false,
) )
.set_reader_quiesce(true)
.build() .build()
.ok_or_else(|| { .ok_or_else(|| {
admin_error!("Failed to construct entry_cache"); admin_error!("Failed to construct entry_cache");
@ -1121,6 +1122,7 @@ impl IdlArcSqlite {
DEFAULT_CACHE_WMISS, DEFAULT_CACHE_WMISS,
false, false,
) )
.set_reader_quiesce(true)
.build() .build()
.ok_or_else(|| { .ok_or_else(|| {
admin_error!("Failed to construct idl_cache"); admin_error!("Failed to construct idl_cache");
@ -1135,6 +1137,7 @@ impl IdlArcSqlite {
DEFAULT_CACHE_WMISS, DEFAULT_CACHE_WMISS,
true, true,
) )
.set_reader_quiesce(true)
.build() .build()
.ok_or_else(|| { .ok_or_else(|| {
admin_error!("Failed to construct name_cache"); admin_error!("Failed to construct name_cache");
@ -1158,6 +1161,12 @@ impl IdlArcSqlite {
}) })
} }
pub fn try_quiesce(&self) {
self.entry_cache.try_quiesce();
self.idl_cache.try_quiesce();
self.name_cache.try_quiesce();
}
pub fn read(&self) -> IdlArcSqliteReadTransaction { pub fn read(&self) -> IdlArcSqliteReadTransaction {
// IMPORTANT! Always take entrycache FIRST // IMPORTANT! Always take entrycache FIRST
let entry_cache_read = self.entry_cache.read(); let entry_cache_read = self.entry_cache.read();

View file

@ -1519,6 +1519,10 @@ impl Backend {
self.cfg.pool_size self.cfg.pool_size
} }
pub fn try_quiesce(&self) {
self.idlayer.try_quiesce();
}
pub fn read(&self) -> BackendReadTransaction { pub fn read(&self) -> BackendReadTransaction {
BackendReadTransaction { BackendReadTransaction {
idlayer: UnsafeCell::new(self.idlayer.read()), idlayer: UnsafeCell::new(self.idlayer.read()),

View file

@ -1,12 +1,12 @@
use crate::be::dbvalue::DbBackupCodeV1; use crate::be::dbvalue::DbBackupCodeV1;
use crate::be::dbvalue::{DbCredTypeV1, DbCredV1, DbPasswordV1, DbWebauthnV1}; use crate::be::dbvalue::{DbCredTypeV1, DbCredV1, DbPasswordV1, DbWebauthnV1};
use hashbrown::HashMap as Map; use hashbrown::HashMap as Map;
use hashbrown::HashSet;
use kanidm_proto::v1::{BackupCodesView, CredentialDetail, CredentialDetailType, OperationError}; use kanidm_proto::v1::{BackupCodesView, CredentialDetail, CredentialDetailType, OperationError};
use openssl::hash::MessageDigest; use openssl::hash::MessageDigest;
use openssl::pkcs5::pbkdf2_hmac; use openssl::pkcs5::pbkdf2_hmac;
use openssl::sha::Sha512; use openssl::sha::Sha512;
use rand::prelude::*; use rand::prelude::*;
use std::collections::HashSet;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use uuid::Uuid; use uuid::Uuid;

View file

@ -42,13 +42,12 @@ use tracing::trace;
use crate::be::dbentry::{DbEntry, DbEntryV1, DbEntryVers}; use crate::be::dbentry::{DbEntry, DbEntryV1, DbEntryVers};
use crate::be::{IdxKey, IdxSlope}; use crate::be::{IdxKey, IdxSlope};
use hashbrown::HashMap;
use ldap3_server::simple::{LdapPartialAttribute, LdapSearchResultEntry}; use ldap3_server::simple::{LdapPartialAttribute, LdapSearchResultEntry};
use smartstring::alias::String as AttrString;
use std::collections::BTreeMap as Map; use std::collections::BTreeMap as Map;
pub use std::collections::BTreeSet as Set; pub use std::collections::BTreeSet as Set;
use std::collections::BTreeSet; use std::collections::BTreeSet;
// use hashbrown::HashMap as Map;
use hashbrown::HashMap;
use smartstring::alias::String as AttrString;
use std::sync::Arc; use std::sync::Arc;
use time::OffsetDateTime; use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;

View file

@ -1861,7 +1861,7 @@ mod tests {
let backup_code_good = readable_password_from_random(); let backup_code_good = readable_password_from_random();
let backup_code_bad = readable_password_from_random(); let backup_code_bad = readable_password_from_random();
assert!(backup_code_bad != backup_code_good); assert!(backup_code_bad != backup_code_good);
let mut code_set = std::collections::HashSet::new(); let mut code_set = HashSet::new();
code_set.insert(backup_code_good.clone()); code_set.insert(backup_code_good.clone());
let backup_codes = BackupCodes::new(code_set); let backup_codes = BackupCodes::new(code_set);

View file

@ -531,9 +531,9 @@ mod tests {
use crate::ldap::LdapServer; use crate::ldap::LdapServer;
use crate::modify::{Modify, ModifyList}; use crate::modify::{Modify, ModifyList};
use async_std::task; use async_std::task;
use hashbrown::HashSet;
use ldap3_server::proto::{LdapFilter, LdapOp, LdapSearchScope}; use ldap3_server::proto::{LdapFilter, LdapOp, LdapSearchScope};
use ldap3_server::simple::*; use ldap3_server::simple::*;
use std::collections::HashSet;
const TEST_PASSWORD: &'static str = "ntaoeuntnaoeuhraohuercahu😍"; const TEST_PASSWORD: &'static str = "ntaoeuntnaoeuhraohuercahu😍";

View file

@ -908,6 +908,7 @@ impl QueryServer {
resolve_filter_cache: Arc::new( resolve_filter_cache: Arc::new(
ARCacheBuilder::new() ARCacheBuilder::new()
.set_size(RESOLVE_FILTER_CACHE_MAX, RESOLVE_FILTER_CACHE_LOCAL) .set_size(RESOLVE_FILTER_CACHE_MAX, RESOLVE_FILTER_CACHE_LOCAL)
.set_reader_quiesce(true)
.build() .build()
.expect("Failer to build resolve_filter_cache"), .expect("Failer to build resolve_filter_cache"),
), ),
@ -919,6 +920,12 @@ impl QueryServer {
task::block_on(self.read_async()) task::block_on(self.read_async())
} }
pub fn try_quiesce(&self) {
self.be.try_quiesce();
self.accesscontrols.try_quiesce();
self.resolve_filter_cache.try_quiesce();
}
pub async fn read_async(&self) -> QueryServerReadTransaction<'_> { pub async fn read_async(&self) -> QueryServerReadTransaction<'_> {
// We need to ensure a db conn will be available // We need to ensure a db conn will be available
#[allow(clippy::expect_used)] #[allow(clippy::expect_used)]

View file

@ -47,8 +47,8 @@ impl EventTag {
use EventTag::*; use EventTag::*;
match self { match self {
AdminError | FilterError | RequestError | SecurityError => "🚨", AdminError | FilterError | RequestError | SecurityError => "🚨",
AdminWarn | FilterWarn | RequestWarn => "🚧", AdminWarn | FilterWarn | RequestWarn => "⚠️ ",
AdminInfo | FilterInfo | RequestInfo | SecurityInfo => "💬", AdminInfo | FilterInfo | RequestInfo | SecurityInfo => " ",
RequestTrace | FilterTrace | PerfTrace => "📍", RequestTrace | FilterTrace | PerfTrace => "📍",
SecurityCritical => "🔐", SecurityCritical => "🔐",
SecurityAccess => "🔓", SecurityAccess => "🔓",

View file

@ -396,8 +396,8 @@ impl TreeEvent {
.map(EventTag::emoji) .map(EventTag::emoji)
.unwrap_or_else(|| match self.level { .unwrap_or_else(|| match self.level {
Level::ERROR => "🚨", Level::ERROR => "🚨",
Level::WARN => "🚧", Level::WARN => "⚠️ ",
Level::INFO => "💬", Level::INFO => " ",
Level::DEBUG => "🐛", Level::DEBUG => "🐛",
Level::TRACE => "📍", Level::TRACE => "📍",
}) })

View file

@ -1,4 +1,4 @@
use std::collections::HashSet; use hashbrown::HashSet;
use std::io::ErrorKind; use std::io::ErrorKind;
use std::path::PathBuf; use std::path::PathBuf;
use std::time::{Duration, SystemTime}; use std::time::{Duration, SystemTime};