20240925 cleanups ()

This commit is contained in:
Firstyear 2024-10-03 14:04:02 +10:00 committed by GitHub
parent 30a04f9b8b
commit cc662f184a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
55 changed files with 296 additions and 2395 deletions

83
Cargo.lock generated
View file

@ -620,7 +620,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"rustc-hash 1.1.0",
"shlex",
"syn 2.0.79",
"which",
@ -643,7 +643,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"rustc-hash 1.1.0",
"shlex",
"syn 2.0.79",
"which",
@ -1413,7 +1413,7 @@ dependencies = [
"lazy_static",
"mintex",
"parking_lot 0.12.3",
"rustc-hash",
"rustc-hash 1.1.0",
"serde",
"serde_json",
"thousands",
@ -2849,6 +2849,7 @@ dependencies = [
"hyper 1.4.1",
"hyper-util",
"rustls",
"rustls-native-certs",
"rustls-pki-types",
"tokio",
"tokio-rustls",
@ -4962,6 +4963,54 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "quinn"
version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684"
dependencies = [
"bytes",
"pin-project-lite",
"quinn-proto",
"quinn-udp",
"rustc-hash 2.0.0",
"rustls",
"socket2",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "quinn-proto"
version = "0.11.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6"
dependencies = [
"bytes",
"rand",
"ring",
"rustc-hash 2.0.0",
"rustls",
"slab",
"thiserror",
"tinyvec",
"tracing",
]
[[package]]
name = "quinn-udp"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b"
dependencies = [
"libc",
"once_cell",
"socket2",
"tracing",
"windows-sys 0.59.0",
]
[[package]]
name = "quote"
version = "1.0.37"
@ -5153,24 +5202,26 @@ dependencies = [
"http-body-util",
"hyper 1.4.1",
"hyper-rustls",
"hyper-tls",
"hyper-util",
"ipnet",
"js-sys",
"log",
"mime",
"mime_guess",
"native-tls",
"once_cell",
"percent-encoding",
"pin-project-lite",
"quinn",
"rustls",
"rustls-native-certs",
"rustls-pemfile",
"rustls-pki-types",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper 1.0.1",
"tokio",
"tokio-native-tls",
"tokio-rustls",
"tokio-util",
"tower-service",
"url",
@ -5307,6 +5358,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hash"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
[[package]]
name = "rusticata-macros"
version = "4.1.0"
@ -5336,12 +5393,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8"
dependencies = [
"once_cell",
"ring",
"rustls-pki-types",
"rustls-webpki",
"subtle",
"zeroize",
]
[[package]]
name = "rustls-native-certs"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a"
dependencies = [
"openssl-probe",
"rustls-pemfile",
"rustls-pki-types",
"schannel",
"security-framework",
]
[[package]]
name = "rustls-pemfile"
version = "2.1.3"

View file

@ -244,11 +244,11 @@ reqwest = { version = "0.12.8", default-features = false, features = [
"http2",
"json",
"gzip",
"native-tls",
"native-tls-alpn",
"rustls-tls-native-roots",
] }
rpassword = "^7.3.1"
rusqlite = { version = "^0.28.0", features = ["array", "bundled"] }
rustls = { version = "0.23.13", default-features = false, features = ["aws_lc_rs"] }
sd-notify = "^0.4.2"
selinux = "^0.4.6"

View file

@ -161,6 +161,7 @@ pub enum OperationError {
DB0001MismatchedRestoreVersion,
DB0002MismatchedRestoreVersion,
DB0003FilterResolveCacheBuild,
DB0004DatabaseTooOld,
// SCIM
SC0001IncomingSshPublicKey,
@ -334,6 +335,7 @@ impl OperationError {
Self::DB0001MismatchedRestoreVersion => None,
Self::DB0002MismatchedRestoreVersion => None,
Self::DB0003FilterResolveCacheBuild => None,
Self::DB0004DatabaseTooOld => Some("The database is too old to be migrated.".into()),
Self::MG0004DomainLevelInDevelopment => None,
Self::MG0005GidConstraintsNotMet => None,
Self::MG0006SKConstraintsNotMet => Some("Migration Constraints Not Met - Security Keys should not be present.".into()),

View file

@ -174,7 +174,6 @@ fn decode_length_checked_json<T: DeserializeOwned>(
});
// Trim to length.
if src.len() as u64 == req_len {
src.clear();
} else {

View file

@ -218,7 +218,7 @@ async fn repl_run_consumer_refresh(
.and_then(|mut write_txn| {
write_txn
.qs_write
.consumer_apply_refresh(&refresh)
.consumer_apply_refresh(refresh)
.and_then(|cs| write_txn.commit().map(|()| cs))
})
.map_err(|err| error!(?err, "Consumer was not able to apply refresh."))?;
@ -301,7 +301,7 @@ async fn repl_run_consumer(
match idms.proxy_write(ct).await.and_then(|mut write_txn| {
write_txn
.qs_write
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.and_then(|cs| write_txn.commit().map(|()| cs))
}) {
Ok(state) => state,
@ -358,7 +358,7 @@ async fn repl_run_consumer(
if let Err(err) = idms.proxy_write(ct).await.and_then(|mut write_txn| {
write_txn
.qs_write
.consumer_apply_refresh(&refresh)
.consumer_apply_refresh(refresh)
.and_then(|cs| write_txn.commit().map(|()| cs))
}) {
error!(?err, "consumer was not able to apply refresh.");

View file

@ -1,33 +1,19 @@
use std::collections::BTreeMap;
use std::time::Duration;
use nonempty::NonEmpty;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use super::dbrepl::{DbEntryChangeState, DbReplMeta};
use super::dbvalue::{DbValueEmailAddressV1, DbValuePhoneNumberV1, DbValueSetV2, DbValueV1};
use super::dbvalue::DbValueSetV2;
use super::keystorage::{KeyHandle, KeyHandleId};
use crate::prelude::entries::Attribute;
use crate::prelude::OperationError;
#[derive(Serialize, Deserialize, Debug)]
pub struct DbEntryV1 {
pub attrs: BTreeMap<Attribute, NonEmpty<DbValueV1>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct DbEntryV2 {
pub attrs: BTreeMap<Attribute, DbValueSetV2>,
}
// REMEMBER: If you add a new version here, you MUST
// update entry.rs to_dbentry to export to the latest
// type always!!
#[derive(Serialize, Deserialize, Debug)]
pub enum DbEntryVers {
V1(DbEntryV1),
V2(DbEntryV2),
V3 {
changestate: DbEntryChangeState,
attrs: BTreeMap<Attribute, DbValueSetV2>,
@ -90,377 +76,9 @@ pub enum DbBackup {
V1(Vec<DbEntry>),
}
fn from_vec_dbval1(attr_val: NonEmpty<DbValueV1>) -> Result<DbValueSetV2, OperationError> {
match attr_val.first() {
DbValueV1::Utf8(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Utf8(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Utf8),
DbValueV1::Iutf8(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Iutf8(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Iutf8),
DbValueV1::Iname(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Iname(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Iname),
DbValueV1::Uuid(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Uuid(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Uuid),
DbValueV1::Bool(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Bool(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Bool),
DbValueV1::SyntaxType(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::SyntaxType(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::SyntaxType),
DbValueV1::IndexType(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::IndexType(s) = dbv {
u16::try_from(s).map_err(|_| OperationError::InvalidValueState)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::IndexType),
DbValueV1::Reference(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Reference(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Reference),
DbValueV1::JsonFilter(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::JsonFilter(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::JsonFilter),
DbValueV1::Credential(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Credential(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Credential),
DbValueV1::SecretValue(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::SecretValue(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::SecretValue),
DbValueV1::SshKey(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::SshKey(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::SshKey),
DbValueV1::Spn(_, _) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Spn(n, d) = dbv {
Ok((n, d))
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Spn),
DbValueV1::Uint32(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Uint32(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Uint32),
DbValueV1::Cid(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Cid(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Cid),
DbValueV1::NsUniqueId(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::NsUniqueId(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::NsUniqueId),
DbValueV1::DateTime(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::DateTime(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::DateTime),
DbValueV1::EmailAddress(_) => {
let mut primary = None;
let vs: Result<Vec<_>, _> = attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::EmailAddress(DbValueEmailAddressV1 { d, p }) = dbv {
if p {
primary = Some(d.clone());
}
Ok(d)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect();
let primary = primary.ok_or(OperationError::InvalidValueState)?;
vs.map(|vs| DbValueSetV2::EmailAddress(primary, vs))
}
DbValueV1::PhoneNumber(_) => {
let mut primary = None;
let vs: Result<Vec<_>, _> = attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::PhoneNumber(DbValuePhoneNumberV1 { d, p }) = dbv {
if p {
primary = Some(d.clone());
}
Ok(d)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect();
let primary = primary.ok_or(OperationError::InvalidValueState)?;
vs.map(|vs| DbValueSetV2::PhoneNumber(primary, vs))
}
DbValueV1::Address(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Address(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Address),
DbValueV1::Url(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::Url(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::Url),
DbValueV1::OauthScope(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::OauthScope(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::OauthScope),
DbValueV1::OauthScopeMap(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::OauthScopeMap(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::OauthScopeMap),
DbValueV1::PrivateBinary(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::PrivateBinary(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::PrivateBinary),
DbValueV1::PublicBinary(_, _) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::PublicBinary(t, s) = dbv {
Ok((t, s))
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::PublicBinary),
DbValueV1::RestrictedString(_) => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::RestrictedString(s) = dbv {
Ok(s)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::RestrictedString),
DbValueV1::IntentToken { u: _, s: _ } => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::IntentToken { u, s } = dbv {
Ok((u.as_hyphenated().to_string(), s))
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::IntentToken),
DbValueV1::TrustedDeviceEnrollment { u: _ } => attr_val
.into_iter()
.map(|dbv| {
if let DbValueV1::TrustedDeviceEnrollment { u } = dbv {
Ok(u)
} else {
Err(OperationError::InvalidValueState)
}
})
.collect::<Result<Vec<_>, _>>()
.map(DbValueSetV2::TrustedDeviceEnrollment),
DbValueV1::Session { u: _ } => {
debug_assert!(false);
Err(OperationError::InvalidState)
}
}
}
impl DbEntry {
pub(crate) fn convert_to_v2(self) -> Result<Self, OperationError> {
if let DbEntryVers::V1(dbe) = self.ent {
dbe.attrs
.into_iter()
.map(|(attr_name, attr_val)| {
from_vec_dbval1(attr_val).map(|attr_val_2| (attr_name, attr_val_2))
})
.collect::<Result<BTreeMap<_, _>, _>>()
.map(|attrs| DbEntry {
ent: DbEntryVers::V2(DbEntryV2 { attrs }),
})
} else {
Ok(self)
}
}
}
impl std::fmt::Debug for DbEntry {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.ent {
DbEntryVers::V1(dbe_v1) => {
write!(f, "v1 - {{ ")?;
for (k, vs) in dbe_v1.attrs.iter() {
write!(f, "{k} - [")?;
for v in vs {
write!(f, "{v:?}, ")?;
}
write!(f, "], ")?;
}
write!(f, "}}")
}
DbEntryVers::V2(dbe_v2) => {
write!(f, "v2 - {{ ")?;
for (k, vs) in dbe_v2.attrs.iter() {
write!(f, "\n{k:>16} - ")?;
write!(f, "{vs:?}")?;
}
write!(f, "\n }}")
}
DbEntryVers::V3 { changestate, attrs } => {
write!(f, "v3 - {{ ")?;
match changestate {
@ -488,52 +106,6 @@ impl std::fmt::Debug for DbEntry {
impl std::fmt::Display for DbEntry {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.ent {
DbEntryVers::V1(dbe_v1) => {
write!(f, "v1 - {{ ")?;
match dbe_v1.attrs.get(&Attribute::Uuid) {
Some(uuids) => {
for uuid in uuids {
write!(f, "{uuid:?}, ")?;
}
}
None => write!(f, "Uuid(INVALID), ")?,
};
if let Some(names) = dbe_v1.attrs.get(&Attribute::Name) {
for name in names {
write!(f, "{name:?}, ")?;
}
}
if let Some(names) = dbe_v1.attrs.get(&Attribute::AttributeName) {
for name in names {
write!(f, "{name:?}, ")?;
}
}
if let Some(names) = dbe_v1.attrs.get(&Attribute::ClassName) {
for name in names {
write!(f, "{name:?}, ")?;
}
}
write!(f, "}}")
}
DbEntryVers::V2(dbe_v2) => {
write!(f, "v2 - {{ ")?;
match dbe_v2.attrs.get(&Attribute::Uuid) {
Some(uuids) => {
write!(f, "{uuids:?}, ")?;
}
None => write!(f, "Uuid(INVALID), ")?,
};
if let Some(names) = dbe_v2.attrs.get(&Attribute::Name) {
write!(f, "{names:?}, ")?;
}
if let Some(names) = dbe_v2.attrs.get(&Attribute::AttributeName) {
write!(f, "{names:?}, ")?;
}
if let Some(names) = dbe_v2.attrs.get(&Attribute::ClassName) {
write!(f, "{names:?}, ")?;
}
write!(f, "}}")
}
DbEntryVers::V3 { changestate, attrs } => {
write!(f, "v3 - {{ ")?;
match attrs.get(&Attribute::Uuid) {

View file

@ -50,7 +50,7 @@ impl fmt::Display for DbCidV1 {
}
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum DbValueIntentTokenStateV1 {
#[serde(rename = "v")]
Valid {
@ -90,14 +90,14 @@ pub enum DbValueIntentTokenStateV1 {
Consumed { max_ttl: Duration },
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum DbTotpAlgoV1 {
S1,
S256,
S512,
}
#[derive(Serialize, Deserialize)]
#[derive(Serialize, Deserialize, PartialEq, Eq)]
pub struct DbTotpV1 {
#[serde(rename = "l")]
pub label: String,
@ -137,7 +137,7 @@ pub struct DbWebauthnV1 {
pub registration_policy: UserVerificationPolicy,
}
#[derive(Serialize, Deserialize)]
#[derive(Serialize, Deserialize, PartialEq, Eq)]
pub struct DbBackupCodeV1 {
pub code_set: HashSet<String>, // has to use std::HashSet for serde
}
@ -221,6 +221,30 @@ pub enum DbCred {
},
}
impl DbCred {
fn uuid(&self) -> Uuid {
match self {
DbCred::Pw { uuid, .. }
| DbCred::GPw { uuid, .. }
| DbCred::PwMfa { uuid, .. }
| DbCred::Wn { uuid, .. }
| DbCred::TmpWn { uuid, .. }
| DbCred::V2PasswordMfa { uuid, .. }
| DbCred::V2Password { uuid, .. }
| DbCred::V2GenPassword { uuid, .. }
| DbCred::V3PasswordMfa { uuid, .. } => *uuid,
}
}
}
impl Eq for DbCred {}
impl PartialEq for DbCred {
fn eq(&self, other: &Self) -> bool {
self.uuid() == other.uuid()
}
}
impl fmt::Display for DbCred {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
@ -329,7 +353,7 @@ impl fmt::Display for DbCred {
}
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct DbValueCredV1 {
#[serde(rename = "t")]
pub tag: String,
@ -342,6 +366,27 @@ pub enum DbValuePasskeyV1 {
V4 { u: Uuid, t: String, k: PasskeyV4 },
}
impl Eq for DbValuePasskeyV1 {}
impl PartialEq for DbValuePasskeyV1 {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(
DbValuePasskeyV1::V4 {
u: self_uuid,
k: self_key,
t: _,
},
DbValuePasskeyV1::V4 {
u: other_uuid,
k: other_key,
t: _,
},
) => self_uuid == other_uuid && self_key.cred_id() == other_key.cred_id(),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub enum DbValueAttestedPasskeyV1 {
V4 {
@ -351,7 +396,28 @@ pub enum DbValueAttestedPasskeyV1 {
},
}
#[derive(Serialize, Deserialize, Debug)]
impl Eq for DbValueAttestedPasskeyV1 {}
impl PartialEq for DbValueAttestedPasskeyV1 {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(
DbValueAttestedPasskeyV1::V4 {
u: self_uuid,
k: self_key,
t: _,
},
DbValueAttestedPasskeyV1::V4 {
u: other_uuid,
k: other_key,
t: _,
},
) => self_uuid == other_uuid && self_key.cred_id() == other_key.cred_id(),
}
}
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct DbValueTaggedStringV1 {
#[serde(rename = "t")]
pub tag: String,
@ -359,7 +425,7 @@ pub struct DbValueTaggedStringV1 {
pub data: String,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct DbValueEmailAddressV1 {
pub d: String,
#[serde(skip_serializing_if = "is_false", default)]
@ -373,7 +439,7 @@ pub struct DbValuePhoneNumberV1 {
pub p: bool,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct DbValueAddressV1 {
#[serde(rename = "f")]
pub formatted: String,
@ -399,7 +465,7 @@ pub enum DbValueOauthClaimMapJoinV1 {
JsonArray,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum DbValueOauthClaimMap {
V1 {
#[serde(rename = "n")]
@ -411,7 +477,7 @@ pub enum DbValueOauthClaimMap {
},
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct DbValueOauthScopeMapV1 {
#[serde(rename = "u")]
pub refer: Uuid,
@ -543,7 +609,7 @@ pub enum DbValueSession {
},
}
#[derive(Serialize, Deserialize, Debug, Default)]
#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Eq)]
pub enum DbValueApiTokenScopeV1 {
#[serde(rename = "r")]
#[default]
@ -554,7 +620,7 @@ pub enum DbValueApiTokenScopeV1 {
Synchronise,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum DbValueApiToken {
V1 {
#[serde(rename = "u")]
@ -573,7 +639,7 @@ pub enum DbValueApiToken {
}
#[skip_serializing_none]
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum DbValueOauth2Session {
V1 {
#[serde(rename = "u")]
@ -667,72 +733,7 @@ pub enum DbValueApplicationPassword {
},
}
#[derive(Serialize, Deserialize, Debug)]
pub enum DbValueV1 {
#[serde(rename = "U8")]
Utf8(String),
#[serde(rename = "I8")]
Iutf8(String),
#[serde(rename = "N8")]
Iname(String),
#[serde(rename = "UU")]
Uuid(Uuid),
#[serde(rename = "BO")]
Bool(bool),
#[serde(rename = "SY")]
SyntaxType(u16),
#[serde(rename = "IN")]
IndexType(usize),
#[serde(rename = "RF")]
Reference(Uuid),
#[serde(rename = "JF")]
JsonFilter(String),
#[serde(rename = "CR")]
Credential(DbValueCredV1),
#[serde(rename = "RU")]
SecretValue(String),
#[serde(rename = "SK")]
SshKey(DbValueTaggedStringV1),
#[serde(rename = "SP")]
Spn(String, String),
#[serde(rename = "UI")]
Uint32(u32),
#[serde(rename = "CI")]
Cid(DbCidV1),
#[serde(rename = "NU")]
NsUniqueId(String),
#[serde(rename = "DT")]
DateTime(String),
#[serde(rename = "EM")]
EmailAddress(DbValueEmailAddressV1),
#[serde(rename = "PN")]
PhoneNumber(DbValuePhoneNumberV1),
#[serde(rename = "AD")]
Address(DbValueAddressV1),
#[serde(rename = "UR")]
Url(Url),
#[serde(rename = "OS")]
OauthScope(String),
#[serde(rename = "OM")]
OauthScopeMap(DbValueOauthScopeMapV1),
#[serde(rename = "E2")]
PrivateBinary(Vec<u8>),
#[serde(rename = "PB")]
PublicBinary(String, Vec<u8>),
#[serde(rename = "RS")]
RestrictedString(String),
#[serde(rename = "IT")]
IntentToken {
u: Uuid,
s: DbValueIntentTokenStateV1,
},
#[serde(rename = "TE")]
TrustedDeviceEnrollment { u: Uuid },
#[serde(rename = "AS")]
Session { u: Uuid },
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum DbValueSetV2 {
#[serde(rename = "U8")]
Utf8(Vec<String>),

View file

@ -14,7 +14,7 @@ use rusqlite::vtab::array::Array;
use rusqlite::{Connection, OpenFlags, OptionalExtension};
use uuid::Uuid;
use crate::be::dbentry::{DbEntry, DbIdentSpn};
use crate::be::dbentry::DbIdentSpn;
use crate::be::dbvalue::DbCidV1;
use crate::be::{BackendConfig, IdList, IdRawEntry, IdxKey, IdxSlope};
use crate::entry::{Entry, EntryCommitted, EntrySealed};
@ -1003,43 +1003,6 @@ impl IdlSqliteWriteTransaction {
.map_err(sqlite_error)
}
fn migrate_dbentryv1_to_dbentryv2(&self) -> Result<(), OperationError> {
let allids = self.get_identry_raw(&IdList::AllIds)?;
let raw_entries: Result<Vec<IdRawEntry>, _> = allids
.into_iter()
.map(|raw| {
serde_cbor::from_slice(raw.data.as_slice())
.map_err(|e| {
admin_error!(?e, "Serde CBOR Error");
OperationError::SerdeCborError
})
.and_then(|dbe: DbEntry| dbe.convert_to_v2())
.and_then(|dbe| {
serde_json::to_vec(&dbe)
.map(|data| IdRawEntry { id: raw.id, data })
.map_err(|e| {
admin_error!(?e, "Serde Json Error");
OperationError::SerdeJsonError
})
})
})
.collect();
self.write_identries_raw(raw_entries?.into_iter())
}
fn migrate_dbentryv2_to_dbentryv3(&self) -> Result<(), OperationError> {
// To perform this migration we have to load everything to a valid entry, then
// write them all back down once their change states are created.
let all_entries = self.get_identry(&IdList::AllIds)?;
for entry in all_entries {
self.write_identry(&entry)?;
}
Ok(())
}
pub fn write_uuid2spn(&self, uuid: Uuid, k: Option<&Value>) -> Result<(), OperationError> {
let uuids = uuid.as_hyphenated().to_string();
match k {
@ -1597,6 +1560,14 @@ impl IdlSqliteWriteTransaction {
trace!(%dbv_id2entry);
if dbv_id2entry != 0 && dbv_id2entry < 9 {
error!(
?dbv_id2entry,
"Unable to perform database migrations. This instance is too old."
);
return Err(OperationError::DB0004DatabaseTooOld);
}
// Check db_version here.
// * if 0 -> create v1.
if dbv_id2entry == 0 {
@ -1679,7 +1650,6 @@ impl IdlSqliteWriteTransaction {
}
// * if v4 -> migrate v1 to v2 entries.
if dbv_id2entry == 4 {
self.migrate_dbentryv1_to_dbentryv2()?;
dbv_id2entry = 5;
info!(entry = %dbv_id2entry, "dbv_id2entry migrated (dbentryv1 -> dbentryv2)");
}
@ -1716,7 +1686,6 @@ impl IdlSqliteWriteTransaction {
}
// * if v8 -> migrate all entries to have a change state
if dbv_id2entry == 8 {
self.migrate_dbentryv2_to_dbentryv3()?;
dbv_id2entry = 9;
info!(entry = %dbv_id2entry, "dbv_id2entry migrated (dbentryv2 -> dbentryv3)");
}

View file

@ -56,7 +56,7 @@ const FILTER_SUBSTR_TEST_THRESHOLD: usize = 4;
#[derive(Debug, Clone)]
/// Limits on the resources a single event can consume. These are defined per-event
/// as they are derived from the userAuthToken based on that individual session
pub struct Limits {
pub(crate) struct Limits {
pub unindexed_allow: bool,
pub search_max_results: usize,
pub search_max_filter_test: usize,
@ -1890,12 +1890,6 @@ impl<'a> BackendWriteTransaction<'a> {
info!("Restoring {} entries ...", dbentries.len());
// Migrate any v1 entries to v2 if needed.
let dbentries = dbentries
.into_iter()
.map(|dbe| dbe.convert_to_v2())
.collect::<Result<Vec<_>, _>>()?;
// Now, we setup all the entries with new ids.
let mut id_max = 0;
let identries: Result<Vec<IdRawEntry>, _> = dbentries

View file

@ -9,7 +9,6 @@ use webauthn_rs::prelude::{AuthenticationResult, Passkey, SecurityKey};
use webauthn_rs_core::proto::{Credential as WebauthnCredential, CredentialV3};
use crate::be::dbvalue::{DbBackupCodeV1, DbCred};
use crate::repl::proto::{ReplBackupCodeV1, ReplCredV1, ReplPasskeyV4V1, ReplSecurityKeyV4V1};
pub mod apppwd;
pub mod softlock;
@ -50,16 +49,6 @@ impl TryFrom<DbBackupCodeV1> for BackupCodes {
}
}
impl TryFrom<&ReplBackupCodeV1> for BackupCodes {
type Error = ();
fn try_from(value: &ReplBackupCodeV1) -> Result<Self, Self::Error> {
Ok(BackupCodes {
code_set: value.codes.iter().cloned().collect(),
})
}
}
impl BackupCodes {
pub fn new(code_set: HashSet<String>) -> Self {
BackupCodes { code_set }
@ -78,12 +67,6 @@ impl BackupCodes {
code_set: self.code_set.clone(),
}
}
pub fn to_repl_v1(&self) -> ReplBackupCodeV1 {
ReplBackupCodeV1 {
codes: self.code_set.iter().cloned().collect(),
}
}
}
#[derive(Clone, Debug, PartialEq)]
@ -372,92 +355,6 @@ impl TryFrom<DbCred> for Credential {
}
impl Credential {
pub fn try_from_repl_v1(rc: &ReplCredV1) -> Result<(String, Self), OperationError> {
match rc {
ReplCredV1::TmpWn { tag, set } => {
let m_uuid: Option<Uuid> = set.first().map(|v| v.uuid);
let v_webauthn = set
.iter()
.map(|passkey| (passkey.tag.clone(), passkey.key.clone()))
.collect();
let type_ = CredentialType::Webauthn(v_webauthn);
match (m_uuid, type_.is_valid()) {
(Some(uuid), true) => Ok((tag.clone(), Credential { type_, uuid })),
_ => Err(OperationError::InvalidValueState),
}
}
ReplCredV1::Password {
tag,
password,
uuid,
} => {
let v_password =
Password::try_from(password).map_err(|()| OperationError::InvalidValueState)?;
let type_ = CredentialType::Password(v_password);
if type_.is_valid() {
Ok((tag.clone(), Credential { type_, uuid: *uuid }))
} else {
Err(OperationError::InvalidValueState)
}
}
ReplCredV1::GenPassword {
tag,
password,
uuid,
} => {
let v_password =
Password::try_from(password).map_err(|()| OperationError::InvalidValueState)?;
let type_ = CredentialType::GeneratedPassword(v_password);
if type_.is_valid() {
Ok((tag.clone(), Credential { type_, uuid: *uuid }))
} else {
Err(OperationError::InvalidValueState)
}
}
ReplCredV1::PasswordMfa {
tag,
password,
totp,
backup_code,
webauthn,
uuid,
} => {
let v_password =
Password::try_from(password).map_err(|()| OperationError::InvalidValueState)?;
let v_totp = totp
.iter()
.map(|(l, dbt)| Totp::try_from(dbt).map(|t| (l.clone(), t)))
.collect::<Result<Map<_, _>, _>>()
.map_err(|()| OperationError::InvalidValueState)?;
let v_backup_code = match backup_code {
Some(rbc) => Some(
BackupCodes::try_from(rbc)
.map_err(|()| OperationError::InvalidValueState)?,
),
None => None,
};
let v_webauthn = webauthn
.iter()
.map(|sk| (sk.tag.clone(), sk.key.clone()))
.collect();
let type_ =
CredentialType::PasswordMfa(v_password, v_totp, v_webauthn, v_backup_code);
if type_.is_valid() {
Ok((tag.clone(), Credential { type_, uuid: *uuid }))
} else {
Err(OperationError::InvalidValueState)
}
}
}
}
/// Create a new credential that contains a CredentialType::Password
pub fn new_password_only(
policy: &CryptoPolicy,
@ -734,51 +631,6 @@ impl Credential {
}
}
/// Extract this credential into it's Serialisable Replication form
pub fn to_repl_v1(&self, tag: String) -> ReplCredV1 {
let uuid = self.uuid;
match &self.type_ {
CredentialType::Password(pw) => ReplCredV1::Password {
tag,
password: pw.to_repl_v1(),
uuid,
},
CredentialType::GeneratedPassword(pw) => ReplCredV1::GenPassword {
tag,
password: pw.to_repl_v1(),
uuid,
},
CredentialType::PasswordMfa(pw, totp, map, backup_code) => ReplCredV1::PasswordMfa {
tag,
password: pw.to_repl_v1(),
totp: totp
.iter()
.map(|(l, t)| (l.clone(), t.to_repl_v1()))
.collect(),
backup_code: backup_code.as_ref().map(|b| b.to_repl_v1()),
webauthn: map
.iter()
.map(|(k, v)| ReplSecurityKeyV4V1 {
tag: k.clone(),
key: v.clone(),
})
.collect(),
uuid,
},
CredentialType::Webauthn(map) => ReplCredV1::TmpWn {
tag,
set: map
.iter()
.map(|(k, v)| ReplPasskeyV4V1 {
uuid,
tag: k.clone(),
key: v.clone(),
})
.collect(),
},
}
}
pub(crate) fn update_password(&self, pw: Password) -> Self {
let type_ = match &self.type_ {
CredentialType::Password(_) | CredentialType::GeneratedPassword(_) => {

View file

@ -8,7 +8,6 @@ use openssl::sign::Signer;
use rand::prelude::*;
use crate::be::dbvalue::{DbTotpAlgoV1, DbTotpV1};
use crate::repl::proto::{ReplTotpAlgoV1, ReplTotpV1};
// Update to match advice that totp hmac key should be the same
// number of bytes as the output.
@ -117,27 +116,6 @@ impl TryFrom<DbTotpV1> for Totp {
}
}
impl TryFrom<&ReplTotpV1> for Totp {
type Error = ();
fn try_from(value: &ReplTotpV1) -> Result<Self, Self::Error> {
let algo = match value.algo {
ReplTotpAlgoV1::S1 => TotpAlgo::Sha1,
ReplTotpAlgoV1::S256 => TotpAlgo::Sha256,
ReplTotpAlgoV1::S512 => TotpAlgo::Sha512,
};
let digits = TotpDigits::try_from(value.digits)?;
Ok(Totp {
secret: value.key.to_vec(),
step: value.step,
algo,
digits,
})
}
}
impl TryFrom<ProtoTotp> for Totp {
type Error = ();
@ -193,19 +171,6 @@ impl Totp {
}
}
pub(crate) fn to_repl_v1(&self) -> ReplTotpV1 {
ReplTotpV1 {
key: self.secret.clone().into(),
step: self.step,
algo: match self.algo {
TotpAlgo::Sha1 => ReplTotpAlgoV1::S1,
TotpAlgo::Sha256 => ReplTotpAlgoV1::S256,
TotpAlgo::Sha512 => ReplTotpAlgoV1::S512,
},
digits: self.digits.into(),
}
}
fn digest(&self, counter: u64) -> Result<u32, TotpError> {
let hmac = self.algo.digest(&self.secret, counter)?;
// Now take the hmac and encode it as hotp expects.

View file

@ -482,7 +482,7 @@ impl Entry<EntryInit, EntryNew> {
}
impl Entry<EntryRefresh, EntryNew> {
pub fn from_repl_entry_v1(repl_entry: &ReplEntryV1) -> Result<Self, OperationError> {
pub fn from_repl_entry_v1(repl_entry: ReplEntryV1) -> Result<Self, OperationError> {
// From the entry, we have to rebuild the ecstate and the attrs.
let (ecstate, mut attrs) = repl_entry.rehydrate()?;
@ -543,7 +543,7 @@ impl Entry<EntryIncremental, EntryNew> {
self.valid.ecstate.stub()
}
pub fn rehydrate(repl_inc_entry: &ReplIncrementalEntryV1) -> Result<Self, OperationError> {
pub fn rehydrate(repl_inc_entry: ReplIncrementalEntryV1) -> Result<Self, OperationError> {
let (uuid, ecstate, attrs) = repl_inc_entry.rehydrate()?;
Ok(Entry {
@ -1719,48 +1719,6 @@ impl Entry<EntrySealed, EntryCommitted> {
// Convert attrs from db format to value
let (attrs, ecstate) = match db_e.ent {
DbEntryVers::V1(_) => {
error!("Db V1 entry should have been migrated!");
return None;
}
DbEntryVers::V2(v2) => {
let r_attrs = v2
.attrs
.into_iter()
// Skip anything empty as new VS can't deal with it.
.filter(|(_k, vs)| !vs.is_empty())
.map(|(k, dbvs)| {
valueset::from_db_valueset_v2(dbvs)
.map(|vs: ValueSet| (k, vs))
.map_err(|e| {
error!(?e, "from_dbentry failed");
})
})
.collect::<Result<Eattrs, ()>>()
.ok()?;
/*
* ==== The Hack Zoen ====
*
* For now to make replication work, we are synthesising an in-memory change
* log, pinned to "the last time the entry was modified" as it's "create time".
*
* This should only be done *once* on entry load.
*/
let cid = r_attrs
.get(&Attribute::LastModifiedCid)
.and_then(|vs| vs.as_cid_set())
.and_then(|set| set.iter().next().cloned())
.or_else(|| {
error!("Unable to access last modified cid of entry, unable to proceed");
None
})?;
let ecstate = EntryChangeState::new_without_schema(&cid, &r_attrs);
(r_attrs, ecstate)
}
DbEntryVers::V3 { changestate, attrs } => {
let ecstate = EntryChangeState::from_db_changestate(changestate);

View file

@ -319,10 +319,6 @@ impl SearchEvent {
attrs: None,
}
}
pub(crate) fn get_limits(&self) -> &Limits {
&self.ident.limits
}
}
// Represents the decoded entries from the protocol -> internal entry representation
@ -402,10 +398,6 @@ impl ExistsEvent {
filter_orig: filter.into_valid(),
}
}
pub(crate) fn get_limits(&self) -> &Limits {
&self.ident.limits
}
}
#[derive(Debug)]

View file

@ -666,7 +666,7 @@ impl Filter<FilterInvalid> {
qs: &mut QueryServerReadTransaction,
) -> Result<Self, OperationError> {
let depth = DEFAULT_LIMIT_FILTER_DEPTH_MAX as usize;
let mut elems = ev.limits.filter_max_elements;
let mut elems = ev.limits().filter_max_elements;
Ok(Filter {
state: FilterInvalid {
inner: FilterComp::from_ro(f, qs, depth, &mut elems)?,
@ -681,7 +681,7 @@ impl Filter<FilterInvalid> {
qs: &mut QueryServerWriteTransaction,
) -> Result<Self, OperationError> {
let depth = DEFAULT_LIMIT_FILTER_DEPTH_MAX as usize;
let mut elems = ev.limits.filter_max_elements;
let mut elems = ev.limits().filter_max_elements;
Ok(Filter {
state: FilterInvalid {
inner: FilterComp::from_rw(f, qs, depth, &mut elems)?,
@ -696,7 +696,7 @@ impl Filter<FilterInvalid> {
qs: &mut QueryServerReadTransaction,
) -> Result<Self, OperationError> {
let depth = DEFAULT_LIMIT_FILTER_DEPTH_MAX as usize;
let mut elems = ev.limits.filter_max_elements;
let mut elems = ev.limits().filter_max_elements;
Ok(Filter {
state: FilterInvalid {
inner: FilterComp::from_ldap_ro(f, qs, depth, &mut elems)?,
@ -2174,7 +2174,7 @@ mod tests {
);
let mut ev = Identity::from_internal();
ev.limits.filter_max_elements = LIMIT;
ev.limits_mut().filter_max_elements = LIMIT;
// Test proto + read
let res = Filter::from_ro(&ev, &inv_proto, &mut r_txn);

View file

@ -290,6 +290,7 @@ pub struct Oauth2RS {
// Our internal exchange encryption material for this rs.
token_fernet: Fernet,
jws_signer: Oauth2JwsSigner,
// For oidc we also need our issuer url.
iss: Url,
// For discovery we need to build and keep a number of values.

View file

@ -683,13 +683,13 @@ pub trait IdmServerTransaction<'a> {
trace!(claims = ?entry.get_ava_set("claim"), "Applied claims");
*/
Ok(Identity {
origin: IdentType::User(IdentUser { entry }),
Ok(Identity::new(
IdentType::User(IdentUser { entry }),
source,
session_id: uat.session_id,
uat.session_id,
scope,
limits,
})
))
}
#[instrument(level = "debug", skip_all)]
@ -710,13 +710,13 @@ pub trait IdmServerTransaction<'a> {
let scope = (&apit.purpose).into();
let limits = Limits::api_token();
Ok(Identity {
origin: IdentType::User(IdentUser { entry }),
Ok(Identity::new(
IdentType::User(IdentUser { entry }),
source,
session_id: apit.token_id,
apit.token_id,
scope,
limits,
})
))
}
fn client_cert_info_entry(
@ -802,14 +802,14 @@ pub trait IdmServerTransaction<'a> {
let certificate_uuid = cert_entry.get_uuid();
Ok(Identity {
origin: IdentType::User(IdentUser { entry }),
Ok(Identity::new(
IdentType::User(IdentUser { entry }),
source,
// session_id is the certificate uuid.
session_id: certificate_uuid,
certificate_uuid,
scope,
limits,
})
))
}
#[instrument(level = "debug", skip_all)]
@ -900,13 +900,13 @@ pub trait IdmServerTransaction<'a> {
// Users via LDAP are always only granted anonymous rights unless
// they auth with an api-token
Ok(Identity {
origin: IdentType::User(IdentUser { entry: anon_entry }),
Ok(Identity::new(
IdentType::User(IdentUser { entry: anon_entry }),
source,
session_id,
scope: AccessScope::ReadOnly,
AccessScope::ReadOnly,
limits,
})
))
}
#[instrument(level = "debug", skip_all)]
@ -990,13 +990,13 @@ pub trait IdmServerTransaction<'a> {
let scope = (&sync_token.purpose).into();
let limits = Limits::unlimited();
Ok(Identity {
origin: IdentType::Synch(entry.get_uuid()),
source: client_auth_info.source,
session_id: sync_token.token_id,
Ok(Identity::new(
IdentType::Synch(entry.get_uuid()),
client_auth_info.source,
sync_token.token_id,
scope,
limits,
})
))
}
}

View file

@ -80,7 +80,6 @@ pub mod prelude {
pub use url::Url;
pub use uuid::{uuid, Uuid};
pub use crate::be::Limits;
pub use crate::constants::*;
pub use crate::entry::{
Entry, EntryCommitted, EntryIncrementalCommitted, EntryIncrementalNew, EntryInit,
@ -113,6 +112,11 @@ pub mod prelude {
ApiTokenScope, IndexType, PartialValue, SessionScope, SyntaxType, Value,
};
pub use time::format_description::well_known::Rfc3339;
#[cfg(test)]
pub use kanidmd_lib_macros::*;
pub(crate) use crate::valueset::{
ValueSet, ValueSetBool, ValueSetCid, ValueSetIndex, ValueSetIutf8, ValueSetRefer,
ValueSetSyntax, ValueSetT, ValueSetUtf8, ValueSetUuid,
@ -123,10 +127,5 @@ pub mod prelude {
ScimEntryHeader,
};
// pub(crate) use serde_json::Value as JsonValue;
#[cfg(test)]
pub use kanidmd_lib_macros::*;
pub use time::format_description::well_known::Rfc3339;
pub(crate) use crate::be::Limits;
}

View file

@ -10,7 +10,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
fn consumer_incremental_apply_entries(
&mut self,
ctx_entries: &[ReplIncrementalEntryV1],
ctx_entries: Vec<ReplIncrementalEntryV1>,
) -> Result<bool, OperationError> {
// trace!(?ctx_entries);
@ -30,7 +30,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
// I think we need to rehydrate all the repl content to a partial
// entry. This way all the types are consistent and ready.
let ctx_entries: Vec<_> = ctx_entries.iter().map(
let ctx_entries: Vec<_> = ctx_entries.into_iter().map(
EntryIncrementalNew::rehydrate
)
.collect::<Result<Vec<_>, _>>()
@ -283,7 +283,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
pub fn consumer_apply_changes(
&mut self,
ctx: &ReplIncrementalContext,
ctx: ReplIncrementalContext,
) -> Result<ConsumerState, OperationError> {
match ctx {
ReplIncrementalContext::DomainMismatch => {
@ -314,9 +314,9 @@ impl<'a> QueryServerWriteTransaction<'a> {
meta_entries,
entries,
} => self.consumer_apply_changes_v1(
*domain_version,
*domain_patch_level,
*domain_uuid,
domain_version,
domain_patch_level,
domain_uuid,
ranges,
schema_entries,
meta_entries,
@ -331,10 +331,10 @@ impl<'a> QueryServerWriteTransaction<'a> {
ctx_domain_version: DomainVersion,
ctx_domain_patch_level: u32,
ctx_domain_uuid: Uuid,
ctx_ranges: &BTreeMap<Uuid, ReplAnchoredCidRange>,
ctx_schema_entries: &[ReplIncrementalEntryV1],
ctx_meta_entries: &[ReplIncrementalEntryV1],
ctx_entries: &[ReplIncrementalEntryV1],
ctx_ranges: BTreeMap<Uuid, ReplAnchoredCidRange>,
ctx_schema_entries: Vec<ReplIncrementalEntryV1>,
ctx_meta_entries: Vec<ReplIncrementalEntryV1>,
ctx_entries: Vec<ReplIncrementalEntryV1>,
) -> Result<ConsumerState, OperationError> {
if ctx_domain_version < DOMAIN_MIN_LEVEL {
error!("Unable to proceed with consumer incremental - incoming domain level is lower than our minimum supported level. {} < {}", ctx_domain_version, DOMAIN_MIN_LEVEL);
@ -367,7 +367,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
let txn_cid = self.get_cid().clone();
let ruv = self.be_txn.get_ruv_write();
ruv.incremental_preflight_validate_ruv(ctx_ranges, &txn_cid)
ruv.incremental_preflight_validate_ruv(&ctx_ranges, &txn_cid)
.inspect_err(|err| {
error!(
?err,
@ -439,11 +439,11 @@ impl<'a> QueryServerWriteTransaction<'a> {
// context. Note that we get this in a writeable form!
let ruv = self.be_txn.get_ruv_write();
ruv.refresh_validate_ruv(ctx_ranges).inspect_err(|err| {
ruv.refresh_validate_ruv(&ctx_ranges).inspect_err(|err| {
error!(?err, "RUV ranges were not rebuilt correctly.");
})?;
ruv.refresh_update_ruv(ctx_ranges).inspect_err(|err| {
ruv.refresh_update_ruv(&ctx_ranges).inspect_err(|err| {
error!(?err, "Unable to update RUV with supplier ranges.");
})?;
@ -452,7 +452,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
pub fn consumer_apply_refresh(
&mut self,
ctx: &ReplRefreshContext,
ctx: ReplRefreshContext,
) -> Result<(), OperationError> {
match ctx {
ReplRefreshContext::V1 {
@ -464,9 +464,9 @@ impl<'a> QueryServerWriteTransaction<'a> {
meta_entries,
entries,
} => self.consumer_apply_refresh_v1(
*domain_version,
*domain_devel,
*domain_uuid,
domain_version,
domain_devel,
domain_uuid,
ranges,
schema_entries,
meta_entries,
@ -477,10 +477,10 @@ impl<'a> QueryServerWriteTransaction<'a> {
fn consumer_refresh_create_entries(
&mut self,
ctx_entries: &[ReplEntryV1],
ctx_entries: Vec<ReplEntryV1>,
) -> Result<(), OperationError> {
let candidates = ctx_entries
.iter()
.into_iter()
.map(EntryRefreshNew::from_repl_entry_v1)
.collect::<Result<Vec<EntryRefreshNew>, _>>()
.inspect_err(|err| {
@ -536,10 +536,10 @@ impl<'a> QueryServerWriteTransaction<'a> {
ctx_domain_version: DomainVersion,
ctx_domain_devel: bool,
ctx_domain_uuid: Uuid,
ctx_ranges: &BTreeMap<Uuid, ReplAnchoredCidRange>,
ctx_schema_entries: &[ReplEntryV1],
ctx_meta_entries: &[ReplEntryV1],
ctx_entries: &[ReplEntryV1],
ctx_ranges: BTreeMap<Uuid, ReplAnchoredCidRange>,
ctx_schema_entries: Vec<ReplEntryV1>,
ctx_meta_entries: Vec<ReplEntryV1>,
ctx_entries: Vec<ReplEntryV1>,
) -> Result<(), OperationError> {
// Can we apply the domain version validly?
// if domain_version >= min_support ...
@ -646,11 +646,11 @@ impl<'a> QueryServerWriteTransaction<'a> {
// context. Note that we get this in a writeable form!
let ruv = self.be_txn.get_ruv_write();
ruv.refresh_validate_ruv(ctx_ranges).inspect_err(|err| {
ruv.refresh_validate_ruv(&ctx_ranges).inspect_err(|err| {
error!(?err, "RUV ranges were not rebuilt correctly.");
})?;
ruv.refresh_update_ruv(ctx_ranges).inspect_err(|err| {
ruv.refresh_update_ruv(&ctx_ranges).inspect_err(|err| {
error!(?err, "Unable to update RUV with supplier ranges.");
})?;

View file

@ -1,30 +1,15 @@
use super::cid::Cid;
use super::entry::EntryChangeState;
use super::entry::State;
use crate::be::dbvalue::DbValueApplicationPassword;
use crate::be::dbvalue::DbValueCertificate;
use crate::be::dbvalue::DbValueImage;
use crate::be::dbvalue::DbValueKeyInternal;
use crate::be::dbvalue::DbValueOauthClaimMapJoinV1;
use crate::be::dbvalue::DbValueSession;
use crate::be::dbvalue::DbValueSetV2;
use crate::entry::Eattrs;
use crate::prelude::*;
use crate::schema::{SchemaReadTransaction, SchemaTransaction};
use crate::valueset;
use base64urlsafedata::Base64UrlSafeData;
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use std::collections::{BTreeMap, BTreeSet};
use std::collections::BTreeMap;
use std::fmt;
use webauthn_rs::prelude::{
AttestationCaList, AttestedPasskey as AttestedPasskeyV4, Passkey as PasskeyV4,
SecurityKey as SecurityKeyV4,
};
// Re-export this for our own usage.
pub use kanidm_lib_crypto::ReplPasswordV1;
pub enum ConsumerState {
Ok,
RefreshRequired,
@ -129,354 +114,10 @@ impl ReplRuvRange {
}
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub struct ReplAddressV1 {
#[serde(rename = "f")]
pub formatted: String,
#[serde(rename = "s")]
pub street_address: String,
#[serde(rename = "l")]
pub locality: String,
#[serde(rename = "r")]
pub region: String,
#[serde(rename = "p")]
pub postal_code: String,
#[serde(rename = "c")]
pub country: String,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum ReplTotpAlgoV1 {
S1,
S256,
S512,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplTotpV1 {
pub key: Base64UrlSafeData,
pub step: u64,
pub algo: ReplTotpAlgoV1,
pub digits: u8,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplBackupCodeV1 {
pub codes: BTreeSet<String>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum ReplCredV1 {
TmpWn {
tag: String,
set: Vec<ReplPasskeyV4V1>,
},
Password {
tag: String,
password: ReplPasswordV1,
uuid: Uuid,
},
GenPassword {
tag: String,
password: ReplPasswordV1,
uuid: Uuid,
},
PasswordMfa {
tag: String,
password: ReplPasswordV1,
totp: Vec<(String, ReplTotpV1)>,
backup_code: Option<ReplBackupCodeV1>,
webauthn: Vec<ReplSecurityKeyV4V1>,
uuid: Uuid,
},
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum ReplIntentTokenV1 {
Valid {
token_id: String,
max_ttl: Duration,
#[serde(default)]
ext_cred_portal_can_view: bool,
#[serde(default)]
primary_can_edit: bool,
#[serde(default)]
passkeys_can_edit: bool,
#[serde(default)]
attested_passkeys_can_edit: bool,
#[serde(default)]
unixcred_can_edit: bool,
#[serde(default)]
sshpubkey_can_edit: bool,
},
InProgress {
token_id: String,
max_ttl: Duration,
session_id: Uuid,
session_ttl: Duration,
#[serde(default)]
ext_cred_portal_can_view: bool,
#[serde(default)]
primary_can_edit: bool,
#[serde(default)]
passkeys_can_edit: bool,
#[serde(default)]
attested_passkeys_can_edit: bool,
#[serde(default)]
unixcred_can_edit: bool,
#[serde(default)]
sshpubkey_can_edit: bool,
},
Consumed {
token_id: String,
max_ttl: Duration,
},
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ReplSecurityKeyV4V1 {
pub tag: String,
pub key: SecurityKeyV4,
}
impl Eq for ReplSecurityKeyV4V1 {}
impl PartialEq for ReplSecurityKeyV4V1 {
fn eq(&self, other: &Self) -> bool {
self.key.cred_id() == other.key.cred_id()
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReplPasskeyV4V1 {
pub uuid: Uuid,
pub tag: String,
pub key: PasskeyV4,
}
impl Eq for ReplPasskeyV4V1 {}
impl PartialEq for ReplPasskeyV4V1 {
fn eq(&self, other: &Self) -> bool {
self.uuid == other.uuid && self.key.cred_id() == other.key.cred_id()
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReplAttestedPasskeyV4V1 {
pub uuid: Uuid,
pub tag: String,
pub key: AttestedPasskeyV4,
}
impl Eq for ReplAttestedPasskeyV4V1 {}
impl PartialEq for ReplAttestedPasskeyV4V1 {
fn eq(&self, other: &Self) -> bool {
self.uuid == other.uuid && self.key.cred_id() == other.key.cred_id()
}
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplOauthScopeMapV1 {
pub refer: Uuid,
pub data: BTreeSet<String>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplOauthClaimMapV1 {
pub name: String,
pub join: DbValueOauthClaimMapJoinV1,
pub values: BTreeMap<Uuid, BTreeSet<String>>,
}
#[skip_serializing_none]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplOauth2SessionV1 {
pub refer: Uuid,
pub parent: Option<Uuid>,
pub state: ReplSessionStateV1,
// pub expiry: Option<String>,
pub issued_at: String,
pub rs_uuid: Uuid,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Default)]
pub enum ReplApiTokenScopeV1 {
#[default]
ReadOnly,
ReadWrite,
Synchronise,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum ReplIdentityIdV1 {
Internal,
Uuid(Uuid),
Synch(Uuid),
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum ReplSessionStateV1 {
ExpiresAt(String),
Never,
RevokedAt(ReplCidV1),
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplApiTokenV1 {
pub refer: Uuid,
pub label: String,
pub expiry: Option<String>,
pub issued_at: String,
pub issued_by: ReplIdentityIdV1,
pub scope: ReplApiTokenScopeV1,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub enum ReplAttrV1 {
Address {
set: Vec<ReplAddressV1>,
},
EmailAddress {
primary: String,
set: Vec<String>,
},
PublicBinary {
set: Vec<(String, Base64UrlSafeData)>,
},
PrivateBinary {
set: Vec<Base64UrlSafeData>,
},
Bool {
set: Vec<bool>,
},
Cid {
set: Vec<ReplCidV1>,
},
Credential {
set: Vec<ReplCredV1>,
},
IntentToken {
set: Vec<ReplIntentTokenV1>,
},
Passkey {
set: Vec<ReplPasskeyV4V1>,
},
AttestedPasskey {
set: Vec<ReplAttestedPasskeyV4V1>,
},
DateTime {
set: Vec<String>,
},
Iname {
set: Vec<String>,
},
IndexType {
set: Vec<u16>,
},
Iutf8 {
set: Vec<String>,
},
JsonFilter {
set: Vec<String>,
},
JwsKeyEs256 {
set: Vec<Base64UrlSafeData>,
},
JwsKeyRs256 {
set: Vec<Base64UrlSafeData>,
},
NsUniqueId {
set: Vec<String>,
},
SecretValue {
set: Vec<String>,
},
RestrictedString {
set: Vec<String>,
},
Uint32 {
set: Vec<u32>,
},
Url {
set: Vec<Url>,
},
Utf8 {
set: Vec<String>,
},
Uuid {
set: Vec<Uuid>,
},
Reference {
set: Vec<Uuid>,
},
SyntaxType {
set: Vec<u16>,
},
Spn {
set: Vec<(String, String)>,
},
UiHint {
set: Vec<u16>,
},
SshKey {
set: Vec<(String, String)>,
},
OauthScope {
set: Vec<String>,
},
OauthScopeMap {
set: Vec<ReplOauthScopeMapV1>,
},
OauthClaimMap {
set: Vec<ReplOauthClaimMapV1>,
},
Oauth2Session {
set: Vec<ReplOauth2SessionV1>,
},
Session {
set: Vec<DbValueSession>,
},
ApiToken {
set: Vec<ReplApiTokenV1>,
},
TotpSecret {
set: Vec<(String, ReplTotpV1)>,
},
AuditLogString {
map: Vec<(Cid, String)>,
},
EcKeyPrivate {
key: Vec<u8>,
},
Image {
set: Vec<DbValueImage>,
},
CredentialType {
set: Vec<u16>,
},
WebauthnAttestationCaList {
ca_list: AttestationCaList,
},
KeyInternal {
set: Vec<DbValueKeyInternal>,
},
HexString {
set: Vec<String>,
},
Certificate {
set: Vec<DbValueCertificate>,
},
ApplicationPassword {
set: Vec<DbValueApplicationPassword>,
},
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct ReplAttrStateV1 {
cid: ReplCidV1,
attr: Option<ReplAttrV1>,
attr: Option<DbValueSetV2>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
@ -525,7 +166,7 @@ impl ReplEntryV1 {
// to None and just send the Cid since they have the same result
// on how the entry/attr state looks at each end.
if maybe.len() > 0 {
Some(maybe.to_repl_v1())
Some(maybe.to_db_valueset_v2())
} else {
None
}
@ -549,8 +190,8 @@ impl ReplEntryV1 {
ReplEntryV1 { uuid, st }
}
pub fn rehydrate(&self) -> Result<(EntryChangeState, Eattrs), OperationError> {
match &self.st {
pub fn rehydrate(self) -> Result<(EntryChangeState, Eattrs), OperationError> {
match self.st {
ReplStateV1::Live { at, attrs } => {
trace!("{:?} {:#?}", at, attrs);
// We need to build two sets, one for the Entry Change States, and one for the
@ -558,11 +199,11 @@ impl ReplEntryV1 {
let mut changes = BTreeMap::default();
let mut eattrs = Eattrs::default();
for (attr_name, ReplAttrStateV1 { cid, attr }) in attrs.iter() {
for (attr_name, ReplAttrStateV1 { cid, attr }) in attrs.into_iter() {
let cid: Cid = cid.into();
if let Some(attr_value) = attr {
let v = valueset::from_repl_v1(attr_value).inspect_err(|err| {
let v = valueset::from_db_valueset_v2(attr_value).inspect_err(|err| {
error!(?err, "Unable to restore valueset for {}", attr_name);
})?;
if eattrs.insert(attr_name.clone(), v).is_some() {
@ -658,7 +299,7 @@ impl ReplIncrementalEntryV1 {
let cid = cid.into();
let attr = live_attr.and_then(|maybe| {
if maybe.len() > 0 {
Some(maybe.to_repl_v1())
Some(maybe.to_db_valueset_v2())
} else {
None
}
@ -683,18 +324,18 @@ impl ReplIncrementalEntryV1 {
ReplIncrementalEntryV1 { uuid, st }
}
pub fn rehydrate(&self) -> Result<(Uuid, EntryChangeState, Eattrs), OperationError> {
match &self.st {
pub fn rehydrate(self) -> Result<(Uuid, EntryChangeState, Eattrs), OperationError> {
match self.st {
ReplStateV1::Live { at, attrs } => {
trace!("{:?} {:#?}", at, attrs);
let mut changes = BTreeMap::default();
let mut eattrs = Eattrs::default();
for (attr_name, ReplAttrStateV1 { cid, attr }) in attrs.iter() {
for (attr_name, ReplAttrStateV1 { cid, attr }) in attrs.into_iter() {
let cid: Cid = cid.into();
if let Some(attr_value) = attr {
let v = valueset::from_repl_v1(attr_value).inspect_err(|err| {
let v = valueset::from_db_valueset_v2(attr_value).inspect_err(|err| {
error!(?err, "Unable to restore valueset for {}", attr_name);
})?;
if eattrs.insert(attr_name.clone(), v).is_some() {

View file

@ -22,7 +22,7 @@ fn repl_initialise(
// eprintln!("{:#?}", refresh_context);
// Apply it to the server
to.consumer_apply_refresh(&refresh_context)?;
to.consumer_apply_refresh(refresh_context)?;
// Need same d_uuid
assert_eq!(from.get_domain_uuid(), to.get_domain_uuid());
@ -79,7 +79,7 @@ fn repl_incremental(
trace!(?changes, "supplying changes");
// Check the changes = should be empty.
to.consumer_apply_changes(&changes)
to.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
// RUV should be consistent again.
@ -221,7 +221,7 @@ async fn test_repl_increment_basic_entry_add(server_a: &QueryServer, server_b: &
let mut server_a_txn = server_a.write(duration_from_epoch_now()).await.unwrap();
server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
// Do a ruv check - should still be the same.
@ -304,7 +304,7 @@ async fn test_repl_increment_basic_entry_add(server_a: &QueryServer, server_b: &
let mut server_a_txn = server_a.write(duration_from_epoch_now()).await.unwrap();
server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
// RUV should be consistent again.
@ -554,7 +554,7 @@ async fn test_repl_increment_consumer_lagging_tombstone(
assert!(matches!(changes, ReplIncrementalContext::RefreshRequired));
let result = server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::RefreshRequired));
@ -1800,7 +1800,7 @@ async fn test_repl_increment_consumer_lagging_attributes(
assert!(matches!(changes, ReplIncrementalContext::RefreshRequired));
let result = server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::RefreshRequired));
@ -1823,7 +1823,7 @@ async fn test_repl_increment_consumer_lagging_attributes(
assert!(matches!(changes, ReplIncrementalContext::UnwillingToSupply));
let result = server_b_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));
@ -1928,7 +1928,7 @@ async fn test_repl_increment_consumer_ruv_trim_past_valid(
assert!(matches!(changes, ReplIncrementalContext::UnwillingToSupply));
let result = server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));
@ -1955,7 +1955,7 @@ async fn test_repl_increment_consumer_ruv_trim_past_valid(
assert!(matches!(changes, ReplIncrementalContext::UnwillingToSupply));
let result = server_b_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));
@ -2061,7 +2061,7 @@ async fn test_repl_increment_consumer_ruv_trim_idle_servers(
assert!(matches!(changes, ReplIncrementalContext::V1 { .. }));
let result = server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));
@ -2088,7 +2088,7 @@ async fn test_repl_increment_consumer_ruv_trim_idle_servers(
assert!(matches!(changes, ReplIncrementalContext::V1 { .. }));
let result = server_b_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));
@ -3211,7 +3211,7 @@ async fn test_repl_initial_consumer_join(server_a: &QueryServer, server_b: &Quer
assert!(matches!(changes, ReplIncrementalContext::DomainMismatch));
let result = server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::RefreshRequired));
@ -3570,7 +3570,7 @@ async fn test_repl_increment_consumer_lagging_refresh(
// Apply it to the server
server_a_txn
.consumer_apply_refresh(&refresh_context)
.consumer_apply_refresh(refresh_context)
.expect("Unable to apply refresh");
// Need same d_uuid
@ -3697,7 +3697,7 @@ async fn test_repl_increment_consumer_lagging_refresh(
assert!(matches!(changes, ReplIncrementalContext::V1 { .. }));
let result = server_a_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));
@ -3724,7 +3724,7 @@ async fn test_repl_increment_consumer_lagging_refresh(
assert!(matches!(changes, ReplIncrementalContext::V1 { .. }));
let result = server_b_txn
.consumer_apply_changes(&changes)
.consumer_apply_changes(changes)
.expect("Unable to apply changes to consumer.");
assert!(matches!(result, ConsumerState::Ok));

View file

@ -111,12 +111,11 @@ impl From<&IdentType> for IdentityId {
/// and other info that can assist with server decision making.
pub struct Identity {
pub origin: IdentType,
pub(crate) source: Source,
// pub(crate) impersonate: bool,
// In a way I guess these are session claims?
#[allow(dead_code)]
source: Source,
pub(crate) session_id: Uuid,
pub(crate) scope: AccessScope,
pub(crate) limits: Limits,
limits: Limits,
}
impl std::fmt::Display for Identity {
@ -140,15 +139,37 @@ impl std::fmt::Display for Identity {
}
impl Identity {
pub fn source(&self) -> &Source {
pub(crate) fn new(
origin: IdentType,
source: Source,
session_id: Uuid,
scope: AccessScope,
limits: Limits,
) -> Self {
Self {
origin,
source,
session_id,
scope,
limits,
}
}
#[allow(dead_code)]
pub(crate) fn source(&self) -> &Source {
&self.source
}
pub fn limits(&self) -> &Limits {
pub(crate) fn limits(&self) -> &Limits {
&self.limits
}
pub fn from_internal() -> Self {
#[cfg(test)]
pub(crate) fn limits_mut(&mut self) -> &mut Limits {
&mut self.limits
}
pub(crate) fn from_internal() -> Self {
Identity {
origin: IdentType::Internal,
source: Source::Internal,
@ -159,7 +180,9 @@ impl Identity {
}
#[cfg(test)]
pub fn from_impersonate_entry_readonly(entry: Arc<Entry<EntrySealed, EntryCommitted>>) -> Self {
pub(crate) fn from_impersonate_entry_readonly(
entry: Arc<Entry<EntrySealed, EntryCommitted>>,
) -> Self {
Identity {
origin: IdentType::User(IdentUser { entry }),
source: Source::Internal,

View file

@ -309,7 +309,7 @@ pub trait QueryServerTransaction<'a> {
e
})?;
let lims = se.get_limits();
let lims = se.ident.limits();
// NOTE: We currently can't build search plugins due to the inability to hand
// the QS wr/ro to the plugin trait. However, there shouldn't be a need for search
@ -345,7 +345,7 @@ pub trait QueryServerTransaction<'a> {
e
})?;
let lims = ee.get_limits();
let lims = ee.ident.limits();
if ee.ident.is_internal() {
// We take a fast-path on internal because we can skip loading entries

View file

@ -4,7 +4,6 @@ use smolset::SmolSet;
use crate::be::dbvalue::DbValueAddressV1;
use crate::prelude::*;
use crate::repl::proto::{ReplAddressV1, ReplAttrV1};
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::value::{Address, VALIDATE_EMAIL_RE};
@ -53,33 +52,6 @@ impl ValueSetAddress {
.collect();
Ok(Box::new(ValueSetAddress { set }))
}
pub fn from_repl_v1(data: &[ReplAddressV1]) -> Result<ValueSet, OperationError> {
let set = data
.iter()
.cloned()
.map(
|ReplAddressV1 {
formatted,
street_address,
locality,
region,
postal_code,
country,
}| {
Address {
formatted,
street_address,
locality,
region,
postal_code,
country,
}
},
)
.collect();
Ok(Box::new(ValueSetAddress { set }))
}
}
impl FromIterator<Address> for Option<Box<ValueSetAddress>> {
@ -197,23 +169,6 @@ impl ValueSetT for ValueSetAddress {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Address {
set: self
.set
.iter()
.map(|a| ReplAddressV1 {
formatted: a.formatted.clone(),
street_address: a.street_address.clone(),
locality: a.locality.clone(),
region: a.region.clone(),
postal_code: a.postal_code.clone(),
country: a.country.clone(),
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.set
@ -491,13 +446,6 @@ impl ValueSetT for ValueSetEmailAddress {
DbValueSetV2::EmailAddress(self.primary.clone(), self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::EmailAddress {
primary: self.primary.clone(),
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::EmailAddress))
}

View file

@ -1,7 +1,6 @@
use crate::be::dbvalue::{DbValueApplicationPassword, DbValueSetV2};
use crate::credential::{apppwd::ApplicationPassword, Password};
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use std::collections::BTreeMap;
@ -56,10 +55,6 @@ impl ValueSetApplicationPassword {
Self::from_dbv_iter(data.into_iter())
}
pub fn from_repl_v1(data: &[DbValueApplicationPassword]) -> Result<ValueSet, OperationError> {
Self::from_dbv_iter(data.iter().cloned())
}
fn to_vec_dbvs(&self) -> Vec<DbValueApplicationPassword> {
self.map
.iter()
@ -207,11 +202,6 @@ impl ValueSetT for ValueSetApplicationPassword {
DbValueSetV2::ApplicationPassword(data)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
let set = self.to_vec_dbvs();
ReplAttrV1::ApplicationPassword { set }
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.map

View file

@ -1,6 +1,5 @@
use crate::prelude::*;
use crate::repl::cid::Cid;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
use kanidm_proto::scim_v1::server::ScimAuditString;
@ -30,21 +29,10 @@ impl ValueSetAuditLogString {
Box::new(ValueSetAuditLogString { map })
}
/*
pub fn push(&mut self, (c, s): AuditLogStringType) -> bool {
self.map.insert(c, s).is_none()
}
*/
pub fn from_dbvs2(data: Vec<AuditLogStringType>) -> Result<ValueSet, OperationError> {
let map = data.into_iter().collect();
Ok(Box::new(ValueSetAuditLogString { map }))
}
pub fn from_repl_v1(data: &[AuditLogStringType]) -> Result<ValueSet, OperationError> {
let map = data.iter().map(|e| (e.0.clone(), e.1.clone())).collect();
Ok(Box::new(ValueSetAuditLogString { map }))
}
}
impl ValueSetT for ValueSetAuditLogString {
@ -145,16 +133,6 @@ impl ValueSetT for ValueSetAuditLogString {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::AuditLogString {
map: self
.map
.iter()
.map(|(c, s)| (c.clone(), s.clone()))
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().map(|c| PartialValue::Cid(c.clone())))
}

View file

@ -5,7 +5,6 @@ use std::collections::BTreeMap;
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -116,12 +115,6 @@ impl ValueSetT for ValueSetPrivateBinary {
DbValueSetV2::PrivateBinary(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::PrivateBinary {
set: self.set.iter().cloned().map(|b| b.into()).collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.set
@ -187,11 +180,6 @@ impl ValueSetPublicBinary {
Ok(Box::new(ValueSetPublicBinary { map }))
}
pub fn from_repl_v1(data: &[(String, Base64UrlSafeData)]) -> Result<ValueSet, OperationError> {
let map = data.iter().map(|(k, v)| (k.clone(), v.to_vec())).collect();
Ok(Box::new(ValueSetPublicBinary { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -310,16 +298,6 @@ impl ValueSetT for ValueSetPublicBinary {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::PublicBinary {
set: self
.map
.iter()
.map(|(tag, bin)| (tag.clone(), bin.clone().into()))
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::PublicBinary))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetBool {
Ok(Box::new(ValueSetBool { set }))
}
pub fn from_repl_v1(data: &[bool]) -> Result<ValueSet, OperationError> {
let set = data.iter().copied().collect();
Ok(Box::new(ValueSetBool { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and bool is foreign.
#[allow(clippy::should_implement_trait)]
@ -127,12 +121,6 @@ impl ValueSetT for ValueSetBool {
DbValueSetV2::Bool(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Bool {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::new_bool))
}

View file

@ -1,6 +1,5 @@
use crate::be::dbvalue::DbValueCertificate;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
use kanidm_proto::scim_v1::server::ScimCertificate;
@ -37,10 +36,6 @@ impl ValueSetCertificate {
Self::from_dbv_iter(data.into_iter())
}
pub fn from_repl_v1(data: &[DbValueCertificate]) -> Result<ValueSet, OperationError> {
Self::from_dbv_iter(data.iter().cloned())
}
fn from_dbv_iter(
certs: impl Iterator<Item = DbValueCertificate>,
) -> Result<ValueSet, OperationError> {
@ -230,11 +225,6 @@ impl ValueSetT for ValueSetCertificate {
DbValueSetV2::Certificate(data)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
let set = self.to_vec_dbvs();
ReplAttrV1::Certificate { set }
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.map

View file

@ -3,7 +3,6 @@ use smolset::SmolSet;
use crate::be::dbvalue::DbCidV1;
use crate::prelude::*;
use crate::repl::cid::Cid;
use crate::repl::proto::{ReplAttrV1, ReplCidV1};
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -33,11 +32,6 @@ impl ValueSetCid {
.collect();
Ok(Box::new(ValueSetCid { set }))
}
pub fn from_repl_v1(data: &[ReplCidV1]) -> Result<ValueSet, OperationError> {
let set = data.iter().map(|dc| dc.into()).collect();
Ok(Box::new(ValueSetCid { set }))
}
}
impl FromIterator<Cid> for Option<Box<ValueSetCid>> {
@ -144,12 +138,6 @@ impl ValueSetT for ValueSetCid {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Cid {
set: self.set.iter().map(|c| c.into()).collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::new_cid))
}

View file

@ -12,9 +12,6 @@ use crate::be::dbvalue::{
};
use crate::credential::Credential;
use crate::prelude::*;
use crate::repl::proto::{
ReplAttestedPasskeyV4V1, ReplAttrV1, ReplCredV1, ReplIntentTokenV1, ReplPasskeyV4V1,
};
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::value::{CredUpdateSessionPerms, CredentialType, IntentTokenState};
@ -51,14 +48,6 @@ impl ValueSetCredential {
Ok(Box::new(ValueSetCredential { map }))
}
pub fn from_repl_v1(data: &[ReplCredV1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.map(Credential::try_from_repl_v1)
.collect::<Result<_, _>>()?;
Ok(Box::new(ValueSetCredential { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -170,16 +159,6 @@ impl ValueSetT for ValueSetCredential {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Credential {
set: self
.map
.iter()
.map(|(tag, cred)| cred.to_repl_v1(tag.clone()))
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Cred))
}
@ -299,69 +278,6 @@ impl ValueSetIntentToken {
Ok(Box::new(ValueSetIntentToken { map }))
}
pub fn from_repl_v1(data: &[ReplIntentTokenV1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.map(|dits| match dits {
ReplIntentTokenV1::Valid {
token_id,
max_ttl,
ext_cred_portal_can_view,
primary_can_edit,
passkeys_can_edit,
attested_passkeys_can_edit,
unixcred_can_edit,
sshpubkey_can_edit,
} => (
token_id.clone(),
IntentTokenState::Valid {
max_ttl: *max_ttl,
perms: CredUpdateSessionPerms {
ext_cred_portal_can_view: *ext_cred_portal_can_view,
primary_can_edit: *primary_can_edit,
passkeys_can_edit: *passkeys_can_edit,
attested_passkeys_can_edit: *attested_passkeys_can_edit,
unixcred_can_edit: *unixcred_can_edit,
sshpubkey_can_edit: *sshpubkey_can_edit,
},
},
),
ReplIntentTokenV1::InProgress {
token_id,
max_ttl,
session_id,
session_ttl,
ext_cred_portal_can_view,
primary_can_edit,
passkeys_can_edit,
attested_passkeys_can_edit,
unixcred_can_edit,
sshpubkey_can_edit,
} => (
token_id.clone(),
IntentTokenState::InProgress {
max_ttl: *max_ttl,
session_id: *session_id,
session_ttl: *session_ttl,
perms: CredUpdateSessionPerms {
ext_cred_portal_can_view: *ext_cred_portal_can_view,
primary_can_edit: *primary_can_edit,
passkeys_can_edit: *passkeys_can_edit,
attested_passkeys_can_edit: *attested_passkeys_can_edit,
unixcred_can_edit: *unixcred_can_edit,
sshpubkey_can_edit: *sshpubkey_can_edit,
},
},
),
ReplIntentTokenV1::Consumed { token_id, max_ttl } => (
token_id.clone(),
IntentTokenState::Consumed { max_ttl: *max_ttl },
),
})
.collect();
Ok(Box::new(ValueSetIntentToken { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -541,67 +457,6 @@ impl ValueSetT for ValueSetIntentToken {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::IntentToken {
set: self
.map
.iter()
.map(|(u, s)| match s {
IntentTokenState::Valid {
max_ttl,
perms:
CredUpdateSessionPerms {
ext_cred_portal_can_view,
primary_can_edit,
passkeys_can_edit,
attested_passkeys_can_edit,
unixcred_can_edit,
sshpubkey_can_edit,
},
} => ReplIntentTokenV1::Valid {
token_id: u.clone(),
max_ttl: *max_ttl,
ext_cred_portal_can_view: *ext_cred_portal_can_view,
primary_can_edit: *primary_can_edit,
passkeys_can_edit: *passkeys_can_edit,
attested_passkeys_can_edit: *attested_passkeys_can_edit,
unixcred_can_edit: *unixcred_can_edit,
sshpubkey_can_edit: *sshpubkey_can_edit,
},
IntentTokenState::InProgress {
max_ttl,
session_id,
session_ttl,
perms:
CredUpdateSessionPerms {
ext_cred_portal_can_view,
primary_can_edit,
passkeys_can_edit,
attested_passkeys_can_edit,
unixcred_can_edit,
sshpubkey_can_edit,
},
} => ReplIntentTokenV1::InProgress {
token_id: u.clone(),
max_ttl: *max_ttl,
session_id: *session_id,
session_ttl: *session_ttl,
ext_cred_portal_can_view: *ext_cred_portal_can_view,
primary_can_edit: *primary_can_edit,
passkeys_can_edit: *passkeys_can_edit,
attested_passkeys_can_edit: *attested_passkeys_can_edit,
unixcred_can_edit: *unixcred_can_edit,
sshpubkey_can_edit: *sshpubkey_can_edit,
},
IntentTokenState::Consumed { max_ttl } => ReplIntentTokenV1::Consumed {
token_id: u.clone(),
max_ttl: *max_ttl,
},
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::IntentToken))
}
@ -668,15 +523,6 @@ impl ValueSetPasskey {
Ok(Box::new(ValueSetPasskey { map }))
}
pub fn from_repl_v1(data: &[ReplPasskeyV4V1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.cloned()
.map(|ReplPasskeyV4V1 { uuid, tag, key }| Ok((uuid, (tag, key))))
.collect::<Result<_, _>>()?;
Ok(Box::new(ValueSetPasskey { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -780,20 +626,6 @@ impl ValueSetT for ValueSetPasskey {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Passkey {
set: self
.map
.iter()
.map(|(u, (t, k))| ReplPasskeyV4V1 {
uuid: *u,
tag: t.clone(),
key: k.clone(),
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Passkey))
}
@ -864,15 +696,6 @@ impl ValueSetAttestedPasskey {
Ok(Box::new(ValueSetAttestedPasskey { map }))
}
pub fn from_repl_v1(data: &[ReplAttestedPasskeyV4V1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.cloned()
.map(|ReplAttestedPasskeyV4V1 { uuid, tag, key }| Ok((uuid, (tag, key))))
.collect::<Result<_, _>>()?;
Ok(Box::new(ValueSetAttestedPasskey { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -976,20 +799,6 @@ impl ValueSetT for ValueSetAttestedPasskey {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::AttestedPasskey {
set: self
.map
.iter()
.map(|(u, (t, k))| ReplAttestedPasskeyV4V1 {
uuid: *u,
tag: t.clone(),
key: k.clone(),
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().copied().map(PartialValue::AttestedPasskey))
}
@ -1058,12 +867,6 @@ impl ValueSetCredentialType {
Ok(Box::new(ValueSetCredentialType { set }))
}
pub fn from_repl_v1(data: &[u16]) -> Result<ValueSet, OperationError> {
let set: Result<_, _> = data.iter().copied().map(CredentialType::try_from).collect();
let set = set.map_err(|_| OperationError::InvalidValueState)?;
Ok(Box::new(ValueSetCredentialType { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and uuid is foreign.
#[allow(clippy::should_implement_trait)]
@ -1154,12 +957,6 @@ impl ValueSetT for ValueSetCredentialType {
DbValueSetV2::CredentialType(self.set.iter().map(|s| *s as u16).collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::CredentialType {
set: self.set.iter().map(|s| *s as u16).collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::CredentialType))
}
@ -1209,34 +1006,9 @@ impl ValueSetWebauthnAttestationCaList {
Box::new(ValueSetWebauthnAttestationCaList { ca_list })
}
/*
pub fn push(&mut self, u: CredentialType) -> bool {
self.set.insert(u)
}
*/
pub fn from_dbvs2(ca_list: AttestationCaList) -> Result<ValueSet, OperationError> {
Ok(Box::new(ValueSetWebauthnAttestationCaList { ca_list }))
}
pub fn from_repl_v1(ca_list: &AttestationCaList) -> Result<ValueSet, OperationError> {
Ok(Box::new(ValueSetWebauthnAttestationCaList {
ca_list: ca_list.clone(),
}))
}
/*
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and uuid is foreign.
#[allow(clippy::should_implement_trait)]
pub fn from_iter<T>(iter: T) -> Option<Box<Self>>
where
T: IntoIterator<Item = CredentialType>,
{
let set = iter.into_iter().collect();
Some(Box::new(ValueSetCredentialType { set }))
}
*/
}
impl ValueSetT for ValueSetWebauthnAttestationCaList {
@ -1258,25 +1030,11 @@ impl ValueSetT for ValueSetWebauthnAttestationCaList {
}
fn remove(&mut self, _pv: &PartialValue, _cid: &Cid) -> bool {
/*
match pv {
_ => {
debug_assert!(false);
true
}
}
*/
debug_assert!(false);
true
}
fn contains(&self, _pv: &PartialValue) -> bool {
/*
match pv {
PartialValue::CredentialType(u) => self.set.contains(u),
_ => false,
}
*/
false
}
@ -1342,12 +1100,6 @@ impl ValueSetT for ValueSetWebauthnAttestationCaList {
))
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::WebauthnAttestationCaList {
ca_list: self.ca_list.clone(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(std::iter::empty::<PartialValue>())
}

View file

@ -2,7 +2,6 @@ use smolset::SmolSet;
use time::OffsetDateTime;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -34,18 +33,6 @@ impl ValueSetDateTime {
Ok(Box::new(ValueSetDateTime { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data
.iter()
.map(|s| {
OffsetDateTime::parse(s, &Rfc3339)
.map(|odt| odt.to_offset(time::UtcOffset::UTC))
.map_err(|_| OperationError::InvalidValueState)
})
.collect::<Result<_, _>>()?;
Ok(Box::new(ValueSetDateTime { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and offset date time is foreign
#[allow(clippy::should_implement_trait)]
@ -161,21 +148,6 @@ impl ValueSetT for ValueSetDateTime {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::DateTime {
set: self
.set
.iter()
.map(|odt| {
debug_assert_eq!(odt.offset(), time::UtcOffset::UTC);
#[allow(clippy::expect_used)]
odt.format(&Rfc3339)
.expect("Failed to format timestamp into RFC3339")
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::DateTime))
}

View file

@ -2,7 +2,6 @@ use std::iter::{self};
use crate::be::dbvalue::DbValueSetV2;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::value::{PartialValue, SyntaxType, Value};
use openssl::ec::EcKey;
use openssl::pkey::{Private, Public};
@ -65,10 +64,6 @@ impl ValueSetEcKeyPrivate {
pub fn from_dbvs2(key_der: &[u8]) -> Result<ValueSet, OperationError> {
Self::valueset_from_key_der(key_der)
}
pub fn from_repl_v1(key_der: &[u8]) -> Result<ValueSet, OperationError> {
Self::valueset_from_key_der(key_der)
}
}
impl ValueSetT for ValueSetEcKeyPrivate {
@ -151,20 +146,6 @@ impl ValueSetT for ValueSetEcKeyPrivate {
DbValueSetV2::EcKeyPrivate(key_der)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
#[allow(clippy::expect_used)]
let key_der = self
.set
.as_ref()
.map(|key| {
key.priv_key.private_key_to_der().expect(
"Unable to process eckey to der, likely corrupted. You must restore from backup.",
)
})
.unwrap_or_default();
ReplAttrV1::EcKeyPrivate { key: key_der }
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = crate::value::PartialValue> + '_> {
Box::new(iter::once(PartialValue::SecretValue))
}

View file

@ -1,7 +1,6 @@
use std::collections::BTreeSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetHexString {
Ok(Box::new(ValueSetHexString { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetHexString { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and str is foreign
#[allow(clippy::should_implement_trait)]
@ -148,12 +142,6 @@ impl ValueSetT for ValueSetHexString {
DbValueSetV2::HexString(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::HexString {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::HexString))
}

View file

@ -9,7 +9,6 @@ use kanidm_proto::internal::{ImageType, ImageValue};
use crate::be::dbvalue::DbValueImage;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -268,33 +267,6 @@ impl ValueSetImage {
}))
}
pub fn from_repl_v1(data: &[DbValueImage]) -> Result<ValueSet, OperationError> {
let mut set: HashSet<ImageValue> = HashSet::new();
for image in data {
let image = match image.clone() {
DbValueImage::V1 {
filename,
filetype,
contents,
} => ImageValue::new(filename, filetype, contents),
};
match image.validate_image() {
Ok(_) => {
set.insert(image.clone());
}
Err(err) => {
admin_error!(
"Image didn't pass validation, not adding to value! Error: {:?}",
err
);
return Err(OperationError::InvalidValueState);
}
}
}
Ok(Box::new(ValueSetImage { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and `ImageValue` is foreign.
#[allow(clippy::should_implement_trait)]
@ -448,21 +420,6 @@ impl ValueSetT for ValueSetImage {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Image {
set: self
.set
.iter()
.cloned()
.map(|e| DbValueImage::V1 {
filename: e.filename,
filetype: e.filetype,
contents: e.contents,
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.set

View file

@ -1,7 +1,6 @@
use std::collections::BTreeSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -27,11 +26,6 @@ impl ValueSetIname {
Ok(Box::new(ValueSetIname { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetIname { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and str is foreign
#[allow(clippy::should_implement_trait)]
@ -159,12 +153,6 @@ impl ValueSetT for ValueSetIname {
DbValueSetV2::Iname(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Iname {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().map(|i| PartialValue::new_iname(i.as_str())))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -27,12 +26,6 @@ impl ValueSetIndex {
Ok(Box::new(ValueSetIndex { set }))
}
pub fn from_repl_v1(data: &[u16]) -> Result<ValueSet, OperationError> {
let set: Result<_, _> = data.iter().copied().map(IndexType::try_from).collect();
let set = set.map_err(|_| OperationError::InvalidValueState)?;
Ok(Box::new(ValueSetIndex { set }))
}
// We need to allow this, because there seems to be a bug using it fromiterator in entry.rs
#[allow(clippy::should_implement_trait)]
pub fn from_iter<T>(iter: T) -> Option<Box<ValueSetIndex>>
@ -122,12 +115,6 @@ impl ValueSetT for ValueSetIndex {
DbValueSetV2::IndexType(self.set.iter().map(|s| *s as u16).collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::IndexType {
set: self.set.iter().map(|s| *s as u16).collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::Index))
}

View file

@ -2,7 +2,6 @@ use std::collections::BTreeSet;
use super::iname::ValueSetIname;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -28,11 +27,6 @@ impl ValueSetIutf8 {
Ok(Box::new(ValueSetIutf8 { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetIutf8 { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and str is foreign.
#[allow(clippy::should_implement_trait)]
@ -157,12 +151,6 @@ impl ValueSetT for ValueSetIutf8 {
DbValueSetV2::Iutf8(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Iutf8 {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().map(|i| PartialValue::new_iutf8(i.as_str())))
}

View file

@ -2,7 +2,6 @@ use kanidm_proto::internal::Filter as ProtoFilter;
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -30,14 +29,6 @@ impl ValueSetJsonFilter {
Ok(Box::new(ValueSetJsonFilter { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data
.iter()
.map(|s| serde_json::from_str(s).map_err(|_| OperationError::SerdeJsonError))
.collect::<Result<_, _>>()?;
Ok(Box::new(ValueSetJsonFilter { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and protofilter is foreign.
#[allow(clippy::should_implement_trait)]
@ -160,22 +151,6 @@ impl ValueSetT for ValueSetJsonFilter {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::JsonFilter {
set: self
.set
.iter()
.filter_map(|s| {
serde_json::to_string(s)
.inspect_err(|err| {
error!(?err, "A json filter value was corrupted during run-time")
})
.ok()
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::JsonFilt))
}

View file

@ -3,7 +3,6 @@ use compact_jwt::{crypto::JwsRs256Signer, JwsEs256Signer, JwsSigner};
use hashbrown::HashSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -144,18 +143,6 @@ impl ValueSetT for ValueSetJwsKeyEs256 {
.collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::JwsKeyEs256 { set: self.set.iter()
.map(|k| {
#[allow(clippy::expect_used)]
k.private_key_to_der()
.expect("Unable to process private key to der, likely corrupted. You must restore from backup.")
})
.map(|b| b.into())
.collect()
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.set
@ -229,19 +216,6 @@ impl ValueSetJwsKeyRs256 {
Ok(Box::new(ValueSetJwsKeyRs256 { set }))
}
pub fn from_repl_v1(data: &[Base64UrlSafeData]) -> Result<ValueSet, OperationError> {
let set = data
.iter()
.map(|b| {
JwsRs256Signer::from_rs256_der(b.as_slice()).map_err(|e| {
debug!(?e, "Error occurred parsing RS256 DER");
OperationError::InvalidValueState
})
})
.collect::<Result<HashSet<_>, _>>()?;
Ok(Box::new(ValueSetJwsKeyRs256 { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and jwssigner is foreign
#[allow(clippy::should_implement_trait)]
@ -334,18 +308,6 @@ impl ValueSetT for ValueSetJwsKeyRs256 {
.collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::JwsKeyRs256 { set: self.set.iter()
.map(|k| {
#[allow(clippy::expect_used)]
k.private_key_to_der()
.expect("Unable to process private key to der, likely corrupted. You must restore from backup.")
})
.map(|b| b.into())
.collect()
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.set

View file

@ -1,6 +1,5 @@
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::server::keys::KeyId;
use crate::value::{KeyStatus, KeyUsage};
@ -118,10 +117,6 @@ impl ValueSetKeyInternal {
Self::from_dbv_iter(keys.into_iter())
}
pub fn from_repl_v1(keys: &[DbValueKeyInternal]) -> Result<ValueSet, OperationError> {
Self::from_dbv_iter(keys.iter().cloned())
}
fn to_vec_dbvs(&self) -> Vec<DbValueKeyInternal> {
self.map
.iter()
@ -311,11 +306,6 @@ impl ValueSetT for ValueSetKeyInternal {
DbValueSetV2::KeyInternal(keys)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
let set = self.to_vec_dbvs();
ReplAttrV1::KeyInternal { set }
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = crate::value::PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::HexString))
}

View file

@ -20,7 +20,7 @@ use kanidm_proto::internal::{Filter as ProtoFilter, UiHint};
use crate::be::dbvalue::DbValueSetV2;
use crate::credential::{apppwd::ApplicationPassword, totp::Totp, Credential};
use crate::prelude::*;
use crate::repl::{cid::Cid, proto::ReplAttrV1};
use crate::repl::cid::Cid;
use crate::schema::SchemaAttribute;
use crate::server::keys::KeyId;
use crate::value::{Address, ApiToken, CredentialType, IntentTokenState, Oauth2Session, Session};
@ -148,8 +148,6 @@ pub trait ValueSetT: std::fmt::Debug + DynClone {
fn to_db_valueset_v2(&self) -> DbValueSetV2;
fn to_repl_v1(&self) -> ReplAttrV1;
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_>;
fn to_value_iter(&self) -> Box<dyn Iterator<Item = Value> + '_>;
@ -877,60 +875,6 @@ pub fn from_db_valueset_v2(dbvs: DbValueSetV2) -> Result<ValueSet, OperationErro
}
}
pub fn from_repl_v1(rv1: &ReplAttrV1) -> Result<ValueSet, OperationError> {
match rv1 {
ReplAttrV1::Iutf8 { set } => ValueSetIutf8::from_repl_v1(set),
ReplAttrV1::Utf8 { set } => ValueSetUtf8::from_repl_v1(set),
ReplAttrV1::IndexType { set } => ValueSetIndex::from_repl_v1(set),
ReplAttrV1::SyntaxType { set } => ValueSetSyntax::from_repl_v1(set),
ReplAttrV1::Cid { set } => ValueSetCid::from_repl_v1(set),
ReplAttrV1::Bool { set } => ValueSetBool::from_repl_v1(set),
ReplAttrV1::Uuid { set } => ValueSetUuid::from_repl_v1(set),
ReplAttrV1::Uint32 { set } => ValueSetUint32::from_repl_v1(set),
ReplAttrV1::Iname { set } => ValueSetIname::from_repl_v1(set),
ReplAttrV1::PrivateBinary { set } => ValueSetPrivateBinary::from_repl_v1(set),
ReplAttrV1::SecretValue { set } => ValueSetSecret::from_repl_v1(set),
ReplAttrV1::Reference { set } => ValueSetRefer::from_repl_v1(set),
ReplAttrV1::JwsKeyEs256 { set } => ValueSetJwsKeyEs256::from_repl_v1(set),
ReplAttrV1::JwsKeyRs256 { set } => ValueSetJwsKeyRs256::from_repl_v1(set),
ReplAttrV1::Spn { set } => ValueSetSpn::from_repl_v1(set),
ReplAttrV1::JsonFilter { set } => ValueSetJsonFilter::from_repl_v1(set),
ReplAttrV1::UiHint { set } => ValueSetUiHint::from_repl_v1(set),
ReplAttrV1::Address { set } => ValueSetAddress::from_repl_v1(set),
ReplAttrV1::EmailAddress { primary, set } => {
ValueSetEmailAddress::from_repl_v1(primary, set)
}
ReplAttrV1::PublicBinary { set } => ValueSetPublicBinary::from_repl_v1(set),
ReplAttrV1::Credential { set } => ValueSetCredential::from_repl_v1(set),
ReplAttrV1::IntentToken { set } => ValueSetIntentToken::from_repl_v1(set),
ReplAttrV1::Passkey { set } => ValueSetPasskey::from_repl_v1(set),
ReplAttrV1::AttestedPasskey { set } => ValueSetAttestedPasskey::from_repl_v1(set),
ReplAttrV1::DateTime { set } => ValueSetDateTime::from_repl_v1(set),
ReplAttrV1::Url { set } => ValueSetUrl::from_repl_v1(set),
ReplAttrV1::NsUniqueId { set } => ValueSetNsUniqueId::from_repl_v1(set),
ReplAttrV1::RestrictedString { set } => ValueSetRestricted::from_repl_v1(set),
ReplAttrV1::SshKey { set } => ValueSetSshKey::from_repl_v1(set),
ReplAttrV1::OauthScope { set } => ValueSetOauthScope::from_repl_v1(set),
ReplAttrV1::OauthScopeMap { set } => ValueSetOauthScopeMap::from_repl_v1(set),
ReplAttrV1::Oauth2Session { set } => ValueSetOauth2Session::from_repl_v1(set),
ReplAttrV1::Session { set } => ValueSetSession::from_repl_v1(set),
ReplAttrV1::ApiToken { set } => ValueSetApiToken::from_repl_v1(set),
ReplAttrV1::TotpSecret { set } => ValueSetTotpSecret::from_repl_v1(set),
ReplAttrV1::AuditLogString { map } => ValueSetAuditLogString::from_repl_v1(map),
ReplAttrV1::EcKeyPrivate { key } => ValueSetEcKeyPrivate::from_repl_v1(key),
ReplAttrV1::Image { set } => ValueSetImage::from_repl_v1(set),
ReplAttrV1::CredentialType { set } => ValueSetCredentialType::from_repl_v1(set),
ReplAttrV1::WebauthnAttestationCaList { ca_list } => {
ValueSetWebauthnAttestationCaList::from_repl_v1(ca_list)
}
ReplAttrV1::OauthClaimMap { set } => ValueSetOauthClaimMap::from_repl_v1(set),
ReplAttrV1::KeyInternal { set } => ValueSetKeyInternal::from_repl_v1(set),
ReplAttrV1::HexString { set } => ValueSetHexString::from_repl_v1(set),
ReplAttrV1::Certificate { set } => ValueSetCertificate::from_repl_v1(set),
ReplAttrV1::ApplicationPassword { set } => ValueSetApplicationPassword::from_repl_v1(set),
}
}
#[cfg(test)]
pub(crate) fn scim_json_reflexive(vs: ValueSet, data: &str) {
let scim_value = vs.to_scim_value().unwrap();

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::value::NSUNIQUEID_RE;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -27,11 +26,6 @@ impl ValueSetNsUniqueId {
Ok(Box::new(ValueSetNsUniqueId { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetNsUniqueId { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and String is foreign.
#[allow(clippy::should_implement_trait)]
@ -127,12 +121,6 @@ impl ValueSetT for ValueSetNsUniqueId {
DbValueSetV2::NsUniqueId(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::NsUniqueId {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::Nsuniqueid))
}

View file

@ -3,7 +3,6 @@ use std::collections::{BTreeMap, BTreeSet};
use crate::be::dbvalue::{DbValueOauthClaimMap, DbValueOauthScopeMapV1};
use crate::prelude::*;
use crate::repl::proto::{ReplAttrV1, ReplOauthClaimMapV1, ReplOauthScopeMapV1};
use crate::schema::SchemaAttribute;
use crate::utils::str_join;
use crate::value::{OauthClaimMapJoin, OAUTHSCOPE_RE};
@ -33,11 +32,6 @@ impl ValueSetOauthScope {
Ok(Box::new(ValueSetOauthScope { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetOauthScope { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and String is foreign.
#[allow(clippy::should_implement_trait)]
@ -126,12 +120,6 @@ impl ValueSetT for ValueSetOauthScope {
DbValueSetV2::OauthScope(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::OauthScope {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::OauthScope))
}
@ -201,14 +189,6 @@ impl ValueSetOauthScopeMap {
Ok(Box::new(ValueSetOauthScopeMap { map }))
}
pub fn from_repl_v1(data: &[ReplOauthScopeMapV1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.map(|ReplOauthScopeMapV1 { refer, data }| (*refer, data.clone()))
.collect();
Ok(Box::new(ValueSetOauthScopeMap { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -336,19 +316,6 @@ impl ValueSetT for ValueSetOauthScopeMap {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::OauthScopeMap {
set: self
.map
.iter()
.map(|(u, m)| ReplOauthScopeMapV1 {
refer: *u,
data: m.iter().cloned().collect(),
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Refer))
}
@ -436,12 +403,6 @@ impl ValueSetOauthClaimMap {
Box::new(ValueSetOauthClaimMap { map })
}
/*
pub(crate) fn push(&mut self, claim: String, mapping: OauthClaimMapping) -> bool {
self.map.insert(claim, mapping).is_none()
}
*/
pub(crate) fn from_dbvs2(data: Vec<DbValueOauthClaimMap>) -> Result<ValueSet, OperationError> {
let map = data
.into_iter()
@ -458,35 +419,6 @@ impl ValueSetOauthClaimMap {
Ok(Box::new(ValueSetOauthClaimMap { map }))
}
pub(crate) fn from_repl_v1(data: &[ReplOauthClaimMapV1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.map(|ReplOauthClaimMapV1 { name, join, values }| {
(
name.clone(),
OauthClaimMapping {
join: (*join).into(),
values: values.clone(),
},
)
})
.collect();
Ok(Box::new(ValueSetOauthClaimMap { map }))
}
/*
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
pub(crate) fn from_iter<T>(iter: T) -> Option<Box<Self>>
where
T: IntoIterator<Item = (String, OauthClaimMapping)>,
{
let map = iter.into_iter().collect();
Some(Box::new(ValueSetOauthClaimMap { map }))
}
*/
fn trim(&mut self) {
self.map
.values_mut()
@ -720,20 +652,6 @@ impl ValueSetT for ValueSetOauthClaimMap {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::OauthClaimMap {
set: self
.map
.iter()
.map(|(name, mapping)| ReplOauthClaimMapV1 {
name: name.clone(),
join: mapping.join.into(),
values: mapping.values.clone(),
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Iutf8))
}

View file

@ -1,7 +1,6 @@
use std::collections::BTreeSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -27,11 +26,6 @@ impl ValueSetRestricted {
Ok(Box::new(ValueSetRestricted { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetRestricted { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and String is foreign.
#[allow(clippy::should_implement_trait)]
@ -159,12 +153,6 @@ impl ValueSetT for ValueSetRestricted {
DbValueSetV2::RestrictedString(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::RestrictedString {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::RestrictedString))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetSecret {
Ok(Box::new(ValueSetSecret { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetSecret { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and String is foreign.
#[allow(clippy::should_implement_trait)]
@ -110,12 +104,6 @@ impl ValueSetT for ValueSetSecret {
DbValueSetV2::SecretValue(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::SecretValue {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().map(|_| PartialValue::SecretValue))
}

View file

@ -9,10 +9,6 @@ use crate::be::dbvalue::{
};
use crate::prelude::*;
use crate::repl::cid::Cid;
use crate::repl::proto::{
ReplApiTokenScopeV1, ReplApiTokenV1, ReplAttrV1, ReplIdentityIdV1, ReplOauth2SessionV1,
ReplSessionStateV1,
};
use crate::schema::SchemaAttribute;
use crate::value::{
ApiToken, ApiTokenScope, AuthType, Oauth2Session, Session, SessionScope, SessionState,
@ -199,10 +195,6 @@ impl ValueSetSession {
Self::from_dbv_iter(data.iter())
}
pub fn from_repl_v1(data: &[DbValueSession]) -> Result<ValueSet, OperationError> {
Self::from_dbv_iter(data.iter())
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -396,12 +388,6 @@ impl ValueSetT for ValueSetSession {
DbValueSetV2::Session(self.to_vec_dbvs())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Session {
set: self.to_vec_dbvs(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Refer))
}
@ -685,69 +671,6 @@ impl ValueSetOauth2Session {
Ok(Box::new(ValueSetOauth2Session { map, rs_filter }))
}
pub fn from_repl_v1(data: &[ReplOauth2SessionV1]) -> Result<ValueSet, OperationError> {
let mut rs_filter = u128::MIN;
let map = data
.iter()
.filter_map(
|ReplOauth2SessionV1 {
refer,
parent,
state,
issued_at,
rs_uuid,
}| {
// Convert things.
let issued_at = OffsetDateTime::parse(issued_at, &Rfc3339)
.map(|odt| odt.to_offset(time::UtcOffset::UTC))
.map_err(|e| {
admin_error!(
?e,
"Invalidating session {} due to invalid issued_at timestamp",
refer
)
})
.ok()?;
// This is a bit annoying. In the case we can't parse the optional
// expiry, we need to NOT return the session so that it's immediately
// invalidated. To do this we have to invert some of the options involved
// here.
let state = match state {
ReplSessionStateV1::ExpiresAt(e_inner) => {
OffsetDateTime::parse(e_inner, &Rfc3339)
.map(|odt| odt.to_offset(time::UtcOffset::UTC))
.map(SessionState::ExpiresAt)
.map_err(|e| {
admin_error!(
?e,
"Invalidating session {} due to invalid expiry timestamp",
refer
)
})
.ok()?
}
ReplSessionStateV1::Never => SessionState::NeverExpires,
ReplSessionStateV1::RevokedAt(rc) => SessionState::RevokedAt(rc.into()),
};
rs_filter |= rs_uuid.as_u128();
Some((
*refer,
Oauth2Session {
parent: *parent,
state,
issued_at,
rs_uuid: *rs_uuid,
},
))
},
)
.collect();
Ok(Box::new(ValueSetOauth2Session { rs_filter, map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -993,38 +916,6 @@ impl ValueSetT for ValueSetOauth2Session {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Oauth2Session {
set: self
.map
.iter()
.map(|(u, m)| ReplOauth2SessionV1 {
refer: *u,
parent: m.parent,
state: match &m.state {
SessionState::ExpiresAt(odt) => {
debug_assert_eq!(odt.offset(), time::UtcOffset::UTC);
#[allow(clippy::expect_used)]
odt.format(&Rfc3339)
.map(ReplSessionStateV1::ExpiresAt)
.expect("Failed to format timestamp into RFC3339!")
}
SessionState::NeverExpires => ReplSessionStateV1::Never,
SessionState::RevokedAt(c) => ReplSessionStateV1::RevokedAt(c.into()),
},
issued_at: {
debug_assert_eq!(m.issued_at.offset(), time::UtcOffset::UTC);
#[allow(clippy::expect_used)]
m.issued_at
.format(&Rfc3339)
.expect("Failed to format timestamp into RFC3339")
},
rs_uuid: m.rs_uuid,
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Refer))
}
@ -1210,82 +1101,6 @@ impl ValueSetApiToken {
Ok(Box::new(ValueSetApiToken { map }))
}
pub fn from_repl_v1(data: &[ReplApiTokenV1]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.filter_map(
|ReplApiTokenV1 {
refer,
label,
expiry,
issued_at,
issued_by,
scope,
}| {
// Convert things.
let issued_at = OffsetDateTime::parse(issued_at, &Rfc3339)
.map(|odt| odt.to_offset(time::UtcOffset::UTC))
.map_err(|e| {
admin_error!(
?e,
"Invalidating session {} due to invalid issued_at timestamp",
refer
)
})
.ok()?;
// This is a bit annoying. In the case we can't parse the optional
// expiry, we need to NOT return the session so that it's immediately
// invalidated. To do this we have to invert some of the options involved
// here.
let expiry = expiry
.as_ref()
.map(|e_inner| {
OffsetDateTime::parse(e_inner, &Rfc3339)
.map(|odt| odt.to_offset(time::UtcOffset::UTC))
// We now have an
// Option<Result<ODT, _>>
})
.transpose()
// Result<Option<ODT>, _>
.map_err(|e| {
admin_error!(
?e,
"Invalidating session {} due to invalid expiry timestamp",
refer
)
})
// Option<Option<ODT>>
.ok()?;
let issued_by = match issued_by {
ReplIdentityIdV1::Internal => IdentityId::Internal,
ReplIdentityIdV1::Uuid(u) => IdentityId::User(*u),
ReplIdentityIdV1::Synch(u) => IdentityId::Synch(*u),
};
let scope = match scope {
ReplApiTokenScopeV1::ReadOnly => ApiTokenScope::ReadOnly,
ReplApiTokenScopeV1::ReadWrite => ApiTokenScope::ReadWrite,
ReplApiTokenScopeV1::Synchronise => ApiTokenScope::Synchronise,
};
Some((
*refer,
ApiToken {
label: label.to_string(),
expiry,
issued_at,
issued_by,
scope,
},
))
},
)
.collect();
Ok(Box::new(ValueSetApiToken { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -1432,43 +1247,6 @@ impl ValueSetT for ValueSetApiToken {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::ApiToken {
set: self
.map
.iter()
.map(|(u, m)| ReplApiTokenV1 {
refer: *u,
label: m.label.clone(),
expiry: m.expiry.map(|odt| {
debug_assert_eq!(odt.offset(), time::UtcOffset::UTC);
#[allow(clippy::expect_used)]
odt.format(&Rfc3339)
.expect("Failed to format timestamp into RFC3339")
}),
issued_at: {
debug_assert_eq!(m.issued_at.offset(), time::UtcOffset::UTC);
#[allow(clippy::expect_used)]
m.issued_at
.format(&Rfc3339)
.expect("Failed to format timestamp into RFC3339")
},
issued_by: match m.issued_by {
IdentityId::Internal => ReplIdentityIdV1::Internal,
IdentityId::User(u) => ReplIdentityIdV1::Uuid(u),
IdentityId::Synch(u) => ReplIdentityIdV1::Synch(u),
},
scope: match m.scope {
ApiTokenScope::ReadOnly => ReplApiTokenScopeV1::ReadOnly,
ApiTokenScope::ReadWrite => ReplApiTokenScopeV1::ReadWrite,
ApiTokenScope::Synchronise => ReplApiTokenScopeV1::Synchronise,
},
})
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Refer))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetSpn {
Ok(Box::new(ValueSetSpn { set }))
}
pub fn from_repl_v1(data: &[(String, String)]) -> Result<ValueSet, OperationError> {
let set = data.iter().map(|(a, b)| (a.clone(), b.clone())).collect();
Ok(Box::new(ValueSetSpn { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -131,12 +125,6 @@ impl ValueSetT for ValueSetSpn {
DbValueSetV2::Spn(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Spn {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(
self.set

View file

@ -3,7 +3,6 @@ use std::collections::BTreeMap;
use crate::be::dbvalue::DbValueTaggedStringV1;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -43,22 +42,6 @@ impl ValueSetSshKey {
Ok(Box::new(ValueSetSshKey { map }))
}
pub fn from_repl_v1(data: &[(String, String)]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.map(|(tag, data)| {
SshPublicKey::from_string(data)
.map_err(|err| {
warn!(%tag, ?err, "discarding corrupted ssh public key");
OperationError::VS0001IncomingReplSshPublicKey
})
.map(|pk| (tag.clone(), pk))
})
.collect::<Result<BTreeMap<_, _>, _>>()?;
Ok(Box::new(ValueSetSshKey { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -178,16 +161,6 @@ impl ValueSetT for ValueSetSshKey {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::SshKey {
set: self
.map
.iter()
.map(|(tag, key)| (tag.clone(), key.to_string()))
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::SshKey))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,12 +25,6 @@ impl ValueSetSyntax {
let set = set.map_err(|_| OperationError::InvalidValueState)?;
Ok(Box::new(ValueSetSyntax { set }))
}
pub fn from_repl_v1(data: &[u16]) -> Result<ValueSet, OperationError> {
let set: Result<_, _> = data.iter().copied().map(SyntaxType::try_from).collect();
let set = set.map_err(|_| OperationError::InvalidValueState)?;
Ok(Box::new(ValueSetSyntax { set }))
}
}
impl FromIterator<SyntaxType> for Option<Box<ValueSetSyntax>> {
@ -122,12 +115,6 @@ impl ValueSetT for ValueSetSyntax {
DbValueSetV2::SyntaxType(self.set.iter().map(|s| *s as u16).collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::SyntaxType {
set: self.set.iter().map(|s| *s as u16).collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::Syntax))
}

View file

@ -5,7 +5,6 @@ use crate::credential::totp::Totp;
use crate::prelude::*;
use crate::be::dbvalue::DbTotpV1;
use crate::repl::proto::{ReplAttrV1, ReplTotpV1};
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -37,18 +36,6 @@ impl ValueSetTotpSecret {
Ok(Box::new(ValueSetTotpSecret { map }))
}
pub fn from_repl_v1(data: &[(String, ReplTotpV1)]) -> Result<ValueSet, OperationError> {
let map = data
.iter()
.map(|(l, data)| {
Totp::try_from(data)
.map_err(|()| OperationError::InvalidValueState)
.map(|t| (l.clone(), t))
})
.collect::<Result<_, _>>()?;
Ok(Box::new(ValueSetTotpSecret { map }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and tuples are always foreign.
#[allow(clippy::should_implement_trait)]
@ -143,16 +130,6 @@ impl ValueSetT for ValueSetTotpSecret {
)
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::TotpSecret {
set: self
.map
.iter()
.map(|(label, totp)| (label.clone(), totp.to_repl_v1()))
.collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.map.keys().cloned().map(PartialValue::Utf8))
}

View file

@ -1,7 +1,6 @@
use std::collections::BTreeSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -28,12 +27,6 @@ impl ValueSetUiHint {
let set = set.map_err(|_| OperationError::InvalidValueState)?;
Ok(Box::new(ValueSetUiHint { set }))
}
pub fn from_repl_v1(data: &[u16]) -> Result<ValueSet, OperationError> {
let set: Result<_, _> = data.iter().copied().map(UiHint::try_from).collect();
let set = set.map_err(|_| OperationError::InvalidValueState)?;
Ok(Box::new(ValueSetUiHint { set }))
}
}
impl ValueSetT for ValueSetUiHint {
@ -111,12 +104,6 @@ impl ValueSetT for ValueSetUiHint {
DbValueSetV2::UiHint(self.set.iter().map(|u| *u as u16).collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::UiHint {
set: self.set.iter().map(|u| *u as u16).collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::UiHint))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetUint32 {
Ok(Box::new(ValueSetUint32 { set }))
}
pub fn from_repl_v1(data: &[u32]) -> Result<ValueSet, OperationError> {
let set = data.iter().copied().collect();
Ok(Box::new(ValueSetUint32 { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and u32 is foreign.
#[allow(clippy::should_implement_trait)]
@ -129,12 +123,6 @@ impl ValueSetT for ValueSetUint32 {
DbValueSetV2::Uint32(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Uint32 {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::new_uint32))
}

View file

@ -1,7 +1,6 @@
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetUrl {
Ok(Box::new(ValueSetUrl { set }))
}
pub fn from_repl_v1(data: &[Url]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetUrl { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and Url is foreign.
#[allow(clippy::should_implement_trait)]
@ -123,12 +117,6 @@ impl ValueSetT for ValueSetUrl {
DbValueSetV2::Url(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Url {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().cloned().map(PartialValue::Url))
}

View file

@ -1,7 +1,6 @@
use std::collections::BTreeSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::utils::trigraph_iter;
use crate::valueset::{DbValueSetV2, ValueSet};
@ -26,11 +25,6 @@ impl ValueSetUtf8 {
let set = data.into_iter().collect();
Ok(Box::new(ValueSetUtf8 { set }))
}
pub fn from_repl_v1(data: &[String]) -> Result<ValueSet, OperationError> {
let set = data.iter().cloned().collect();
Ok(Box::new(ValueSetUtf8 { set }))
}
}
impl ValueSetT for ValueSetUtf8 {
@ -161,12 +155,6 @@ impl ValueSetT for ValueSetUtf8 {
DbValueSetV2::Utf8(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Utf8 {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().map(|i| PartialValue::new_utf8s(i.as_str())))
}

View file

@ -3,7 +3,6 @@ use std::collections::BTreeSet;
use smolset::SmolSet;
use crate::prelude::*;
use crate::repl::proto::ReplAttrV1;
use crate::schema::SchemaAttribute;
use crate::valueset::{uuid_to_proto_string, DbValueSetV2, ValueSet};
@ -28,11 +27,6 @@ impl ValueSetUuid {
Ok(Box::new(ValueSetUuid { set }))
}
pub fn from_repl_v1(data: &[Uuid]) -> Result<ValueSet, OperationError> {
let set = data.iter().copied().collect();
Ok(Box::new(ValueSetUuid { set }))
}
// We need to allow this, because rust doesn't allow us to impl FromIterator on foreign
// types, and uuid is foreign.
#[allow(clippy::should_implement_trait)]
@ -127,12 +121,6 @@ impl ValueSetT for ValueSetUuid {
DbValueSetV2::Uuid(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Uuid {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::Uuid))
}
@ -302,12 +290,6 @@ impl ValueSetT for ValueSetRefer {
DbValueSetV2::Reference(self.set.iter().cloned().collect())
}
fn to_repl_v1(&self) -> ReplAttrV1 {
ReplAttrV1::Reference {
set: self.set.iter().cloned().collect(),
}
}
fn to_partialvalue_iter(&self) -> Box<dyn Iterator<Item = PartialValue> + '_> {
Box::new(self.set.iter().copied().map(PartialValue::Refer))
}