mirror of
https://github.com/kanidm/kanidm.git
synced 2025-05-23 17:33:57 +02:00
Remove older migrations
This commit is contained in:
parent
b361df2675
commit
722153eedf
server/lib/src
|
@ -289,15 +289,6 @@ lazy_static! {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
/// Self-write of mail
|
||||
pub static ref IDM_PEOPLE_SELF_WRITE_MAIL_V1: BuiltinGroup = BuiltinGroup {
|
||||
name: "idm_people_self_write_mail",
|
||||
description: "Builtin IDM Group for people accounts to update their own mail.",
|
||||
uuid: UUID_IDM_PEOPLE_SELF_MAIL_WRITE,
|
||||
members: Vec::with_capacity(0),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
/// Self-write of mail
|
||||
pub static ref IDM_PEOPLE_SELF_MAIL_WRITE_DL7: BuiltinGroup = BuiltinGroup {
|
||||
name: "idm_people_self_mail_write",
|
||||
|
@ -465,10 +456,12 @@ pub fn idm_builtin_non_admin_groups() -> Vec<&'static BuiltinGroup> {
|
|||
&IDM_ALL_ACCOUNTS,
|
||||
&BUILTIN_IDM_RADIUS_SERVERS_V1,
|
||||
&BUILTIN_IDM_MAIL_SERVERS_DL8,
|
||||
&IDM_PEOPLE_SELF_WRITE_MAIL_V1,
|
||||
&BUILTIN_GROUP_PEOPLE_SELF_NAME_WRITE_DL7,
|
||||
&IDM_PEOPLE_SELF_MAIL_WRITE_DL7,
|
||||
&BUILTIN_GROUP_CLIENT_CERTIFICATE_ADMINS_DL7,
|
||||
// Write deps on read, so write must be added first.
|
||||
// All members must exist before we write HP
|
||||
&IDM_HIGH_PRIVILEGE_V1,
|
||||
&IDM_HIGH_PRIVILEGE_DL7,
|
||||
// other things
|
||||
&IDM_UI_ENABLE_EXPERIMENTAL_FEATURES,
|
||||
&IDM_ACCOUNT_MAIL_READ,
|
||||
|
|
|
@ -54,13 +54,9 @@ pub type DomainVersion = u32;
|
|||
/// previously.
|
||||
pub const DOMAIN_LEVEL_0: DomainVersion = 0;
|
||||
|
||||
/// Deprecated as of 1.3.0
|
||||
pub const DOMAIN_LEVEL_5: DomainVersion = 5;
|
||||
|
||||
/// Domain Level introduced with 1.2.0.
|
||||
/// Deprecated as of 1.4.0
|
||||
pub const DOMAIN_LEVEL_6: DomainVersion = 6;
|
||||
pub const PATCH_LEVEL_1: u32 = 1;
|
||||
|
||||
/// Domain Level introduced with 1.3.0.
|
||||
/// Deprecated as of 1.5.0
|
||||
|
@ -85,7 +81,7 @@ pub const DOMAIN_LEVEL_11: DomainVersion = 11;
|
|||
|
||||
// The minimum level that we can re-migrate from.
|
||||
// This should be DOMAIN_TGT_LEVEL minus 2
|
||||
pub const DOMAIN_MIN_REMIGRATION_LEVEL: DomainVersion = DOMAIN_LEVEL_7;
|
||||
pub const DOMAIN_MIN_REMIGRATION_LEVEL: DomainVersion = DOMAIN_LEVEL_6;
|
||||
// The minimum supported domain functional level (for replication)
|
||||
pub const DOMAIN_MIN_LEVEL: DomainVersion = DOMAIN_TGT_LEVEL;
|
||||
// The previous releases domain functional level
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
use std::iter::once;
|
||||
use std::sync::Arc;
|
||||
|
||||
use compact_jwt::JwsEs256Signer;
|
||||
use rand::prelude::*;
|
||||
use regex::Regex;
|
||||
use tracing::trace;
|
||||
|
||||
|
@ -61,13 +59,6 @@ impl Plugin for Domain {
|
|||
}
|
||||
}
|
||||
|
||||
fn generate_domain_cookie_key() -> Value {
|
||||
let mut key = [0; 64];
|
||||
let mut rng = StdRng::from_entropy();
|
||||
rng.fill(&mut key);
|
||||
Value::new_privatebinary(&key)
|
||||
}
|
||||
|
||||
impl Domain {
|
||||
/// Generates the cookie key for the domain.
|
||||
fn modify_inner<T: Clone + std::fmt::Debug>(
|
||||
|
@ -79,11 +70,14 @@ impl Domain {
|
|||
&& e.attribute_equality(Attribute::Uuid, &PVUUID_DOMAIN_INFO)
|
||||
{
|
||||
// Validate the domain ldap basedn syntax.
|
||||
if let Some(basedn) = e
|
||||
.get_ava_single_iutf8(Attribute::DomainLdapBasedn) {
|
||||
|
||||
if let Some(basedn) = e.get_ava_single_iutf8(Attribute::DomainLdapBasedn) {
|
||||
if !DOMAIN_LDAP_BASEDN_RE.is_match(basedn) {
|
||||
error!("Invalid {} '{}'. Must pass regex \"{}\"", Attribute::DomainLdapBasedn,basedn, *DOMAIN_LDAP_BASEDN_RE);
|
||||
error!(
|
||||
"Invalid {} '{}'. Must pass regex \"{}\"",
|
||||
Attribute::DomainLdapBasedn,
|
||||
basedn,
|
||||
*DOMAIN_LDAP_BASEDN_RE
|
||||
);
|
||||
return Err(OperationError::InvalidState);
|
||||
}
|
||||
}
|
||||
|
@ -117,38 +111,18 @@ impl Domain {
|
|||
// So effectively we only skip setting this value after we know that we are at DL12
|
||||
// since we could never go back to anything lower than 10 at that point.
|
||||
if DOMAIN_MIN_REMIGRATION_LEVEL < DOMAIN_LEVEL_10
|
||||
&& !e.attribute_pres(Attribute::DomainDisplayName) {
|
||||
let domain_display_name = Value::new_utf8(format!("Kanidm {}", qs.get_domain_name()));
|
||||
security_info!("plugin_domain: setting default domain_display_name to {:?}", domain_display_name);
|
||||
&& !e.attribute_pres(Attribute::DomainDisplayName)
|
||||
{
|
||||
let domain_display_name =
|
||||
Value::new_utf8(format!("Kanidm {}", qs.get_domain_name()));
|
||||
security_info!(
|
||||
"plugin_domain: setting default domain_display_name to {:?}",
|
||||
domain_display_name
|
||||
);
|
||||
|
||||
e.set_ava(&Attribute::DomainDisplayName, once(domain_display_name));
|
||||
}
|
||||
|
||||
if qs.get_domain_version() < DOMAIN_LEVEL_6 && !e.attribute_pres(Attribute::FernetPrivateKeyStr) {
|
||||
security_info!("regenerating domain token encryption key");
|
||||
let k = fernet::Fernet::generate_key();
|
||||
let v = Value::new_secret_str(&k);
|
||||
e.add_ava(Attribute::FernetPrivateKeyStr, v);
|
||||
}
|
||||
|
||||
if qs.get_domain_version() < DOMAIN_LEVEL_6 && !e.attribute_pres(Attribute::Es256PrivateKeyDer) {
|
||||
security_info!("regenerating domain es256 private key");
|
||||
let der = JwsEs256Signer::generate_es256()
|
||||
.and_then(|jws| jws.private_key_to_der())
|
||||
.map_err(|e| {
|
||||
admin_error!(err = ?e, "Unable to generate ES256 JwsSigner private key");
|
||||
OperationError::CryptographyError
|
||||
})?;
|
||||
let v = Value::new_privatebinary(&der);
|
||||
e.add_ava(Attribute::Es256PrivateKeyDer, v);
|
||||
}
|
||||
|
||||
if qs.get_domain_version() < DOMAIN_LEVEL_6 && !e.attribute_pres(Attribute::PrivateCookieKey) {
|
||||
security_info!("regenerating domain cookie key");
|
||||
e.add_ava(Attribute::PrivateCookieKey, generate_domain_cookie_key());
|
||||
}
|
||||
|
||||
trace!(?e);
|
||||
Ok(())
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
|
@ -45,7 +45,7 @@ impl Plugin for JwsKeygen {
|
|||
|
||||
impl JwsKeygen {
|
||||
fn modify_inner<T: Clone>(
|
||||
qs: &mut QueryServerWriteTransaction,
|
||||
_qs: &mut QueryServerWriteTransaction,
|
||||
cand: &mut [Entry<EntryInvalid, T>],
|
||||
) -> Result<(), OperationError> {
|
||||
cand.iter_mut().try_for_each(|e| {
|
||||
|
@ -88,20 +88,6 @@ impl JwsKeygen {
|
|||
}
|
||||
}
|
||||
|
||||
if qs.get_domain_version() < DOMAIN_LEVEL_6 &&
|
||||
(e.attribute_equality(Attribute::Class, &EntryClass::ServiceAccount.into()) ||
|
||||
e.attribute_equality(Attribute::Class, &EntryClass::SyncAccount.into())) &&
|
||||
!e.attribute_pres(Attribute::JwsEs256PrivateKey) {
|
||||
security_info!("regenerating jws es256 private key");
|
||||
let jwssigner = JwsEs256Signer::generate_es256()
|
||||
.map_err(|e| {
|
||||
admin_error!(err = ?e, "Unable to generate ES256 JwsSigner private key");
|
||||
OperationError::CryptographyError
|
||||
})?;
|
||||
let v = Value::JwsKeyEs256(jwssigner);
|
||||
e.add_ava(Attribute::JwsEs256PrivateKey, v);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
|
|
@ -294,238 +294,6 @@ impl QueryServerWriteTransaction<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Migration domain level 6 to 7
|
||||
#[instrument(level = "info", skip_all)]
|
||||
pub(crate) fn migrate_domain_6_to_7(&mut self) -> Result<(), OperationError> {
|
||||
if !cfg!(test) && DOMAIN_MAX_LEVEL < DOMAIN_LEVEL_7 {
|
||||
error!("Unable to raise domain level from 6 to 7.");
|
||||
return Err(OperationError::MG0004DomainLevelInDevelopment);
|
||||
}
|
||||
|
||||
// ============== Apply constraints ===============
|
||||
|
||||
// Due to changes in gidnumber allocation, in the *extremely* unlikely
|
||||
// case that a user's ID was generated outside the valid range, we re-request
|
||||
// the creation of their gid number to proceed.
|
||||
let filter = filter!(f_and!([
|
||||
f_or!([
|
||||
f_eq(Attribute::Class, EntryClass::PosixAccount.into()),
|
||||
f_eq(Attribute::Class, EntryClass::PosixGroup.into())
|
||||
]),
|
||||
// This logic gets a bit messy but it would be:
|
||||
// If ! (
|
||||
// (GID_REGULAR_USER_MIN < value < GID_REGULAR_USER_MAX) ||
|
||||
// (GID_UNUSED_A_MIN < value < GID_UNUSED_A_MAX) ||
|
||||
// (GID_UNUSED_B_MIN < value < GID_UNUSED_B_MAX) ||
|
||||
// (GID_UNUSED_C_MIN < value < GID_UNUSED_D_MAX)
|
||||
// )
|
||||
f_andnot(f_or!([
|
||||
f_and!([
|
||||
// The gid value must be less than GID_REGULAR_USER_MAX
|
||||
f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_REGULAR_USER_MAX)
|
||||
),
|
||||
// This bit of mental gymnastics is "greater than".
|
||||
// The gid value must not be less than USER_MIN
|
||||
f_andnot(f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_REGULAR_USER_MIN)
|
||||
))
|
||||
]),
|
||||
f_and!([
|
||||
f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_UNUSED_A_MAX)
|
||||
),
|
||||
f_andnot(f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_UNUSED_A_MIN)
|
||||
))
|
||||
]),
|
||||
f_and!([
|
||||
f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_UNUSED_B_MAX)
|
||||
),
|
||||
f_andnot(f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_UNUSED_B_MIN)
|
||||
))
|
||||
]),
|
||||
// If both of these conditions are true we get:
|
||||
// C_MIN < value < D_MAX, which the outer and-not inverts.
|
||||
f_and!([
|
||||
// The gid value must be less than GID_UNUSED_D_MAX
|
||||
f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_UNUSED_D_MAX)
|
||||
),
|
||||
// This bit of mental gymnastics is "greater than".
|
||||
// The gid value must not be less than C_MIN
|
||||
f_andnot(f_lt(
|
||||
Attribute::GidNumber,
|
||||
PartialValue::Uint32(crate::plugins::gidnumber::GID_UNUSED_C_MIN)
|
||||
))
|
||||
]),
|
||||
]))
|
||||
]));
|
||||
|
||||
let results = self.internal_search(filter).map_err(|err| {
|
||||
error!(?err, "migrate_domain_6_to_7 -> Error");
|
||||
err
|
||||
})?;
|
||||
|
||||
if !results.is_empty() {
|
||||
error!("Unable to proceed. Not all entries meet gid/uid constraints.");
|
||||
for entry in results {
|
||||
error!(gid_invalid = ?entry.get_display_id());
|
||||
}
|
||||
return Err(OperationError::MG0005GidConstraintsNotMet);
|
||||
}
|
||||
|
||||
// =========== Apply changes ==============
|
||||
|
||||
// For each oauth2 client, if it is missing a landing page then we clone the origin
|
||||
// into landing. This is because previously we implied the landing to be origin if
|
||||
// unset, but now landing is the primary url and implies an origin.
|
||||
let filter = filter!(f_and!([
|
||||
f_eq(Attribute::Class, EntryClass::OAuth2ResourceServer.into()),
|
||||
f_pres(Attribute::OAuth2RsOrigin),
|
||||
f_andnot(f_pres(Attribute::OAuth2RsOriginLanding)),
|
||||
]));
|
||||
|
||||
let pre_candidates = self.internal_search(filter).map_err(|err| {
|
||||
error!(?err, "migrate_domain_6_to_7 internal search failure");
|
||||
err
|
||||
})?;
|
||||
|
||||
let modset: Vec<_> = pre_candidates
|
||||
.into_iter()
|
||||
.filter_map(|ent| {
|
||||
ent.get_ava_single_url(Attribute::OAuth2RsOrigin)
|
||||
.map(|origin_url| {
|
||||
// Copy the origin url to the landing.
|
||||
let modlist = vec![Modify::Present(
|
||||
Attribute::OAuth2RsOriginLanding,
|
||||
Value::Url(origin_url.clone()),
|
||||
)];
|
||||
|
||||
(ent.get_uuid(), ModifyList::new_list(modlist))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
// If there is nothing, we don't need to do anything.
|
||||
if !modset.is_empty() {
|
||||
self.internal_batch_modify(modset.into_iter())?;
|
||||
}
|
||||
|
||||
// Do this before schema change since domain info has cookie key
|
||||
// as may at this point.
|
||||
//
|
||||
// Domain info should have the attribute private cookie key removed.
|
||||
let modlist = ModifyList::new_list(vec![
|
||||
Modify::Purged(Attribute::PrivateCookieKey),
|
||||
Modify::Purged(Attribute::Es256PrivateKeyDer),
|
||||
Modify::Purged(Attribute::FernetPrivateKeyStr),
|
||||
]);
|
||||
|
||||
self.internal_modify_uuid(UUID_DOMAIN_INFO, &modlist)?;
|
||||
|
||||
let filter = filter!(f_or!([
|
||||
f_eq(Attribute::Class, EntryClass::ServiceAccount.into()),
|
||||
f_eq(Attribute::Class, EntryClass::SyncAccount.into())
|
||||
]));
|
||||
|
||||
let modlist = ModifyList::new_list(vec![Modify::Purged(Attribute::JwsEs256PrivateKey)]);
|
||||
|
||||
self.internal_modify(&filter, &modlist)?;
|
||||
|
||||
// Now update schema
|
||||
let idm_schema_classes = [
|
||||
SCHEMA_ATTR_PATCH_LEVEL_DL7.clone().into(),
|
||||
SCHEMA_ATTR_DOMAIN_DEVELOPMENT_TAINT_DL7.clone().into(),
|
||||
SCHEMA_ATTR_REFERS_DL7.clone().into(),
|
||||
SCHEMA_ATTR_CERTIFICATE_DL7.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_ORIGIN_DL7.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_STRICT_REDIRECT_URI_DL7.clone().into(),
|
||||
SCHEMA_ATTR_MAIL_DL7.clone().into(),
|
||||
SCHEMA_ATTR_LEGALNAME_DL7.clone().into(),
|
||||
SCHEMA_ATTR_DISPLAYNAME_DL7.clone().into(),
|
||||
SCHEMA_CLASS_DOMAIN_INFO_DL7.clone().into(),
|
||||
SCHEMA_CLASS_SERVICE_ACCOUNT_DL7.clone().into(),
|
||||
SCHEMA_CLASS_SYNC_ACCOUNT_DL7.clone().into(),
|
||||
SCHEMA_CLASS_CLIENT_CERTIFICATE_DL7.clone().into(),
|
||||
SCHEMA_CLASS_OAUTH2_RS_DL7.clone().into(),
|
||||
];
|
||||
|
||||
idm_schema_classes
|
||||
.into_iter()
|
||||
.try_for_each(|entry| self.internal_migrate_or_create(entry))
|
||||
.map_err(|err| {
|
||||
error!(?err, "migrate_domain_6_to_7 -> Error");
|
||||
err
|
||||
})?;
|
||||
|
||||
self.reload()?;
|
||||
|
||||
// Update access controls
|
||||
let idm_data = [
|
||||
BUILTIN_GROUP_PEOPLE_SELF_NAME_WRITE_DL7
|
||||
.clone()
|
||||
.try_into()?,
|
||||
IDM_PEOPLE_SELF_MAIL_WRITE_DL7.clone().try_into()?,
|
||||
BUILTIN_GROUP_CLIENT_CERTIFICATE_ADMINS_DL7
|
||||
.clone()
|
||||
.try_into()?,
|
||||
IDM_HIGH_PRIVILEGE_DL7.clone().try_into()?,
|
||||
];
|
||||
|
||||
idm_data
|
||||
.into_iter()
|
||||
.try_for_each(|entry| {
|
||||
self.internal_migrate_or_create_ignore_attrs(entry, &[Attribute::Member])
|
||||
})
|
||||
.map_err(|err| {
|
||||
error!(?err, "migrate_domain_6_to_7 -> Error");
|
||||
err
|
||||
})?;
|
||||
|
||||
let idm_data = [
|
||||
IDM_ACP_SELF_WRITE_DL7.clone().into(),
|
||||
IDM_ACP_SELF_NAME_WRITE_DL7.clone().into(),
|
||||
IDM_ACP_HP_CLIENT_CERTIFICATE_MANAGER_DL7.clone().into(),
|
||||
IDM_ACP_OAUTH2_MANAGE_DL7.clone().into(),
|
||||
];
|
||||
|
||||
idm_data
|
||||
.into_iter()
|
||||
.try_for_each(|entry| self.internal_migrate_or_create(entry))
|
||||
.map_err(|err| {
|
||||
error!(?err, "migrate_domain_6_to_7 -> Error");
|
||||
err
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Patch Application - This triggers a one-shot fixup task for issue #2756
|
||||
/// to correct the content of dyngroups after the dyngroups are now loaded.
|
||||
#[instrument(level = "info", skip_all)]
|
||||
pub(crate) fn migrate_domain_patch_level_1(&mut self) -> Result<(), OperationError> {
|
||||
admin_warn!("applying domain patch 1.");
|
||||
|
||||
debug_assert!(*self.phase >= ServerPhase::SchemaReady);
|
||||
|
||||
let filter = filter!(f_eq(Attribute::Class, EntryClass::DynGroup.into()));
|
||||
let modlist = modlist!([m_pres(Attribute::Class, &EntryClass::DynGroup.into())]);
|
||||
|
||||
self.internal_modify(&filter, &modlist).map(|()| {
|
||||
info!("forced dyngroups to re-calculate memberships");
|
||||
})
|
||||
}
|
||||
|
||||
/// Migration domain level 7 to 8
|
||||
#[instrument(level = "info", skip_all)]
|
||||
pub(crate) fn migrate_domain_7_to_8(&mut self) -> Result<(), OperationError> {
|
||||
|
@ -843,7 +611,7 @@ impl QueryServerWriteTransaction<'_> {
|
|||
//
|
||||
// DO NOT MODIFY THIS DEFINITION
|
||||
let idm_schema: Vec<EntryInitNew> = vec![
|
||||
SCHEMA_ATTR_MAIL.clone().into(),
|
||||
// SCHEMA_ATTR_MAIL.clone().into(),
|
||||
SCHEMA_ATTR_ACCOUNT_EXPIRE.clone().into(),
|
||||
SCHEMA_ATTR_ACCOUNT_VALID_FROM.clone().into(),
|
||||
SCHEMA_ATTR_API_TOKEN_SESSION.clone().into(),
|
||||
|
@ -853,7 +621,7 @@ impl QueryServerWriteTransaction<'_> {
|
|||
SCHEMA_ATTR_BADLIST_PASSWORD.clone().into(),
|
||||
SCHEMA_ATTR_CREDENTIAL_UPDATE_INTENT_TOKEN.clone().into(),
|
||||
SCHEMA_ATTR_ATTESTED_PASSKEYS.clone().into(),
|
||||
SCHEMA_ATTR_DISPLAYNAME.clone().into(),
|
||||
// SCHEMA_ATTR_DISPLAYNAME.clone().into(),
|
||||
SCHEMA_ATTR_DOMAIN_DISPLAY_NAME.clone().into(),
|
||||
SCHEMA_ATTR_DOMAIN_LDAP_BASEDN.clone().into(),
|
||||
SCHEMA_ATTR_DOMAIN_NAME.clone().into(),
|
||||
|
@ -868,7 +636,7 @@ impl QueryServerWriteTransaction<'_> {
|
|||
SCHEMA_ATTR_GIDNUMBER.clone().into(),
|
||||
SCHEMA_ATTR_GRANT_UI_HINT.clone().into(),
|
||||
SCHEMA_ATTR_JWS_ES256_PRIVATE_KEY.clone().into(),
|
||||
SCHEMA_ATTR_LEGALNAME.clone().into(),
|
||||
// SCHEMA_ATTR_LEGALNAME.clone().into(),
|
||||
SCHEMA_ATTR_LOGINSHELL.clone().into(),
|
||||
SCHEMA_ATTR_NAME_HISTORY.clone().into(),
|
||||
SCHEMA_ATTR_NSUNIQUEID.clone().into(),
|
||||
|
@ -882,7 +650,7 @@ impl QueryServerWriteTransaction<'_> {
|
|||
SCHEMA_ATTR_OAUTH2_RS_IMPLICIT_SCOPES.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_NAME.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_ORIGIN_LANDING.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_ORIGIN.clone().into(),
|
||||
// SCHEMA_ATTR_OAUTH2_RS_ORIGIN.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_SCOPE_MAP.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_SUP_SCOPE_MAP.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_TOKEN_KEY.clone().into(),
|
||||
|
@ -917,6 +685,13 @@ impl QueryServerWriteTransaction<'_> {
|
|||
// DL7
|
||||
SCHEMA_ATTR_PATCH_LEVEL_DL7.clone().into(),
|
||||
SCHEMA_ATTR_DOMAIN_DEVELOPMENT_TAINT_DL7.clone().into(),
|
||||
SCHEMA_ATTR_REFERS_DL7.clone().into(),
|
||||
SCHEMA_ATTR_CERTIFICATE_DL7.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_RS_ORIGIN_DL7.clone().into(),
|
||||
SCHEMA_ATTR_OAUTH2_STRICT_REDIRECT_URI_DL7.clone().into(),
|
||||
SCHEMA_ATTR_MAIL_DL7.clone().into(),
|
||||
SCHEMA_ATTR_LEGALNAME_DL7.clone().into(),
|
||||
SCHEMA_ATTR_DISPLAYNAME_DL7.clone().into(),
|
||||
];
|
||||
|
||||
let r = idm_schema
|
||||
|
@ -945,12 +720,12 @@ impl QueryServerWriteTransaction<'_> {
|
|||
// DL5
|
||||
SCHEMA_CLASS_PERSON_DL5.clone().into(),
|
||||
SCHEMA_CLASS_ACCOUNT_DL5.clone().into(),
|
||||
SCHEMA_CLASS_OAUTH2_RS_DL5.clone().into(),
|
||||
// SCHEMA_CLASS_OAUTH2_RS_DL5.clone().into(),
|
||||
SCHEMA_CLASS_OAUTH2_RS_BASIC_DL5.clone().into(),
|
||||
// DL6
|
||||
SCHEMA_CLASS_ACCOUNT_POLICY_DL6.clone().into(),
|
||||
SCHEMA_CLASS_SERVICE_ACCOUNT_DL6.clone().into(),
|
||||
SCHEMA_CLASS_SYNC_ACCOUNT_DL6.clone().into(),
|
||||
// SCHEMA_CLASS_SERVICE_ACCOUNT_DL6.clone().into(),
|
||||
// SCHEMA_CLASS_SYNC_ACCOUNT_DL6.clone().into(),
|
||||
SCHEMA_CLASS_GROUP_DL6.clone().into(),
|
||||
SCHEMA_CLASS_KEY_PROVIDER_DL6.clone().into(),
|
||||
SCHEMA_CLASS_KEY_PROVIDER_INTERNAL_DL6.clone().into(),
|
||||
|
@ -958,7 +733,13 @@ impl QueryServerWriteTransaction<'_> {
|
|||
SCHEMA_CLASS_KEY_OBJECT_JWT_ES256_DL6.clone().into(),
|
||||
SCHEMA_CLASS_KEY_OBJECT_JWE_A128GCM_DL6.clone().into(),
|
||||
SCHEMA_CLASS_KEY_OBJECT_INTERNAL_DL6.clone().into(),
|
||||
SCHEMA_CLASS_DOMAIN_INFO_DL6.clone().into(),
|
||||
// SCHEMA_CLASS_DOMAIN_INFO_DL6.clone().into(),
|
||||
// DL7
|
||||
SCHEMA_CLASS_DOMAIN_INFO_DL7.clone().into(),
|
||||
SCHEMA_CLASS_SERVICE_ACCOUNT_DL7.clone().into(),
|
||||
SCHEMA_CLASS_SYNC_ACCOUNT_DL7.clone().into(),
|
||||
SCHEMA_CLASS_CLIENT_CERTIFICATE_DL7.clone().into(),
|
||||
SCHEMA_CLASS_OAUTH2_RS_DL7.clone().into(),
|
||||
];
|
||||
|
||||
let r: Result<(), _> = idm_schema_classes_dl1
|
||||
|
@ -1050,9 +831,9 @@ impl QueryServerWriteTransaction<'_> {
|
|||
IDM_ACP_RADIUS_SECRET_MANAGE_V1.clone(),
|
||||
IDM_ACP_PEOPLE_SELF_WRITE_MAIL_V1.clone(),
|
||||
IDM_ACP_SELF_READ_V1.clone(),
|
||||
IDM_ACP_SELF_WRITE_V1.clone(),
|
||||
// IDM_ACP_SELF_WRITE_V1.clone(),
|
||||
IDM_ACP_ACCOUNT_SELF_WRITE_V1.clone(),
|
||||
IDM_ACP_SELF_NAME_WRITE_V1.clone(),
|
||||
// IDM_ACP_SELF_NAME_WRITE_V1.clone(),
|
||||
IDM_ACP_ALL_ACCOUNTS_POSIX_READ_V1.clone(),
|
||||
IDM_ACP_SYSTEM_CONFIG_ACCOUNT_POLICY_MANAGE_V1.clone(),
|
||||
IDM_ACP_GROUP_UNIX_MANAGE_V1.clone(),
|
||||
|
@ -1074,13 +855,18 @@ impl QueryServerWriteTransaction<'_> {
|
|||
IDM_ACP_SERVICE_ACCOUNT_MANAGE_V1.clone(),
|
||||
// DL4
|
||||
// DL5
|
||||
IDM_ACP_OAUTH2_MANAGE_DL5.clone(),
|
||||
// IDM_ACP_OAUTH2_MANAGE_DL5.clone(),
|
||||
// DL6
|
||||
IDM_ACP_GROUP_ACCOUNT_POLICY_MANAGE_DL6.clone(),
|
||||
IDM_ACP_PEOPLE_CREATE_DL6.clone(),
|
||||
IDM_ACP_GROUP_MANAGE_DL6.clone(),
|
||||
IDM_ACP_ACCOUNT_MAIL_READ_DL6.clone(),
|
||||
IDM_ACP_DOMAIN_ADMIN_DL6.clone(),
|
||||
// DL7
|
||||
IDM_ACP_SELF_WRITE_DL7.clone(),
|
||||
IDM_ACP_SELF_NAME_WRITE_DL7.clone(),
|
||||
IDM_ACP_HP_CLIENT_CERTIFICATE_MANAGER_DL7.clone(),
|
||||
IDM_ACP_OAUTH2_MANAGE_DL7.clone(),
|
||||
];
|
||||
|
||||
let res: Result<(), _> = idm_entries
|
||||
|
@ -1344,79 +1130,6 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
#[qs_test(domain_level=DOMAIN_LEVEL_6)]
|
||||
async fn test_migrations_dl6_dl7(server: &QueryServer) {
|
||||
// Assert our instance was setup to version 6
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
let db_domain_version = write_txn
|
||||
.internal_search_uuid(UUID_DOMAIN_INFO)
|
||||
.expect("unable to access domain entry")
|
||||
.get_ava_single_uint32(Attribute::Version)
|
||||
.expect("Attribute Version not present");
|
||||
|
||||
assert_eq!(db_domain_version, DOMAIN_LEVEL_6);
|
||||
|
||||
// Create an oauth2 client that doesn't have a landing url set.
|
||||
let oauth2_client_uuid = Uuid::new_v4();
|
||||
|
||||
let ea: Entry<EntryInit, EntryNew> = entry_init!(
|
||||
(Attribute::Class, EntryClass::Object.to_value()),
|
||||
(Attribute::Class, EntryClass::Account.to_value()),
|
||||
(Attribute::Uuid, Value::Uuid(oauth2_client_uuid)),
|
||||
(
|
||||
Attribute::Class,
|
||||
EntryClass::OAuth2ResourceServer.to_value()
|
||||
),
|
||||
(
|
||||
Attribute::Class,
|
||||
EntryClass::OAuth2ResourceServerPublic.to_value()
|
||||
),
|
||||
(Attribute::Name, Value::new_iname("test_resource_server")),
|
||||
(
|
||||
Attribute::DisplayName,
|
||||
Value::new_utf8s("test_resource_server")
|
||||
),
|
||||
(
|
||||
Attribute::OAuth2RsOrigin,
|
||||
Value::new_url_s("https://demo.example.com").unwrap()
|
||||
)
|
||||
);
|
||||
|
||||
write_txn
|
||||
.internal_create(vec![ea])
|
||||
.expect("Unable to create oauth2 client");
|
||||
|
||||
// Set the version to 7.
|
||||
write_txn
|
||||
.internal_apply_domain_migration(DOMAIN_LEVEL_7)
|
||||
.expect("Unable to set domain level to version 7");
|
||||
|
||||
// post migration verification.
|
||||
let domain_entry = write_txn
|
||||
.internal_search_uuid(UUID_DOMAIN_INFO)
|
||||
.expect("Unable to access domain entry");
|
||||
|
||||
assert!(!domain_entry.attribute_pres(Attribute::PrivateCookieKey));
|
||||
|
||||
let oauth2_entry = write_txn
|
||||
.internal_search_uuid(oauth2_client_uuid)
|
||||
.expect("Unable to access oauth2 client entry");
|
||||
|
||||
let origin = oauth2_entry
|
||||
.get_ava_single_url(Attribute::OAuth2RsOrigin)
|
||||
.expect("Unable to access oauth2 client origin");
|
||||
|
||||
// The origin should have been cloned to the landing.
|
||||
let landing = oauth2_entry
|
||||
.get_ava_single_url(Attribute::OAuth2RsOriginLanding)
|
||||
.expect("Unable to access oauth2 client landing");
|
||||
|
||||
assert_eq!(origin, landing);
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
}
|
||||
|
||||
#[qs_test(domain_level=DOMAIN_LEVEL_7)]
|
||||
async fn test_migrations_dl7_dl8(server: &QueryServer) {
|
||||
// Assert our instance was setup to version 7
|
||||
|
@ -1511,11 +1224,104 @@ mod tests {
|
|||
}
|
||||
|
||||
#[qs_test(domain_level=DOMAIN_LEVEL_8)]
|
||||
async fn test_migrations_dl8_dl9(_server: &QueryServer) {}
|
||||
async fn test_migrations_dl8_dl9(server: &QueryServer) {
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
let db_domain_version = write_txn
|
||||
.internal_search_uuid(UUID_DOMAIN_INFO)
|
||||
.expect("unable to access domain entry")
|
||||
.get_ava_single_uint32(Attribute::Version)
|
||||
.expect("Attribute Version not present");
|
||||
|
||||
assert_eq!(db_domain_version, DOMAIN_LEVEL_8);
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
|
||||
// == pre migration verification. ==
|
||||
// check we currently would fail a migration.
|
||||
|
||||
// let mut read_txn = server.read().await.unwrap();
|
||||
// drop(read_txn);
|
||||
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
// Fix any issues
|
||||
|
||||
// == Increase the version ==
|
||||
write_txn
|
||||
.internal_apply_domain_migration(DOMAIN_LEVEL_9)
|
||||
.expect("Unable to set domain level to version 9");
|
||||
|
||||
// post migration verification.
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
}
|
||||
|
||||
#[qs_test(domain_level=DOMAIN_LEVEL_9)]
|
||||
async fn test_migrations_dl9_dl10(_server: &QueryServer) {}
|
||||
async fn test_migrations_dl9_dl10(server: &QueryServer) {
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
let db_domain_version = write_txn
|
||||
.internal_search_uuid(UUID_DOMAIN_INFO)
|
||||
.expect("unable to access domain entry")
|
||||
.get_ava_single_uint32(Attribute::Version)
|
||||
.expect("Attribute Version not present");
|
||||
|
||||
assert_eq!(db_domain_version, DOMAIN_LEVEL_9);
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
|
||||
// == pre migration verification. ==
|
||||
// check we currently would fail a migration.
|
||||
|
||||
// let mut read_txn = server.read().await.unwrap();
|
||||
// drop(read_txn);
|
||||
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
// Fix any issues
|
||||
|
||||
// == Increase the version ==
|
||||
write_txn
|
||||
.internal_apply_domain_migration(DOMAIN_LEVEL_10)
|
||||
.expect("Unable to set domain level to version 10");
|
||||
|
||||
// post migration verification.
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
}
|
||||
|
||||
#[qs_test(domain_level=DOMAIN_LEVEL_10)]
|
||||
async fn test_migrations_dl10_dl11(_server: &QueryServer) {}
|
||||
async fn test_migrations_dl10_dl11(server: &QueryServer) {
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
let db_domain_version = write_txn
|
||||
.internal_search_uuid(UUID_DOMAIN_INFO)
|
||||
.expect("unable to access domain entry")
|
||||
.get_ava_single_uint32(Attribute::Version)
|
||||
.expect("Attribute Version not present");
|
||||
|
||||
assert_eq!(db_domain_version, DOMAIN_LEVEL_10);
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
|
||||
// == pre migration verification. ==
|
||||
// check we currently would fail a migration.
|
||||
|
||||
// let mut read_txn = server.read().await.unwrap();
|
||||
// drop(read_txn);
|
||||
|
||||
let mut write_txn = server.write(duration_from_epoch_now()).await.unwrap();
|
||||
|
||||
// Fix any issues
|
||||
|
||||
// == Increase the version ==
|
||||
write_txn
|
||||
.internal_apply_domain_migration(DOMAIN_LEVEL_11)
|
||||
.expect("Unable to set domain level to version 11");
|
||||
|
||||
// post migration verification.
|
||||
|
||||
write_txn.commit().expect("Unable to commit");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2335,7 +2335,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
debug!(domain_previous_patch_level = ?previous_patch_level, domain_target_patch_level = ?domain_info_patch_level);
|
||||
|
||||
// We have to check for DL0 since that's the initialisation level.
|
||||
if previous_version <= DOMAIN_LEVEL_5 && previous_version != DOMAIN_LEVEL_0 {
|
||||
if previous_version <= DOMAIN_MIN_REMIGRATION_LEVEL && previous_version != DOMAIN_LEVEL_0 {
|
||||
error!("UNABLE TO PROCEED. You are attempting a Skip update which is NOT SUPPORTED. You must upgrade one-version of Kanidm at a time.");
|
||||
error!("For more see: https://kanidm.github.io/kanidm/stable/support.html#upgrade-policy and https://kanidm.github.io/kanidm/stable/server_updates.html");
|
||||
error!(domain_previous_version = ?previous_version, domain_target_version = ?domain_info_version);
|
||||
|
@ -2343,21 +2343,13 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
return Err(OperationError::MG0008SkipUpgradeAttempted);
|
||||
}
|
||||
|
||||
if previous_version <= DOMAIN_LEVEL_6 && domain_info_version >= DOMAIN_LEVEL_7 {
|
||||
self.migrate_domain_6_to_7()?;
|
||||
}
|
||||
|
||||
// Similar to the older system info migration handler, these allow "one shot" fixes
|
||||
// to be issued and run by bumping the patch level.
|
||||
if previous_patch_level < PATCH_LEVEL_1 && domain_info_patch_level >= PATCH_LEVEL_1 {
|
||||
self.migrate_domain_patch_level_1()?;
|
||||
}
|
||||
|
||||
if previous_version <= DOMAIN_LEVEL_7 && domain_info_version >= DOMAIN_LEVEL_8 {
|
||||
// 1.3 -> 1.4
|
||||
self.migrate_domain_7_to_8()?;
|
||||
}
|
||||
|
||||
if previous_version <= DOMAIN_LEVEL_8 && domain_info_version >= DOMAIN_LEVEL_9 {
|
||||
// 1.4 -> 1.5
|
||||
self.migrate_domain_8_to_9()?;
|
||||
}
|
||||
|
||||
|
@ -2366,10 +2358,12 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
}
|
||||
|
||||
if previous_version <= DOMAIN_LEVEL_9 && domain_info_version >= DOMAIN_LEVEL_10 {
|
||||
// 1.5 -> 1.6
|
||||
self.migrate_domain_9_to_10()?;
|
||||
}
|
||||
|
||||
if previous_version <= DOMAIN_LEVEL_10 && domain_info_version >= DOMAIN_LEVEL_11 {
|
||||
// 1.6 -> 1.7
|
||||
self.migrate_domain_10_to_11()?;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue