mirror of
https://github.com/kanidm/kanidm.git
synced 2025-02-23 20:47:01 +01:00
Dynamic schema. This allows classes and attributes to be added and modified "live",
so that restarts to affect object schema are not required. This is good to allow customisation and other extensions for advanced users, and doing it now makes it "easier" to supply extra schema from the project core into the initialise_idm function.
This commit is contained in:
parent
c374c8c96b
commit
c7c88d6965
|
@ -347,7 +347,7 @@ impl BackendWriteTransaction {
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
audit_log!(au, "serialising: {:?}", ser_entries);
|
// audit_log!(au, "serialising: {:?}", ser_entries);
|
||||||
|
|
||||||
let ser_entries = ser_entries?;
|
let ser_entries = ser_entries?;
|
||||||
|
|
||||||
|
@ -444,7 +444,7 @@ impl BackendWriteTransaction {
|
||||||
|
|
||||||
let ser_entries = try_audit!(au, ser_entries);
|
let ser_entries = try_audit!(au, ser_entries);
|
||||||
|
|
||||||
audit_log!(au, "serialising: {:?}", ser_entries);
|
// audit_log!(au, "serialising: {:?}", ser_entries);
|
||||||
|
|
||||||
// Simple: If the list of id's is not the same as the input list, we are missing id's
|
// Simple: If the list of id's is not the same as the input list, we are missing id's
|
||||||
// TODO: This check won't be needed once I rebuild the entry state types.
|
// TODO: This check won't be needed once I rebuild the entry state types.
|
||||||
|
|
|
@ -16,7 +16,6 @@ pub static JSON_ADMIN_V1: &'static str = r#"{
|
||||||
"name": ["admin"],
|
"name": ["admin"],
|
||||||
"uuid": ["00000000-0000-0000-0000-000000000000"],
|
"uuid": ["00000000-0000-0000-0000-000000000000"],
|
||||||
"description": ["Builtin Admin account."],
|
"description": ["Builtin Admin account."],
|
||||||
"version": ["1"],
|
|
||||||
"displayname": ["Administrator"]
|
"displayname": ["Administrator"]
|
||||||
}
|
}
|
||||||
}"#;
|
}"#;
|
||||||
|
@ -32,7 +31,6 @@ pub static JSON_IDM_ADMINS_V1: &'static str = r#"{
|
||||||
"name": ["idm_admins"],
|
"name": ["idm_admins"],
|
||||||
"uuid": ["00000000-0000-0000-0000-000000000001"],
|
"uuid": ["00000000-0000-0000-0000-000000000001"],
|
||||||
"description": ["Builtin IDM Administrators Group."],
|
"description": ["Builtin IDM Administrators Group."],
|
||||||
"version": ["1"],
|
|
||||||
"member": ["00000000-0000-0000-0000-000000000000"]
|
"member": ["00000000-0000-0000-0000-000000000000"]
|
||||||
}
|
}
|
||||||
}"#;
|
}"#;
|
||||||
|
@ -63,7 +61,6 @@ pub static JSON_IDM_ADMINS_ACP_SEARCH_V1: &'static str = r#"{
|
||||||
"name": ["idm_admins_acp_search"],
|
"name": ["idm_admins_acp_search"],
|
||||||
"uuid": ["00000000-0000-0000-0000-ffffff000002"],
|
"uuid": ["00000000-0000-0000-0000-ffffff000002"],
|
||||||
"description": ["Builtin IDM Administrators Access Controls."],
|
"description": ["Builtin IDM Administrators Access Controls."],
|
||||||
"version": ["1"],
|
|
||||||
"acp_enable": ["true"],
|
"acp_enable": ["true"],
|
||||||
"acp_receiver": [
|
"acp_receiver": [
|
||||||
"{\"Eq\":[\"memberof\",\"00000000-0000-0000-0000-000000000001\"]}"
|
"{\"Eq\":[\"memberof\",\"00000000-0000-0000-0000-000000000001\"]}"
|
||||||
|
@ -86,7 +83,6 @@ pub static JSON_IDM_ADMINS_ACP_REVIVE_V1: &'static str = r#"{
|
||||||
"name": ["idm_admins_acp_revive"],
|
"name": ["idm_admins_acp_revive"],
|
||||||
"uuid": ["00000000-0000-0000-0000-ffffff000003"],
|
"uuid": ["00000000-0000-0000-0000-ffffff000003"],
|
||||||
"description": ["Builtin IDM Administrators Access Controls."],
|
"description": ["Builtin IDM Administrators Access Controls."],
|
||||||
"version": ["1"],
|
|
||||||
"acp_enable": ["true"],
|
"acp_enable": ["true"],
|
||||||
"acp_receiver": [
|
"acp_receiver": [
|
||||||
"{\"Eq\":[\"memberof\",\"00000000-0000-0000-0000-000000000001\"]}"
|
"{\"Eq\":[\"memberof\",\"00000000-0000-0000-0000-000000000001\"]}"
|
||||||
|
@ -110,7 +106,6 @@ pub static JSON_IDM_SELF_ACP_READ_V1: &'static str = r#"{
|
||||||
"name": ["idm_self_acp_read"],
|
"name": ["idm_self_acp_read"],
|
||||||
"uuid": ["00000000-0000-0000-0000-ffffff000004"],
|
"uuid": ["00000000-0000-0000-0000-ffffff000004"],
|
||||||
"description": ["Builtin IDM Control for self read - required for whoami."],
|
"description": ["Builtin IDM Control for self read - required for whoami."],
|
||||||
"version": ["1"],
|
|
||||||
"acp_enable": ["true"],
|
"acp_enable": ["true"],
|
||||||
"acp_receiver": [
|
"acp_receiver": [
|
||||||
"\"Self\""
|
"\"Self\""
|
||||||
|
@ -133,7 +128,6 @@ pub static JSON_ANONYMOUS_V1: &'static str = r#"{
|
||||||
"name": ["anonymous"],
|
"name": ["anonymous"],
|
||||||
"uuid": ["00000000-0000-0000-0000-ffffffffffff"],
|
"uuid": ["00000000-0000-0000-0000-ffffffffffff"],
|
||||||
"description": ["Anonymous access account."],
|
"description": ["Anonymous access account."],
|
||||||
"version": ["1"],
|
|
||||||
"displayname": ["Anonymous"]
|
"displayname": ["Anonymous"]
|
||||||
}
|
}
|
||||||
}"#;
|
}"#;
|
||||||
|
@ -153,27 +147,14 @@ pub static UUID_SCHEMA_ATTR_SYSTEMMAY: &'static str = "f3842165-90ad-4465-ad71-1
|
||||||
pub static UUID_SCHEMA_ATTR_MAY: &'static str = "7adb7e2d-af8f-492e-8f1c-c5d9b7c47b5f";
|
pub static UUID_SCHEMA_ATTR_MAY: &'static str = "7adb7e2d-af8f-492e-8f1c-c5d9b7c47b5f";
|
||||||
pub static UUID_SCHEMA_ATTR_SYSTEMMUST: &'static str = "e2e4abc4-7083-41ea-a663-43d904d949ce";
|
pub static UUID_SCHEMA_ATTR_SYSTEMMUST: &'static str = "e2e4abc4-7083-41ea-a663-43d904d949ce";
|
||||||
pub static UUID_SCHEMA_ATTR_MUST: &'static str = "40e88ca8-06d7-4a51-b538-1125e51c02e0";
|
pub static UUID_SCHEMA_ATTR_MUST: &'static str = "40e88ca8-06d7-4a51-b538-1125e51c02e0";
|
||||||
|
|
||||||
pub static UUID_SCHEMA_CLASS_ATTRIBUTETYPE: &'static str = "ed65a356-a4d9-45a8-b4b9-5d40d9acdb7e";
|
|
||||||
pub static UUID_SCHEMA_CLASS_CLASSTYPE: &'static str = "ec1964f6-0c72-4373-954f-f3a603c5f8bb";
|
|
||||||
pub static UUID_SCHEMA_CLASS_OBJECT: &'static str = "579bb16d-1d85-4f8e-bb3b-6fc55af582fe";
|
|
||||||
pub static UUID_SCHEMA_CLASS_EXTENSIBLEOBJECT: &'static str =
|
|
||||||
"0fb2171d-372b-4d0d-9194-9a4d6846c324";
|
|
||||||
|
|
||||||
pub static UUID_SCHEMA_CLASS_RECYCLED: &'static str = "813bb7e3-dadf-413d-acc4-197b03d55a4f";
|
|
||||||
pub static UUID_SCHEMA_CLASS_TOMBSTONE: &'static str = "848a1224-0c3c-465f-abd0-10a32e21830e";
|
|
||||||
|
|
||||||
// system supplementary
|
|
||||||
pub static UUID_SCHEMA_ATTR_DISPLAYNAME: &'static str = "201bc966-954b-48f5-bf25-99ffed759861";
|
|
||||||
pub static UUID_SCHEMA_ATTR_MAIL: &'static str = "fae94676-720b-461b-9438-bfe8cfd7e6cd";
|
|
||||||
pub static UUID_SCHEMA_ATTR_MEMBEROF: &'static str = "2ff1abc8-2f64-4f41-9e3d-33d44616a112";
|
pub static UUID_SCHEMA_ATTR_MEMBEROF: &'static str = "2ff1abc8-2f64-4f41-9e3d-33d44616a112";
|
||||||
pub static UUID_SCHEMA_ATTR_SSH_PUBLICKEY: &'static str = "52f2f13f-d35c-4cca-9f43-90a12c968f72";
|
|
||||||
pub static UUID_SCHEMA_ATTR_PASSWORD: &'static str = "a5121082-be54-4624-a307-383839b0366b";
|
|
||||||
pub static UUID_SCHEMA_ATTR_MEMBER: &'static str = "cbb7cb55-1d48-4b89-8da7-8d570e755b47";
|
pub static UUID_SCHEMA_ATTR_MEMBER: &'static str = "cbb7cb55-1d48-4b89-8da7-8d570e755b47";
|
||||||
pub static UUID_SCHEMA_ATTR_DIRECTMEMBEROF: &'static str = "63f6a766-3838-48e3-bd78-0fb1152b862f";
|
pub static UUID_SCHEMA_ATTR_DIRECTMEMBEROF: &'static str = "63f6a766-3838-48e3-bd78-0fb1152b862f";
|
||||||
pub static UUID_SCHEMA_ATTR_VERSION: &'static str = "896d5095-b3ae-451e-a91f-4314165b5395";
|
pub static UUID_SCHEMA_ATTR_VERSION: &'static str = "896d5095-b3ae-451e-a91f-4314165b5395";
|
||||||
pub static UUID_SCHEMA_ATTR_DOMAIN: &'static str = "c9926716-eaaa-4c83-a1ab-1ed4372a7491";
|
pub static UUID_SCHEMA_ATTR_DOMAIN: &'static str = "c9926716-eaaa-4c83-a1ab-1ed4372a7491";
|
||||||
|
/*
|
||||||
pub static UUID_SCHEMA_ATTR_ACP_ALLOW: &'static str = "160ebaaf-5251-444c-aa41-8d1a572c147a";
|
pub static UUID_SCHEMA_ATTR_ACP_ALLOW: &'static str = "160ebaaf-5251-444c-aa41-8d1a572c147a";
|
||||||
|
*/
|
||||||
pub static UUID_SCHEMA_ATTR_ACP_ENABLE: &'static str = "7346a4a6-3dae-4e48-b606-a1c0c2abeba0";
|
pub static UUID_SCHEMA_ATTR_ACP_ENABLE: &'static str = "7346a4a6-3dae-4e48-b606-a1c0c2abeba0";
|
||||||
pub static UUID_SCHEMA_ATTR_ACP_RECEIVER: &'static str = "8e48d272-7818-400f-ac74-6ae05e6c79b8";
|
pub static UUID_SCHEMA_ATTR_ACP_RECEIVER: &'static str = "8e48d272-7818-400f-ac74-6ae05e6c79b8";
|
||||||
pub static UUID_SCHEMA_ATTR_ACP_TARGETSCOPE: &'static str = "424bb6f3-44a1-4af8-8f4c-782a18f9240e";
|
pub static UUID_SCHEMA_ATTR_ACP_TARGETSCOPE: &'static str = "424bb6f3-44a1-4af8-8f4c-782a18f9240e";
|
||||||
|
@ -186,9 +167,15 @@ pub static UUID_SCHEMA_ATTR_ACP_MODIFY_PRESENTATTR: &'static str =
|
||||||
"2df6a429-e060-40e1-8551-be101aff3496";
|
"2df6a429-e060-40e1-8551-be101aff3496";
|
||||||
pub static UUID_SCHEMA_ATTR_ACP_MODIFY_CLASS: &'static str = "a6f0cf6e-c748-4aa4-8c77-9f1cb89b7f4a";
|
pub static UUID_SCHEMA_ATTR_ACP_MODIFY_CLASS: &'static str = "a6f0cf6e-c748-4aa4-8c77-9f1cb89b7f4a";
|
||||||
|
|
||||||
pub static UUID_SCHEMA_CLASS_PERSON: &'static str = "86c4d9e8-3820-45d7-8a8c-d3c522287010";
|
pub static UUID_SCHEMA_CLASS_ATTRIBUTETYPE: &'static str = "ed65a356-a4d9-45a8-b4b9-5d40d9acdb7e";
|
||||||
pub static UUID_SCHEMA_CLASS_GROUP: &'static str = "c0e4e58c-1a2e-4bc3-ad56-5950ef810ea7";
|
pub static UUID_SCHEMA_CLASS_CLASSTYPE: &'static str = "ec1964f6-0c72-4373-954f-f3a603c5f8bb";
|
||||||
pub static UUID_SCHEMA_CLASS_ACCOUNT: &'static str = "8bbff87c-1731-455e-a0e7-bf1d0908e983";
|
pub static UUID_SCHEMA_CLASS_OBJECT: &'static str = "579bb16d-1d85-4f8e-bb3b-6fc55af582fe";
|
||||||
|
pub static UUID_SCHEMA_CLASS_EXTENSIBLEOBJECT: &'static str =
|
||||||
|
"0fb2171d-372b-4d0d-9194-9a4d6846c324";
|
||||||
|
pub static UUID_SCHEMA_CLASS_MEMBEROF: &'static str = "6c02b1d2-5f05-4216-8a82-b39a8b681be9";
|
||||||
|
|
||||||
|
pub static UUID_SCHEMA_CLASS_RECYCLED: &'static str = "813bb7e3-dadf-413d-acc4-197b03d55a4f";
|
||||||
|
pub static UUID_SCHEMA_CLASS_TOMBSTONE: &'static str = "848a1224-0c3c-465f-abd0-10a32e21830e";
|
||||||
pub static UUID_SCHEMA_CLASS_SYSTEM_INFO: &'static str = "510b2a38-0577-4680-b0ad-836ca3415e6c";
|
pub static UUID_SCHEMA_CLASS_SYSTEM_INFO: &'static str = "510b2a38-0577-4680-b0ad-836ca3415e6c";
|
||||||
pub static UUID_SCHEMA_CLASS_ACCESS_CONTROL_PROFILE: &'static str =
|
pub static UUID_SCHEMA_CLASS_ACCESS_CONTROL_PROFILE: &'static str =
|
||||||
"78dd6c9a-ac61-4e53-928f-6e8b810a469b";
|
"78dd6c9a-ac61-4e53-928f-6e8b810a469b";
|
||||||
|
@ -201,6 +188,254 @@ pub static UUID_SCHEMA_CLASS_ACCESS_CONTROL_MODIFY: &'static str =
|
||||||
pub static UUID_SCHEMA_CLASS_ACCESS_CONTROL_CREATE: &'static str =
|
pub static UUID_SCHEMA_CLASS_ACCESS_CONTROL_CREATE: &'static str =
|
||||||
"58c5c197-51d8-4c30-9a8e-b8a0bb0eaacd";
|
"58c5c197-51d8-4c30-9a8e-b8a0bb0eaacd";
|
||||||
|
|
||||||
|
// system supplementary
|
||||||
|
pub static UUID_SCHEMA_ATTR_DISPLAYNAME: &'static str = "201bc966-954b-48f5-bf25-99ffed759861";
|
||||||
|
pub static JSON_SCHEMA_ATTR_DISPLAYNAME: &'static str = r#"{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "201bc966-954b-48f5-bf25-99ffed759861"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"attributetype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"The publicly visible display name of this person"
|
||||||
|
],
|
||||||
|
"index": [
|
||||||
|
"EQUALITY"
|
||||||
|
],
|
||||||
|
"multivalue": [
|
||||||
|
"false"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"displayname"
|
||||||
|
],
|
||||||
|
"secret": [
|
||||||
|
"false"
|
||||||
|
],
|
||||||
|
"syntax": [
|
||||||
|
"UTF8STRING"
|
||||||
|
],
|
||||||
|
"system": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"201bc966-954b-48f5-bf25-99ffed759861"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
pub static UUID_SCHEMA_ATTR_MAIL: &'static str = "fae94676-720b-461b-9438-bfe8cfd7e6cd";
|
||||||
|
pub static JSON_SCHEMA_ATTR_MAIL: &'static str = r#"
|
||||||
|
{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "fae94676-720b-461b-9438-bfe8cfd7e6cd"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"attributetype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"mail addresses of the object"
|
||||||
|
],
|
||||||
|
"index": [
|
||||||
|
"EQUALITY"
|
||||||
|
],
|
||||||
|
"multivalue": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"mail"
|
||||||
|
],
|
||||||
|
"secret": [
|
||||||
|
"false"
|
||||||
|
],
|
||||||
|
"syntax": [
|
||||||
|
"UTF8STRING"
|
||||||
|
],
|
||||||
|
"system": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"fae94676-720b-461b-9438-bfe8cfd7e6cd"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
pub static UUID_SCHEMA_ATTR_SSH_PUBLICKEY: &'static str = "52f2f13f-d35c-4cca-9f43-90a12c968f72";
|
||||||
|
pub static JSON_SCHEMA_ATTR_SSH_PUBLICKEY: &'static str = r#"
|
||||||
|
{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "52f2f13f-d35c-4cca-9f43-90a12c968f72"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"attributetype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"SSH public keys of the object"
|
||||||
|
],
|
||||||
|
"index": [],
|
||||||
|
"multivalue": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"ssh_publickey"
|
||||||
|
],
|
||||||
|
"secret": [
|
||||||
|
"false"
|
||||||
|
],
|
||||||
|
"syntax": [
|
||||||
|
"UTF8STRING"
|
||||||
|
],
|
||||||
|
"system": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"52f2f13f-d35c-4cca-9f43-90a12c968f72"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
pub static UUID_SCHEMA_ATTR_PASSWORD: &'static str = "a5121082-be54-4624-a307-383839b0366b";
|
||||||
|
pub static JSON_SCHEMA_ATTR_PASSWORD: &'static str = r#"
|
||||||
|
{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "a5121082-be54-4624-a307-383839b0366b"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"attributetype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"password hash material of the object for authentication"
|
||||||
|
],
|
||||||
|
"index": [],
|
||||||
|
"multivalue": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"password"
|
||||||
|
],
|
||||||
|
"secret": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"syntax": [
|
||||||
|
"UTF8STRING"
|
||||||
|
],
|
||||||
|
"system": [
|
||||||
|
"true"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"a5121082-be54-4624-a307-383839b0366b"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
pub static UUID_SCHEMA_CLASS_PERSON: &'static str = "86c4d9e8-3820-45d7-8a8c-d3c522287010";
|
||||||
|
pub static JSON_SCHEMA_CLASS_PERSON: &'static str = r#"
|
||||||
|
{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "86c4d9e8-3820-45d7-8a8c-d3c522287010"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"classtype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"Object representation of a person"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"person"
|
||||||
|
],
|
||||||
|
"systemmay": [
|
||||||
|
"mail",
|
||||||
|
"memberof"
|
||||||
|
],
|
||||||
|
"systemmust": [
|
||||||
|
"displayname",
|
||||||
|
"name"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"86c4d9e8-3820-45d7-8a8c-d3c522287010"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
pub static UUID_SCHEMA_CLASS_GROUP: &'static str = "c0e4e58c-1a2e-4bc3-ad56-5950ef810ea7";
|
||||||
|
pub static JSON_SCHEMA_CLASS_GROUP: &'static str = r#"
|
||||||
|
{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "c0e4e58c-1a2e-4bc3-ad56-5950ef810ea7"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"classtype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"Object representation of a group"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"group"
|
||||||
|
],
|
||||||
|
"systemmay": [
|
||||||
|
"member"
|
||||||
|
],
|
||||||
|
"systemmust": [
|
||||||
|
"name"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"c0e4e58c-1a2e-4bc3-ad56-5950ef810ea7"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
pub static UUID_SCHEMA_CLASS_ACCOUNT: &'static str = "8bbff87c-1731-455e-a0e7-bf1d0908e983";
|
||||||
|
pub static JSON_SCHEMA_CLASS_ACCOUNT: &'static str = r#"
|
||||||
|
{
|
||||||
|
"valid": {
|
||||||
|
"uuid": "8bbff87c-1731-455e-a0e7-bf1d0908e983"
|
||||||
|
},
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": [
|
||||||
|
"object",
|
||||||
|
"classtype"
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
"Object representation of a person"
|
||||||
|
],
|
||||||
|
"name": [
|
||||||
|
"account"
|
||||||
|
],
|
||||||
|
"systemmay": [
|
||||||
|
"password",
|
||||||
|
"ssh_publickey"
|
||||||
|
],
|
||||||
|
"systemmust": [
|
||||||
|
"displayname",
|
||||||
|
"name"
|
||||||
|
],
|
||||||
|
"uuid": [
|
||||||
|
"8bbff87c-1731-455e-a0e7-bf1d0908e983"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
// ============ TEST DATA ============
|
// ============ TEST DATA ============
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub static JSON_TESTPERSON1: &'static str = r#"{
|
pub static JSON_TESTPERSON1: &'static str = r#"{
|
||||||
|
|
|
@ -19,8 +19,8 @@ use crate::interval::IntervalActor;
|
||||||
use crate::proto::v1::actors::QueryServerV1;
|
use crate::proto::v1::actors::QueryServerV1;
|
||||||
use crate::proto::v1::messages::{AuthMessage, WhoamiMessage};
|
use crate::proto::v1::messages::{AuthMessage, WhoamiMessage};
|
||||||
use crate::proto::v1::{
|
use crate::proto::v1::{
|
||||||
AuthRequest, AuthResponse, AuthState, CreateRequest, DeleteRequest, ModifyRequest,
|
AuthRequest, AuthState, CreateRequest, DeleteRequest, ModifyRequest, SearchRequest,
|
||||||
SearchRequest, UserAuthToken, WhoamiRequest, WhoamiResponse,
|
UserAuthToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
251
src/lib/entry.rs
251
src/lib/entry.rs
|
@ -4,7 +4,7 @@ use crate::error::{OperationError, SchemaError};
|
||||||
use crate::filter::{Filter, FilterInvalid, FilterResolved, FilterValidResolved};
|
use crate::filter::{Filter, FilterInvalid, FilterResolved, FilterValidResolved};
|
||||||
use crate::modify::{Modify, ModifyInvalid, ModifyList, ModifyValid};
|
use crate::modify::{Modify, ModifyInvalid, ModifyList, ModifyValid};
|
||||||
use crate::proto::v1::Entry as ProtoEntry;
|
use crate::proto::v1::Entry as ProtoEntry;
|
||||||
use crate::proto::v1::UserAuthToken;
|
use crate::schema::{IndexType, SyntaxType};
|
||||||
use crate::schema::{SchemaAttribute, SchemaClass, SchemaTransaction};
|
use crate::schema::{SchemaAttribute, SchemaClass, SchemaTransaction};
|
||||||
use crate::server::{QueryServerTransaction, QueryServerWriteTransaction};
|
use crate::server::{QueryServerTransaction, QueryServerWriteTransaction};
|
||||||
|
|
||||||
|
@ -17,6 +17,9 @@ use std::collections::HashMap;
|
||||||
use std::iter::ExactSizeIterator;
|
use std::iter::ExactSizeIterator;
|
||||||
use std::slice::Iter as SliceIter;
|
use std::slice::Iter as SliceIter;
|
||||||
|
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
@ -251,6 +254,7 @@ impl<STATE> Entry<EntryNormalised, STATE> {
|
||||||
{
|
{
|
||||||
// First, check we have class on the object ....
|
// First, check we have class on the object ....
|
||||||
if !ne.attribute_pres("class") {
|
if !ne.attribute_pres("class") {
|
||||||
|
debug!("Missing attribute class");
|
||||||
return Err(SchemaError::InvalidClass);
|
return Err(SchemaError::InvalidClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -265,6 +269,7 @@ impl<STATE> Entry<EntryNormalised, STATE> {
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if classes.len() != entry_classes_size {
|
if classes.len() != entry_classes_size {
|
||||||
|
debug!("Class on entry not found in schema?");
|
||||||
return Err(SchemaError::InvalidClass);
|
return Err(SchemaError::InvalidClass);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -280,6 +285,9 @@ impl<STATE> Entry<EntryNormalised, STATE> {
|
||||||
// Now from the set of valid classes make a list of must/may
|
// Now from the set of valid classes make a list of must/may
|
||||||
// FIXME: This is clone on read, which may be really slow. It also may
|
// FIXME: This is clone on read, which may be really slow. It also may
|
||||||
// be inefficent on duplicates etc.
|
// be inefficent on duplicates etc.
|
||||||
|
//
|
||||||
|
// NOTE: We still need this on extensible, because we still need to satisfy
|
||||||
|
// our other must conditions too!
|
||||||
let must: Result<HashMap<String, &SchemaAttribute>, _> = classes
|
let must: Result<HashMap<String, &SchemaAttribute>, _> = classes
|
||||||
.iter()
|
.iter()
|
||||||
// Join our class systemmmust + must into one iter
|
// Join our class systemmmust + must into one iter
|
||||||
|
@ -296,9 +304,6 @@ impl<STATE> Entry<EntryNormalised, STATE> {
|
||||||
|
|
||||||
let must = must?;
|
let must = must?;
|
||||||
|
|
||||||
// FIXME: Error needs to say what is missing
|
|
||||||
// We need to return *all* missing attributes.
|
|
||||||
|
|
||||||
// Check that all must are inplace
|
// Check that all must are inplace
|
||||||
// for each attr in must, check it's present on our ent
|
// for each attr in must, check it's present on our ent
|
||||||
// FIXME: Could we iter over only the attr_name
|
// FIXME: Could we iter over only the attr_name
|
||||||
|
@ -309,22 +314,76 @@ impl<STATE> Entry<EntryNormalised, STATE> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that any other attributes are in may
|
if extensible {
|
||||||
// for each attr on the object, check it's in the may+must set
|
for (attr_name, avas) in ne.avas() {
|
||||||
for (attr_name, avas) in ne.avas() {
|
match schema_attributes.get(attr_name) {
|
||||||
match schema_attributes.get(attr_name) {
|
Some(a_schema) => {
|
||||||
Some(a_schema) => {
|
// Now, for each type we do a *full* check of the syntax
|
||||||
// Now, for each type we do a *full* check of the syntax
|
// and validity of the ava.
|
||||||
// and validity of the ava.
|
let r = a_schema.validate_ava(avas);
|
||||||
let r = a_schema.validate_ava(avas);
|
if r.is_err() {
|
||||||
// We have to destructure here to make type checker happy
|
debug!("Failed to validate: {}", attr_name);
|
||||||
match r {
|
return Err(r.unwrap_err());
|
||||||
Ok(_) => {}
|
}
|
||||||
Err(e) => return Err(e),
|
// We have to destructure here to make type checker happy
|
||||||
|
match r {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
debug!("Invalid Attribute for extensible object");
|
||||||
|
return Err(SchemaError::InvalidAttribute);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
}
|
||||||
if !extensible {
|
} else {
|
||||||
|
let may: Result<HashMap<String, &SchemaAttribute>, _> = classes
|
||||||
|
.iter()
|
||||||
|
// Join our class systemmmust + must + systemmay + may into one.
|
||||||
|
.flat_map(|(_, cls)| {
|
||||||
|
cls.systemmust
|
||||||
|
.iter()
|
||||||
|
.chain(cls.must.iter())
|
||||||
|
.chain(cls.systemmay.iter())
|
||||||
|
.chain(cls.may.iter())
|
||||||
|
})
|
||||||
|
.map(|s| {
|
||||||
|
// This should NOT fail - if it does, it means our schema is
|
||||||
|
// in an invalid state!
|
||||||
|
Ok((
|
||||||
|
s.clone(),
|
||||||
|
schema_attributes.get(s).ok_or(SchemaError::Corrupted)?,
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let may = may?;
|
||||||
|
|
||||||
|
// FIXME: Error needs to say what is missing
|
||||||
|
// We need to return *all* missing attributes.
|
||||||
|
|
||||||
|
// Check that any other attributes are in may
|
||||||
|
// for each attr on the object, check it's in the may+must set
|
||||||
|
for (attr_name, avas) in ne.avas() {
|
||||||
|
debug!("Checking {}", attr_name);
|
||||||
|
match may.get(attr_name) {
|
||||||
|
Some(a_schema) => {
|
||||||
|
// Now, for each type we do a *full* check of the syntax
|
||||||
|
// and validity of the ava.
|
||||||
|
let r = a_schema.validate_ava(avas);
|
||||||
|
if r.is_err() {
|
||||||
|
debug!("Failed to validate: {}", attr_name);
|
||||||
|
return Err(r.unwrap_err());
|
||||||
|
}
|
||||||
|
// We have to destructure here to make type checker happy
|
||||||
|
match r {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
debug!("Invalid Attribute for may+must set");
|
||||||
return Err(SchemaError::InvalidAttribute);
|
return Err(SchemaError::InvalidAttribute);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -634,6 +693,52 @@ impl Entry<EntryValid, EntryCommitted> {
|
||||||
attrs: f_attrs,
|
attrs: f_attrs,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// These are special types to allow returning typed values from
|
||||||
|
// an entry, if we "know" what we expect to receive.
|
||||||
|
|
||||||
|
/// This returns an array of IndexTypes, when the type is an Optional
|
||||||
|
/// multivalue in schema - IE this will *not* fail if the attribute is
|
||||||
|
/// empty, yielding and empty array instead.
|
||||||
|
///
|
||||||
|
/// However, the converstion to IndexType is fallaible, so in case of a failure
|
||||||
|
/// to convert, an Err is returned.
|
||||||
|
pub(crate) fn get_ava_opt_index(&self, attr: &str) -> Result<Vec<IndexType>, ()> {
|
||||||
|
match self.attrs.get(attr) {
|
||||||
|
Some(av) => {
|
||||||
|
let r: Result<Vec<_>, _> =
|
||||||
|
av.iter().map(|v| IndexType::try_from(v.as_str())).collect();
|
||||||
|
r
|
||||||
|
}
|
||||||
|
None => Ok(Vec::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a bool from an ava
|
||||||
|
pub fn get_ava_single_bool(&self, attr: &str) -> Option<bool> {
|
||||||
|
match self.get_ava_single(attr) {
|
||||||
|
Some(a) => bool::from_str(a.as_str()).ok(),
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_ava_single_syntax(&self, attr: &str) -> Option<SyntaxType> {
|
||||||
|
match self.get_ava_single(attr) {
|
||||||
|
Some(a) => SyntaxType::try_from(a.as_str()).ok(),
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is a cloning interface on getting ava's with optional
|
||||||
|
/// existance. It's used in the schema code for must/may/systemmust/systemmay
|
||||||
|
/// access. It should probably be avoided due to the clone unless you
|
||||||
|
/// are aware of the consequences.
|
||||||
|
pub(crate) fn get_ava_opt(&self, attr: &str) -> Vec<String> {
|
||||||
|
match self.attrs.get(attr) {
|
||||||
|
Some(a) => a.clone(),
|
||||||
|
None => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<STATE> Entry<EntryValid, STATE> {
|
impl<STATE> Entry<EntryValid, STATE> {
|
||||||
|
@ -990,37 +1095,91 @@ impl<VALID, STATE> PartialEq for Entry<VALID, STATE> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
impl From<&SchemaAttribute> for Entry<EntryValid, EntryNew> {
|
||||||
enum Credential {
|
fn from(s: &SchemaAttribute) -> Self {
|
||||||
Password {
|
// Convert an Attribute to an entry ... make it good!
|
||||||
name: String,
|
let uuid_str = s.uuid.to_hyphenated().to_string();
|
||||||
hash: String,
|
let uuid_v = vec![uuid_str.clone()];
|
||||||
},
|
|
||||||
TOTPPassword {
|
let name_v = vec![s.name.clone()];
|
||||||
name: String,
|
let desc_v = vec![s.description.clone()];
|
||||||
hash: String,
|
|
||||||
totp_secret: String,
|
let system_v = vec![if s.system {
|
||||||
},
|
"true".to_string()
|
||||||
SshPublicKey {
|
} else {
|
||||||
name: String,
|
"false".to_string()
|
||||||
data: String,
|
}];
|
||||||
},
|
|
||||||
|
let secret_v = vec![if s.secret {
|
||||||
|
"true".to_string()
|
||||||
|
} else {
|
||||||
|
"false".to_string()
|
||||||
|
}];
|
||||||
|
|
||||||
|
let multivalue_v = vec![if s.multivalue {
|
||||||
|
"true".to_string()
|
||||||
|
} else {
|
||||||
|
"false".to_string()
|
||||||
|
}];
|
||||||
|
|
||||||
|
let index_v: Vec<_> = s.index.iter().map(|i| i.to_string()).collect();
|
||||||
|
|
||||||
|
let syntax_v = vec![s.syntax.to_string()];
|
||||||
|
|
||||||
|
// Build the BTreeMap of the attributes relevant
|
||||||
|
let mut attrs: BTreeMap<String, Vec<String>> = BTreeMap::new();
|
||||||
|
attrs.insert("name".to_string(), name_v);
|
||||||
|
attrs.insert("description".to_string(), desc_v);
|
||||||
|
attrs.insert("uuid".to_string(), uuid_v);
|
||||||
|
attrs.insert("system".to_string(), system_v);
|
||||||
|
attrs.insert("secret".to_string(), secret_v);
|
||||||
|
attrs.insert("multivalue".to_string(), multivalue_v);
|
||||||
|
attrs.insert("index".to_string(), index_v);
|
||||||
|
attrs.insert("syntax".to_string(), syntax_v);
|
||||||
|
attrs.insert(
|
||||||
|
"class".to_string(),
|
||||||
|
vec!["object".to_string(), "attributetype".to_string()],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Insert stuff.
|
||||||
|
|
||||||
|
Entry {
|
||||||
|
valid: EntryValid {
|
||||||
|
uuid: uuid_str.clone(),
|
||||||
|
},
|
||||||
|
state: EntryNew,
|
||||||
|
attrs: attrs,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
impl From<&SchemaClass> for Entry<EntryValid, EntryNew> {
|
||||||
struct User {
|
fn from(s: &SchemaClass) -> Self {
|
||||||
username: String,
|
let uuid_str = s.uuid.to_hyphenated().to_string();
|
||||||
// Could this be derived from self? Do we even need schema?
|
let uuid_v = vec![uuid_str.clone()];
|
||||||
class: Vec<String>,
|
|
||||||
displayname: String,
|
|
||||||
legalname: Option<String>,
|
|
||||||
email: Vec<String>,
|
|
||||||
// uuid?
|
|
||||||
// need to support deref later ...
|
|
||||||
memberof: Vec<String>,
|
|
||||||
sshpublickey: Vec<String>,
|
|
||||||
|
|
||||||
credentials: Vec<Credential>,
|
let name_v = vec![s.name.clone()];
|
||||||
|
let desc_v = vec![s.description.clone()];
|
||||||
|
|
||||||
|
let mut attrs: BTreeMap<String, Vec<String>> = BTreeMap::new();
|
||||||
|
attrs.insert("name".to_string(), name_v);
|
||||||
|
attrs.insert("description".to_string(), desc_v);
|
||||||
|
attrs.insert("uuid".to_string(), uuid_v);
|
||||||
|
attrs.insert(
|
||||||
|
"class".to_string(),
|
||||||
|
vec!["object".to_string(), "classtype".to_string()],
|
||||||
|
);
|
||||||
|
attrs.insert("systemmay".to_string(), s.systemmay.clone());
|
||||||
|
attrs.insert("systemmust".to_string(), s.systemmust.clone());
|
||||||
|
|
||||||
|
Entry {
|
||||||
|
valid: EntryValid {
|
||||||
|
uuid: uuid_str.clone(),
|
||||||
|
},
|
||||||
|
state: EntryNew,
|
||||||
|
attrs: attrs,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -29,6 +29,7 @@ pub enum OperationError {
|
||||||
InvalidState,
|
InvalidState,
|
||||||
InvalidEntryState,
|
InvalidEntryState,
|
||||||
InvalidACPState(&'static str),
|
InvalidACPState(&'static str),
|
||||||
|
InvalidSchemaState(&'static str),
|
||||||
InvalidAccountState(&'static str),
|
InvalidAccountState(&'static str),
|
||||||
BackendEngine,
|
BackendEngine,
|
||||||
SQLiteError, //(RusqliteError)
|
SQLiteError, //(RusqliteError)
|
||||||
|
|
|
@ -3,9 +3,9 @@ use crate::entry::{Entry, EntryCommitted, EntryInvalid, EntryNew, EntryValid};
|
||||||
use crate::filter::{Filter, FilterValid};
|
use crate::filter::{Filter, FilterValid};
|
||||||
use crate::proto::v1::Entry as ProtoEntry;
|
use crate::proto::v1::Entry as ProtoEntry;
|
||||||
use crate::proto::v1::{
|
use crate::proto::v1::{
|
||||||
AuthAllowed, AuthCredential, AuthRequest, AuthResponse, AuthState, AuthStep, CreateRequest,
|
AuthCredential, AuthResponse, AuthState, AuthStep, CreateRequest, DeleteRequest, ModifyRequest,
|
||||||
DeleteRequest, ModifyRequest, OperationResponse, ReviveRecycledRequest, SearchRequest,
|
OperationResponse, ReviveRecycledRequest, SearchRequest, SearchResponse, UserAuthToken,
|
||||||
SearchResponse, UserAuthToken, WhoamiRequest, WhoamiResponse,
|
WhoamiResponse,
|
||||||
};
|
};
|
||||||
// use error::OperationError;
|
// use error::OperationError;
|
||||||
use crate::error::OperationError;
|
use crate::error::OperationError;
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
use crate::entry::{Entry, EntryCommitted, EntryValid};
|
use crate::entry::{Entry, EntryCommitted, EntryValid};
|
||||||
use crate::error::OperationError;
|
use crate::error::OperationError;
|
||||||
|
|
||||||
use crate::proto::v1::{AuthAllowed, UserAuthToken};
|
use crate::proto::v1::UserAuthToken;
|
||||||
|
|
||||||
use crate::idm::claim::Claim;
|
use crate::idm::claim::Claim;
|
||||||
use crate::idm::group::Group;
|
use crate::idm::group::Group;
|
||||||
|
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct Account {
|
pub(crate) struct Account {
|
||||||
|
|
|
@ -36,6 +36,10 @@ impl CredHandler {
|
||||||
match self {
|
match self {
|
||||||
CredHandler::Anonymous => {
|
CredHandler::Anonymous => {
|
||||||
creds.iter().fold(CredState::Denied, |acc, cred| {
|
creds.iter().fold(CredState::Denied, |acc, cred| {
|
||||||
|
// TODO: if denied, continue returning denied.
|
||||||
|
// TODO: if continue, contunue returning continue.
|
||||||
|
// How to do this correctly?
|
||||||
|
|
||||||
// There is no "continuation" from this type.
|
// There is no "continuation" from this type.
|
||||||
match cred {
|
match cred {
|
||||||
AuthCredential::Anonymous => {
|
AuthCredential::Anonymous => {
|
||||||
|
@ -43,8 +47,9 @@ impl CredHandler {
|
||||||
CredState::Success(Vec::new())
|
CredState::Success(Vec::new())
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// Should we have a reason in Denied so that we
|
// Should we have a reason in Denied so that we can say why denied?
|
||||||
CredState::Denied
|
acc
|
||||||
|
// CredState::Denied
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -163,8 +168,6 @@ impl AuthSession {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::constants::JSON_ANONYMOUS_V1;
|
use crate::constants::JSON_ANONYMOUS_V1;
|
||||||
use crate::entry::{Entry, EntryNew, EntryValid};
|
|
||||||
use crate::idm::account::Account;
|
|
||||||
use crate::idm::authsession::AuthSession;
|
use crate::idm::authsession::AuthSession;
|
||||||
use crate::proto::v1::AuthAllowed;
|
use crate::proto::v1::AuthAllowed;
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ macro_rules! run_idm_test {
|
||||||
use crate::idm::server::IdmServer;
|
use crate::idm::server::IdmServer;
|
||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
use crate::server::QueryServer;
|
use crate::server::QueryServer;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use env_logger;
|
use env_logger;
|
||||||
::std::env::set_var("RUST_LOG", "actix_web=debug,rsidm=debug");
|
::std::env::set_var("RUST_LOG", "actix_web=debug,rsidm=debug");
|
||||||
|
@ -30,21 +29,11 @@ macro_rules! run_idm_test {
|
||||||
|
|
||||||
let be = Backend::new(&mut audit, "").expect("Failed to init be");
|
let be = Backend::new(&mut audit, "").expect("Failed to init be");
|
||||||
let schema_outer = Schema::new(&mut audit).expect("Failed to init schema");
|
let schema_outer = Schema::new(&mut audit).expect("Failed to init schema");
|
||||||
{
|
|
||||||
let mut schema = schema_outer.write();
|
|
||||||
schema
|
|
||||||
.bootstrap_core(&mut audit)
|
|
||||||
.expect("Failed to bootstrap schema");
|
|
||||||
schema.commit().expect("Failed to commit schema");
|
|
||||||
}
|
|
||||||
|
|
||||||
let test_server = QueryServer::new(be, schema_outer);
|
let test_server = QueryServer::new(be, schema_outer);
|
||||||
|
test_server
|
||||||
{
|
.initialise_helper(&mut audit)
|
||||||
let ts_write = test_server.write();
|
.expect("init failed");
|
||||||
ts_write.initialise(&mut audit).expect("Init failed!");
|
|
||||||
ts_write.commit(&mut audit).expect("Commit failed!");
|
|
||||||
}
|
|
||||||
|
|
||||||
let test_idm_server = IdmServer::new(test_server.clone());
|
let test_idm_server = IdmServer::new(test_server.clone());
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,14 @@
|
||||||
use crate::audit::AuditScope;
|
use crate::audit::AuditScope;
|
||||||
use crate::be::Backend;
|
|
||||||
use crate::constants::UUID_ANONYMOUS;
|
|
||||||
use crate::error::OperationError;
|
use crate::error::OperationError;
|
||||||
use crate::event::{AuthEvent, AuthEventStep, AuthEventStepInit, AuthResult, SearchEvent};
|
use crate::event::{AuthEvent, AuthEventStep, AuthResult};
|
||||||
use crate::idm::account::Account;
|
use crate::idm::account::Account;
|
||||||
use crate::idm::authsession::AuthSession;
|
use crate::idm::authsession::AuthSession;
|
||||||
use crate::proto::v1::{AuthResponse, AuthState, UserAuthToken};
|
use crate::proto::v1::AuthState;
|
||||||
use crate::schema::Schema;
|
|
||||||
use crate::server::{QueryServer, QueryServerTransaction};
|
use crate::server::{QueryServer, QueryServerTransaction};
|
||||||
use concread::cowcell::{CowCell, CowCellReadTxn, CowCellWriteTxn};
|
use concread::cowcell::{CowCell, CowCellWriteTxn};
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::sync::Arc;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
// use lru::LruCache;
|
// use lru::LruCache;
|
||||||
|
|
||||||
|
|
|
@ -14,20 +14,11 @@ macro_rules! run_test {
|
||||||
|
|
||||||
let be = Backend::new(&mut audit, "").expect("Failed to init be");
|
let be = Backend::new(&mut audit, "").expect("Failed to init be");
|
||||||
let schema_outer = Schema::new(&mut audit).expect("Failed to init schema");
|
let schema_outer = Schema::new(&mut audit).expect("Failed to init schema");
|
||||||
{
|
|
||||||
let mut schema = schema_outer.write();
|
|
||||||
schema
|
|
||||||
.bootstrap_core(&mut audit)
|
|
||||||
.expect("Failed to bootstrap schema");
|
|
||||||
schema.commit().expect("Failed to commit schema");
|
|
||||||
}
|
|
||||||
let test_server = QueryServer::new(be, schema_outer);
|
let test_server = QueryServer::new(be, schema_outer);
|
||||||
|
|
||||||
{
|
test_server
|
||||||
let ts_write = test_server.write();
|
.initialise_helper(&mut audit)
|
||||||
ts_write.initialise(&mut audit).expect("Init failed!");
|
.expect("init failed!");
|
||||||
ts_write.commit(&mut audit).expect("Commit failed!");
|
|
||||||
}
|
|
||||||
|
|
||||||
$test_fn(&test_server, &mut audit);
|
$test_fn(&test_server, &mut audit);
|
||||||
// Any needed teardown?
|
// Any needed teardown?
|
||||||
|
|
|
@ -4,16 +4,16 @@ macro_rules! setup_test {
|
||||||
$au:expr,
|
$au:expr,
|
||||||
$preload_entries:ident
|
$preload_entries:ident
|
||||||
) => {{
|
) => {{
|
||||||
|
use env_logger;
|
||||||
|
::std::env::set_var("RUST_LOG", "actix_web=debug,rsidm=debug");
|
||||||
|
let _ = env_logger::builder().is_test(true).try_init();
|
||||||
|
|
||||||
// Create an in memory BE
|
// Create an in memory BE
|
||||||
let be = Backend::new($au, "").expect("Failed to init BE");
|
let be = Backend::new($au, "").expect("Failed to init BE");
|
||||||
|
|
||||||
let schema_outer = Schema::new($au).expect("Failed to init schema");
|
let schema_outer = Schema::new($au).expect("Failed to init schema");
|
||||||
{
|
|
||||||
let mut schema = schema_outer.write();
|
|
||||||
schema.bootstrap_core($au).expect("Failed to init schema");
|
|
||||||
schema.commit().expect("Failed to commit schema");
|
|
||||||
}
|
|
||||||
let qs = QueryServer::new(be, schema_outer);
|
let qs = QueryServer::new(be, schema_outer);
|
||||||
|
qs.initialise_helper($au).expect("init failed!");
|
||||||
|
|
||||||
if !$preload_entries.is_empty() {
|
if !$preload_entries.is_empty() {
|
||||||
let qs_write = qs.write();
|
let qs_write = qs.write();
|
||||||
|
@ -58,6 +58,7 @@ macro_rules! run_create_test {
|
||||||
{
|
{
|
||||||
let qs_write = qs.write();
|
let qs_write = qs.write();
|
||||||
let r = qs_write.create(&mut au_test, &ce);
|
let r = qs_write.create(&mut au_test, &ce);
|
||||||
|
debug!("r: {:?}", r);
|
||||||
assert!(r == $expect);
|
assert!(r == $expect);
|
||||||
$check(&mut au_test, &qs_write);
|
$check(&mut au_test, &qs_write);
|
||||||
match r {
|
match r {
|
||||||
|
|
|
@ -129,7 +129,10 @@ fn apply_memberof(
|
||||||
// first add a purged memberof to remove all mo we no longer
|
// first add a purged memberof to remove all mo we no longer
|
||||||
// support.
|
// support.
|
||||||
// TODO: Could this be more efficient
|
// TODO: Could this be more efficient
|
||||||
|
// TODO: Could this affect replication? Or should the CL work out the
|
||||||
|
// true diff of the operation?
|
||||||
let mo_purge = vec![
|
let mo_purge = vec![
|
||||||
|
Modify::Present("class".to_string(), "memberof".to_string()),
|
||||||
Modify::Purged("memberof".to_string()),
|
Modify::Purged("memberof".to_string()),
|
||||||
Modify::Purged("directmemberof".to_string()),
|
Modify::Purged("directmemberof".to_string()),
|
||||||
];
|
];
|
||||||
|
@ -368,7 +371,7 @@ mod tests {
|
||||||
"valid": null,
|
"valid": null,
|
||||||
"state": null,
|
"state": null,
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"class": ["group"],
|
"class": ["group", "memberof"],
|
||||||
"name": ["testgroup_a"],
|
"name": ["testgroup_a"],
|
||||||
"uuid": ["aaaaaaaa-f82e-4484-a407-181aa03bda5c"]
|
"uuid": ["aaaaaaaa-f82e-4484-a407-181aa03bda5c"]
|
||||||
}
|
}
|
||||||
|
@ -380,7 +383,7 @@ mod tests {
|
||||||
"valid": null,
|
"valid": null,
|
||||||
"state": null,
|
"state": null,
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"class": ["group"],
|
"class": ["group", "memberof"],
|
||||||
"name": ["testgroup_b"],
|
"name": ["testgroup_b"],
|
||||||
"uuid": ["bbbbbbbb-2438-4384-9891-48f4c8172e9b"]
|
"uuid": ["bbbbbbbb-2438-4384-9891-48f4c8172e9b"]
|
||||||
}
|
}
|
||||||
|
@ -392,7 +395,7 @@ mod tests {
|
||||||
"valid": null,
|
"valid": null,
|
||||||
"state": null,
|
"state": null,
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"class": ["group"],
|
"class": ["group", "memberof"],
|
||||||
"name": ["testgroup_c"],
|
"name": ["testgroup_c"],
|
||||||
"uuid": ["cccccccc-9b01-423f-9ba6-51aa4bbd5dd2"]
|
"uuid": ["cccccccc-9b01-423f-9ba6-51aa4bbd5dd2"]
|
||||||
}
|
}
|
||||||
|
@ -404,7 +407,7 @@ mod tests {
|
||||||
"valid": null,
|
"valid": null,
|
||||||
"state": null,
|
"state": null,
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"class": ["group"],
|
"class": ["group", "memberof"],
|
||||||
"name": ["testgroup_d"],
|
"name": ["testgroup_d"],
|
||||||
"uuid": ["dddddddd-2ab3-48e3-938d-1b4754cd2984"]
|
"uuid": ["dddddddd-2ab3-48e3-938d-1b4754cd2984"]
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,18 +7,17 @@ use crate::be::Backend;
|
||||||
use crate::async_log::EventLog;
|
use crate::async_log::EventLog;
|
||||||
use crate::error::OperationError;
|
use crate::error::OperationError;
|
||||||
use crate::event::{
|
use crate::event::{
|
||||||
AuthEvent, AuthResult, CreateEvent, DeleteEvent, ModifyEvent, PurgeRecycledEvent,
|
AuthEvent, CreateEvent, DeleteEvent, ModifyEvent, PurgeRecycledEvent, PurgeTombstoneEvent,
|
||||||
PurgeTombstoneEvent, SearchEvent, SearchResult, WhoamiResult,
|
SearchEvent, SearchResult, WhoamiResult,
|
||||||
};
|
};
|
||||||
use crate::schema::{Schema, SchemaTransaction};
|
use crate::schema::Schema;
|
||||||
|
|
||||||
use crate::constants::UUID_ANONYMOUS;
|
|
||||||
use crate::idm::server::IdmServer;
|
use crate::idm::server::IdmServer;
|
||||||
use crate::server::{QueryServer, QueryServerTransaction};
|
use crate::server::{QueryServer, QueryServerTransaction};
|
||||||
|
|
||||||
use crate::proto::v1::{
|
use crate::proto::v1::{
|
||||||
AuthRequest, AuthResponse, AuthState, CreateRequest, DeleteRequest, ModifyRequest,
|
AuthResponse, CreateRequest, DeleteRequest, ModifyRequest, OperationResponse, SearchRequest,
|
||||||
OperationResponse, SearchRequest, SearchResponse, UserAuthToken, WhoamiRequest, WhoamiResponse,
|
SearchResponse, WhoamiResponse,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::proto::v1::messages::{AuthMessage, WhoamiMessage};
|
use crate::proto::v1::messages::{AuthMessage, WhoamiMessage};
|
||||||
|
@ -68,60 +67,19 @@ impl QueryServerV1 {
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
};
|
};
|
||||||
|
|
||||||
{
|
|
||||||
let be_txn = be.write();
|
|
||||||
let mut schema_write = schema.write();
|
|
||||||
|
|
||||||
// Now, we have the initial schema in memory. Use this to trigger
|
|
||||||
// an index of the be for the core schema.
|
|
||||||
|
|
||||||
// Now search for the schema itself, and validate that the system
|
|
||||||
// in memory matches the BE on disk, and that it's syntactically correct.
|
|
||||||
// Write it out if changes are needed.
|
|
||||||
|
|
||||||
// Now load the remaining backend schema into memory.
|
|
||||||
// TODO: Schema elements should be versioned individually.
|
|
||||||
match schema_write
|
|
||||||
.bootstrap_core(&mut audit)
|
|
||||||
// TODO: Backend setup indexes as needed from schema, for the core
|
|
||||||
// system schema.
|
|
||||||
// TODO: Trigger an index? This could be costly ...
|
|
||||||
// Perhaps a config option to say if we index on startup or not.
|
|
||||||
// TODO: Check the results!
|
|
||||||
.and_then(|_| {
|
|
||||||
let r = schema_write.validate(&mut audit);
|
|
||||||
if r.len() == 0 {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(OperationError::ConsistencyError(r))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.and_then(|_| be_txn.commit())
|
|
||||||
.and_then(|_| schema_write.commit())
|
|
||||||
{
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a query_server implementation
|
// Create a query_server implementation
|
||||||
// TODO: FIXME: CRITICAL: Schema must be ARC/Cow properly!!! Right now it's
|
|
||||||
// not!!!
|
|
||||||
let query_server = QueryServer::new(be, schema);
|
let query_server = QueryServer::new(be, schema);
|
||||||
|
|
||||||
let mut audit_qsc = AuditScope::new("query_server_init");
|
let mut audit_qsc = AuditScope::new("query_server_init");
|
||||||
// This may need to be two parts, one for schema, one for everything else
|
// TODO: Should the IDM parts be broken out to the IdmSerner?
|
||||||
// that way we can reload schema in between.
|
|
||||||
let query_server_write = query_server.write();
|
|
||||||
match query_server_write.initialise(&mut audit_qsc).and_then(|_| {
|
|
||||||
audit_segment!(audit_qsc, || query_server_write.commit(&mut audit_qsc))
|
|
||||||
}) {
|
|
||||||
// We are good to go! Finally commit and consume the txn.
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
};
|
|
||||||
// What's important about this initial setup here is that it also triggers
|
// What's important about this initial setup here is that it also triggers
|
||||||
// the schema and acp reload, so they are now configured correctly!
|
// the schema and acp reload, so they are now configured correctly!
|
||||||
|
// Initialise the schema core.
|
||||||
|
//
|
||||||
|
// Now search for the schema itself, and validate that the system
|
||||||
|
// in memory matches the BE on disk, and that it's syntactically correct.
|
||||||
|
// Write it out if changes are needed.
|
||||||
|
query_server.initialise_helper(&mut audit_qsc)?;
|
||||||
|
|
||||||
// We generate a SINGLE idms only!
|
// We generate a SINGLE idms only!
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
// attempt to reflect and map to a simple representation of
|
// attempt to reflect and map to a simple representation of
|
||||||
// the protocol, which was intended to be easy-to-use and accessible.
|
// the protocol, which was intended to be easy-to-use and accessible.
|
||||||
|
|
||||||
|
/*
|
||||||
struct ClientV1 {}
|
struct ClientV1 {}
|
||||||
|
|
||||||
impl ClientV1 {
|
impl ClientV1 {
|
||||||
|
@ -24,3 +25,4 @@ impl ClientV1 {
|
||||||
|
|
||||||
fn delete() -> () {}
|
fn delete() -> () {}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
|
@ -2,7 +2,7 @@ use crate::error::OperationError;
|
||||||
use actix::prelude::*;
|
use actix::prelude::*;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::proto::v1::{AuthRequest, AuthResponse, UserAuthToken, WhoamiRequest, WhoamiResponse};
|
use crate::proto::v1::{AuthRequest, AuthResponse, UserAuthToken, WhoamiResponse};
|
||||||
|
|
||||||
// These are used when the request (IE Get) has no intrising request
|
// These are used when the request (IE Get) has no intrising request
|
||||||
// type. Additionally, they are used in some requests where we need
|
// type. Additionally, they are used in some requests where we need
|
||||||
|
|
1236
src/lib/schema.rs
1236
src/lib/schema.rs
File diff suppressed because it is too large
Load diff
|
@ -13,7 +13,10 @@ use crate::access::{
|
||||||
};
|
};
|
||||||
use crate::constants::{
|
use crate::constants::{
|
||||||
JSON_ADMIN_V1, JSON_ANONYMOUS_V1, JSON_IDM_ADMINS_ACP_REVIVE_V1, JSON_IDM_ADMINS_ACP_SEARCH_V1,
|
JSON_ADMIN_V1, JSON_ANONYMOUS_V1, JSON_IDM_ADMINS_ACP_REVIVE_V1, JSON_IDM_ADMINS_ACP_SEARCH_V1,
|
||||||
JSON_IDM_ADMINS_V1, JSON_IDM_SELF_ACP_READ_V1, JSON_SYSTEM_INFO_V1,
|
JSON_IDM_ADMINS_V1, JSON_IDM_SELF_ACP_READ_V1, JSON_SCHEMA_ATTR_DISPLAYNAME,
|
||||||
|
JSON_SCHEMA_ATTR_MAIL, JSON_SCHEMA_ATTR_PASSWORD, JSON_SCHEMA_ATTR_SSH_PUBLICKEY,
|
||||||
|
JSON_SCHEMA_CLASS_ACCOUNT, JSON_SCHEMA_CLASS_GROUP, JSON_SCHEMA_CLASS_PERSON,
|
||||||
|
JSON_SYSTEM_INFO_V1,
|
||||||
};
|
};
|
||||||
use crate::entry::{Entry, EntryCommitted, EntryInvalid, EntryNew, EntryNormalised, EntryValid};
|
use crate::entry::{Entry, EntryCommitted, EntryInvalid, EntryNew, EntryNormalised, EntryValid};
|
||||||
use crate::error::{ConsistencyError, OperationError, SchemaError};
|
use crate::error::{ConsistencyError, OperationError, SchemaError};
|
||||||
|
@ -25,7 +28,8 @@ use crate::filter::{Filter, FilterInvalid, FilterValid};
|
||||||
use crate::modify::{Modify, ModifyInvalid, ModifyList, ModifyValid};
|
use crate::modify::{Modify, ModifyInvalid, ModifyList, ModifyValid};
|
||||||
use crate::plugins::Plugins;
|
use crate::plugins::Plugins;
|
||||||
use crate::schema::{
|
use crate::schema::{
|
||||||
Schema, SchemaReadTransaction, SchemaTransaction, SchemaWriteTransaction, SyntaxType,
|
Schema, SchemaAttribute, SchemaClass, SchemaReadTransaction, SchemaTransaction,
|
||||||
|
SchemaWriteTransaction, SyntaxType,
|
||||||
};
|
};
|
||||||
|
|
||||||
// This is the core of the server. It implements all
|
// This is the core of the server. It implements all
|
||||||
|
@ -595,6 +599,23 @@ impl QueryServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn initialise_helper(&self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
|
let ts_write_1 = self.write();
|
||||||
|
ts_write_1
|
||||||
|
.initialise_schema_core(audit)
|
||||||
|
.and_then(|_| ts_write_1.commit(audit))?;
|
||||||
|
|
||||||
|
let ts_write_2 = self.write();
|
||||||
|
ts_write_2
|
||||||
|
.initialise_schema_idm(audit)
|
||||||
|
.and_then(|_| ts_write_2.commit(audit))?;
|
||||||
|
|
||||||
|
let ts_write_3 = self.write();
|
||||||
|
ts_write_3
|
||||||
|
.initialise_idm(audit)
|
||||||
|
.and_then(|_| ts_write_3.commit(audit))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn verify(&self, au: &mut AuditScope) -> Vec<Result<(), ConsistencyError>> {
|
pub fn verify(&self, au: &mut AuditScope) -> Vec<Result<(), ConsistencyError>> {
|
||||||
let r_txn = self.read();
|
let r_txn = self.read();
|
||||||
r_txn.verify(au)
|
r_txn.verify(au)
|
||||||
|
@ -1188,7 +1209,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
||||||
// attributes in the situation.
|
// attributes in the situation.
|
||||||
// If not exist, create from Entry B
|
// If not exist, create from Entry B
|
||||||
//
|
//
|
||||||
// WARNING: this requires schema awareness for multivalue types!
|
// TODO: WARNING: this requires schema awareness for multivalue types!
|
||||||
// We need to either do a schema aware merge, or we just overwrite those
|
// We need to either do a schema aware merge, or we just overwrite those
|
||||||
// few attributes.
|
// few attributes.
|
||||||
//
|
//
|
||||||
|
@ -1269,8 +1290,52 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn initialise_schema_core(&self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
|
// Load in all the "core" schema, that we already have in "memory".
|
||||||
|
let entries = self.schema.to_entries();
|
||||||
|
|
||||||
|
// internal_migrate_or_create.
|
||||||
|
let r: Result<_, _> = entries
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| {
|
||||||
|
audit_log!(
|
||||||
|
audit,
|
||||||
|
"init schema -> {}",
|
||||||
|
serde_json::to_string_pretty(&e).unwrap()
|
||||||
|
);
|
||||||
|
self.internal_migrate_or_create(audit, e)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
assert!(r.is_ok());
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn initialise_schema_idm(&self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
|
// List of IDM schemas to init.
|
||||||
|
let idm_schema: Vec<&str> = vec![
|
||||||
|
JSON_SCHEMA_ATTR_DISPLAYNAME,
|
||||||
|
JSON_SCHEMA_ATTR_MAIL,
|
||||||
|
JSON_SCHEMA_ATTR_SSH_PUBLICKEY,
|
||||||
|
JSON_SCHEMA_ATTR_PASSWORD,
|
||||||
|
JSON_SCHEMA_CLASS_PERSON,
|
||||||
|
JSON_SCHEMA_CLASS_GROUP,
|
||||||
|
JSON_SCHEMA_CLASS_ACCOUNT,
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut audit_si = AuditScope::new("start_initialise_schema_idm");
|
||||||
|
let r: Result<Vec<()>, _> = idm_schema
|
||||||
|
.iter()
|
||||||
|
.map(|e_str| self.internal_migrate_or_create_str(&mut audit_si, e_str))
|
||||||
|
.collect();
|
||||||
|
audit.append_scope(audit_si);
|
||||||
|
assert!(r.is_ok());
|
||||||
|
|
||||||
|
// TODO: Should we log the set of failures some how?
|
||||||
|
r.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
// This function is idempotent
|
// This function is idempotent
|
||||||
pub fn initialise(&self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
pub fn initialise_idm(&self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
// First, check the system_info object. This stores some server information
|
// First, check the system_info object. This stores some server information
|
||||||
// and details. It's a pretty static thing.
|
// and details. It's a pretty static thing.
|
||||||
let mut audit_si = AuditScope::new("start_system_info");
|
let mut audit_si = AuditScope::new("start_system_info");
|
||||||
|
@ -1331,9 +1396,42 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reload_schema(&mut self, _audit: &mut AuditScope) -> Result<(), OperationError> {
|
fn reload_schema(&mut self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
// supply entries to the writable schema to reload from.
|
// supply entries to the writable schema to reload from.
|
||||||
Ok(())
|
// find all attributes.
|
||||||
|
let filt = filter!(f_eq("class", "attributetype"));
|
||||||
|
let res = try_audit!(audit, self.internal_search(audit, filt));
|
||||||
|
// load them.
|
||||||
|
let attributetypes: Result<Vec<_>, _> = res
|
||||||
|
.iter()
|
||||||
|
.map(|e| SchemaAttribute::try_from(audit, e))
|
||||||
|
.collect();
|
||||||
|
let attributetypes = try_audit!(audit, attributetypes);
|
||||||
|
|
||||||
|
try_audit!(audit, self.schema.update_attributes(attributetypes));
|
||||||
|
|
||||||
|
// find all classes
|
||||||
|
let filt = filter!(f_eq("class", "classtype"));
|
||||||
|
let res = try_audit!(audit, self.internal_search(audit, filt));
|
||||||
|
// load them.
|
||||||
|
let classtypes: Result<Vec<_>, _> = res
|
||||||
|
.iter()
|
||||||
|
.map(|e| SchemaClass::try_from(audit, e))
|
||||||
|
.collect();
|
||||||
|
let classtypes = try_audit!(audit, classtypes);
|
||||||
|
|
||||||
|
try_audit!(audit, self.schema.update_classes(classtypes));
|
||||||
|
|
||||||
|
// validate.
|
||||||
|
let valid_r = self.schema.validate(audit);
|
||||||
|
|
||||||
|
// Translate the result.
|
||||||
|
if valid_r.len() == 0 {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
// Log the failures?
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reload_accesscontrols(&mut self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
fn reload_accesscontrols(&mut self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
|
@ -1414,6 +1512,9 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn commit(mut self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
pub fn commit(mut self, audit: &mut AuditScope) -> Result<(), OperationError> {
|
||||||
|
// TODO: This could be faster if we cache the set of classes changed
|
||||||
|
// in an operation so we can check if we need to do the reload or not
|
||||||
|
//
|
||||||
// Reload the schema from qs.
|
// Reload the schema from qs.
|
||||||
self.reload_schema(audit)?;
|
self.reload_schema(audit)?;
|
||||||
// Determine if we need to update access control profiles
|
// Determine if we need to update access control profiles
|
||||||
|
@ -1514,30 +1615,31 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_qs_init_idempotent_1() {
|
fn test_qs_init_idempotent_schema_core() {
|
||||||
run_test!(|server: &QueryServer, audit: &mut AuditScope| {
|
run_test!(|server: &QueryServer, audit: &mut AuditScope| {
|
||||||
{
|
{
|
||||||
// Setup and abort.
|
// Setup and abort.
|
||||||
let server_txn = server.write();
|
let server_txn = server.write();
|
||||||
assert!(server_txn.initialise(audit).is_ok());
|
assert!(server_txn.initialise_schema_core(audit).is_ok());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let server_txn = server.write();
|
let server_txn = server.write();
|
||||||
assert!(server_txn.initialise(audit).is_ok());
|
assert!(server_txn.initialise_schema_core(audit).is_ok());
|
||||||
assert!(server_txn.initialise(audit).is_ok());
|
assert!(server_txn.initialise_schema_core(audit).is_ok());
|
||||||
assert!(server_txn.commit(audit).is_ok());
|
assert!(server_txn.commit(audit).is_ok());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Now do it again in a new txn, but abort
|
// Now do it again in a new txn, but abort
|
||||||
let server_txn = server.write();
|
let server_txn = server.write();
|
||||||
assert!(server_txn.initialise(audit).is_ok());
|
assert!(server_txn.initialise_schema_core(audit).is_ok());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
// Now do it again in a new txn.
|
// Now do it again in a new txn.
|
||||||
let server_txn = server.write();
|
let server_txn = server.write();
|
||||||
assert!(server_txn.initialise(audit).is_ok());
|
assert!(server_txn.initialise_schema_core(audit).is_ok());
|
||||||
assert!(server_txn.commit(audit).is_ok());
|
assert!(server_txn.commit(audit).is_ok());
|
||||||
}
|
}
|
||||||
|
// TODO: Check the content is as expected
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2235,4 +2337,168 @@ mod tests {
|
||||||
assert!(r4 == Ok("cc8e95b4-c24f-4d68-ba54-8bed76f63930".to_string()));
|
assert!(r4 == Ok("cc8e95b4-c24f-4d68-ba54-8bed76f63930".to_string()));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_qs_dynamic_schema_class() {
|
||||||
|
run_test!(|server: &QueryServer, audit: &mut AuditScope| {
|
||||||
|
let e1: Entry<EntryInvalid, EntryNew> = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"valid": null,
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": ["object", "testclass"],
|
||||||
|
"name": ["testobj1"],
|
||||||
|
"uuid": ["cc8e95b4-c24f-4d68-ba54-8bed76f63930"]
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.expect("json failure");
|
||||||
|
|
||||||
|
// Class definition
|
||||||
|
let e_cd: Entry<EntryInvalid, EntryNew> = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"valid": null,
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": ["object", "classtype"],
|
||||||
|
"name": ["testclass"],
|
||||||
|
"uuid": ["cfcae205-31c3-484b-8ced-667d1709c5e3"],
|
||||||
|
"description": ["Test Class"]
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.expect("json failure");
|
||||||
|
|
||||||
|
let server_txn = server.write();
|
||||||
|
// Add a new class.
|
||||||
|
let ce_class = CreateEvent::new_internal(vec![e_cd.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_class).is_ok());
|
||||||
|
// Trying to add it now should fail.
|
||||||
|
let ce_fail = CreateEvent::new_internal(vec![e1.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_fail).is_err());
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
|
||||||
|
// Start a new write
|
||||||
|
let server_txn = server.write();
|
||||||
|
// Add the class to an object
|
||||||
|
// should work
|
||||||
|
let ce_work = CreateEvent::new_internal(vec![e1.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_work).is_ok());
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
|
||||||
|
// Start a new write
|
||||||
|
let server_txn = server.write();
|
||||||
|
// delete the class
|
||||||
|
let de_class =
|
||||||
|
unsafe { DeleteEvent::new_internal_invalid(filter!(f_eq("name", "testclass"))) };
|
||||||
|
assert!(server_txn.delete(audit, &de_class).is_ok());
|
||||||
|
// Commit
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
|
||||||
|
// Start a new write
|
||||||
|
let server_txn = server.write();
|
||||||
|
// Trying to add now should fail
|
||||||
|
let ce_fail = CreateEvent::new_internal(vec![e1.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_fail).is_err());
|
||||||
|
// Search our entry
|
||||||
|
let testobj1 = server_txn
|
||||||
|
.internal_search_uuid(audit, "cc8e95b4-c24f-4d68-ba54-8bed76f63930")
|
||||||
|
.expect("failed");
|
||||||
|
assert!(testobj1.attribute_value_pres("class", "testclass"));
|
||||||
|
|
||||||
|
// Should still be good
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
// Commit.
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_qs_dynamic_schema_attr() {
|
||||||
|
run_test!(|server: &QueryServer, audit: &mut AuditScope| {
|
||||||
|
let e1: Entry<EntryInvalid, EntryNew> = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"valid": null,
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": ["object", "extensibleobject"],
|
||||||
|
"name": ["testobj1"],
|
||||||
|
"uuid": ["cc8e95b4-c24f-4d68-ba54-8bed76f63930"],
|
||||||
|
"testattr": ["test"]
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.expect("json failure");
|
||||||
|
|
||||||
|
// Attribute definition
|
||||||
|
let e_ad: Entry<EntryInvalid, EntryNew> = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"valid": null,
|
||||||
|
"state": null,
|
||||||
|
"attrs": {
|
||||||
|
"class": ["object", "attributetype"],
|
||||||
|
"name": ["testattr"],
|
||||||
|
"uuid": ["cfcae205-31c3-484b-8ced-667d1709c5e3"],
|
||||||
|
"description": ["Test Attribute"],
|
||||||
|
"multivalue": ["false"],
|
||||||
|
"secret": ["false"],
|
||||||
|
"syntax": ["UTF8STRING"],
|
||||||
|
"system": ["false"]
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.expect("json failure");
|
||||||
|
|
||||||
|
let server_txn = server.write();
|
||||||
|
// Add a new attribute.
|
||||||
|
let ce_attr = CreateEvent::new_internal(vec![e_ad.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_attr).is_ok());
|
||||||
|
// Trying to add it now should fail. (use extensible object)
|
||||||
|
let ce_fail = CreateEvent::new_internal(vec![e1.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_fail).is_err());
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
|
||||||
|
// Start a new write
|
||||||
|
let server_txn = server.write();
|
||||||
|
// Add the attr to an object
|
||||||
|
// should work
|
||||||
|
let ce_work = CreateEvent::new_internal(vec![e1.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_work).is_ok());
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
|
||||||
|
// Start a new write
|
||||||
|
let server_txn = server.write();
|
||||||
|
// delete the attr
|
||||||
|
let de_attr =
|
||||||
|
unsafe { DeleteEvent::new_internal_invalid(filter!(f_eq("name", "testattr"))) };
|
||||||
|
assert!(server_txn.delete(audit, &de_attr).is_ok());
|
||||||
|
// Commit
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
|
||||||
|
// Start a new write
|
||||||
|
let server_txn = server.write();
|
||||||
|
// Trying to add now should fail
|
||||||
|
let ce_fail = CreateEvent::new_internal(vec![e1.clone()]);
|
||||||
|
assert!(server_txn.create(audit, &ce_fail).is_err());
|
||||||
|
// Search our attribute - should FAIL
|
||||||
|
let filt = filter!(f_eq("testattr", "test"));
|
||||||
|
assert!(server_txn.internal_search(audit, filt).is_err());
|
||||||
|
// Search the entry - the attribute will still be present
|
||||||
|
// even if we can't search on it.
|
||||||
|
let testobj1 = server_txn
|
||||||
|
.internal_search_uuid(audit, "cc8e95b4-c24f-4d68-ba54-8bed76f63930")
|
||||||
|
.expect("failed");
|
||||||
|
assert!(testobj1.attribute_value_pres("testattr", "test"));
|
||||||
|
|
||||||
|
server_txn.commit(audit).expect("should not fail");
|
||||||
|
// Commit.
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue