2018-11-11 01:39:11 +01:00
|
|
|
// use serde_json::{Error, Value};
|
2019-04-18 03:28:33 +02:00
|
|
|
use crate::audit::AuditScope;
|
2019-09-04 03:06:37 +02:00
|
|
|
use crate::credential::Credential;
|
2019-05-24 07:11:06 +02:00
|
|
|
use crate::filter::{Filter, FilterInvalid, FilterResolved, FilterValidResolved};
|
2019-04-18 03:28:33 +02:00
|
|
|
use crate::modify::{Modify, ModifyInvalid, ModifyList, ModifyValid};
|
2019-05-24 07:11:06 +02:00
|
|
|
use crate::schema::{SchemaAttribute, SchemaClass, SchemaTransaction};
|
|
|
|
use crate::server::{QueryServerTransaction, QueryServerWriteTransaction};
|
2019-08-27 01:36:54 +02:00
|
|
|
use crate::value::{IndexType, SyntaxType};
|
|
|
|
use crate::value::{PartialValue, Value};
|
2019-09-04 03:06:37 +02:00
|
|
|
use rsidm_proto::v1::Entry as ProtoEntry;
|
|
|
|
use rsidm_proto::v1::Filter as ProtoFilter;
|
|
|
|
use rsidm_proto::v1::{OperationError, SchemaError};
|
2019-05-01 06:06:22 +02:00
|
|
|
|
|
|
|
use crate::be::dbentry::{DbEntry, DbEntryV1, DbEntryVers};
|
|
|
|
|
2018-11-15 04:49:08 +01:00
|
|
|
use std::collections::btree_map::{Iter as BTreeIter, IterMut as BTreeIterMut};
|
2019-08-27 01:36:54 +02:00
|
|
|
use std::collections::btree_set::Iter as BTreeSetIter;
|
2018-10-03 13:21:21 +02:00
|
|
|
use std::collections::BTreeMap;
|
2019-06-07 11:19:09 +02:00
|
|
|
use std::collections::BTreeSet;
|
2019-01-28 08:53:58 +01:00
|
|
|
use std::collections::HashMap;
|
2019-01-29 07:17:02 +01:00
|
|
|
use std::iter::ExactSizeIterator;
|
2019-06-07 11:19:09 +02:00
|
|
|
use uuid::Uuid;
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
// use std::convert::TryFrom;
|
|
|
|
// use std::str::FromStr;
|
|
|
|
|
2018-09-29 09:54:16 +02:00
|
|
|
// make a trait entry for everything to adhere to?
|
|
|
|
// * How to get indexs out?
|
|
|
|
// * How to track pending diffs?
|
|
|
|
|
2018-10-03 13:21:21 +02:00
|
|
|
// Entry is really similar to serde Value, but limits the possibility
|
|
|
|
// of what certain types could be.
|
|
|
|
//
|
|
|
|
// The idea of an entry is that we have
|
|
|
|
// an entry that looks like:
|
|
|
|
//
|
|
|
|
// {
|
|
|
|
// 'class': ['object', ...],
|
|
|
|
// 'attr': ['value', ...],
|
|
|
|
// 'attr': ['value', ...],
|
|
|
|
// ...
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// When we send this as a result to clients, we could embed other objects as:
|
|
|
|
//
|
|
|
|
// {
|
|
|
|
// 'attr': [
|
|
|
|
// 'value': {
|
|
|
|
// },
|
|
|
|
// ],
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
lazy_static! {
|
|
|
|
static ref CLASS_EXTENSIBLE: PartialValue = PartialValue::new_class("extensibleobject");
|
|
|
|
}
|
|
|
|
|
2018-11-13 08:14:26 +01:00
|
|
|
pub struct EntryClasses<'a> {
|
2019-01-29 07:17:02 +01:00
|
|
|
size: usize,
|
2019-08-27 01:36:54 +02:00
|
|
|
inner: Option<BTreeSetIter<'a, Value>>,
|
2018-11-13 08:14:26 +01:00
|
|
|
// _p: &'a PhantomData<()>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Iterator for EntryClasses<'a> {
|
2019-08-27 01:36:54 +02:00
|
|
|
type Item = &'a Value;
|
2018-11-13 08:14:26 +01:00
|
|
|
|
|
|
|
#[inline]
|
2019-08-27 01:36:54 +02:00
|
|
|
fn next(&mut self) -> Option<(&'a Value)> {
|
2018-11-13 08:14:26 +01:00
|
|
|
match self.inner.iter_mut().next() {
|
|
|
|
Some(i) => i.next(),
|
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
match self.inner.iter().next() {
|
|
|
|
Some(i) => i.size_hint(),
|
|
|
|
None => (0, None),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
impl<'a> ExactSizeIterator for EntryClasses<'a> {
|
|
|
|
fn len(&self) -> usize {
|
|
|
|
self.size
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-13 08:14:26 +01:00
|
|
|
pub struct EntryAvas<'a> {
|
2019-08-27 01:36:54 +02:00
|
|
|
inner: BTreeIter<'a, String, BTreeSet<Value>>,
|
2018-11-13 08:14:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Iterator for EntryAvas<'a> {
|
2019-08-27 01:36:54 +02:00
|
|
|
type Item = (&'a String, &'a BTreeSet<Value>);
|
2018-11-13 08:14:26 +01:00
|
|
|
|
|
|
|
#[inline]
|
2019-08-27 01:36:54 +02:00
|
|
|
fn next(&mut self) -> Option<(&'a String, &'a BTreeSet<Value>)> {
|
2018-11-13 08:14:26 +01:00
|
|
|
self.inner.next()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
self.inner.size_hint()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-15 04:49:08 +01:00
|
|
|
pub struct EntryAvasMut<'a> {
|
2019-08-27 01:36:54 +02:00
|
|
|
inner: BTreeIterMut<'a, String, BTreeSet<Value>>,
|
2018-11-15 04:49:08 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Iterator for EntryAvasMut<'a> {
|
2019-08-27 01:36:54 +02:00
|
|
|
type Item = (&'a String, &'a mut BTreeSet<Value>);
|
2018-11-15 04:49:08 +01:00
|
|
|
|
|
|
|
#[inline]
|
2019-08-27 01:36:54 +02:00
|
|
|
fn next(&mut self) -> Option<(&'a String, &'a mut BTreeSet<Value>)> {
|
2018-11-15 04:49:08 +01:00
|
|
|
self.inner.next()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
self.inner.size_hint()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-22 02:39:56 +01:00
|
|
|
// This is a BE concept, so move it there!
|
2019-01-28 04:54:17 +01:00
|
|
|
|
|
|
|
// Entry should have a lifecycle of types. THis is Raw (modifiable) and Entry (verified).
|
|
|
|
// This way, we can move between them, but only certain actions are possible on either
|
|
|
|
// This means modifications happen on Raw, but to move to Entry, you schema normalise.
|
|
|
|
// Vice versa, you can for free, move to Raw, but you lose the validation.
|
|
|
|
|
|
|
|
// Because this is type system it's "free" in the end, and means we force validation
|
|
|
|
// at the correct and required points of the entries life.
|
|
|
|
|
|
|
|
// This is specifically important for the commit to the backend, as we only want to
|
|
|
|
// commit validated types.
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
2019-01-28 06:10:28 +01:00
|
|
|
pub struct EntryNew; // new
|
2019-08-27 01:36:54 +02:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
2019-05-01 06:06:22 +02:00
|
|
|
pub struct EntryCommitted {
|
|
|
|
id: u64,
|
|
|
|
} // It's been in the DB, so it has an id
|
|
|
|
// pub struct EntryPurged;
|
2019-01-28 06:10:28 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
#[derive(Clone, Debug)]
|
2019-05-15 02:36:18 +02:00
|
|
|
pub struct EntryValid {
|
|
|
|
// Asserted with schema, so we know it has a UUID now ...
|
2019-08-27 01:36:54 +02:00
|
|
|
uuid: Uuid,
|
2019-05-15 02:36:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Modified, can't be sure of it's content! We therefore disregard the UUID
|
|
|
|
// and on validate, we check it again.
|
2019-08-27 01:36:54 +02:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
2019-05-15 02:36:18 +02:00
|
|
|
pub struct EntryInvalid;
|
2019-01-28 06:10:28 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
// This state can't exist because everything is normalised now with Value types
|
|
|
|
// #[derive(Clone, Copy, Debug, Deserialize, Serialize)]
|
|
|
|
// pub struct EntryNormalised;
|
2019-06-07 11:19:09 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
2019-07-26 10:13:58 +02:00
|
|
|
pub struct EntryReduced;
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
#[derive(Debug)]
|
2019-01-28 06:10:28 +01:00
|
|
|
pub struct Entry<VALID, STATE> {
|
2019-01-28 08:53:58 +01:00
|
|
|
valid: VALID,
|
|
|
|
state: STATE,
|
2019-08-27 01:36:54 +02:00
|
|
|
// We may need to change this to BTreeSet to allow borrow of Value -> PartialValue for lookups.
|
|
|
|
attrs: BTreeMap<String, BTreeSet<Value>>,
|
2018-10-03 13:21:21 +02:00
|
|
|
}
|
2018-09-29 09:54:16 +02:00
|
|
|
|
2019-05-08 02:39:46 +02:00
|
|
|
impl<STATE> std::fmt::Display for Entry<EntryValid, STATE> {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
|
|
write!(f, "{}", self.get_uuid())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-28 06:10:28 +01:00
|
|
|
impl Entry<EntryInvalid, EntryNew> {
|
2019-02-25 06:48:37 +01:00
|
|
|
#[cfg(test)]
|
2018-10-03 13:21:21 +02:00
|
|
|
pub fn new() -> Self {
|
|
|
|
Entry {
|
2019-01-22 02:39:56 +01:00
|
|
|
// This means NEVER COMMITED
|
2019-01-28 08:53:58 +01:00
|
|
|
valid: EntryInvalid,
|
|
|
|
state: EntryNew,
|
2018-11-07 07:35:25 +01:00
|
|
|
attrs: BTreeMap::new(),
|
2018-10-03 13:21:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
// Could we consume protoentry?
|
|
|
|
//
|
|
|
|
// I think we could, but that would limit us to how protoentry works,
|
|
|
|
// where we are likely to actually change the Entry type here and how
|
|
|
|
// we store and represent types and data.
|
2019-05-01 06:06:22 +02:00
|
|
|
pub fn from_proto_entry(
|
2019-03-20 06:30:34 +01:00
|
|
|
audit: &mut AuditScope,
|
|
|
|
e: &ProtoEntry,
|
2019-03-20 06:33:22 +01:00
|
|
|
qs: &QueryServerWriteTransaction,
|
|
|
|
) -> Result<Self, OperationError> {
|
2019-01-28 06:10:28 +01:00
|
|
|
// Why not the trait? In the future we may want to extend
|
|
|
|
// this with server aware functions for changes of the
|
|
|
|
// incoming data.
|
2019-03-17 04:24:06 +01:00
|
|
|
|
|
|
|
// Somehow we need to take the tree of e attrs, and convert
|
|
|
|
// all ref types to our types ...
|
2019-08-27 01:36:54 +02:00
|
|
|
let map2: Result<BTreeMap<String, BTreeSet<Value>>, OperationError> = e
|
2019-03-17 04:24:06 +01:00
|
|
|
.attrs
|
|
|
|
.iter()
|
|
|
|
.map(|(k, v)| {
|
2019-08-27 01:36:54 +02:00
|
|
|
let nv: Result<BTreeSet<Value>, _> =
|
2019-03-20 06:33:22 +01:00
|
|
|
v.iter().map(|vr| qs.clone_value(audit, &k, vr)).collect();
|
2019-03-17 04:24:06 +01:00
|
|
|
match nv {
|
2019-08-27 01:36:54 +02:00
|
|
|
Ok(nvi) => Ok((k.clone(), nvi)),
|
2019-03-17 04:24:06 +01:00
|
|
|
Err(e) => Err(e),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let x = map2?;
|
2019-03-17 04:24:06 +01:00
|
|
|
|
|
|
|
Ok(Entry {
|
2019-01-28 06:10:28 +01:00
|
|
|
// For now, we do a straight move, and we sort the incoming data
|
|
|
|
// sets so that BST works.
|
2019-01-28 10:16:53 +01:00
|
|
|
state: EntryNew,
|
|
|
|
valid: EntryInvalid,
|
2019-03-17 04:24:06 +01:00
|
|
|
attrs: x,
|
|
|
|
})
|
2018-10-03 13:21:21 +02:00
|
|
|
}
|
2019-08-27 01:36:54 +02:00
|
|
|
|
|
|
|
pub fn from_proto_entry_str(
|
|
|
|
audit: &mut AuditScope,
|
|
|
|
es: &str,
|
|
|
|
qs: &QueryServerWriteTransaction,
|
|
|
|
) -> Result<Self, OperationError> {
|
|
|
|
// str -> Proto entry
|
|
|
|
let pe: ProtoEntry = try_audit!(
|
|
|
|
audit,
|
|
|
|
serde_json::from_str(es).map_err(|_| OperationError::SerdeJsonError)
|
|
|
|
);
|
|
|
|
// now call from_proto_entry
|
|
|
|
Self::from_proto_entry(audit, &pe, qs)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
pub(crate) fn unsafe_from_entry_str(es: &str) -> Self {
|
|
|
|
// Just use log directly here, it's testing
|
|
|
|
// str -> proto entry
|
|
|
|
let pe: ProtoEntry = serde_json::from_str(es).expect("Invalid Proto Entry");
|
|
|
|
// use a static map to convert str -> ava
|
|
|
|
let x: BTreeMap<String, BTreeSet<Value>> = pe.attrs.into_iter()
|
|
|
|
.map(|(k, vs)| {
|
|
|
|
let attr = k.to_lowercase();
|
|
|
|
let vv: BTreeSet<Value> = match attr.as_str() {
|
|
|
|
"name" | "version" | "domain" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_iutf8(v)).collect()
|
|
|
|
}
|
|
|
|
"userid" | "uidnumber" => {
|
|
|
|
warn!("WARNING: Use of unstabilised attributes userid/uidnumber");
|
|
|
|
vs.into_iter().map(|v| Value::new_iutf8(v)).collect()
|
|
|
|
}
|
|
|
|
"class" | "acp_create_class" | "acp_modify_class" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_class(v.as_str())).collect()
|
|
|
|
}
|
|
|
|
"acp_create_attr" | "acp_search_attr" | "acp_modify_removedattr" | "acp_modify_presentattr" |
|
|
|
|
"systemmay" | "may" | "systemmust" | "must"
|
|
|
|
=> {
|
|
|
|
vs.into_iter().map(|v| Value::new_attr(v.as_str())).collect()
|
|
|
|
}
|
|
|
|
"uuid" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_uuids(v.as_str())
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
warn!("WARNING: Allowing syntax incorrect attribute to be presented UTF8 string");
|
|
|
|
Value::new_utf8(v)
|
|
|
|
})
|
|
|
|
).collect()
|
|
|
|
}
|
|
|
|
"member" | "memberof" | "directmemberof" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_refer_s(v.as_str()).unwrap() ).collect()
|
|
|
|
}
|
|
|
|
"acp_enable" | "multivalue" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_bools(v.as_str())
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
warn!("WARNING: Allowing syntax incorrect attribute to be presented UTF8 string");
|
|
|
|
Value::new_utf8(v)
|
|
|
|
})
|
|
|
|
).collect()
|
|
|
|
}
|
|
|
|
"syntax" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_syntaxs(v.as_str())
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
warn!("WARNING: Allowing syntax incorrect attribute to be presented UTF8 string");
|
|
|
|
Value::new_utf8(v)
|
|
|
|
})
|
|
|
|
).collect()
|
|
|
|
}
|
|
|
|
"index" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_indexs(v.as_str())
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
warn!("WARNING: Allowing syntax incorrect attribute to be presented UTF8 string");
|
|
|
|
Value::new_utf8(v)
|
|
|
|
})
|
|
|
|
).collect()
|
|
|
|
}
|
|
|
|
"acp_targetscope" | "acp_receiver" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_json_filter(v.as_str())
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
warn!("WARNING: Allowing syntax incorrect attribute to be presented UTF8 string");
|
|
|
|
Value::new_utf8(v)
|
|
|
|
})
|
|
|
|
).collect()
|
|
|
|
}
|
|
|
|
"displayname" | "description" => {
|
|
|
|
vs.into_iter().map(|v| Value::new_utf8(v)).collect()
|
|
|
|
}
|
|
|
|
ia => {
|
|
|
|
warn!("WARNING: Allowing invalid attribute {} to be interpretted as UTF8 string. YOU MAY ENCOUNTER ODD BEHAVIOUR!!!", ia);
|
|
|
|
vs.into_iter().map(|v| Value::new_utf8(v)).collect()
|
|
|
|
}
|
|
|
|
};
|
|
|
|
(attr, vv)
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
// return the entry!
|
|
|
|
Entry {
|
|
|
|
state: EntryNew,
|
|
|
|
valid: EntryInvalid,
|
|
|
|
attrs: x,
|
|
|
|
}
|
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
2018-10-03 13:21:21 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
impl<STATE> Entry<EntryInvalid, STATE> {
|
|
|
|
// This is only used in tests today, but I don't want to cfg test it.
|
|
|
|
#[allow(dead_code)]
|
|
|
|
fn get_uuid(&self) -> Option<&Uuid> {
|
|
|
|
match self.attrs.get("uuid") {
|
|
|
|
Some(vs) => match vs.iter().take(1).next() {
|
|
|
|
// Uv is a value that might contain uuid - we hope it does!
|
|
|
|
Some(uv) => uv.to_uuid(),
|
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
pub fn normalise(
|
|
|
|
self,
|
|
|
|
schema: &SchemaTransaction,
|
|
|
|
) -> Result<Entry<EntryNormalised, STATE>, SchemaError> {
|
|
|
|
let Entry {
|
|
|
|
valid: _,
|
|
|
|
state,
|
|
|
|
attrs,
|
|
|
|
} = self;
|
|
|
|
|
|
|
|
let schema_attributes = schema.get_attributes();
|
|
|
|
|
|
|
|
// This should never fail!
|
|
|
|
let schema_attr_name = match schema_attributes.get("name") {
|
|
|
|
Some(v) => v,
|
|
|
|
None => {
|
|
|
|
return Err(SchemaError::Corrupted);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut new_attrs = BTreeMap::new();
|
|
|
|
|
|
|
|
// First normalise - this checks and fixes our UUID format
|
|
|
|
// but should not remove multiple values.
|
|
|
|
for (attr_name, avas) in attrs.iter() {
|
|
|
|
let attr_name_normal: String = schema_attr_name.normalise_value(attr_name);
|
|
|
|
// Get the needed schema type
|
|
|
|
let schema_a_r = schema_attributes.get(&attr_name_normal);
|
|
|
|
|
|
|
|
let mut avas_normal: Vec<String> = match schema_a_r {
|
|
|
|
Some(schema_a) => {
|
|
|
|
avas.iter()
|
|
|
|
.map(|av| {
|
|
|
|
// normalise those based on schema?
|
|
|
|
schema_a.normalise_value(av)
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
None => avas.clone(),
|
|
|
|
};
|
|
|
|
|
|
|
|
// Ensure they are ordered property, with no dupes.
|
|
|
|
avas_normal.sort_unstable();
|
|
|
|
avas_normal.dedup();
|
|
|
|
|
|
|
|
// Should never fail!
|
|
|
|
let _ = new_attrs.insert(attr_name_normal, avas_normal);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Entry {
|
|
|
|
valid: EntryNormalised,
|
|
|
|
state: state,
|
|
|
|
attrs: new_attrs,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn validate(
|
|
|
|
self,
|
|
|
|
schema: &SchemaTransaction,
|
|
|
|
) -> Result<Entry<EntryValid, STATE>, SchemaError> {
|
|
|
|
// We need to clone before we start, as well be mutating content.
|
|
|
|
// We destructure:
|
|
|
|
|
|
|
|
// self.normalise(schema).and_then(|e| e.validate(schema))
|
|
|
|
e.validate(schema)
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
|
2019-01-29 08:17:28 +01:00
|
|
|
pub fn validate(
|
|
|
|
self,
|
2019-05-24 07:11:06 +02:00
|
|
|
schema: &SchemaTransaction,
|
2019-01-29 08:17:28 +01:00
|
|
|
) -> Result<Entry<EntryValid, STATE>, SchemaError> {
|
2019-01-28 08:53:58 +01:00
|
|
|
let schema_classes = schema.get_classes();
|
|
|
|
let schema_attributes = schema.get_attributes();
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let uuid: Uuid = match &self.attrs.get("uuid") {
|
|
|
|
Some(vs) => match vs.iter().take(1).next() {
|
|
|
|
Some(uuid_v) => match uuid_v.to_uuid() {
|
|
|
|
Some(uuid) => *uuid,
|
|
|
|
None => return Err(SchemaError::InvalidAttribute),
|
|
|
|
},
|
2019-05-15 02:36:18 +02:00
|
|
|
None => return Err(SchemaError::MissingMustAttribute("uuid".to_string())),
|
|
|
|
},
|
|
|
|
None => return Err(SchemaError::MissingMustAttribute("uuid".to_string())),
|
|
|
|
};
|
|
|
|
|
2019-06-07 11:19:09 +02:00
|
|
|
// Build the new valid entry ...
|
2019-01-28 10:16:53 +01:00
|
|
|
let ne = Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: EntryValid { uuid },
|
2019-06-07 11:19:09 +02:00
|
|
|
state: self.state,
|
|
|
|
attrs: self.attrs,
|
2019-01-28 10:16:53 +01:00
|
|
|
};
|
2019-06-07 11:19:09 +02:00
|
|
|
// Now validate it!
|
2019-01-28 04:54:17 +01:00
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
// We scope here to limit the time of borrow of ne.
|
2019-01-29 07:17:02 +01:00
|
|
|
{
|
2019-01-29 07:52:42 +01:00
|
|
|
// First, check we have class on the object ....
|
|
|
|
if !ne.attribute_pres("class") {
|
2019-07-15 08:56:55 +02:00
|
|
|
debug!("Missing attribute class");
|
2019-01-29 08:17:28 +01:00
|
|
|
return Err(SchemaError::InvalidClass);
|
2019-01-29 07:52:42 +01:00
|
|
|
}
|
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
// Do we have extensible?
|
2019-08-27 01:36:54 +02:00
|
|
|
let extensible = ne.attribute_value_pres("class", &CLASS_EXTENSIBLE);
|
2019-07-26 10:13:58 +02:00
|
|
|
|
|
|
|
let entry_classes = ne.classes().ok_or(SchemaError::InvalidClass)?;
|
2019-01-29 07:17:02 +01:00
|
|
|
let entry_classes_size = entry_classes.len();
|
2019-01-28 08:53:58 +01:00
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
let classes: Vec<&SchemaClass> = entry_classes
|
2019-08-27 01:36:54 +02:00
|
|
|
// we specify types here to help me clarify a few things in the
|
|
|
|
// development process :)
|
|
|
|
.filter_map(|c: &Value| {
|
|
|
|
let x: Option<&SchemaClass> = c.as_string().and_then(|s| schema_classes.get(s));
|
|
|
|
x
|
|
|
|
})
|
2019-01-29 07:17:02 +01:00
|
|
|
.collect();
|
2018-11-15 04:49:08 +01:00
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
if classes.len() != entry_classes_size {
|
2019-07-15 08:56:55 +02:00
|
|
|
debug!("Class on entry not found in schema?");
|
2019-01-29 07:17:02 +01:00
|
|
|
return Err(SchemaError::InvalidClass);
|
|
|
|
};
|
2018-11-13 08:14:26 +01:00
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
// What this is really doing is taking a set of classes, and building an
|
2019-07-26 10:13:58 +02:00
|
|
|
// "overall" class that describes this exact object for checking. IE we
|
|
|
|
// build a super must/may set from the small class must/may sets.
|
2018-11-14 02:54:59 +01:00
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
// for each class
|
|
|
|
// add systemmust/must and systemmay/may to their lists
|
|
|
|
// add anything from must also into may
|
2018-11-13 08:14:26 +01:00
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
// Now from the set of valid classes make a list of must/may
|
2019-07-15 08:56:55 +02:00
|
|
|
//
|
|
|
|
// NOTE: We still need this on extensible, because we still need to satisfy
|
2019-07-26 10:13:58 +02:00
|
|
|
// our other must conditions as well!
|
|
|
|
let must: Result<Vec<&SchemaAttribute>, _> = classes
|
2019-01-29 07:17:02 +01:00
|
|
|
.iter()
|
|
|
|
// Join our class systemmmust + must into one iter
|
2019-07-26 10:13:58 +02:00
|
|
|
.flat_map(|cls| cls.systemmust.iter().chain(cls.must.iter()))
|
2019-01-29 07:17:02 +01:00
|
|
|
.map(|s| {
|
|
|
|
// This should NOT fail - if it does, it means our schema is
|
|
|
|
// in an invalid state!
|
2019-07-26 10:13:58 +02:00
|
|
|
Ok(schema_attributes.get(s).ok_or(SchemaError::Corrupted)?)
|
2019-01-29 07:17:02 +01:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2019-04-28 12:13:03 +02:00
|
|
|
let must = must?;
|
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
// Check that all must are inplace
|
|
|
|
// for each attr in must, check it's present on our ent
|
2019-07-26 10:13:58 +02:00
|
|
|
for attr in must {
|
|
|
|
let avas = ne.get_ava(&attr.name);
|
2019-01-29 07:17:02 +01:00
|
|
|
if avas.is_none() {
|
2019-07-26 10:13:58 +02:00
|
|
|
return Err(SchemaError::MissingMustAttribute(attr.name.clone()));
|
2019-01-29 07:17:02 +01:00
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
debug!("Extensible object -> {}", extensible);
|
|
|
|
|
2019-07-15 08:56:55 +02:00
|
|
|
if extensible {
|
|
|
|
for (attr_name, avas) in ne.avas() {
|
|
|
|
match schema_attributes.get(attr_name) {
|
|
|
|
Some(a_schema) => {
|
|
|
|
// Now, for each type we do a *full* check of the syntax
|
|
|
|
// and validity of the ava.
|
|
|
|
let r = a_schema.validate_ava(avas);
|
|
|
|
match r {
|
|
|
|
Ok(_) => {}
|
2019-07-29 09:09:09 +02:00
|
|
|
Err(e) => {
|
|
|
|
debug!("Failed to validate: {}", attr_name);
|
|
|
|
return Err(e);
|
|
|
|
}
|
2019-07-15 08:56:55 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2019-08-27 01:36:54 +02:00
|
|
|
debug!("Invalid Attribute {} for extensible object", attr_name);
|
2019-07-15 08:56:55 +02:00
|
|
|
return Err(SchemaError::InvalidAttribute);
|
2019-01-29 07:17:02 +01:00
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
2019-07-15 08:56:55 +02:00
|
|
|
}
|
|
|
|
} else {
|
2019-07-28 13:18:25 +02:00
|
|
|
// We clone string here, but it's so we can check all
|
2019-07-26 10:13:58 +02:00
|
|
|
// the values in "may" ar here - so we can't avoid this look up. What we
|
|
|
|
// could do though, is have &String based on the schemaattribute though?;
|
2019-07-28 13:18:25 +02:00
|
|
|
let may: Result<HashMap<&String, &SchemaAttribute>, _> = classes
|
2019-07-15 08:56:55 +02:00
|
|
|
.iter()
|
|
|
|
// Join our class systemmmust + must + systemmay + may into one.
|
2019-07-26 10:13:58 +02:00
|
|
|
.flat_map(|cls| {
|
2019-07-15 08:56:55 +02:00
|
|
|
cls.systemmust
|
|
|
|
.iter()
|
|
|
|
.chain(cls.must.iter())
|
|
|
|
.chain(cls.systemmay.iter())
|
|
|
|
.chain(cls.may.iter())
|
|
|
|
})
|
|
|
|
.map(|s| {
|
|
|
|
// This should NOT fail - if it does, it means our schema is
|
|
|
|
// in an invalid state!
|
2019-07-28 13:18:25 +02:00
|
|
|
Ok((s, schema_attributes.get(s).ok_or(SchemaError::Corrupted)?))
|
2019-07-15 08:56:55 +02:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let may = may?;
|
|
|
|
|
2019-07-28 13:18:25 +02:00
|
|
|
// TODO #70: Error needs to say what is missing
|
2019-07-26 10:13:58 +02:00
|
|
|
// We need to return *all* missing attributes, not just the first error
|
2019-07-28 13:18:25 +02:00
|
|
|
// we find. This will probably take a rewrite of the function definition
|
|
|
|
// to return a result<_, vec<schemaerror>> and for the schema errors to take
|
|
|
|
// information about what is invalid. It's pretty nontrivial.
|
2019-07-15 08:56:55 +02:00
|
|
|
|
|
|
|
// Check that any other attributes are in may
|
|
|
|
// for each attr on the object, check it's in the may+must set
|
|
|
|
for (attr_name, avas) in ne.avas() {
|
|
|
|
match may.get(attr_name) {
|
|
|
|
Some(a_schema) => {
|
|
|
|
// Now, for each type we do a *full* check of the syntax
|
|
|
|
// and validity of the ava.
|
|
|
|
let r = a_schema.validate_ava(avas);
|
|
|
|
match r {
|
|
|
|
Ok(_) => {}
|
2019-07-29 09:09:09 +02:00
|
|
|
Err(e) => {
|
|
|
|
debug!("Failed to validate: {}", attr_name);
|
|
|
|
return Err(e);
|
|
|
|
}
|
2019-07-15 08:56:55 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2019-08-27 01:36:54 +02:00
|
|
|
debug!("Invalid Attribute {} for may+must set", attr_name);
|
2019-01-29 07:17:02 +01:00
|
|
|
return Err(SchemaError::InvalidAttribute);
|
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-01-29 07:17:02 +01:00
|
|
|
} // unborrow ne.
|
2019-01-28 06:10:28 +01:00
|
|
|
|
|
|
|
// Well, we got here, so okay!
|
2019-01-29 07:17:02 +01:00
|
|
|
Ok(ne)
|
2018-12-29 10:56:03 +01:00
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
2018-12-29 10:56:03 +01:00
|
|
|
|
2019-01-29 08:17:28 +01:00
|
|
|
impl<VALID, STATE> Clone for Entry<VALID, STATE>
|
|
|
|
where
|
2019-05-15 02:36:18 +02:00
|
|
|
VALID: Clone,
|
2019-01-29 08:17:28 +01:00
|
|
|
STATE: Copy,
|
2019-01-29 07:17:02 +01:00
|
|
|
{
|
2019-01-28 06:10:28 +01:00
|
|
|
// Dirty modifiable state. Works on any other state to dirty them.
|
2019-01-28 08:53:58 +01:00
|
|
|
fn clone(&self) -> Entry<VALID, STATE> {
|
2019-01-28 06:10:28 +01:00
|
|
|
Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: self.valid.clone(),
|
2019-01-28 08:53:58 +01:00
|
|
|
state: self.state,
|
2019-01-28 06:10:28 +01:00
|
|
|
attrs: self.attrs.clone(),
|
|
|
|
}
|
2018-11-13 08:14:26 +01:00
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
2018-11-13 08:14:26 +01:00
|
|
|
|
2019-01-28 10:16:53 +01:00
|
|
|
/*
|
|
|
|
* A series of unsafe transitions allowing entries to skip certain steps in
|
|
|
|
* the process to facilitate eq/checks.
|
|
|
|
*/
|
2019-05-15 02:36:18 +02:00
|
|
|
impl Entry<EntryInvalid, EntryCommitted> {
|
2019-01-28 10:16:53 +01:00
|
|
|
#[cfg(test)]
|
|
|
|
pub unsafe fn to_valid_new(self) -> Entry<EntryValid, EntryNew> {
|
|
|
|
Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: EntryValid {
|
2019-08-27 01:36:54 +02:00
|
|
|
uuid: self.get_uuid().expect("Invalid uuid").clone(),
|
2019-05-15 02:36:18 +02:00
|
|
|
},
|
2019-01-28 10:16:53 +01:00
|
|
|
state: EntryNew,
|
|
|
|
attrs: self.attrs,
|
|
|
|
}
|
|
|
|
}
|
2019-05-01 06:06:22 +02:00
|
|
|
}
|
|
|
|
// Both invalid states can be reached from "entry -> invalidate"
|
2019-01-28 10:16:53 +01:00
|
|
|
|
2019-05-15 02:36:18 +02:00
|
|
|
impl Entry<EntryInvalid, EntryNew> {
|
|
|
|
#[cfg(test)]
|
|
|
|
pub unsafe fn to_valid_new(self) -> Entry<EntryValid, EntryNew> {
|
|
|
|
Entry {
|
|
|
|
valid: EntryValid {
|
2019-08-27 01:36:54 +02:00
|
|
|
uuid: self.get_uuid().expect("Invalid uuid").clone(),
|
2019-05-15 02:36:18 +02:00
|
|
|
},
|
|
|
|
state: EntryNew,
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs: self.attrs,
|
2019-05-15 02:36:18 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
/*
|
2019-06-07 11:19:09 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
pub unsafe fn to_valid_normal(self) -> Entry<EntryNormalised, EntryNew> {
|
|
|
|
Entry {
|
|
|
|
valid: EntryNormalised,
|
|
|
|
state: EntryNew,
|
|
|
|
attrs: self
|
|
|
|
.attrs
|
|
|
|
.into_iter()
|
|
|
|
.map(|(k, mut v)| {
|
|
|
|
v.sort_unstable();
|
|
|
|
(k, v)
|
|
|
|
})
|
|
|
|
.collect(),
|
|
|
|
}
|
|
|
|
}
|
2019-08-27 01:36:54 +02:00
|
|
|
*/
|
2019-06-07 11:19:09 +02:00
|
|
|
|
2019-02-25 06:25:21 +01:00
|
|
|
#[cfg(test)]
|
2019-01-28 10:16:53 +01:00
|
|
|
pub unsafe fn to_valid_committed(self) -> Entry<EntryValid, EntryCommitted> {
|
|
|
|
Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: EntryValid {
|
2019-06-07 11:19:09 +02:00
|
|
|
uuid: self
|
|
|
|
.get_uuid()
|
2019-08-27 01:36:54 +02:00
|
|
|
.and_then(|u| Some(u.clone()))
|
|
|
|
.unwrap_or_else(|| Uuid::new_v4()),
|
2019-05-15 02:36:18 +02:00
|
|
|
},
|
2019-05-01 06:06:22 +02:00
|
|
|
state: EntryCommitted { id: 0 },
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs: self.attrs,
|
2019-01-28 10:16:53 +01:00
|
|
|
}
|
|
|
|
}
|
2019-05-01 06:06:22 +02:00
|
|
|
}
|
2019-01-28 10:16:53 +01:00
|
|
|
|
2019-05-15 02:36:18 +02:00
|
|
|
impl Entry<EntryInvalid, EntryCommitted> {
|
2019-05-01 06:06:22 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
pub unsafe fn to_valid_committed(self) -> Entry<EntryValid, EntryCommitted> {
|
|
|
|
Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: EntryValid {
|
2019-08-27 01:36:54 +02:00
|
|
|
uuid: self.get_uuid().expect("Missing UUID!").clone(),
|
2019-05-15 02:36:18 +02:00
|
|
|
},
|
2019-05-01 06:06:22 +02:00
|
|
|
state: self.state,
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs: self.attrs,
|
2019-05-01 06:06:22 +02:00
|
|
|
}
|
|
|
|
}
|
2019-01-28 10:16:53 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Entry<EntryValid, EntryNew> {
|
2019-05-15 02:36:18 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
pub unsafe fn to_valid_committed(self) -> Entry<EntryValid, EntryCommitted> {
|
|
|
|
Entry {
|
|
|
|
valid: self.valid,
|
|
|
|
state: EntryCommitted { id: 0 },
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs: self.attrs,
|
2019-05-15 02:36:18 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-28 10:16:53 +01:00
|
|
|
pub fn compare(&self, rhs: &Entry<EntryValid, EntryCommitted>) -> bool {
|
|
|
|
self.attrs == rhs.attrs
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Entry<EntryValid, EntryCommitted> {
|
2019-05-15 02:36:18 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
pub unsafe fn to_valid_committed(self) -> Entry<EntryValid, EntryCommitted> {
|
|
|
|
// NO-OP to satisfy macros.
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2019-01-28 10:16:53 +01:00
|
|
|
pub fn compare(&self, rhs: &Entry<EntryValid, EntryNew>) -> bool {
|
|
|
|
self.attrs == rhs.attrs
|
|
|
|
}
|
2019-02-24 05:15:28 +01:00
|
|
|
|
|
|
|
pub fn to_tombstone(&self) -> Self {
|
2019-08-27 01:36:54 +02:00
|
|
|
// Duplicate this to a tombstone entry
|
|
|
|
let class_ava = btreeset![Value::new_class("object"), Value::new_class("tombstone")];
|
2019-02-24 05:15:28 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let mut attrs_new: BTreeMap<String, BTreeSet<Value>> = BTreeMap::new();
|
2019-02-24 05:15:28 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs_new.insert(
|
|
|
|
"uuid".to_string(),
|
|
|
|
btreeset![Value::new_uuidr(&self.valid.uuid)],
|
|
|
|
);
|
2019-02-24 05:15:28 +01:00
|
|
|
attrs_new.insert("class".to_string(), class_ava);
|
|
|
|
|
|
|
|
Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: self.valid.clone(),
|
2019-05-01 06:06:22 +02:00
|
|
|
state: self.state,
|
2019-02-24 05:15:28 +01:00
|
|
|
attrs: attrs_new,
|
|
|
|
}
|
|
|
|
}
|
2019-04-17 05:00:03 +02:00
|
|
|
|
2019-05-01 06:06:22 +02:00
|
|
|
pub fn get_id(&self) -> u64 {
|
|
|
|
self.state.id
|
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn from_dbentry(db_e: DbEntry, id: u64) -> Result<Self, ()> {
|
|
|
|
// Convert attrs from db format to value
|
|
|
|
let r_attrs: Result<BTreeMap<String, BTreeSet<Value>>, ()> = match db_e.ent {
|
|
|
|
DbEntryVers::V1(v1) => v1
|
|
|
|
.attrs
|
|
|
|
.into_iter()
|
|
|
|
.map(|(k, vs)| {
|
|
|
|
let vv: Result<BTreeSet<Value>, ()> =
|
|
|
|
vs.into_iter().map(|v| Value::from_db_valuev1(v)).collect();
|
|
|
|
match vv {
|
|
|
|
Ok(vv) => Ok((k, vv)),
|
|
|
|
Err(e) => Err(e),
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect(),
|
2019-05-15 02:36:18 +02:00
|
|
|
};
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let attrs = r_attrs?;
|
|
|
|
|
|
|
|
let uuid: Uuid = match attrs.get("uuid") {
|
|
|
|
Some(vs) => vs.iter().take(1).next(),
|
2019-05-15 02:36:18 +02:00
|
|
|
None => None,
|
2019-08-27 01:36:54 +02:00
|
|
|
}
|
|
|
|
.ok_or(())?
|
|
|
|
// Now map value -> uuid
|
|
|
|
.to_uuid()
|
|
|
|
.ok_or(())?
|
2019-05-15 02:36:18 +02:00
|
|
|
.clone();
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
Ok(Entry {
|
2019-05-15 02:36:18 +02:00
|
|
|
valid: EntryValid { uuid: uuid },
|
2019-05-01 06:06:22 +02:00
|
|
|
state: EntryCommitted { id },
|
2019-05-15 02:36:18 +02:00
|
|
|
attrs: attrs,
|
2019-07-26 10:13:58 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
pub fn to_reduced(self) -> Entry<EntryReduced, EntryCommitted> {
|
|
|
|
Entry {
|
|
|
|
valid: EntryReduced,
|
|
|
|
state: self.state,
|
|
|
|
attrs: self.attrs,
|
2019-05-01 06:06:22 +02:00
|
|
|
}
|
2019-04-17 05:00:03 +02:00
|
|
|
}
|
2019-06-07 11:19:09 +02:00
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
pub fn reduce_attributes(
|
|
|
|
self,
|
|
|
|
allowed_attrs: BTreeSet<&str>,
|
|
|
|
) -> Entry<EntryReduced, EntryCommitted> {
|
2019-06-07 11:19:09 +02:00
|
|
|
// Remove all attrs from our tree that are NOT in the allowed set.
|
|
|
|
|
|
|
|
let Entry {
|
2019-07-29 09:09:09 +02:00
|
|
|
valid: _s_valid,
|
2019-06-07 11:19:09 +02:00
|
|
|
state: s_state,
|
|
|
|
attrs: s_attrs,
|
|
|
|
} = self;
|
|
|
|
|
|
|
|
let f_attrs: BTreeMap<_, _> = s_attrs
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|(k, v)| {
|
|
|
|
if allowed_attrs.contains(k.as_str()) {
|
|
|
|
Some((k, v))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
Entry {
|
2019-07-26 10:13:58 +02:00
|
|
|
valid: EntryReduced,
|
2019-06-07 11:19:09 +02:00
|
|
|
state: s_state,
|
|
|
|
attrs: f_attrs,
|
|
|
|
}
|
|
|
|
}
|
2019-07-15 08:56:55 +02:00
|
|
|
|
|
|
|
// These are special types to allow returning typed values from
|
|
|
|
// an entry, if we "know" what we expect to receive.
|
|
|
|
|
|
|
|
/// This returns an array of IndexTypes, when the type is an Optional
|
|
|
|
/// multivalue in schema - IE this will *not* fail if the attribute is
|
|
|
|
/// empty, yielding and empty array instead.
|
|
|
|
///
|
|
|
|
/// However, the converstion to IndexType is fallaible, so in case of a failure
|
|
|
|
/// to convert, an Err is returned.
|
2019-08-27 01:36:54 +02:00
|
|
|
pub(crate) fn get_ava_opt_index(&self, attr: &str) -> Result<Vec<&IndexType>, ()> {
|
2019-07-15 08:56:55 +02:00
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(av) => {
|
2019-08-27 01:36:54 +02:00
|
|
|
let r: Result<Vec<_>, _> = av.iter().map(|v| v.to_indextype().ok_or(())).collect();
|
2019-07-15 08:56:55 +02:00
|
|
|
r
|
|
|
|
}
|
|
|
|
None => Ok(Vec::new()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Get a bool from an ava
|
|
|
|
pub fn get_ava_single_bool(&self, attr: &str) -> Option<bool> {
|
|
|
|
match self.get_ava_single(attr) {
|
2019-08-27 01:36:54 +02:00
|
|
|
Some(a) => a.to_bool(),
|
2019-07-15 08:56:55 +02:00
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn get_ava_single_syntax(&self, attr: &str) -> Option<&SyntaxType> {
|
2019-07-15 08:56:55 +02:00
|
|
|
match self.get_ava_single(attr) {
|
2019-08-27 01:36:54 +02:00
|
|
|
Some(a) => a.to_syntaxtype(),
|
2019-07-15 08:56:55 +02:00
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-04 03:06:37 +02:00
|
|
|
pub fn get_ava_single_credential(&self, attr: &str) -> Option<&Credential> {
|
|
|
|
match self.get_ava_single(attr) {
|
|
|
|
Some(a) => a.to_credential(),
|
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn get_ava_reference_uuid(&self, attr: &str) -> Option<Vec<&Uuid>> {
|
|
|
|
// If any value is NOT a reference, return none!
|
2019-07-15 08:56:55 +02:00
|
|
|
match self.attrs.get(attr) {
|
2019-08-27 01:36:54 +02:00
|
|
|
Some(av) => {
|
|
|
|
let v: Option<Vec<&Uuid>> = av.iter().map(|e| e.to_ref_uuid()).collect();
|
|
|
|
v
|
|
|
|
}
|
|
|
|
None => None,
|
2019-07-15 08:56:55 +02:00
|
|
|
}
|
|
|
|
}
|
2019-08-27 01:36:54 +02:00
|
|
|
|
|
|
|
/*
|
|
|
|
/// This interface will get &str (if possible).
|
|
|
|
pub(crate) fn get_ava_opt_str(&self, attr: &str) -> Option<Vec<&str>> {
|
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(a) => {
|
|
|
|
let r: Vec<_> = a.iter().filter_map(|v| v.to_str()).collect();
|
|
|
|
if r.len() == 0 {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => Some(Vec::new()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
|
|
|
|
pub(crate) fn get_ava_opt_string(&self, attr: &str) -> Option<Vec<String>> {
|
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(a) => {
|
|
|
|
let r: Vec<String> = a
|
|
|
|
.iter()
|
|
|
|
.filter_map(|v| v.as_string().map(|s| s.clone()))
|
|
|
|
.collect();
|
|
|
|
if r.len() == 0 {
|
|
|
|
// Corrupt?
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => Some(Vec::new()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn get_ava_string(&self, attr: &str) -> Option<Vec<String>> {
|
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(a) => {
|
|
|
|
let r: Vec<String> = a
|
|
|
|
.iter()
|
|
|
|
.filter_map(|v| v.as_string().map(|s| s.clone()))
|
|
|
|
.collect();
|
|
|
|
if r.len() == 0 {
|
|
|
|
// Corrupt?
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_ava_single_str(&self, attr: &str) -> Option<&str> {
|
|
|
|
self.get_ava_single(attr).and_then(|v| v.to_str())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_ava_single_string(&self, attr: &str) -> Option<String> {
|
|
|
|
self.get_ava_single(attr)
|
|
|
|
.and_then(|v: &Value| v.as_string())
|
|
|
|
.and_then(|s: &String| Some((*s).clone()))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_ava_single_protofilter(&self, attr: &str) -> Option<ProtoFilter> {
|
|
|
|
self.get_ava_single(attr)
|
|
|
|
.and_then(|v: &Value| {
|
|
|
|
debug!("get_ava_single_protofilter -> {:?}", v);
|
|
|
|
v.as_json_filter()
|
|
|
|
})
|
|
|
|
.and_then(|f: &ProtoFilter| Some((*f).clone()))
|
|
|
|
}
|
2019-01-28 10:16:53 +01:00
|
|
|
}
|
|
|
|
|
2019-01-28 06:10:28 +01:00
|
|
|
impl<STATE> Entry<EntryValid, STATE> {
|
2019-05-01 06:06:22 +02:00
|
|
|
// Returns the entry in the latest DbEntry format we are aware of.
|
|
|
|
pub fn into_dbentry(&self) -> DbEntry {
|
|
|
|
// In the future this will do extra work to process uuid
|
|
|
|
// into "attributes" suitable for dbentry storage.
|
|
|
|
|
|
|
|
// How will this work with replication?
|
|
|
|
//
|
|
|
|
// Alternately, we may have higher-level types that translate entry
|
|
|
|
// into proper structures, and they themself emit/modify entries?
|
|
|
|
|
|
|
|
DbEntry {
|
|
|
|
ent: DbEntryVers::V1(DbEntryV1 {
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs: self
|
|
|
|
.attrs
|
|
|
|
.iter()
|
|
|
|
.map(|(k, vs)| {
|
|
|
|
let dbvs: Vec<_> = vs.iter().map(|v| v.to_db_valuev1()).collect();
|
|
|
|
(k.clone(), dbvs)
|
|
|
|
})
|
|
|
|
.collect(),
|
2019-05-01 06:06:22 +02:00
|
|
|
}),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-28 06:10:28 +01:00
|
|
|
pub fn invalidate(self) -> Entry<EntryInvalid, STATE> {
|
|
|
|
Entry {
|
2019-01-28 10:16:53 +01:00
|
|
|
valid: EntryInvalid,
|
|
|
|
state: self.state,
|
2019-01-29 08:17:28 +01:00
|
|
|
attrs: self.attrs,
|
2018-11-13 08:14:26 +01:00
|
|
|
}
|
|
|
|
}
|
2018-11-15 04:49:08 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn get_uuid(&self) -> &Uuid {
|
2019-05-15 02:36:18 +02:00
|
|
|
&self.valid.uuid
|
2019-05-08 02:39:46 +02:00
|
|
|
}
|
2018-11-27 11:48:21 +01:00
|
|
|
|
2019-05-24 07:11:06 +02:00
|
|
|
pub fn filter_from_attrs(&self, attrs: &Vec<String>) -> Option<Filter<FilterInvalid>> {
|
|
|
|
// Because we are a valid entry, a filter we create still may not
|
|
|
|
// be valid because the internal server entry templates are still
|
|
|
|
// created by humans! Plus double checking something already valid
|
|
|
|
// is not bad ...
|
2019-02-11 10:49:15 +01:00
|
|
|
//
|
2019-01-20 00:51:22 +01:00
|
|
|
// Generate a filter from the attributes requested and defined.
|
|
|
|
// Basically, this is a series of nested and's (which will be
|
|
|
|
// optimised down later: but if someone wants to solve flatten() ...)
|
|
|
|
|
|
|
|
// Take name: (a, b), name: (c, d) -> (name, a), (name, b), (name, c), (name, d)
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let mut pairs: Vec<(&str, &Value)> = Vec::new();
|
2019-01-20 00:51:22 +01:00
|
|
|
|
|
|
|
for attr in attrs {
|
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(values) => {
|
|
|
|
for v in values {
|
2019-05-24 07:11:06 +02:00
|
|
|
pairs.push((attr, v))
|
2019-01-20 00:51:22 +01:00
|
|
|
}
|
|
|
|
}
|
2019-01-20 01:46:17 +01:00
|
|
|
None => return None,
|
2019-01-20 00:51:22 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-07 11:19:09 +02:00
|
|
|
Some(filter_all!(f_and(
|
2019-05-24 07:11:06 +02:00
|
|
|
pairs
|
|
|
|
.into_iter()
|
2019-08-27 01:36:54 +02:00
|
|
|
.map(|(attr, value)| {
|
|
|
|
// We use FC directly here instead of f_eq to avoid an excess clone.
|
|
|
|
FC::Eq(attr, value.to_partialvalue())
|
|
|
|
})
|
2019-05-24 07:11:06 +02:00
|
|
|
.collect()
|
|
|
|
)))
|
2018-12-30 03:17:09 +01:00
|
|
|
}
|
|
|
|
|
2019-03-01 07:35:06 +01:00
|
|
|
pub fn gen_modlist_assert(
|
|
|
|
&self,
|
2019-05-24 07:11:06 +02:00
|
|
|
schema: &SchemaTransaction,
|
2019-03-12 06:20:08 +01:00
|
|
|
) -> Result<ModifyList<ModifyInvalid>, SchemaError> {
|
2019-01-28 04:54:17 +01:00
|
|
|
// Create a modlist from this entry. We make this assuming we want the entry
|
|
|
|
// to have this one as a subset of values. This means if we have single
|
|
|
|
// values, we'll replace, if they are multivalue, we present them.
|
|
|
|
let mut mods = ModifyList::new();
|
|
|
|
|
|
|
|
for (k, vs) in self.attrs.iter() {
|
2019-04-27 08:26:08 +02:00
|
|
|
// WHY?! We skip uuid here because it is INVALID for a UUID
|
|
|
|
// to be in a modlist, and the base.rs plugin will fail if it
|
|
|
|
// is there. This actually doesn't matter, because to apply the
|
|
|
|
// modlist in these situations we already know the entry MUST
|
|
|
|
// exist with that UUID, we only need to conform it's other
|
|
|
|
// attributes into the same state.
|
|
|
|
//
|
|
|
|
// In the future, if we make uuid a real entry type, then this
|
|
|
|
// check can "go away" because uuid will never exist as an ava.
|
|
|
|
//
|
2019-07-28 13:18:25 +02:00
|
|
|
// NOTE: Remove this check when uuid becomes a real attribute.
|
2019-07-26 10:13:58 +02:00
|
|
|
// UUID is now a real attribute, but it also has an ava for db_entry
|
|
|
|
// conversion - so what do? If we remove it here, we could have CSN issue with
|
|
|
|
// repl on uuid conflict, but it probably shouldn't be an ava either ...
|
2019-07-28 13:18:25 +02:00
|
|
|
// as a result, I think we need to keep this continue line to not cause issues.
|
2019-04-27 08:26:08 +02:00
|
|
|
if k == "uuid" {
|
|
|
|
continue;
|
|
|
|
}
|
2019-01-28 04:54:17 +01:00
|
|
|
// Get the schema attribute type out.
|
|
|
|
match schema.is_multivalue(k) {
|
|
|
|
Ok(r) => {
|
|
|
|
if !r {
|
|
|
|
// As this is single value, purge then present to maintain this
|
|
|
|
// invariant
|
|
|
|
mods.push_mod(Modify::Purged(k.clone()));
|
|
|
|
}
|
|
|
|
}
|
2019-04-27 08:26:08 +02:00
|
|
|
// A schema error happened, fail the whole operation.
|
2019-03-01 07:35:06 +01:00
|
|
|
Err(e) => return Err(e),
|
2019-01-28 04:54:17 +01:00
|
|
|
}
|
|
|
|
for v in vs {
|
|
|
|
mods.push_mod(Modify::Present(k.clone(), v.clone()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(mods)
|
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
impl Entry<EntryReduced, EntryCommitted> {
|
|
|
|
pub fn into_pe(&self) -> ProtoEntry {
|
2019-08-27 01:36:54 +02:00
|
|
|
// Turn values -> Strings.
|
2019-07-26 10:13:58 +02:00
|
|
|
ProtoEntry {
|
2019-08-27 01:36:54 +02:00
|
|
|
attrs: self
|
|
|
|
.attrs
|
|
|
|
.iter()
|
|
|
|
.map(|(k, vs)| {
|
|
|
|
let pvs: Vec<_> = vs.iter().map(|v| v.to_proto_string_clone()).collect();
|
|
|
|
(k.clone(), pvs)
|
|
|
|
})
|
|
|
|
.collect(),
|
2019-07-26 10:13:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// impl<STATE> Entry<EntryValid, STATE> {
|
2019-01-28 06:10:28 +01:00
|
|
|
impl<VALID, STATE> Entry<VALID, STATE> {
|
2019-07-26 10:13:58 +02:00
|
|
|
/*
|
|
|
|
* WARNING: Should these TODO move to EntryValid only?
|
|
|
|
* I've tried to do this once, but the issue is that there
|
|
|
|
* is a lot of code in normalised and other states that
|
|
|
|
* relies on the ability to get ava. I think we may not be
|
|
|
|
* able to do so "easily".
|
|
|
|
*/
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn get_ava(&self, attr: &str) -> Option<Vec<&Value>> {
|
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(vs) => {
|
|
|
|
let x: Vec<_> = vs.iter().collect();
|
|
|
|
Some(x)
|
|
|
|
}
|
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_ava_set(&self, attr: &str) -> Option<BTreeSet<&Value>> {
|
|
|
|
self.attrs
|
|
|
|
.get(attr)
|
|
|
|
.and_then(|vs| Some(vs.iter().collect()))
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn get_ava_set_str(&self, attr: &str) -> Option<BTreeSet<&str>> {
|
|
|
|
self.attrs.get(attr).and_then(|vs| {
|
|
|
|
let x: Option<BTreeSet<_>> = vs.iter().map(|s| s.to_str()).collect();
|
|
|
|
x
|
2019-06-07 11:19:09 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-05-24 07:11:06 +02:00
|
|
|
// Returns NONE if there is more than ONE!!!!
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn get_ava_single(&self, attr: &str) -> Option<&Value> {
|
2019-05-24 07:11:06 +02:00
|
|
|
match self.attrs.get(attr) {
|
|
|
|
Some(vs) => {
|
|
|
|
if vs.len() != 1 {
|
|
|
|
None
|
|
|
|
} else {
|
2019-08-27 01:36:54 +02:00
|
|
|
vs.iter().take(1).next()
|
2019-05-24 07:11:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-07 11:19:09 +02:00
|
|
|
pub fn get_ava_names(&self) -> BTreeSet<&str> {
|
|
|
|
// Get the set of all attribute names in the entry
|
|
|
|
let r: BTreeSet<&str> = self.attrs.keys().map(|a| a.as_str()).collect();
|
|
|
|
r
|
|
|
|
}
|
|
|
|
|
2019-01-28 06:10:28 +01:00
|
|
|
pub fn attribute_pres(&self, attr: &str) -> bool {
|
2019-07-26 10:13:58 +02:00
|
|
|
// Note, we don't normalise attr name, but I think that's not
|
|
|
|
// something we should over-optimise on.
|
2019-01-28 06:10:28 +01:00
|
|
|
self.attrs.contains_key(attr)
|
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
#[inline]
|
|
|
|
pub fn attribute_value_pres(&self, attr: &str, value: &PartialValue) -> bool {
|
2019-07-26 10:13:58 +02:00
|
|
|
// Yeah, this is techdebt, but both names of this fn are valid - we are
|
|
|
|
// checking if an attribute-value is equal to, or asserting it's present
|
|
|
|
// as a pair. So I leave both, and let the compiler work it out.
|
|
|
|
self.attribute_equality(attr, value)
|
2019-05-08 02:39:46 +02:00
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn attribute_equality(&self, attr: &str, value: &PartialValue) -> bool {
|
2019-01-28 06:10:28 +01:00
|
|
|
// we assume based on schema normalisation on the way in
|
|
|
|
// that the equality here of the raw values MUST be correct.
|
|
|
|
// We also normalise filters, to ensure that their values are
|
|
|
|
// syntax valid and will correctly match here with our indexes.
|
2019-07-26 10:13:58 +02:00
|
|
|
match self.attrs.get(attr) {
|
2019-08-27 01:36:54 +02:00
|
|
|
Some(v_list) => v_list.contains(value),
|
2019-07-26 10:13:58 +02:00
|
|
|
None => false,
|
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn attribute_substring(&self, attr: &str, subvalue: &PartialValue) -> bool {
|
2019-07-26 10:13:58 +02:00
|
|
|
match self.attrs.get(attr) {
|
2019-07-27 08:54:31 +02:00
|
|
|
Some(v_list) => v_list
|
|
|
|
.iter()
|
|
|
|
.fold(false, |acc, v| if acc { acc } else { v.contains(subvalue) }),
|
2019-07-26 10:13:58 +02:00
|
|
|
None => false,
|
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
pub fn classes(&self) -> Option<EntryClasses> {
|
2019-01-28 06:10:28 +01:00
|
|
|
// Get the class vec, if any?
|
|
|
|
// How do we indicate "empty?"
|
2019-07-26 10:13:58 +02:00
|
|
|
let v = self.attrs.get("class").map(|c| c.len())?;
|
2019-01-28 06:10:28 +01:00
|
|
|
let c = self.attrs.get("class").map(|c| c.iter());
|
2019-07-26 10:13:58 +02:00
|
|
|
Some(EntryClasses { size: v, inner: c })
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn avas(&self) -> EntryAvas {
|
|
|
|
EntryAvas {
|
|
|
|
inner: self.attrs.iter(),
|
|
|
|
}
|
|
|
|
}
|
2019-06-07 11:19:09 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
// Since EntryValid/Invalid is just about class adherenece, not Value correctness, we
|
|
|
|
// can now apply filters to invalid entries - why? Because even if they aren't class
|
|
|
|
// valid, we still have strict typing checks between the filter -> entry to guarantee
|
|
|
|
// they should be functional. We'll never match something that isn't syntactially valid.
|
|
|
|
pub fn entry_match_no_index(&self, filter: &Filter<FilterValidResolved>) -> bool {
|
|
|
|
self.entry_match_no_index_inner(filter.to_inner())
|
|
|
|
}
|
|
|
|
|
2019-06-07 11:19:09 +02:00
|
|
|
// This is private, but exists on all types, so that valid and normal can then
|
|
|
|
// expose the simpler wrapper for entry_match_no_index only.
|
|
|
|
// Assert if this filter matches the entry (no index)
|
|
|
|
fn entry_match_no_index_inner(&self, filter: &FilterResolved) -> bool {
|
|
|
|
// Go through the filter components and check them in the entry.
|
|
|
|
// This is recursive!!!!
|
|
|
|
match filter {
|
2019-08-27 01:36:54 +02:00
|
|
|
FilterResolved::Eq(attr, value) => self.attribute_equality(attr.as_str(), value),
|
2019-06-07 11:19:09 +02:00
|
|
|
FilterResolved::Sub(attr, subvalue) => {
|
2019-08-27 01:36:54 +02:00
|
|
|
self.attribute_substring(attr.as_str(), subvalue)
|
2019-06-07 11:19:09 +02:00
|
|
|
}
|
|
|
|
FilterResolved::Pres(attr) => {
|
|
|
|
// Given attr, is is present in the entry?
|
|
|
|
self.attribute_pres(attr.as_str())
|
|
|
|
}
|
|
|
|
FilterResolved::Or(l) => l.iter().fold(false, |acc, f| {
|
|
|
|
// Check with ftweedal about or filter zero len correctness.
|
|
|
|
if acc {
|
|
|
|
acc
|
|
|
|
} else {
|
|
|
|
self.entry_match_no_index_inner(f)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
FilterResolved::And(l) => l.iter().fold(true, |acc, f| {
|
|
|
|
// Check with ftweedal about and filter zero len correctness.
|
|
|
|
if acc {
|
|
|
|
self.entry_match_no_index_inner(f)
|
|
|
|
} else {
|
|
|
|
acc
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
FilterResolved::AndNot(f) => !self.entry_match_no_index_inner(f),
|
|
|
|
}
|
|
|
|
}
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-01-29 07:17:02 +01:00
|
|
|
impl<STATE> Entry<EntryInvalid, STATE>
|
2019-01-29 08:17:28 +01:00
|
|
|
where
|
|
|
|
STATE: Copy,
|
2019-01-29 07:17:02 +01:00
|
|
|
{
|
2019-01-28 06:10:28 +01:00
|
|
|
// This should always work? It's only on validate that we'll build
|
|
|
|
// a list of syntax violations ...
|
|
|
|
// If this already exists, we silently drop the event? Is that an
|
|
|
|
// acceptable interface?
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn add_ava(&mut self, attr: &str, value: &Value) {
|
2019-01-28 06:10:28 +01:00
|
|
|
// How do we make this turn into an ok / err?
|
|
|
|
self.attrs
|
2019-05-24 07:11:06 +02:00
|
|
|
.entry(attr.to_string())
|
2019-01-28 06:10:28 +01:00
|
|
|
.and_modify(|v| {
|
|
|
|
// Here we need to actually do a check/binary search ...
|
2019-08-27 01:36:54 +02:00
|
|
|
if v.contains(value) {
|
2019-01-28 06:10:28 +01:00
|
|
|
// It already exists, done!
|
2019-08-27 01:36:54 +02:00
|
|
|
} else {
|
|
|
|
v.insert(value.clone());
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
})
|
2019-08-27 01:36:54 +02:00
|
|
|
.or_insert(btreeset![value.clone()]);
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
fn remove_ava(&mut self, attr: &str, value: &PartialValue) {
|
2019-07-26 10:13:58 +02:00
|
|
|
// It would be great to remove these extra allocations, but they
|
|
|
|
// really don't cost much :(
|
|
|
|
self.attrs.entry(attr.to_string()).and_modify(|v| {
|
|
|
|
// Here we need to actually do a check/binary search ...
|
2019-08-27 01:36:54 +02:00
|
|
|
v.remove(value);
|
2019-07-26 10:13:58 +02:00
|
|
|
});
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
2019-05-24 07:11:06 +02:00
|
|
|
pub fn purge_ava(&mut self, attr: &str) {
|
2019-01-28 06:10:28 +01:00
|
|
|
self.attrs.remove(attr);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Overwrite the existing avas.
|
2019-08-27 01:36:54 +02:00
|
|
|
pub fn set_avas(&mut self, attr: &str, values: Vec<Value>) {
|
|
|
|
// Overwrite the existing value, build a tree from the list.
|
|
|
|
let x: BTreeSet<_> = values.into_iter().collect();
|
|
|
|
let _ = self.attrs.insert(attr.to_string(), x);
|
2019-01-28 06:10:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn avas_mut(&mut self) -> EntryAvasMut {
|
|
|
|
EntryAvasMut {
|
|
|
|
inner: self.attrs.iter_mut(),
|
|
|
|
}
|
|
|
|
}
|
2019-01-28 04:54:17 +01:00
|
|
|
|
|
|
|
// Should this be schemaless, relying on checks of the modlist, and the entry validate after?
|
2019-07-26 10:13:58 +02:00
|
|
|
// YES. Makes it very cheap.
|
2019-05-08 02:39:46 +02:00
|
|
|
pub fn apply_modlist(&mut self, modlist: &ModifyList<ModifyValid>) {
|
|
|
|
// -> Result<Entry<EntryInvalid, STATE>, OperationError> {
|
2019-01-28 04:54:17 +01:00
|
|
|
// Apply a modlist, generating a new entry that conforms to the changes.
|
|
|
|
// This is effectively clone-and-transform
|
|
|
|
|
|
|
|
// mutate
|
2019-03-12 06:20:08 +01:00
|
|
|
for modify in modlist {
|
2019-01-28 04:54:17 +01:00
|
|
|
match modify {
|
2019-08-27 01:36:54 +02:00
|
|
|
Modify::Present(a, v) => self.add_ava(a.as_str(), v),
|
|
|
|
Modify::Removed(a, v) => self.remove_ava(a.as_str(), v),
|
2019-05-24 07:11:06 +02:00
|
|
|
Modify::Purged(a) => self.purge_ava(a.as_str()),
|
2019-01-28 04:54:17 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-09-29 09:54:16 +02:00
|
|
|
}
|
|
|
|
|
2019-01-28 06:10:28 +01:00
|
|
|
impl<VALID, STATE> PartialEq for Entry<VALID, STATE> {
|
2019-01-28 08:53:58 +01:00
|
|
|
fn eq(&self, rhs: &Entry<VALID, STATE>) -> bool {
|
2019-06-07 11:19:09 +02:00
|
|
|
// This may look naive - but it is correct. This is because
|
|
|
|
// all items that end up in an item MUST have passed through
|
|
|
|
// schema validation and normalisation so we can assume that
|
|
|
|
// all rules were applied correctly. Thus we can just simply
|
|
|
|
// do a char-compare like this.
|
|
|
|
//
|
|
|
|
// Of course, this is only true on the "Valid" types ... the others
|
|
|
|
// are not guaranteed to support this ... but more likely that will
|
|
|
|
// just end in eager false-results. We'll never say something is true
|
|
|
|
// that should NOT be.
|
2018-11-11 01:39:11 +01:00
|
|
|
self.attrs == rhs.attrs
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-15 08:56:55 +02:00
|
|
|
impl From<&SchemaAttribute> for Entry<EntryValid, EntryNew> {
|
|
|
|
fn from(s: &SchemaAttribute) -> Self {
|
|
|
|
// Convert an Attribute to an entry ... make it good!
|
2019-08-27 01:36:54 +02:00
|
|
|
let uuid = s.uuid.clone();
|
|
|
|
let uuid_v = btreeset![Value::new_uuidr(&uuid)];
|
2019-07-15 08:56:55 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let name_v = btreeset![Value::new_iutf8(s.name.clone())];
|
|
|
|
let desc_v = btreeset![Value::new_utf8(s.description.clone())];
|
2019-07-15 08:56:55 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let multivalue_v = btreeset![Value::from(s.multivalue)];
|
2019-07-15 08:56:55 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let index_v: BTreeSet<_> = s.index.iter().map(|i| Value::from(i.clone())).collect();
|
2019-07-15 08:56:55 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let syntax_v = btreeset![Value::from(s.syntax.clone())];
|
2019-07-15 08:56:55 +02:00
|
|
|
|
|
|
|
// Build the BTreeMap of the attributes relevant
|
2019-08-27 01:36:54 +02:00
|
|
|
let mut attrs: BTreeMap<String, BTreeSet<Value>> = BTreeMap::new();
|
2019-07-15 08:56:55 +02:00
|
|
|
attrs.insert("name".to_string(), name_v);
|
|
|
|
attrs.insert("description".to_string(), desc_v);
|
|
|
|
attrs.insert("uuid".to_string(), uuid_v);
|
|
|
|
attrs.insert("multivalue".to_string(), multivalue_v);
|
|
|
|
attrs.insert("index".to_string(), index_v);
|
|
|
|
attrs.insert("syntax".to_string(), syntax_v);
|
|
|
|
attrs.insert(
|
|
|
|
"class".to_string(),
|
2019-08-27 01:36:54 +02:00
|
|
|
btreeset![
|
|
|
|
Value::new_class("object"),
|
|
|
|
Value::new_class("system"),
|
|
|
|
Value::new_class("attributetype")
|
2019-07-20 04:04:38 +02:00
|
|
|
],
|
2019-07-15 08:56:55 +02:00
|
|
|
);
|
|
|
|
|
|
|
|
// Insert stuff.
|
|
|
|
|
|
|
|
Entry {
|
2019-08-27 01:36:54 +02:00
|
|
|
valid: EntryValid { uuid: uuid },
|
2019-07-15 08:56:55 +02:00
|
|
|
state: EntryNew,
|
|
|
|
attrs: attrs,
|
|
|
|
}
|
|
|
|
}
|
2018-09-29 09:54:16 +02:00
|
|
|
}
|
|
|
|
|
2019-07-15 08:56:55 +02:00
|
|
|
impl From<&SchemaClass> for Entry<EntryValid, EntryNew> {
|
|
|
|
fn from(s: &SchemaClass) -> Self {
|
2019-08-27 01:36:54 +02:00
|
|
|
let uuid = s.uuid.clone();
|
|
|
|
let uuid_v = btreeset![Value::new_uuidr(&uuid)];
|
2019-07-15 08:56:55 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let name_v = btreeset![Value::new_iutf8(s.name.clone())];
|
|
|
|
let desc_v = btreeset![Value::new_utf8(s.description.clone())];
|
2019-07-15 08:56:55 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
let mut attrs: BTreeMap<String, BTreeSet<Value>> = BTreeMap::new();
|
2019-07-15 08:56:55 +02:00
|
|
|
attrs.insert("name".to_string(), name_v);
|
|
|
|
attrs.insert("description".to_string(), desc_v);
|
|
|
|
attrs.insert("uuid".to_string(), uuid_v);
|
|
|
|
attrs.insert(
|
|
|
|
"class".to_string(),
|
2019-08-27 01:36:54 +02:00
|
|
|
btreeset![
|
|
|
|
Value::new_class("object"),
|
|
|
|
Value::new_class("system"),
|
|
|
|
Value::new_class("classtype")
|
2019-07-20 04:04:38 +02:00
|
|
|
],
|
2019-07-15 08:56:55 +02:00
|
|
|
);
|
2019-08-27 01:36:54 +02:00
|
|
|
|
|
|
|
if s.systemmay.len() > 0 {
|
|
|
|
attrs.insert(
|
|
|
|
"systemmay".to_string(),
|
|
|
|
s.systemmay
|
|
|
|
.iter()
|
|
|
|
.map(|sm| Value::new_attr(sm.as_str()))
|
|
|
|
.collect(),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.systemmust.len() > 0 {
|
|
|
|
attrs.insert(
|
|
|
|
"systemmust".to_string(),
|
|
|
|
s.systemmust
|
|
|
|
.iter()
|
|
|
|
.map(|sm| Value::new_attr(sm.as_str()))
|
|
|
|
.collect(),
|
|
|
|
);
|
|
|
|
}
|
2019-07-15 08:56:55 +02:00
|
|
|
|
|
|
|
Entry {
|
2019-08-27 01:36:54 +02:00
|
|
|
valid: EntryValid { uuid: uuid },
|
2019-07-15 08:56:55 +02:00
|
|
|
state: EntryNew,
|
|
|
|
attrs: attrs,
|
|
|
|
}
|
|
|
|
}
|
2018-09-29 09:54:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2019-04-18 03:28:33 +02:00
|
|
|
use crate::entry::{Entry, EntryInvalid, EntryNew};
|
|
|
|
use crate::modify::{Modify, ModifyList};
|
2019-08-27 01:36:54 +02:00
|
|
|
use crate::value::{PartialValue, Value};
|
2018-09-29 09:54:16 +02:00
|
|
|
|
2018-10-03 13:21:21 +02:00
|
|
|
#[test]
|
|
|
|
fn test_entry_basic() {
|
2019-01-28 10:16:53 +01:00
|
|
|
let mut e: Entry<EntryInvalid, EntryNew> = Entry::new();
|
2018-09-29 09:54:16 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
e.add_ava("userid", &Value::from("william"));
|
2018-11-14 02:54:59 +01:00
|
|
|
}
|
2018-10-03 13:21:21 +02:00
|
|
|
|
2018-11-14 02:54:59 +01:00
|
|
|
#[test]
|
|
|
|
fn test_entry_dup_value() {
|
|
|
|
// Schema doesn't matter here because we are duplicating a value
|
|
|
|
// it should fail!
|
|
|
|
|
|
|
|
// We still probably need schema here anyway to validate what we
|
|
|
|
// are adding ... Or do we validate after the changes are made in
|
|
|
|
// total?
|
2019-01-28 10:16:53 +01:00
|
|
|
let mut e: Entry<EntryInvalid, EntryNew> = Entry::new();
|
2019-08-27 01:36:54 +02:00
|
|
|
e.add_ava("userid", &Value::from("william"));
|
|
|
|
e.add_ava("userid", &Value::from("william"));
|
2018-11-14 02:54:59 +01:00
|
|
|
|
2019-05-24 07:11:06 +02:00
|
|
|
let values = e.get_ava("userid").expect("Failed to get ava");
|
2018-11-14 02:54:59 +01:00
|
|
|
// Should only be one value!
|
|
|
|
assert_eq!(values.len(), 1)
|
2018-10-03 13:21:21 +02:00
|
|
|
}
|
2018-11-07 07:54:02 +01:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_entry_pres() {
|
2019-01-28 10:16:53 +01:00
|
|
|
let mut e: Entry<EntryInvalid, EntryNew> = Entry::new();
|
2019-08-27 01:36:54 +02:00
|
|
|
e.add_ava("userid", &Value::from("william"));
|
2018-11-14 02:54:59 +01:00
|
|
|
|
|
|
|
assert!(e.attribute_pres("userid"));
|
|
|
|
assert!(!e.attribute_pres("name"));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_entry_equality() {
|
2019-01-28 10:16:53 +01:00
|
|
|
let mut e: Entry<EntryInvalid, EntryNew> = Entry::new();
|
2018-11-07 07:54:02 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
e.add_ava("userid", &Value::from("william"));
|
2018-11-07 07:54:02 +01:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
assert!(e.attribute_equality("userid", &PartialValue::new_utf8s("william")));
|
|
|
|
assert!(!e.attribute_equality("userid", &PartialValue::new_utf8s("test")));
|
|
|
|
assert!(!e.attribute_equality("nonexist", &PartialValue::new_utf8s("william")));
|
|
|
|
// Also test non-matching attr syntax
|
|
|
|
assert!(!e.attribute_equality("userid", &PartialValue::new_class("william")));
|
2018-11-07 07:54:02 +01:00
|
|
|
}
|
2019-01-21 03:08:56 +01:00
|
|
|
|
2019-07-26 10:13:58 +02:00
|
|
|
#[test]
|
|
|
|
fn test_entry_substring() {
|
|
|
|
let mut e: Entry<EntryInvalid, EntryNew> = Entry::new();
|
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
e.add_ava("userid", &Value::from("william"));
|
2019-07-26 10:13:58 +02:00
|
|
|
|
2019-08-27 01:36:54 +02:00
|
|
|
assert!(e.attribute_substring("userid", &PartialValue::new_utf8s("william")));
|
|
|
|
assert!(e.attribute_substring("userid", &PartialValue::new_utf8s("will")));
|
|
|
|
assert!(e.attribute_substring("userid", &PartialValue::new_utf8s("liam")));
|
|
|
|
assert!(e.attribute_substring("userid", &PartialValue::new_utf8s("lli")));
|
|
|
|
assert!(!e.attribute_substring("userid", &PartialValue::new_utf8s("llim")));
|
|
|
|
assert!(!e.attribute_substring("userid", &PartialValue::new_utf8s("bob")));
|
|
|
|
assert!(!e.attribute_substring("userid", &PartialValue::new_utf8s("wl")));
|
2019-07-26 10:13:58 +02:00
|
|
|
}
|
|
|
|
|
2019-01-28 04:54:17 +01:00
|
|
|
#[test]
|
|
|
|
fn test_entry_apply_modlist() {
|
|
|
|
// Test application of changes to an entry.
|
2019-01-28 10:16:53 +01:00
|
|
|
let mut e: Entry<EntryInvalid, EntryNew> = Entry::new();
|
2019-08-27 01:36:54 +02:00
|
|
|
e.add_ava("userid", &Value::from("william"));
|
2019-01-28 04:54:17 +01:00
|
|
|
|
2019-03-12 06:20:08 +01:00
|
|
|
let mods = unsafe {
|
|
|
|
ModifyList::new_valid_list(vec![Modify::Present(
|
|
|
|
String::from("attr"),
|
2019-08-27 01:36:54 +02:00
|
|
|
Value::new_iutf8s("value"),
|
2019-03-12 06:20:08 +01:00
|
|
|
)])
|
|
|
|
};
|
2019-01-28 04:54:17 +01:00
|
|
|
|
2019-05-08 02:39:46 +02:00
|
|
|
e.apply_modlist(&mods);
|
2019-01-28 04:54:17 +01:00
|
|
|
|
|
|
|
// Assert the changes are there
|
2019-08-27 01:36:54 +02:00
|
|
|
assert!(e.attribute_equality("attr", &PartialValue::new_iutf8s("value")));
|
2019-01-28 04:54:17 +01:00
|
|
|
|
|
|
|
// Assert present for multivalue
|
|
|
|
// Assert purge on single/multi/empty value
|
|
|
|
// Assert removed on value that exists and doesn't exist
|
|
|
|
}
|
2018-10-03 13:21:21 +02:00
|
|
|
}
|