add normalise entries

This commit is contained in:
William Brown 2018-11-15 16:49:08 +13:00
parent dddd04898c
commit a5ebac54c7
3 changed files with 129 additions and 11 deletions

View file

@ -1,5 +1,5 @@
// use serde_json::{Error, Value}; // use serde_json::{Error, Value};
use std::collections::btree_map::Iter as BTreeIter; use std::collections::btree_map::{Iter as BTreeIter, IterMut as BTreeIterMut};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::slice::Iter as SliceIter; use std::slice::Iter as SliceIter;
@ -74,6 +74,24 @@ impl<'a> Iterator for EntryAvas<'a> {
} }
} }
pub struct EntryAvasMut<'a> {
inner: BTreeIterMut<'a, String, Vec<String>>,
}
impl<'a> Iterator for EntryAvasMut<'a> {
type Item = (&'a String, &'a mut Vec<String>);
#[inline]
fn next(&mut self) -> Option<(&'a String, &'a mut Vec<String>)> {
self.inner.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct Entry { pub struct Entry {
attrs: BTreeMap<String, Vec<String>>, attrs: BTreeMap<String, Vec<String>>,
@ -106,6 +124,12 @@ impl Entry {
.or_insert(vec![value]); .or_insert(vec![value]);
} }
// FIXME: Should this collect from iter instead?
pub fn add_avas(&mut self, attr: String, values: Vec<String>) {
// Overwrite the existing value
let _ = self.attrs.insert(attr, values);
}
pub fn get_ava(&self, attr: &String) -> Option<&Vec<String>> { pub fn get_ava(&self, attr: &String) -> Option<&Vec<String>> {
self.attrs.get(attr) self.attrs.get(attr)
} }
@ -141,11 +165,16 @@ impl Entry {
} }
pub fn avas(&self) -> EntryAvas { pub fn avas(&self) -> EntryAvas {
// Get all attr:value pairs.
EntryAvas { EntryAvas {
inner: self.attrs.iter(), inner: self.attrs.iter(),
} }
} }
pub fn avas_mut(&mut self) -> EntryAvasMut {
EntryAvasMut {
inner: self.attrs.iter_mut(),
}
}
} }
impl Clone for Entry { impl Clone for Entry {

View file

@ -181,14 +181,25 @@ impl SchemaAttribute {
} }
} }
pub fn normalise_ava(&self, attr_type: String, attr_value: String) { pub fn normalise_syntax(&self, v: &String) -> String {
// Given some types, we can normalise them in sane and consistent v.to_uppercase()
// ways. This is generally used in add_ava, and filter }
// modification.
// Given the attr_type load the schema_attribute pub fn normalise_index(&self, v: &String) -> String {
// given the syntax, normalise. v.to_uppercase()
}
pub fn normalise_utf8string_insensitive(&self, v: &String) -> String {
v.to_lowercase()
}
pub fn normalise_value(&self, v: &String) -> String {
match self.syntax {
SyntaxType::SYNTAX_ID => self.normalise_syntax(v),
SyntaxType::INDEX_ID => self.normalise_index(v),
SyntaxType::UTF8STRING_INSENSITIVE => self.normalise_utf8string_insensitive(v),
_ => v.clone(),
}
} }
} }
@ -705,8 +716,39 @@ impl Schema {
Ok(()) Ok(())
} }
// Normalise also validates? pub fn normalise_entry(&mut self, entry: &Entry) -> Entry {
pub fn normalise_entry(&mut self) {} // We duplicate the entry here, because we can't
// modify what we got on the protocol level. It also
// lets us extend and change things.
let mut entry_new: Entry = Entry::new();
// Better hope we have the attribute type ...
let schema_attr_name = self.attributes.get("name").unwrap();
// For each ava
for (attr_name, avas) in entry.avas() {
let attr_name_normal: String = schema_attr_name.normalise_value(attr_name);
// Get the needed schema type
let schema_a_r = self.attributes.get(&attr_name_normal);
// if we can't find schema_a, clone and push
// else
let avas_normal: Vec<String> = match schema_a_r {
Some(schema_a) => {
avas.iter()
.map(|av| {
// normalise those based on schema?
schema_a.normalise_value(av)
})
.collect()
}
None => avas.clone(),
};
// now push those to the new entry.
entry_new.add_avas(attr_name_normal, avas_normal);
}
// Done!
entry_new
}
// This needs to be recursive? // This needs to be recursive?
pub fn validate_filter(&self, filt: &Filter) -> Result<(), SchemaError> { pub fn validate_filter(&self, filt: &Filter) -> Result<(), SchemaError> {
@ -1045,10 +1087,56 @@ mod tests {
}"#, }"#,
) )
.unwrap(); .unwrap();
assert_eq!(schema.validate_entry(&e_ok), Ok(())); assert_eq!(schema.validate_entry(&e_ok), Ok(()));
} }
#[test]
fn test_schema_entry_normalise() {
// Check that entries can be normalised sanely
let mut schema = Schema::new();
schema.bootstrap_core();
// Check syntax to upper
// check index to upper
// insense to lower
// attr name to lower
let e_test: Entry = serde_json::from_str(
r#"{
"attrs": {
"class": ["extensibleobject"],
"name": ["TestPerson"],
"displayName": ["testperson"],
"syntax": ["utf8string"],
"index": ["equality"]
}
}"#,
)
.unwrap();
assert_eq!(
schema.validate_entry(&e_test),
Err(SchemaError::INVALID_ATTRIBUTE_SYNTAX)
);
let e_expect: Entry = serde_json::from_str(
r#"{
"attrs": {
"class": ["extensibleobject"],
"name": ["testperson"],
"displayname": ["testperson"],
"syntax": ["UTF8STRING"],
"index": ["EQUALITY"]
}
}"#,
)
.unwrap();
assert_eq!(schema.validate_entry(&e_expect), Ok(()));
let e_normalised = schema.normalise_entry(&e_test);
assert_eq!(schema.validate_entry(&e_normalised), Ok(()));
assert_eq!(e_expect, e_normalised);
}
#[test] #[test]
fn test_schema_extensible() { fn test_schema_extensible() {
let schema = Schema::new(); let schema = Schema::new();

View file

@ -69,6 +69,7 @@ impl QueryServer {
pub fn create(&mut self, au: &mut AuditEvent, ce: &CreateEvent) -> Result<(), ()> { pub fn create(&mut self, au: &mut AuditEvent, ce: &CreateEvent) -> Result<(), ()> {
// Start a txn // Start a txn
// Run any pre checks // Run any pre checks
// FIXME: Normalise all entries incoming
let r = ce.entries.iter().fold(Ok(()), |acc, e| { let r = ce.entries.iter().fold(Ok(()), |acc, e| {
if acc.is_ok() { if acc.is_ok() {