mirror of
https://github.com/kanidm/kanidm.git
synced 2025-02-23 20:47:01 +01:00
Major rework of filter to have a valid/invalid state type associated
This commit is contained in:
parent
df9bad5bea
commit
a22c8d56aa
|
@ -1,7 +1,285 @@
|
|||
|
||||
* Filters are security checked for access
|
||||
* attribute request lists are checked for access
|
||||
Access Profiles
|
||||
---------------
|
||||
|
||||
Access profiles are a way of expressing which persons are allowed what actions to be
|
||||
performed on any database record (object) in the system.
|
||||
|
||||
As a result, there are specific requirements to what these can control and how they are
|
||||
expressed.
|
||||
|
||||
Access profiles define an action of allow or deny: Denies are enforced before allows, and
|
||||
will override even if applicable. They should only be created by system access profiles,
|
||||
because we have certain requirements to deny certain changes.
|
||||
|
||||
Access profiles are stored as entries and are dynamically loaded into a structure that is
|
||||
more efficent for use at runtime. Schema and it's transactions are a similar implementation.
|
||||
|
||||
Search Requirements
|
||||
-------------------
|
||||
|
||||
A search access profile, must be able to limit the content of a search request and it's
|
||||
scoping.
|
||||
|
||||
A search access profile, must be able to limit the returned set of data from the objects
|
||||
visible.
|
||||
|
||||
An example is that user Alice should only be able to search for objects where the class
|
||||
is person, and where they are a memberOf "visible" group. Alice should only be able to
|
||||
see those users displayNames (not their legalName for example), and their public email.
|
||||
|
||||
Delete Requirements
|
||||
-------------------
|
||||
|
||||
A delete profile must contain the content and scope of a delete.
|
||||
|
||||
An example is that user Alice should only be able to delete objects where the memberOf is
|
||||
"purgeable", and where they are not marked as "protected".
|
||||
|
||||
Create Requirements
|
||||
-------------------
|
||||
|
||||
A create profile defines a filtering limit on what content can be created and it's requirements.
|
||||
|
||||
A create profile defines a limit on what attributes can be created in addition to the filtering
|
||||
requirements.
|
||||
|
||||
An example is user Alice should only be able to create objects where the class is group, and can
|
||||
only name the group - they can not add members to the group.
|
||||
|
||||
A content requriemnt could be something such as the value an attribute can contain must conform to a
|
||||
regex, IE, you can create a group of any name, except where the name contains "admin" somewhere
|
||||
in it's name. Arguable, this is partially possible with filtering.
|
||||
|
||||
For example, we want to be able to limit the classes that someone *could* create on something
|
||||
because classes often are used as a security type.
|
||||
|
||||
|
||||
Modify Requirements
|
||||
-------------------
|
||||
|
||||
A modify profile defines a filter limit of what can be modified in the directory.
|
||||
|
||||
A modify profile defines a limit of what attributes can be altered in the modification.
|
||||
|
||||
A modify profile defines a limit on the modlist actions: For example you may only be allowed to
|
||||
ensure presence of a value. (Modify allowing purge, not-present, and presence).
|
||||
|
||||
Content requirements (see create requirements) are out of scope at the moment.
|
||||
|
||||
An example is Alice should only be able to modify a users password if that user is a member of the
|
||||
students group.
|
||||
|
||||
Note, modify, does not imply *read* of the attribute. Care should be taken that we don't disclose
|
||||
the current value in any error messages if the operation fails.
|
||||
|
||||
|
||||
Targetting Requirements
|
||||
-----------------------
|
||||
|
||||
The target of an access profile should be a filter defining the objects that this applies to.
|
||||
|
||||
THe filter limit for the profiles of what they are acting on requires a single special operation
|
||||
which is the concept of "targetting self". For example, we could define a rule that says "members
|
||||
of group X are allowed self-write mobile phone number".
|
||||
|
||||
An extension to the filter code, could allow an extra filter enum of "Self", that would allow this
|
||||
to operate correctly, and would consume the entry in the event as the target of "Self". This would
|
||||
be best implemented as a compilation of self -> eq(uuid, self.uuid).
|
||||
|
||||
|
||||
Implementation Details
|
||||
----------------------
|
||||
|
||||
Example profiles:
|
||||
|
||||
search {
|
||||
action: allow
|
||||
receiver: Eq("memberof", "admins")
|
||||
targetscope: Pres("class")
|
||||
targetattr: legalName
|
||||
targetattr: displayName
|
||||
description: Allow admins to read all users names
|
||||
}
|
||||
|
||||
search {
|
||||
action: allow
|
||||
receiver: Self
|
||||
targetscope: Self
|
||||
targetattr: homeAddress
|
||||
description: Allow everyone to read only their own homeAddress
|
||||
}
|
||||
|
||||
delete {
|
||||
action: allow
|
||||
receiver: Or(Eq("memberof", "admins), Eq("memberof", "servicedesk"))
|
||||
targetscope: Eq("memberof", "tempaccount")
|
||||
description: Allow admins or servicedesk to delete any member of "temp accounts".
|
||||
}
|
||||
|
||||
// This difference in targetscope behaviour could be justification to change the keyword here
|
||||
// to prevent confusion.
|
||||
create {
|
||||
action: allow
|
||||
receiver: Eq("name", "alice")
|
||||
createscope: And(Eq("class", "person"), Eq("location", "AU"))
|
||||
createattr: location
|
||||
createattr: legalName
|
||||
createattr: mail
|
||||
createclass: person
|
||||
createclass: object
|
||||
description: Allow alice to make new persons, only with class person+object, and only set
|
||||
the attributes mail, location and legalName. The created object must conform to targetscope
|
||||
}
|
||||
|
||||
modify {
|
||||
action: allow
|
||||
receiver: Eq("name", "claire")
|
||||
targetscope: And(Eq("class", "group"), Eq("name", "admins"))
|
||||
presentattr: member
|
||||
description: Allow claire to promote people as members of the admins group.
|
||||
}
|
||||
|
||||
modify {
|
||||
action: allow
|
||||
receiver: Eq("name", "claire")
|
||||
targetscope: And(Eq("class", "person"), Eq("memberof", "students"))
|
||||
presentattr: sshkeys
|
||||
presentattr: class
|
||||
targetclass: unixuser
|
||||
description: Allow claire to modify persons in the students group, and to grant them the
|
||||
class of unixuser (only this class can be granted!). Subsequently, she may then give
|
||||
the sshkeys values as a modification.
|
||||
}
|
||||
|
||||
modify {
|
||||
action: allow
|
||||
receiver: Eq("name", "alice")
|
||||
targetscope: Eq("memberof", "students")
|
||||
purgedattr: sshkeys
|
||||
description: Allow allice to purge sshkeys from members of the students group.
|
||||
}
|
||||
|
||||
modify {
|
||||
action: allow
|
||||
receiver: Eq("name", "alice")
|
||||
targetscope: Eq("memberof", "students")
|
||||
purgedattr: sshkeys
|
||||
removedattr: sshkeys
|
||||
presentattr: sshkeys
|
||||
description: Allow alice full control over the ssh keys attribute on members of students.
|
||||
}
|
||||
|
||||
// This may not be valid: Perhaps if <*>attr: is on modify/create, then targetclass, must
|
||||
// must be set, else class is considered empty.
|
||||
//
|
||||
// This profile could in fact be an invalid example, because presentattr: class, but not
|
||||
// targetclass, so nothing could be granted.
|
||||
modify {
|
||||
action: allow
|
||||
receiver: Eq("name", "alice")
|
||||
targetscope: Eq("memberof", "students")
|
||||
presentattr: class
|
||||
description: Allow alice to grant any class to members of students.
|
||||
}
|
||||
|
||||
|
||||
Search Application
|
||||
------------------
|
||||
|
||||
The set of access controls is checked, and the set where receiver matches the current identified
|
||||
user is collected. These then are added to the users requested search as:
|
||||
|
||||
And(<User Search Request>, Or(<Set of Search Profile Filters))
|
||||
|
||||
In this manner, the search security is easily applied, as if the targets to conform to one of the
|
||||
required search profile filters, the outer And condition is nullified and no results returned.
|
||||
|
||||
Once complete, in the translation of the entry -> proto_entry, each access control and it's allowed
|
||||
set of attrs has to be checked to determine what of that entry can be displayed. Consider there are
|
||||
three entries, A, B, C. An ACI that allows read of "name" on A, B exists, and a read of "mail" on
|
||||
B, C. The correct behaviour is then:
|
||||
|
||||
A: name
|
||||
B: name, mail
|
||||
C: mail
|
||||
|
||||
So this means that the entry -> proto entry part is likely the most expensive part of the access
|
||||
control operation, but also one of the most important. It may be possible to compile to some kind
|
||||
of faster method, but initially a simple version is needed.
|
||||
|
||||
Delete Application
|
||||
------------------
|
||||
|
||||
Delete is similar to search, however there is the risk that the user may say something like:
|
||||
|
||||
Pres("class").
|
||||
|
||||
Now, were we to approach this like search, this would then have "every thing the identified user
|
||||
is allowed to delete, is deleted". A consideration here is that Pres("class") would delete "all"
|
||||
objects in the directory, but with the access control present, it would limit the delete to the
|
||||
set of allowed deletes.
|
||||
|
||||
In a sense, this is a correct behaviour - they were allowed to delete everything they asked to
|
||||
delete. However, in another it's not valid: the request was broad and they were not allowed access
|
||||
to delete everything they request.
|
||||
|
||||
The possible abuse here is that you could then use deletes to determine existance of entries in
|
||||
the database that you do not have access to. This however, requires someone to HAVE a delete
|
||||
privilege which is itself, very high level of access, so this risk may be minimal.
|
||||
|
||||
So the choices are:
|
||||
|
||||
* Treat it like search and allow the user to delete "what they are allowed to delete"
|
||||
* Deny the request, because their delete was too broad, and they should specify better
|
||||
what they want to delet.
|
||||
|
||||
Option 2 seems more correct because the delete request is an explicit request, not a request where
|
||||
you want partial results - imagine someone wants to delete users A, B at the same time, but only
|
||||
have access to A. They wwant this request to fail so they KNOW B was not deleted, rather than
|
||||
succeed and have B still exist with a partial delete status.
|
||||
|
||||
Create Application
|
||||
------------------
|
||||
|
||||
Create seems like the easiest to apply. Ensure that only the attributes in createattr are in the
|
||||
createevent, ensure the classes only contain the set in createclass, then finally apply
|
||||
filter_no_index to the entry to entry. If all of this passes, the create is allowed.
|
||||
|
||||
A key point, is that there is no union of create aci's - the WHOLE aci must pass, not parts of
|
||||
multiple.
|
||||
|
||||
An important consideration is how to handle overlapping aci. If two aci *could* match the create
|
||||
should we enforce both conditions are upheld? Or only a single upheld aci allows the create?
|
||||
|
||||
In some cases it may not be possible to satisfy both, and that would block creates. The intent
|
||||
of the access profile is that "something like this CAN" be created, so I believe that provided
|
||||
only a single control passes, the create should be allowed.
|
||||
|
||||
Modify Application
|
||||
------------------
|
||||
|
||||
Modify is similar to above, however, we specifically filter on the modlist action of present,
|
||||
removed or purged with the action. Otherwise, the rules of create stand where provided all requirements
|
||||
of the modify are "upheld", then it is allowed provided at least a single profile allows the change.
|
||||
|
||||
A key difference is that if the modify lists multiple presentattr types, the modify so long as it has
|
||||
one presentattr of the profile, it is conforming. IE we say "presentattr: name, email", but we
|
||||
only attempt to modify "email".
|
||||
|
||||
Considerations
|
||||
--------------
|
||||
|
||||
* When should access controls be applied? During an operation, we only schema validate after
|
||||
pre plugins, so likely it has to be "at that point", to ensure schema validity of the entries
|
||||
we want to assert changes to.
|
||||
* Self filter keyword should compile to eq("uuid", "...."). When do we do this and how?
|
||||
* memberof could take name or uuid, we need to be able to resolve this correctly, but this is likely
|
||||
a memberof issue we need to address, ie memberofuuid vs memberof attr.
|
||||
* Content controls in create and modify will be important to get right to avoid the security issues
|
||||
of ldap access controls. Given that class has special importance, it's only right to give it extra
|
||||
consideration in these controls.
|
||||
* In the future when recyclebin is added, a re-animation access profile should be created allowing
|
||||
revival of entries given certain conditions of the entry we are attempting to revive.
|
||||
|
||||
* profiles work on filters
|
||||
*
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ use serde_json;
|
|||
|
||||
use audit::AuditScope;
|
||||
use entry::{Entry, EntryCommitted, EntryNew, EntryValid};
|
||||
use filter::Filter;
|
||||
use filter::{Filter, FilterValid};
|
||||
|
||||
mod idl;
|
||||
mod mem_be;
|
||||
|
@ -56,7 +56,7 @@ pub trait BackendReadTransaction {
|
|||
fn search(
|
||||
&self,
|
||||
au: &mut AuditScope,
|
||||
filt: &Filter,
|
||||
filt: &Filter<FilterValid>,
|
||||
) -> Result<Vec<Entry<EntryValid, EntryCommitted>>, BackendError> {
|
||||
// Do things
|
||||
// Alloc a vec for the entries.
|
||||
|
@ -120,7 +120,11 @@ pub trait BackendReadTransaction {
|
|||
/// Basically, this is a specialised case of search, where we don't need to
|
||||
/// load any candidates if they match. This is heavily used in uuid
|
||||
/// refint and attr uniqueness.
|
||||
fn exists(&self, au: &mut AuditScope, filt: &Filter) -> Result<bool, BackendError> {
|
||||
fn exists(
|
||||
&self,
|
||||
au: &mut AuditScope,
|
||||
filt: &Filter<FilterValid>,
|
||||
) -> Result<bool, BackendError> {
|
||||
// Do a final optimise of the filter
|
||||
// At the moment, technically search will do this, but it won't always be the
|
||||
// case once this becomes a standalone function.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// use serde_json::{Error, Value};
|
||||
use super::proto_v1::Entry as ProtoEntry;
|
||||
use error::SchemaError;
|
||||
use filter::Filter;
|
||||
use filter::{Filter, FilterValid};
|
||||
use modify::{Modify, ModifyList};
|
||||
use schema::{SchemaAttribute, SchemaClass, SchemaReadTransaction};
|
||||
use std::collections::btree_map::{Iter as BTreeIter, IterMut as BTreeIterMut};
|
||||
|
@ -397,7 +397,7 @@ impl<STATE> Entry<EntryValid, STATE> {
|
|||
}
|
||||
|
||||
// Assert if this filter matches the entry (no index)
|
||||
pub fn entry_match_no_index(&self, filter: &Filter) -> bool {
|
||||
pub fn entry_match_no_index(&self, filter: &Filter<FilterValid>) -> bool {
|
||||
// Go through the filter components and check them in the entry.
|
||||
// This is recursive!!!!
|
||||
match filter {
|
||||
|
@ -424,10 +424,16 @@ impl<STATE> Entry<EntryValid, STATE> {
|
|||
}
|
||||
}),
|
||||
Filter::Not(f) => !self.entry_match_no_index(f),
|
||||
Filter::invalid(_) => {
|
||||
// TODO: Is there a better way to not need to match the phantom?
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn filter_from_attrs(&self, attrs: &Vec<String>) -> Option<Filter> {
|
||||
pub fn filter_from_attrs(&self, attrs: &Vec<String>) -> Option<Filter<FilterValid>> {
|
||||
// Because we are a valid entry, a filter we create *must* be valid
|
||||
//
|
||||
// Generate a filter from the attributes requested and defined.
|
||||
// Basically, this is a series of nested and's (which will be
|
||||
// optimised down later: but if someone wants to solve flatten() ...)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use super::filter::Filter;
|
||||
use super::filter::{Filter, FilterInvalid};
|
||||
use super::proto_v1::Entry as ProtoEntry;
|
||||
use super::proto_v1::{
|
||||
AuthRequest, AuthResponse, AuthStatus, CreateRequest, DeleteRequest, ModifyRequest, Response,
|
||||
|
@ -58,7 +58,7 @@ impl SearchResult {
|
|||
#[derive(Debug)]
|
||||
pub struct SearchEvent {
|
||||
pub internal: bool,
|
||||
pub filter: Filter,
|
||||
pub filter: Filter<FilterInvalid>,
|
||||
class: (), // String
|
||||
}
|
||||
|
||||
|
@ -70,12 +70,12 @@ impl SearchEvent {
|
|||
pub fn from_request(request: SearchRequest) -> Self {
|
||||
SearchEvent {
|
||||
internal: false,
|
||||
filter: request.filter,
|
||||
filter: Filter::from(&request.filter),
|
||||
class: (),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_internal(filter: Filter) -> Self {
|
||||
pub fn new_internal(filter: Filter<FilterInvalid>) -> Self {
|
||||
SearchEvent {
|
||||
internal: true,
|
||||
filter: filter,
|
||||
|
@ -132,7 +132,7 @@ impl CreateEvent {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct ExistsEvent {
|
||||
pub filter: Filter,
|
||||
pub filter: Filter<FilterInvalid>,
|
||||
pub internal: bool,
|
||||
}
|
||||
|
||||
|
@ -141,7 +141,7 @@ impl Message for ExistsEvent {
|
|||
}
|
||||
|
||||
impl ExistsEvent {
|
||||
pub fn new_internal(filter: Filter) -> Self {
|
||||
pub fn new_internal(filter: Filter<FilterInvalid>) -> Self {
|
||||
ExistsEvent {
|
||||
filter: filter,
|
||||
internal: true,
|
||||
|
@ -151,7 +151,7 @@ impl ExistsEvent {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct DeleteEvent {
|
||||
pub filter: Filter,
|
||||
pub filter: Filter<FilterInvalid>,
|
||||
pub internal: bool,
|
||||
}
|
||||
|
||||
|
@ -165,14 +165,14 @@ impl DeleteEvent {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn from_filter(filter: Filter) -> Self {
|
||||
pub fn from_filter(filter: Filter<FilterInvalid>) -> Self {
|
||||
DeleteEvent {
|
||||
filter: filter,
|
||||
internal: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_internal(filter: Filter) -> Self {
|
||||
pub fn new_internal(filter: Filter<FilterInvalid>) -> Self {
|
||||
DeleteEvent {
|
||||
filter: filter,
|
||||
internal: true,
|
||||
|
@ -182,7 +182,7 @@ impl DeleteEvent {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct ModifyEvent {
|
||||
pub filter: Filter,
|
||||
pub filter: Filter<FilterInvalid>,
|
||||
pub modlist: ModifyList,
|
||||
pub internal: bool,
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ impl ModifyEvent {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn from_filter(filter: Filter, modlist: ModifyList) -> Self {
|
||||
pub fn from_filter(filter: Filter<FilterInvalid>, modlist: ModifyList) -> Self {
|
||||
ModifyEvent {
|
||||
filter: filter,
|
||||
modlist: modlist,
|
||||
|
@ -205,7 +205,7 @@ impl ModifyEvent {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn new_internal(filter: Filter, modlist: ModifyList) -> Self {
|
||||
pub fn new_internal(filter: Filter<FilterInvalid>, modlist: ModifyList) -> Self {
|
||||
ModifyEvent {
|
||||
filter: filter,
|
||||
modlist: modlist,
|
||||
|
|
|
@ -2,28 +2,38 @@
|
|||
// in parallel map/reduce style, or directly on a single
|
||||
// entry to assert it matches.
|
||||
|
||||
use error::SchemaError;
|
||||
use proto_v1::Filter as ProtoFilter;
|
||||
use regex::Regex;
|
||||
use schema::{SchemaAttribute, SchemaClass, SchemaReadTransaction};
|
||||
use std::cmp::{Ordering, PartialOrd};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
// Perhaps make these json serialisable. Certainly would make parsing
|
||||
// simpler ...
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub enum Filter {
|
||||
#[derive(Debug)]
|
||||
pub struct FilterValid;
|
||||
#[derive(Debug)]
|
||||
pub struct FilterInvalid;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Filter<VALID> {
|
||||
// This is attr - value
|
||||
Eq(String, String),
|
||||
Sub(String, String),
|
||||
Pres(String),
|
||||
Or(Vec<Filter>),
|
||||
And(Vec<Filter>),
|
||||
Not(Box<Filter>),
|
||||
Or(Vec<Filter<VALID>>),
|
||||
And(Vec<Filter<VALID>>),
|
||||
Not(Box<Filter<VALID>>),
|
||||
invalid(PhantomData<VALID>),
|
||||
}
|
||||
|
||||
// Change this so you have RawFilter and Filter. RawFilter is the "builder", and then
|
||||
// given a "schema" you can emit a Filter. For us internally, we can create Filter
|
||||
// directly still ...
|
||||
|
||||
impl Filter {
|
||||
impl Filter<FilterValid> {
|
||||
// Does this need mut self? Aren't we returning
|
||||
// a new copied filter?
|
||||
pub fn optimise(&self) -> Self {
|
||||
|
@ -41,9 +51,108 @@ impl Filter {
|
|||
// If its the root item?
|
||||
self.clone()
|
||||
}
|
||||
|
||||
pub fn invalidate(self) -> Filter<FilterInvalid> {
|
||||
// Not used a lot, but probably a chance for improvement?
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Filter {
|
||||
impl Filter<FilterInvalid> {
|
||||
pub fn validate(
|
||||
&self,
|
||||
schema: &SchemaReadTransaction,
|
||||
) -> Result<Filter<FilterValid>, SchemaError> {
|
||||
// TODO:
|
||||
// First, normalise (if possible)
|
||||
// Then, validate
|
||||
|
||||
// Optimisation is done at another stage.
|
||||
|
||||
// This probably needs some rework
|
||||
|
||||
let schema_attributes = schema.get_attributes();
|
||||
let schema_name = schema_attributes
|
||||
.get("name")
|
||||
.expect("Critical: Core schema corrupt or missing.");
|
||||
|
||||
match self {
|
||||
Filter::Eq(attr, value) => {
|
||||
// Validate/normalise the attr name.
|
||||
let attr_norm = schema_name.normalise_value(attr);
|
||||
// Now check it exists
|
||||
match schema_attributes.get(&attr_norm) {
|
||||
Some(schema_a) => {
|
||||
let value_norm = schema_a.normalise_value(value);
|
||||
schema_a
|
||||
.validate_value(value)
|
||||
// Okay, it worked, transform to a filter component
|
||||
.map(|_| Filter::Eq(attr_norm, value_norm))
|
||||
// On error, pass the error back out.
|
||||
}
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
}
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
|
||||
/*
|
||||
match self {
|
||||
Filter::Eq(attr, value) => match schema_attributes.get(attr) {
|
||||
Some(schema_a) => schema_a.validate_value(value),
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
},
|
||||
Filter::Sub(attr, value) => match schema_attributes.get(attr) {
|
||||
Some(schema_a) => schema_a.validate_value(value),
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
},
|
||||
Filter::Pres(attr) => {
|
||||
// This could be better as a contains_key
|
||||
// because we never use the value
|
||||
match schema_attributes.get(attr) {
|
||||
Some(_) => Ok(()),
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
}
|
||||
}
|
||||
Filter::Or(filters) => {
|
||||
// This should never happen because
|
||||
// optimising should remove them as invalid parts?
|
||||
if filters.len() == 0 {
|
||||
return Err(SchemaError::EmptyFilter);
|
||||
};
|
||||
filters.iter().fold(Ok(()), |acc, filt| {
|
||||
if acc.is_ok() {
|
||||
self.validate(filt)
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
})
|
||||
}
|
||||
Filter::And(filters) => {
|
||||
// This should never happen because
|
||||
// optimising should remove them as invalid parts?
|
||||
if filters.len() == 0 {
|
||||
return Err(SchemaError::EmptyFilter);
|
||||
};
|
||||
filters.iter().fold(Ok(()), |acc, filt| {
|
||||
if acc.is_ok() {
|
||||
self.validate(filt)
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
})
|
||||
}
|
||||
Filter::Not(filter) => self.validate(filter),
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
pub fn from(f: &ProtoFilter) -> Self {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Filter<FilterValid> {
|
||||
fn clone(&self) -> Self {
|
||||
// I think we only need to match self then new + clone?
|
||||
match self {
|
||||
|
@ -53,12 +162,34 @@ impl Clone for Filter {
|
|||
Filter::Or(l) => Filter::Or(l.clone()),
|
||||
Filter::And(l) => Filter::And(l.clone()),
|
||||
Filter::Not(l) => Filter::Not(l.clone()),
|
||||
Filter::invalid(_) => {
|
||||
// TODO: Is there a better way to not need to match the phantom?
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Filter {
|
||||
fn eq(&self, rhs: &Filter) -> bool {
|
||||
impl Clone for Filter<FilterInvalid> {
|
||||
fn clone(&self) -> Self {
|
||||
// I think we only need to match self then new + clone?
|
||||
match self {
|
||||
Filter::Eq(a, v) => Filter::Eq(a.clone(), v.clone()),
|
||||
Filter::Sub(a, v) => Filter::Sub(a.clone(), v.clone()),
|
||||
Filter::Pres(a) => Filter::Pres(a.clone()),
|
||||
Filter::Or(l) => Filter::Or(l.clone()),
|
||||
Filter::And(l) => Filter::And(l.clone()),
|
||||
Filter::Not(l) => Filter::Not(l.clone()),
|
||||
Filter::invalid(_) => {
|
||||
// TODO: Is there a better way to not need to match the phantom?
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Filter<FilterValid> {
|
||||
fn eq(&self, rhs: &Filter<FilterValid>) -> bool {
|
||||
match (self, rhs) {
|
||||
(Filter::Eq(a1, v1), Filter::Eq(a2, v2)) => a1 == a2 && v1 == v2,
|
||||
(Filter::Sub(a1, v1), Filter::Sub(a2, v2)) => a1 == a2 && v1 == v2,
|
||||
|
@ -73,8 +204,8 @@ impl PartialEq for Filter {
|
|||
|
||||
// remember, this isn't ordering by alphanumeric, this is ordering of
|
||||
// optimisation preference!
|
||||
impl PartialOrd for Filter {
|
||||
fn partial_cmp(&self, rhs: &Filter) -> Option<Ordering> {
|
||||
impl PartialOrd for Filter<FilterValid> {
|
||||
fn partial_cmp(&self, rhs: &Filter<FilterValid>) -> Option<Ordering> {
|
||||
match (self, rhs) {
|
||||
(Filter::Eq(a1, _), Filter::Eq(a2, _)) => {
|
||||
// Order attr name, then value
|
||||
|
@ -101,26 +232,22 @@ impl PartialOrd for Filter {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Filter;
|
||||
use super::{Filter, FilterValid, FilterInvalid};
|
||||
use entry::{Entry, EntryNew, EntryValid};
|
||||
use serde_json;
|
||||
use std::cmp::{Ordering, PartialOrd};
|
||||
|
||||
#[test]
|
||||
fn test_filter_simple() {
|
||||
let filt = Filter::Eq(String::from("class"), String::from("user"));
|
||||
let j = serde_json::to_string_pretty(&filt);
|
||||
println!("{}", j.unwrap());
|
||||
let filt: Filter<FilterInvalid> = Filter::Eq(String::from("class"), String::from("user"));
|
||||
|
||||
let complex_filt = Filter::And(vec![
|
||||
let complex_filt: Filter<FilterInvalid> = Filter::And(vec![
|
||||
Filter::Or(vec![
|
||||
Filter::Eq(String::from("userid"), String::from("test_a")),
|
||||
Filter::Eq(String::from("userid"), String::from("test_b")),
|
||||
]),
|
||||
Filter::Eq(String::from("class"), String::from("user")),
|
||||
]);
|
||||
let y = serde_json::to_string_pretty(&complex_filt);
|
||||
println!("{}", y.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -223,7 +350,7 @@ mod tests {
|
|||
]);
|
||||
assert!(e.entry_match_no_index(&f_t2a));
|
||||
|
||||
let f_t3a = Filter::Or(vec![
|
||||
let f_t3a: Filter<FilterInvalid> = Filter::Or(vec![
|
||||
Filter::Eq(String::from("userid"), String::from("alice")),
|
||||
Filter::Eq(String::from("uidNumber"), String::from("1000")),
|
||||
]);
|
||||
|
|
|
@ -26,4 +26,8 @@ impl ModifyList {
|
|||
pub fn push_mod(&mut self, modify: Modify) {
|
||||
self.mods.push(modify)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.mods.len()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// use super::entry::Entry;
|
||||
use super::filter::Filter;
|
||||
// use super::filter::Filter;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
// These proto implementations are here because they have public definitions
|
||||
|
@ -16,6 +16,17 @@ pub struct Entry {
|
|||
pub attrs: BTreeMap<String, Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub enum Filter {
|
||||
// This is attr - value
|
||||
Eq(String, String),
|
||||
Sub(String, String),
|
||||
Pres(String),
|
||||
Or(Vec<Filter>),
|
||||
And(Vec<Filter>),
|
||||
Not(Box<Filter>),
|
||||
}
|
||||
|
||||
// FIXME: Do I need proto filter?
|
||||
// Probably yes, don't be shit william.
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use super::audit::AuditScope;
|
|||
use super::constants::*;
|
||||
// use super::entry::Entry;
|
||||
use super::error::SchemaError;
|
||||
use super::filter::Filter;
|
||||
// use super::filter::Filter;
|
||||
use std::collections::HashMap;
|
||||
// Apparently this is nightly only?
|
||||
use modify::ModifyList;
|
||||
|
@ -293,10 +293,6 @@ pub trait SchemaReadTransaction {
|
|||
self.get_inner().validate(audit)
|
||||
}
|
||||
|
||||
fn validate_filter(&self, filt: &Filter) -> Result<(), SchemaError> {
|
||||
self.get_inner().validate_filter(filt)
|
||||
}
|
||||
|
||||
fn normalise_modlist(&self, modlist: &ModifyList) -> ModifyList {
|
||||
unimplemented!()
|
||||
}
|
||||
|
@ -388,7 +384,7 @@ impl SchemaInner {
|
|||
description: String::from("A description of an attribute, object or class"),
|
||||
system: true,
|
||||
secret: false,
|
||||
multivalue: false,
|
||||
multivalue: true,
|
||||
index: vec![],
|
||||
syntax: SyntaxType::UTF8STRING,
|
||||
},
|
||||
|
@ -870,57 +866,6 @@ impl SchemaInner {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// This needs to be recursive?
|
||||
pub fn validate_filter(&self, filt: &Filter) -> Result<(), SchemaError> {
|
||||
match filt {
|
||||
Filter::Eq(attr, value) => match self.attributes.get(attr) {
|
||||
Some(schema_a) => schema_a.validate_value(value),
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
},
|
||||
Filter::Sub(attr, value) => match self.attributes.get(attr) {
|
||||
Some(schema_a) => schema_a.validate_value(value),
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
},
|
||||
Filter::Pres(attr) => {
|
||||
// This could be better as a contains_key
|
||||
// because we never use the value
|
||||
match self.attributes.get(attr) {
|
||||
Some(_) => Ok(()),
|
||||
None => Err(SchemaError::InvalidAttribute),
|
||||
}
|
||||
}
|
||||
Filter::Or(filters) => {
|
||||
// This should never happen because
|
||||
// optimising should remove them as invalid parts?
|
||||
if filters.len() == 0 {
|
||||
return Err(SchemaError::EmptyFilter);
|
||||
};
|
||||
filters.iter().fold(Ok(()), |acc, filt| {
|
||||
if acc.is_ok() {
|
||||
self.validate_filter(filt)
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
})
|
||||
}
|
||||
Filter::And(filters) => {
|
||||
// This should never happen because
|
||||
// optimising should remove them as invalid parts?
|
||||
if filters.len() == 0 {
|
||||
return Err(SchemaError::EmptyFilter);
|
||||
};
|
||||
filters.iter().fold(Ok(()), |acc, filt| {
|
||||
if acc.is_ok() {
|
||||
self.validate_filter(filt)
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
})
|
||||
}
|
||||
Filter::Not(filter) => self.validate_filter(filter),
|
||||
}
|
||||
}
|
||||
|
||||
// Normalise *does not* validate.
|
||||
// Normalise just fixes some possible common issues, but it
|
||||
// can't fix *everything* possibly wrong ...
|
||||
|
@ -1007,7 +952,7 @@ mod tests {
|
|||
use super::super::constants::*;
|
||||
use super::super::entry::{Entry, EntryInvalid, EntryNew, EntryValid};
|
||||
use super::super::error::SchemaError;
|
||||
use super::super::filter::Filter;
|
||||
use super::super::filter::{Filter, FilterInvalid, FilterValid};
|
||||
use super::{IndexType, Schema, SchemaAttribute, SchemaClass, SyntaxType};
|
||||
use schema::SchemaReadTransaction;
|
||||
use serde_json;
|
||||
|
@ -1474,90 +1419,52 @@ mod tests {
|
|||
let schema_outer = Schema::new(&mut audit).unwrap();
|
||||
let schema = schema_outer.read();
|
||||
// Test mixed case attr name
|
||||
let f_mixed: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Eq": [
|
||||
"ClAsS", "attributetype"
|
||||
]
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
let f_mixed = Filter::Eq("ClAsS".to_string(), "attributetype".to_string());
|
||||
assert_eq!(
|
||||
schema.validate_filter(&f_mixed),
|
||||
f_mixed.validate(&schema),
|
||||
Err(SchemaError::InvalidAttribute)
|
||||
);
|
||||
// test syntax of bool
|
||||
let f_bool: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Eq": [
|
||||
"secret", "zzzz"
|
||||
]
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
let f_bool = Filter::Eq("secret".to_string(), "zzzz".to_string());
|
||||
assert_eq!(
|
||||
schema.validate_filter(&f_bool),
|
||||
f_bool.validate(&schema),
|
||||
Err(SchemaError::InvalidAttributeSyntax)
|
||||
);
|
||||
// test insensitise values
|
||||
let f_insense: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Eq": [
|
||||
"class", "AttributeType"
|
||||
]
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
// test insensitive values
|
||||
let f_insense = Filter::Eq("class".to_string(), "AttributeType".to_string());
|
||||
assert_eq!(
|
||||
schema.validate_filter(&f_insense),
|
||||
f_insense.validate(&schema),
|
||||
Err(SchemaError::InvalidAttributeSyntax)
|
||||
);
|
||||
// Test the recursive structures validate
|
||||
let f_or_empty: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Or": []
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
let f_or_empty = Filter::Or(Vec::new());
|
||||
assert_eq!(f_or_empty.validate(&schema), Err(SchemaError::EmptyFilter));
|
||||
let f_or = Filter::Or(vec![
|
||||
Filter::Eq("class".to_string(), "AttributeType".to_string()),
|
||||
]);
|
||||
assert_eq!(
|
||||
schema.validate_filter(&f_or_empty),
|
||||
Err(SchemaError::EmptyFilter)
|
||||
);
|
||||
let f_or: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Or": [
|
||||
{ "Eq": ["class", "AttributeType"] }
|
||||
]
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
schema.validate_filter(&f_or),
|
||||
f_or.validate(&schema),
|
||||
Err(SchemaError::InvalidAttributeSyntax)
|
||||
);
|
||||
let f_or_mult: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Or": [
|
||||
{ "Eq": ["class", "attributetype"] },
|
||||
{ "Eq": ["class", "AttributeType"] }
|
||||
]
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
let f_or_mult = Filter::Or(vec![
|
||||
Filter::Eq("class".to_string(), "attributetype".to_string()),
|
||||
Filter::Eq("class".to_string(), "AttributeType".to_string()),
|
||||
]);
|
||||
assert_eq!(
|
||||
schema.validate_filter(&f_or_mult),
|
||||
f_or_mult.validate(&schema),
|
||||
Err(SchemaError::InvalidAttributeSyntax)
|
||||
);
|
||||
let f_or_ok: Filter = serde_json::from_str(
|
||||
r#"{
|
||||
"Or": [
|
||||
{ "Eq": ["class", "attributetype"] },
|
||||
{ "Eq": ["class", "classtype"] }
|
||||
]
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(schema.validate_filter(&f_or_ok), Ok(()));
|
||||
let f_or_ok = Filter::Or(vec![
|
||||
Filter::Eq("class".to_string(), "attributetype".to_string()),
|
||||
Filter::Eq("class".to_string(), "classtype".to_string()),
|
||||
]);
|
||||
assert_eq!(
|
||||
f_or_ok.validate(&schema),
|
||||
Ok(Filter::Or::<FilterValid>(vec![
|
||||
Filter::Eq("class".to_string(), "attributetype".to_string()),
|
||||
Filter::Eq("class".to_string(), "classtype".to_string()),
|
||||
]))
|
||||
);
|
||||
println!("{}", audit);
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ use event::{
|
|||
AuthEvent, AuthResult, CreateEvent, DeleteEvent, ExistsEvent, ModifyEvent, OpResult,
|
||||
SearchEvent, SearchResult,
|
||||
};
|
||||
use filter::Filter;
|
||||
use filter::{Filter, FilterInvalid};
|
||||
use log::EventLog;
|
||||
use modify::ModifyList;
|
||||
use plugins::Plugins;
|
||||
|
@ -88,15 +88,17 @@ pub fn start(log: actix::Addr<EventLog>, path: &str, threads: usize) -> actix::A
|
|||
// the backend
|
||||
pub trait QueryServerReadTransaction {
|
||||
type BackendTransactionType: BackendReadTransaction;
|
||||
|
||||
fn get_be_txn(&self) -> &Self::BackendTransactionType;
|
||||
|
||||
type SchemaTransactionType: SchemaReadTransaction;
|
||||
fn get_schema(&self) -> &Self::SchemaTransactionType;
|
||||
|
||||
fn search(
|
||||
&self,
|
||||
au: &mut AuditScope,
|
||||
se: &SearchEvent,
|
||||
) -> Result<Vec<Entry<EntryValid, EntryCommitted>>, OperationError> {
|
||||
// TODO: Validate the filter
|
||||
// How to get schema?
|
||||
// This is an important security step because it prevents us from
|
||||
// performing un-indexed searches on attr's that don't exist in the
|
||||
// server. This is why ExtensibleObject can only take schema that
|
||||
|
@ -104,6 +106,12 @@ pub trait QueryServerReadTransaction {
|
|||
|
||||
// TODO: Normalise the filter
|
||||
|
||||
// TODO: Validate the filter
|
||||
let vf = match se.filter.validate(self.get_schema()) {
|
||||
Ok(f) => f,
|
||||
Err(e) => return Err(OperationError::SchemaViolation),
|
||||
};
|
||||
|
||||
// TODO: Assert access control allows the filter and requested attrs.
|
||||
|
||||
// TODO: Pre-search plugins
|
||||
|
@ -111,7 +119,7 @@ pub trait QueryServerReadTransaction {
|
|||
let mut audit_be = AuditScope::new("backend_search");
|
||||
let res = self
|
||||
.get_be_txn()
|
||||
.search(&mut audit_be, &se.filter)
|
||||
.search(&mut audit_be, &vf)
|
||||
.map(|r| r)
|
||||
.map_err(|_| OperationError::Backend);
|
||||
au.append_scope(audit_be);
|
||||
|
@ -126,9 +134,16 @@ pub trait QueryServerReadTransaction {
|
|||
|
||||
fn exists(&self, au: &mut AuditScope, ee: &ExistsEvent) -> Result<bool, OperationError> {
|
||||
let mut audit_be = AuditScope::new("backend_exists");
|
||||
|
||||
// How to get schema?
|
||||
let vf = match ee.filter.validate(self.get_schema()) {
|
||||
Ok(f) => f,
|
||||
Err(e) => return Err(OperationError::SchemaViolation),
|
||||
};
|
||||
|
||||
let res = self
|
||||
.get_be_txn()
|
||||
.exists(&mut audit_be, &ee.filter)
|
||||
.exists(&mut audit_be, &vf)
|
||||
.map(|r| r)
|
||||
.map_err(|_| OperationError::Backend);
|
||||
au.append_scope(audit_be);
|
||||
|
@ -136,7 +151,11 @@ pub trait QueryServerReadTransaction {
|
|||
}
|
||||
|
||||
// From internal, generate an exists event and dispatch
|
||||
fn internal_exists(&self, au: &mut AuditScope, filter: Filter) -> Result<bool, OperationError> {
|
||||
fn internal_exists(
|
||||
&self,
|
||||
au: &mut AuditScope,
|
||||
filter: Filter<FilterInvalid>,
|
||||
) -> Result<bool, OperationError> {
|
||||
let mut audit_int = AuditScope::new("internal_exists");
|
||||
// Build an exists event
|
||||
let ee = ExistsEvent::new_internal(filter);
|
||||
|
@ -150,7 +169,7 @@ pub trait QueryServerReadTransaction {
|
|||
fn internal_search(
|
||||
&self,
|
||||
audit: &mut AuditScope,
|
||||
filter: Filter,
|
||||
filter: Filter<FilterInvalid>,
|
||||
) -> Result<Vec<Entry<EntryValid, EntryCommitted>>, OperationError> {
|
||||
let mut audit_int = AuditScope::new("internal_search");
|
||||
let se = SearchEvent::new_internal(filter);
|
||||
|
@ -176,6 +195,12 @@ impl QueryServerReadTransaction for QueryServerTransaction {
|
|||
fn get_be_txn(&self) -> &BackendTransaction {
|
||||
&self.be_txn
|
||||
}
|
||||
|
||||
type SchemaTransactionType = SchemaTransaction;
|
||||
|
||||
fn get_schema(&self) -> &SchemaTransaction {
|
||||
&self.schema
|
||||
}
|
||||
}
|
||||
|
||||
pub struct QueryServerWriteTransaction<'a> {
|
||||
|
@ -193,6 +218,12 @@ impl<'a> QueryServerReadTransaction for QueryServerWriteTransaction<'a> {
|
|||
fn get_be_txn(&self) -> &BackendWriteTransaction {
|
||||
&self.be_txn
|
||||
}
|
||||
|
||||
type SchemaTransactionType = SchemaWriteTransaction<'a>;
|
||||
|
||||
fn get_schema(&self) -> &SchemaWriteTransaction<'a> {
|
||||
&self.schema
|
||||
}
|
||||
}
|
||||
|
||||
pub struct QueryServer {
|
||||
|
@ -332,6 +363,16 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
// Get the candidates.
|
||||
// Modify applies a modlist to a filter, so we need to internal search
|
||||
// then apply.
|
||||
|
||||
// Validate input.
|
||||
|
||||
// Is the modlist non zero?
|
||||
if me.modlist.len() == 0 {
|
||||
return Err(OperationError::EmptyRequest);
|
||||
}
|
||||
|
||||
// Is the filter invalid to schema?
|
||||
|
||||
// WARNING! Check access controls here!!!!
|
||||
// How can we do the search with the permissions of the caller?
|
||||
|
||||
|
@ -344,6 +385,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
};
|
||||
|
||||
if pre_candidates.len() == 0 {
|
||||
audit_log!(au, "modify: no candidates match filter {:?}", me.filter);
|
||||
return Err(OperationError::NoMatchingEntries);
|
||||
};
|
||||
|
||||
|
@ -439,7 +481,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
pub fn internal_delete(
|
||||
&self,
|
||||
audit: &mut AuditScope,
|
||||
filter: Filter,
|
||||
filter: Filter<FilterInvalid>,
|
||||
) -> Result<(), OperationError> {
|
||||
let mut audit_int = AuditScope::new("internal_delete");
|
||||
let de = DeleteEvent::new_internal(filter);
|
||||
|
@ -451,7 +493,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
pub fn internal_modify(
|
||||
&self,
|
||||
audit: &mut AuditScope,
|
||||
filter: Filter,
|
||||
filter: Filter<FilterInvalid>,
|
||||
modlist: ModifyList,
|
||||
) -> Result<(), OperationError> {
|
||||
let mut audit_int = AuditScope::new("internal_modify");
|
||||
|
@ -492,7 +534,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
//
|
||||
// This will extra classes an attributes alone!
|
||||
let filt = match e.filter_from_attrs(&vec![String::from("uuid")]) {
|
||||
Some(f) => f,
|
||||
Some(f) => f.invalidate(),
|
||||
None => return Err(OperationError::FilterGeneration),
|
||||
};
|
||||
|
||||
|
@ -537,7 +579,7 @@ impl<'a> QueryServerWriteTransaction<'a> {
|
|||
|
||||
// Create a filter from the entry for assertion.
|
||||
let filt = match e.filter_from_attrs(&vec![String::from("uuid")]) {
|
||||
Some(f) => f,
|
||||
Some(f) => f.invalidate(),
|
||||
None => return Err(OperationError::FilterGeneration),
|
||||
};
|
||||
|
||||
|
@ -774,11 +816,13 @@ mod tests {
|
|||
use super::super::audit::AuditScope;
|
||||
use super::super::be::{Backend, BackendTransaction};
|
||||
use super::super::entry::{Entry, EntryCommitted, EntryInvalid, EntryNew, EntryValid};
|
||||
use super::super::error::OperationError;
|
||||
use super::super::event::{CreateEvent, DeleteEvent, ModifyEvent, SearchEvent};
|
||||
use super::super::filter::Filter;
|
||||
use super::super::log;
|
||||
use super::super::modify::{Modify, ModifyList};
|
||||
use super::super::proto_v1::Entry as ProtoEntry;
|
||||
use super::super::proto_v1::Filter as ProtoFilter;
|
||||
use super::super::proto_v1::{CreateRequest, SearchRequest};
|
||||
use super::super::schema::Schema;
|
||||
use super::super::server::{
|
||||
|
@ -818,7 +862,7 @@ mod tests {
|
|||
fn test_qs_create_user() {
|
||||
run_test!(|_log, mut server: QueryServer, audit: &mut AuditScope| {
|
||||
let mut server_txn = server.write();
|
||||
let filt = Filter::Pres(String::from("name"));
|
||||
let filt = ProtoFilter::Pres(String::from("name"));
|
||||
|
||||
let se1 = SearchEvent::from_request(SearchRequest::new(filt.clone()));
|
||||
let se2 = SearchEvent::from_request(SearchRequest::new(filt));
|
||||
|
@ -936,7 +980,7 @@ mod tests {
|
|||
Filter::Pres(String::from("class")),
|
||||
ModifyList::new_list(vec![]),
|
||||
);
|
||||
assert!(server_txn.modify(audit, &me_emp).is_err());
|
||||
assert!(server_txn.modify(audit, &me_emp) == Err(OperationError::EmptyRequest));
|
||||
|
||||
// Mod changes no objects
|
||||
let me_nochg = ModifyEvent::from_filter(
|
||||
|
@ -946,7 +990,7 @@ mod tests {
|
|||
String::from("anusaosu"),
|
||||
)]),
|
||||
);
|
||||
assert!(server_txn.modify(audit, &me_nochg).is_err());
|
||||
assert!(server_txn.modify(audit, &me_nochg) == Err(OperationError::NoMatchingEntries));
|
||||
|
||||
// Filter is invalid to schema
|
||||
let me_inv_f = ModifyEvent::from_filter(
|
||||
|
@ -956,7 +1000,7 @@ mod tests {
|
|||
String::from("anusaosu"),
|
||||
)]),
|
||||
);
|
||||
assert!(server_txn.modify(audit, &me_inv_f).is_err());
|
||||
assert!(server_txn.modify(audit, &me_inv_f) == Err(OperationError::SchemaViolation));
|
||||
|
||||
// Mod is invalid to schema
|
||||
let me_inv_m = ModifyEvent::from_filter(
|
||||
|
@ -966,7 +1010,7 @@ mod tests {
|
|||
String::from("anusaosu"),
|
||||
)]),
|
||||
);
|
||||
assert!(server_txn.modify(audit, &me_inv_m).is_err());
|
||||
assert!(server_txn.modify(audit, &me_inv_m) == Err(OperationError::SchemaViolation));
|
||||
|
||||
// Mod single object
|
||||
let me_sin = ModifyEvent::from_filter(
|
||||
|
@ -980,7 +1024,7 @@ mod tests {
|
|||
|
||||
// Mod multiple object
|
||||
let me_mult = ModifyEvent::from_filter(
|
||||
Filter::And(vec![
|
||||
Filter::Or(vec![
|
||||
Filter::Eq(String::from("name"), String::from("testperson1")),
|
||||
Filter::Eq(String::from("name"), String::from("testperson2")),
|
||||
]),
|
||||
|
|
Loading…
Reference in a new issue