Compare commits

...

12 commits

Author SHA1 Message Date
Mark Dietzer 2bd3f15713
Merge 998e56d648 into ad012cd6fd 2025-04-05 03:31:43 +02:00
Arian van Putten ad012cd6fd
implement notify-reload protocol () 2025-04-04 09:24:14 +10:00
Firstyear 82a883089f
Allow versioning of server configs ()
This allows our server configuration to be versioned, in preparation
for a change related to the proxy protocol additions.
2025-04-02 02:44:19 +00:00
Firstyear a2eae53328
20250314 remove protected plugin ()
Removes the protected plugin into an access control module so that it's outputs can be properly represented in effective access checks.
2025-04-01 01:00:56 +00:00
dependabot[bot] ec3db91da0
Bump the all group with 10 updates ()
* Bump the all group with 10 updates

Bumps the all group with 10 updates:

| Package | From | To |
| --- | --- | --- |
| [clap](https://github.com/clap-rs/clap) | `4.5.32` | `4.5.34` |
| [itertools](https://github.com/rust-itertools/itertools) | `0.13.0` | `0.14.0` |
| [lru](https://github.com/jeromefroe/lru-rs) | `0.12.5` | `0.13.0` |
| [rand](https://github.com/rust-random/rand) | `0.8.5` | `0.9.0` |
| [rand_chacha](https://github.com/rust-random/rand) | `0.3.1` | `0.9.0` |
| [whoami](https://github.com/ardaku/whoami) | `1.5.2` | `1.6.0` |
| [axum-extra](https://github.com/tokio-rs/axum) | `0.9.6` | `0.10.1` |
| [axum-macros](https://github.com/tokio-rs/axum) | `0.4.2` | `0.5.0` |
| [fantoccini](https://github.com/jonhoo/fantoccini) | `0.21.4` | `0.21.5` |
| [jsonschema](https://github.com/Stranger6667/jsonschema) | `0.29.0` | `0.29.1` |


Updates `clap` from 4.5.32 to 4.5.34
- [Release notes](https://github.com/clap-rs/clap/releases)
- [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md)
- [Commits](https://github.com/clap-rs/clap/compare/clap_complete-v4.5.32...clap_complete-v4.5.34)

Updates `itertools` from 0.13.0 to 0.14.0
- [Changelog](https://github.com/rust-itertools/itertools/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rust-itertools/itertools/compare/v0.13.0...v0.14.0)

Updates `lru` from 0.12.5 to 0.13.0
- [Changelog](https://github.com/jeromefroe/lru-rs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/jeromefroe/lru-rs/compare/0.12.5...0.13.0)

Updates `rand` from 0.8.5 to 0.9.0
- [Release notes](https://github.com/rust-random/rand/releases)
- [Changelog](https://github.com/rust-random/rand/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rust-random/rand/compare/0.8.5...0.9.0)

Updates `rand_chacha` from 0.3.1 to 0.9.0
- [Release notes](https://github.com/rust-random/rand/releases)
- [Changelog](https://github.com/rust-random/rand/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rust-random/rand/compare/rand_chacha-0.3.1...0.9.0)

Updates `whoami` from 1.5.2 to 1.6.0
- [Release notes](https://github.com/ardaku/whoami/releases)
- [Changelog](https://github.com/ardaku/whoami/blob/v1.6.0/CHANGELOG.md)
- [Commits](https://github.com/ardaku/whoami/compare/v1.5.2...v1.6.0)

Updates `axum-extra` from 0.9.6 to 0.10.1
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-extra-v0.9.6...axum-extra-v0.10.1)

Updates `axum-macros` from 0.4.2 to 0.5.0
- [Release notes](https://github.com/tokio-rs/axum/releases)
- [Changelog](https://github.com/tokio-rs/axum/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/axum/compare/axum-macros-v0.4.2...axum-macros-v0.5.0)

Updates `fantoccini` from 0.21.4 to 0.21.5
- [Commits](https://github.com/jonhoo/fantoccini/compare/v0.21.4...v0.21.5)

Updates `jsonschema` from 0.29.0 to 0.29.1
- [Release notes](https://github.com/Stranger6667/jsonschema/releases)
- [Changelog](https://github.com/Stranger6667/jsonschema/blob/master/CHANGELOG.md)
- [Commits](https://github.com/Stranger6667/jsonschema/compare/rust-v0.29.0...rust-v0.29.1)

---
updated-dependencies:
- dependency-name: clap
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: all
- dependency-name: itertools
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: lru
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: rand
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: rand_chacha
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: whoami
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: axum-extra
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: axum-macros
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: fantoccini
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: all
- dependency-name: jsonschema
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: all
...

Signed-off-by: dependabot[bot] <support@github.com>

* maint: revert rand and axum packages

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: James Hodgkinson <james@terminaloutcomes.com>
2025-03-31 00:28:22 +00:00
dependabot[bot] efaef70abe
Bump mozilla-actions/sccache-action from 0.0.8 to 0.0.9 in the all group ()
Bumps the all group with 1 update: [mozilla-actions/sccache-action](https://github.com/mozilla-actions/sccache-action).


Updates `mozilla-actions/sccache-action` from 0.0.8 to 0.0.9
- [Release notes](https://github.com/mozilla-actions/sccache-action/releases)
- [Commits](https://github.com/mozilla-actions/sccache-action/compare/v0.0.8...v0.0.9)

---
updated-dependencies:
- dependency-name: mozilla-actions/sccache-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: all
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-30 23:25:40 +00:00
dependabot[bot] 5b48f1dfe3
Bump the all group in /pykanidm with 4 updates ()
Bumps the all group in /pykanidm with 4 updates: [pydantic](https://github.com/pydantic/pydantic), [types-requests](https://github.com/python/typeshed), [mkdocs-material](https://github.com/squidfunk/mkdocs-material) and [mkdocstrings-python](https://github.com/mkdocstrings/python).


Updates `pydantic` from 2.10.6 to 2.11.1
- [Release notes](https://github.com/pydantic/pydantic/releases)
- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md)
- [Commits](https://github.com/pydantic/pydantic/compare/v2.10.6...v2.11.1)

Updates `types-requests` from 2.32.0.20250306 to 2.32.0.20250328
- [Commits](https://github.com/python/typeshed/commits)

Updates `mkdocs-material` from 9.6.9 to 9.6.10
- [Release notes](https://github.com/squidfunk/mkdocs-material/releases)
- [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG)
- [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.6.9...9.6.10)

Updates `mkdocstrings-python` from 1.16.7 to 1.16.8
- [Release notes](https://github.com/mkdocstrings/python/releases)
- [Changelog](https://github.com/mkdocstrings/python/blob/main/CHANGELOG.md)
- [Commits](https://github.com/mkdocstrings/python/compare/1.16.7...1.16.8)

---
updated-dependencies:
- dependency-name: pydantic
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: all
- dependency-name: types-requests
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: all
- dependency-name: mkdocs-material
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: all
- dependency-name: mkdocstrings-python
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: all
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-31 07:54:28 +10:00
Doridian 998e56d648 begin reworking 2025-01-07 17:09:31 -08:00
Mark Dietzer 2e3f4f30ae
Merge branch 'master' into feat/initgroups 2024-12-31 17:23:49 -08:00
Doridian 15410a7830 Simplify logic 2024-12-30 00:04:02 -08:00
Doridian 8af51175f5 Implement libnss side possibly 2024-12-29 21:46:24 -08:00
Doridian 685746796e Add and implement basic NssGroupsByMember call 2024-12-29 21:29:28 -08:00
50 changed files with 2125 additions and 1849 deletions

View file

@ -19,7 +19,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- name: Install dependencies
run: |
sudo apt-get update && \
@ -39,6 +39,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- name: "Run cargo fmt"
run: cargo fmt --check

View file

@ -24,7 +24,7 @@ jobs:
with:
ref: ${{ inputs.tag }}
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- name: Install deps
run: |
sudo apt-get update

View file

@ -27,7 +27,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- name: Install dependencies
run: |
sudo apt-get update && \
@ -72,7 +72,7 @@ jobs:
with:
toolchain: ${{ matrix.rust_version }}
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- name: Install dependencies
run: |
sudo apt-get update && \
@ -112,7 +112,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- name: Install dependencies
run: |
sudo apt-get update && \

View file

@ -28,7 +28,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.8
uses: mozilla-actions/sccache-action@v0.0.9
- run: cargo build --locked -p kanidm_client -p kanidm_tools --bin kanidm
# yamllint disable-line rule:line-length
- run: cargo test -p kanidm_client -p kanidm_tools

100
Cargo.lock generated
View file

@ -232,9 +232,9 @@ dependencies = [
[[package]]
name = "async-compression"
version = "0.4.21"
version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0cf008e5e1a9e9e22a7d3c9a4992e21a350290069e36d8fb72304ed17e8f2d2"
checksum = "59a194f9d963d8099596278594b3107448656ba73831c9d8c783e613ce86da64"
dependencies = [
"flate2",
"futures-core",
@ -536,7 +536,7 @@ dependencies = [
"bitflags 2.9.0",
"cexpr",
"clang-sys",
"itertools 0.13.0",
"itertools 0.10.5",
"log",
"prettyplease",
"proc-macro2",
@ -724,9 +724,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.32"
version = "4.5.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83"
checksum = "e958897981290da2a852763fe9cdb89cd36977a5d729023127095fa94d95e2ff"
dependencies = [
"clap_builder",
"clap_derive",
@ -734,9 +734,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.32"
version = "4.5.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8"
checksum = "83b0f35019843db2160b5bb19ae09b4e6411ac33fc6a712003c33e03090e2489"
dependencies = [
"anstream",
"anstyle",
@ -818,7 +818,7 @@ dependencies = [
"crossbeam-epoch",
"crossbeam-queue",
"crossbeam-utils",
"lru 0.13.0",
"lru",
"smallvec",
"sptr",
"tokio",
@ -1167,9 +1167,9 @@ dependencies = [
[[package]]
name = "deranged"
version = "0.4.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
checksum = "28cfac68e08048ae1883171632c2aef3ebc555621ae56fbccce1cbf22dd7f058"
dependencies = [
"powerfmt",
"serde",
@ -1441,13 +1441,12 @@ dependencies = [
[[package]]
name = "fantoccini"
version = "0.21.4"
version = "0.21.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7722aeee9c2be6fa131166990295089d73d973012b758a2208b9ba51af5dd024"
checksum = "e3a6a7a9a454c24453f9807c7f12b37e31ae43f3eb41888ae1f79a9a3e3be3f5"
dependencies = [
"base64 0.22.1",
"cookie 0.18.1",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body-util",
@ -2533,14 +2532,15 @@ dependencies = [
[[package]]
name = "iana-time-zone"
version = "0.1.61"
version = "0.1.62"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
checksum = "b2fd658b06e56721792c5df4475705b6cda790e9298d19d2f8af083457bcd127"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"log",
"wasm-bindgen",
"windows-core",
]
@ -2595,9 +2595,9 @@ dependencies = [
[[package]]
name = "icu_locid_transform_data"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d"
[[package]]
name = "icu_normalizer"
@ -2619,9 +2619,9 @@ dependencies = [
[[package]]
name = "icu_normalizer_data"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7"
[[package]]
name = "icu_properties"
@ -2640,9 +2640,9 @@ dependencies = [
[[package]]
name = "icu_properties_data"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2"
[[package]]
name = "icu_provider"
@ -2801,15 +2801,6 @@ dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.14.0"
@ -2843,9 +2834,9 @@ dependencies = [
[[package]]
name = "jsonschema"
version = "0.29.0"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c59cb1733c34377b6067a0419befd7f25073c5249ec3b0614a482bf499e1df5"
checksum = "161c33c3ec738cfea3288c5c53dfcdb32fd4fc2954de86ea06f71b5a1a40bfcd"
dependencies = [
"ahash",
"base64 0.22.1",
@ -3105,7 +3096,7 @@ dependencies = [
"kanidmd_core",
"kanidmd_testkit",
"libc",
"lru 0.12.5",
"lru",
"mimalloc",
"notify-debouncer-full",
"prctl",
@ -3203,7 +3194,7 @@ dependencies = [
"hex",
"idlset",
"image 0.24.9",
"itertools 0.13.0",
"itertools 0.14.0",
"kanidm_build_profiles",
"kanidm_lib_crypto",
"kanidm_proto",
@ -3493,18 +3484,9 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.26"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
[[package]]
name = "lru"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
dependencies = [
"hashbrown 0.15.2",
]
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "lru"
@ -3967,9 +3949,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.21.1"
version = "1.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc"
checksum = "c2806eaa3524762875e21c3dcd057bc4b7bfa01ce4da8d46be1cd43649e1cc6b"
[[package]]
name = "openssl"
@ -4502,9 +4484,9 @@ dependencies = [
[[package]]
name = "quinn-udp"
version = "0.5.10"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e46f3055866785f6b92bc6164b76be02ca8f2eb4b002c0354b28cf4c119e5944"
checksum = "541d0f57c6ec747a90738a52741d3221f7960e8ac2f0ff4b1a63680e033b4ab5"
dependencies = [
"cfg_aliases",
"libc",
@ -4637,9 +4619,9 @@ checksum = "5daffa8f5ca827e146485577fa9dba9bd9c6921e06e954ab8f6408c10f753086"
[[package]]
name = "referencing"
version = "0.29.0"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce52678d53e5ee37e4af0a9036ca834d0cd34b33c82457c6b06a24f8d783344"
checksum = "40a64b3a635fad9000648b4d8a59c8710c523ab61a23d392a7d91d47683f5adc"
dependencies = [
"ahash",
"fluent-uri",
@ -4966,9 +4948,9 @@ dependencies = [
[[package]]
name = "rustls-webpki"
version = "0.103.0"
version = "0.103.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0aa4eeac2588ffff23e9d7a7e9b3f971c5fb5b7ebc9452745e0c232c64f83b2f"
checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03"
dependencies = [
"ring",
"rustls-pki-types",
@ -5595,9 +5577,9 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.40"
version = "0.3.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d9c75b47bdff86fa3334a3db91356b8d7d86a9b839dab7d0bdc5c3d3a077618"
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
dependencies = [
"deranged",
"itoa",
@ -5618,9 +5600,9 @@ checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
[[package]]
name = "time-macros"
version = "0.2.21"
version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29aa485584182073ed57fd5004aa09c371f021325014694e432313345865fd04"
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
dependencies = [
"num-conv",
"time-core",
@ -6482,9 +6464,9 @@ dependencies = [
[[package]]
name = "whoami"
version = "1.5.2"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d"
checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7"
dependencies = [
"redox_syscall",
"wasite",

View file

@ -159,7 +159,7 @@ base64 = "^0.22.1"
base64urlsafedata = "0.5.1"
bitflags = "^2.8.0"
bytes = "^1.9.0"
clap = { version = "^4.5.27", features = ["derive", "env"] }
clap = { version = "^4.5.34", features = ["derive", "env"] }
clap_complete = "^4.5.42"
# Forced by saffron/cron
chrono = "^0.4.39"
@ -190,7 +190,7 @@ image = { version = "0.24.9", default-features = false, features = [
"jpeg",
"webp",
] }
itertools = "0.13.0"
itertools = "0.14.0"
enum-iterator = "2.1.0"
kanidmd_web_ui_shared = { path = "./server/web_ui/shared" }
# REMOVE this
@ -202,7 +202,7 @@ libc = "^0.2.168"
libnss = "^0.8.0"
libsqlite3-sys = "^0.25.2"
lodepng = "3.11.0"
lru = "^0.12.5"
lru = "^0.13.0"
mathru = "^0.13.0"
md-5 = "0.10.6"
mimalloc = "0.1.43"
@ -294,7 +294,7 @@ webauthn-rs = { version = "0.5.1", features = ["preview-features"] }
webauthn-rs-core = "0.5.1"
webauthn-rs-proto = "0.5.1"
whoami = "^1.5.2"
whoami = "^1.6.0"
walkdir = "2"
x509-cert = "0.2.5"

View file

@ -53,7 +53,6 @@
- [Service Integration Examples](examples/readme.md)
- [Kubernetes Ingress](examples/kubernetes_ingress.md)
- [OAuth2 Examples](integrations/oauth2/examples.md)
- [Traefik](examples/traefik.md)
- [Replication](repl/readme.md)

View file

@ -1,3 +1,6 @@
# The server configuration file version.
version = "2"
# The webserver bind address. Requires TLS certificates.
# If the port is set to 443 you may require the
# NET_BIND_SERVICE capability.

View file

@ -1,3 +1,6 @@
# The server configuration file version.
version = "2"
# The webserver bind address. Requires TLS certificates.
# If the port is set to 443 you may require the
# NET_BIND_SERVICE capability.

View file

@ -22,6 +22,8 @@ pub enum Attribute {
AcpCreateClass,
AcpEnable,
AcpModifyClass,
AcpModifyPresentClass,
AcpModifyRemoveClass,
AcpModifyPresentAttr,
AcpModifyRemovedAttr,
AcpReceiver,
@ -255,6 +257,8 @@ impl Attribute {
Attribute::AcpCreateClass => ATTR_ACP_CREATE_CLASS,
Attribute::AcpEnable => ATTR_ACP_ENABLE,
Attribute::AcpModifyClass => ATTR_ACP_MODIFY_CLASS,
Attribute::AcpModifyPresentClass => ATTR_ACP_MODIFY_PRESENT_CLASS,
Attribute::AcpModifyRemoveClass => ATTR_ACP_MODIFY_REMOVE_CLASS,
Attribute::AcpModifyPresentAttr => ATTR_ACP_MODIFY_PRESENTATTR,
Attribute::AcpModifyRemovedAttr => ATTR_ACP_MODIFY_REMOVEDATTR,
Attribute::AcpReceiver => ATTR_ACP_RECEIVER,
@ -440,6 +444,8 @@ impl Attribute {
ATTR_ACP_CREATE_CLASS => Attribute::AcpCreateClass,
ATTR_ACP_ENABLE => Attribute::AcpEnable,
ATTR_ACP_MODIFY_CLASS => Attribute::AcpModifyClass,
ATTR_ACP_MODIFY_PRESENT_CLASS => Attribute::AcpModifyPresentClass,
ATTR_ACP_MODIFY_REMOVE_CLASS => Attribute::AcpModifyRemoveClass,
ATTR_ACP_MODIFY_PRESENTATTR => Attribute::AcpModifyPresentAttr,
ATTR_ACP_MODIFY_REMOVEDATTR => Attribute::AcpModifyRemovedAttr,
ATTR_ACP_RECEIVER => Attribute::AcpReceiver,

View file

@ -62,6 +62,8 @@ pub const ATTR_ACP_CREATE_ATTR: &str = "acp_create_attr";
pub const ATTR_ACP_CREATE_CLASS: &str = "acp_create_class";
pub const ATTR_ACP_ENABLE: &str = "acp_enable";
pub const ATTR_ACP_MODIFY_CLASS: &str = "acp_modify_class";
pub const ATTR_ACP_MODIFY_PRESENT_CLASS: &str = "acp_modify_present_class";
pub const ATTR_ACP_MODIFY_REMOVE_CLASS: &str = "acp_modify_remove_class";
pub const ATTR_ACP_MODIFY_PRESENTATTR: &str = "acp_modify_presentattr";
pub const ATTR_ACP_MODIFY_REMOVEDATTR: &str = "acp_modify_removedattr";
pub const ATTR_ACP_RECEIVER_GROUP: &str = "acp_receiver_group";

View file

@ -33,7 +33,7 @@ pub enum ScimAttributeEffectiveAccess {
/// All attributes on the entry have this permission granted
Grant,
/// All attributes on the entry have this permission denied
Denied,
Deny,
/// The following attributes on the entry have this permission granted
Allow(BTreeSet<Attribute>),
}
@ -43,7 +43,7 @@ impl ScimAttributeEffectiveAccess {
pub fn check(&self, attr: &Attribute) -> bool {
match self {
Self::Grant => true,
Self::Denied => false,
Self::Deny => false,
Self::Allow(set) => set.contains(attr),
}
}

257
pykanidm/poetry.lock generated
View file

@ -741,14 +741,14 @@ dev = ["flake8", "markdown", "twine", "wheel"]
[[package]]
name = "griffe"
version = "1.2.0"
version = "1.7.1"
description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API."
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "griffe-1.2.0-py3-none-any.whl", hash = "sha256:a8b2fcb1ecdc5a412e646b0b4375eb20a5d2eac3a11dd8c10c56967a4097663c"},
{file = "griffe-1.2.0.tar.gz", hash = "sha256:1c9f6ef7455930f3f9b0c4145a961c90385d1e2cbc496f7796fbff560ec60d31"},
{file = "griffe-1.7.1-py3-none-any.whl", hash = "sha256:37a7f15233937d723ddc969fa4117fdd03988885c16938dc43bccdfe8fa4d02d"},
{file = "griffe-1.7.1.tar.gz", hash = "sha256:464730d0e95d0afd038e699a5f7276d7438d0712db0c489a17e761f70e011507"},
]
[package.dependencies]
@ -1051,14 +1051,14 @@ pyyaml = ">=5.1"
[[package]]
name = "mkdocs-material"
version = "9.6.9"
version = "9.6.10"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs_material-9.6.9-py3-none-any.whl", hash = "sha256:6e61b7fb623ce2aa4622056592b155a9eea56ff3487d0835075360be45a4c8d1"},
{file = "mkdocs_material-9.6.9.tar.gz", hash = "sha256:a4872139715a1f27b2aa3f3dc31a9794b7bbf36333c0ba4607cf04786c94f89c"},
{file = "mkdocs_material-9.6.10-py3-none-any.whl", hash = "sha256:36168548df4e2ddeb9a334ddae4ab9c388ccfea4dd50ffee657d22b93dcb1c3e"},
{file = "mkdocs_material-9.6.10.tar.gz", hash = "sha256:25a453c1f24f34fcf1f53680c03d2c1421b52ce5247f4468153c87a70cd5f1fc"},
]
[package.dependencies]
@ -1120,18 +1120,18 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"]
[[package]]
name = "mkdocstrings-python"
version = "1.16.7"
version = "1.16.8"
description = "A Python handler for mkdocstrings."
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "mkdocstrings_python-1.16.7-py3-none-any.whl", hash = "sha256:a5589a5be247a28ba651287f83630c69524042f8055d93b5c203d804a3409333"},
{file = "mkdocstrings_python-1.16.7.tar.gz", hash = "sha256:cdfc1a99fe5f6f0d90446a364ef7cac12014a4ef46114b2677a58cec84007117"},
{file = "mkdocstrings_python-1.16.8-py3-none-any.whl", hash = "sha256:211b7aaf776cd45578ecb531e5ad0d3a35a8be9101a6bfa10de38a69af9d8fd8"},
{file = "mkdocstrings_python-1.16.8.tar.gz", hash = "sha256:9453ccae69be103810c1cf6435ce71c8f714ae37fef4d87d16aa92a7c800fe1d"},
]
[package.dependencies]
griffe = ">=0.49"
griffe = ">=1.6.2"
mkdocs-autorefs = ">=1.4"
mkdocstrings = ">=0.28.3"
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
@ -1530,20 +1530,21 @@ files = [
[[package]]
name = "pydantic"
version = "2.10.6"
version = "2.11.1"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
{file = "pydantic-2.11.1-py3-none-any.whl", hash = "sha256:5b6c415eee9f8123a14d859be0c84363fec6b1feb6b688d6435801230b56e0b8"},
{file = "pydantic-2.11.1.tar.gz", hash = "sha256:442557d2910e75c991c39f4b4ab18963d57b9b55122c8b2a9cd176d8c29ce968"},
]
[package.dependencies]
annotated-types = ">=0.6.0"
pydantic-core = "2.27.2"
pydantic-core = "2.33.0"
typing-extensions = ">=4.12.2"
typing-inspection = ">=0.4.0"
[package.extras]
email = ["email-validator (>=2.0.0)"]
@ -1551,112 +1552,111 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows
[[package]]
name = "pydantic-core"
version = "2.27.2"
version = "2.33.0"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},
{file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},
{file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},
{file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},
{file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},
{file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},
{file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},
{file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},
{file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"},
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"},
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"},
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"},
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"},
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"},
{file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"},
{file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"},
{file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"},
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"},
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"},
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"},
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"},
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"},
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"},
{file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"},
{file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"},
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"},
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"},
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"},
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"},
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"},
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"},
{file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"},
{file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"},
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"},
{file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},
{file = "pydantic_core-2.33.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71dffba8fe9ddff628c68f3abd845e91b028361d43c5f8e7b3f8b91d7d85413e"},
{file = "pydantic_core-2.33.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:abaeec1be6ed535a5d7ffc2e6c390083c425832b20efd621562fbb5bff6dc518"},
{file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759871f00e26ad3709efc773ac37b4d571de065f9dfb1778012908bcc36b3a73"},
{file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dcfebee69cd5e1c0b76a17e17e347c84b00acebb8dd8edb22d4a03e88e82a207"},
{file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b1262b912435a501fa04cd213720609e2cefa723a07c92017d18693e69bf00b"},
{file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4726f1f3f42d6a25678c67da3f0b10f148f5655813c5aca54b0d1742ba821b8f"},
{file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e790954b5093dff1e3a9a2523fddc4e79722d6f07993b4cd5547825c3cbf97b5"},
{file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34e7fb3abe375b5c4e64fab75733d605dda0f59827752debc99c17cb2d5f3276"},
{file = "pydantic_core-2.33.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ecb158fb9b9091b515213bed3061eb7deb1d3b4e02327c27a0ea714ff46b0760"},
{file = "pydantic_core-2.33.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:4d9149e7528af8bbd76cc055967e6e04617dcb2a2afdaa3dea899406c5521faa"},
{file = "pydantic_core-2.33.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e81a295adccf73477220e15ff79235ca9dcbcee4be459eb9d4ce9a2763b8386c"},
{file = "pydantic_core-2.33.0-cp310-cp310-win32.whl", hash = "sha256:f22dab23cdbce2005f26a8f0c71698457861f97fc6318c75814a50c75e87d025"},
{file = "pydantic_core-2.33.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cb2390355ba084c1ad49485d18449b4242da344dea3e0fe10babd1f0db7dcfc"},
{file = "pydantic_core-2.33.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a608a75846804271cf9c83e40bbb4dab2ac614d33c6fd5b0c6187f53f5c593ef"},
{file = "pydantic_core-2.33.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e1c69aa459f5609dec2fa0652d495353accf3eda5bdb18782bc5a2ae45c9273a"},
{file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9ec80eb5a5f45a2211793f1c4aeddff0c3761d1c70d684965c1807e923a588b"},
{file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e925819a98318d17251776bd3d6aa9f3ff77b965762155bdad15d1a9265c4cfd"},
{file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bf68bb859799e9cec3d9dd8323c40c00a254aabb56fe08f907e437005932f2b"},
{file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b2ea72dea0825949a045fa4071f6d5b3d7620d2a208335207793cf29c5a182d"},
{file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1583539533160186ac546b49f5cde9ffc928062c96920f58bd95de32ffd7bffd"},
{file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23c3e77bf8a7317612e5c26a3b084c7edeb9552d645742a54a5867635b4f2453"},
{file = "pydantic_core-2.33.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a7a7f2a3f628d2f7ef11cb6188bcf0b9e1558151d511b974dfea10a49afe192b"},
{file = "pydantic_core-2.33.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:f1fb026c575e16f673c61c7b86144517705865173f3d0907040ac30c4f9f5915"},
{file = "pydantic_core-2.33.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:635702b2fed997e0ac256b2cfbdb4dd0bf7c56b5d8fba8ef03489c03b3eb40e2"},
{file = "pydantic_core-2.33.0-cp311-cp311-win32.whl", hash = "sha256:07b4ced28fccae3f00626eaa0c4001aa9ec140a29501770a88dbbb0966019a86"},
{file = "pydantic_core-2.33.0-cp311-cp311-win_amd64.whl", hash = "sha256:4927564be53239a87770a5f86bdc272b8d1fbb87ab7783ad70255b4ab01aa25b"},
{file = "pydantic_core-2.33.0-cp311-cp311-win_arm64.whl", hash = "sha256:69297418ad644d521ea3e1aa2e14a2a422726167e9ad22b89e8f1130d68e1e9a"},
{file = "pydantic_core-2.33.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6c32a40712e3662bebe524abe8abb757f2fa2000028d64cc5a1006016c06af43"},
{file = "pydantic_core-2.33.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ec86b5baa36f0a0bfb37db86c7d52652f8e8aa076ab745ef7725784183c3fdd"},
{file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4deac83a8cc1d09e40683be0bc6d1fa4cde8df0a9bf0cda5693f9b0569ac01b6"},
{file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:175ab598fb457a9aee63206a1993874badf3ed9a456e0654273e56f00747bbd6"},
{file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f36afd0d56a6c42cf4e8465b6441cf546ed69d3a4ec92724cc9c8c61bd6ecf4"},
{file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a98257451164666afafc7cbf5fb00d613e33f7e7ebb322fbcd99345695a9a61"},
{file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecc6d02d69b54a2eb83ebcc6f29df04957f734bcf309d346b4f83354d8376862"},
{file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a69b7596c6603afd049ce7f3835bcf57dd3892fc7279f0ddf987bebed8caa5a"},
{file = "pydantic_core-2.33.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea30239c148b6ef41364c6f51d103c2988965b643d62e10b233b5efdca8c0099"},
{file = "pydantic_core-2.33.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:abfa44cf2f7f7d7a199be6c6ec141c9024063205545aa09304349781b9a125e6"},
{file = "pydantic_core-2.33.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20d4275f3c4659d92048c70797e5fdc396c6e4446caf517ba5cad2db60cd39d3"},
{file = "pydantic_core-2.33.0-cp312-cp312-win32.whl", hash = "sha256:918f2013d7eadea1d88d1a35fd4a1e16aaf90343eb446f91cb091ce7f9b431a2"},
{file = "pydantic_core-2.33.0-cp312-cp312-win_amd64.whl", hash = "sha256:aec79acc183865bad120b0190afac467c20b15289050648b876b07777e67ea48"},
{file = "pydantic_core-2.33.0-cp312-cp312-win_arm64.whl", hash = "sha256:5461934e895968655225dfa8b3be79e7e927e95d4bd6c2d40edd2fa7052e71b6"},
{file = "pydantic_core-2.33.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f00e8b59e1fc8f09d05594aa7d2b726f1b277ca6155fc84c0396db1b373c4555"},
{file = "pydantic_core-2.33.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a73be93ecef45786d7d95b0c5e9b294faf35629d03d5b145b09b81258c7cd6d"},
{file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff48a55be9da6930254565ff5238d71d5e9cd8c5487a191cb85df3bdb8c77365"},
{file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a4ea04195638dcd8c53dadb545d70badba51735b1594810e9768c2c0b4a5da"},
{file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41d698dcbe12b60661f0632b543dbb119e6ba088103b364ff65e951610cb7ce0"},
{file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae62032ef513fe6281ef0009e30838a01057b832dc265da32c10469622613885"},
{file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f225f3a3995dbbc26affc191d0443c6c4aa71b83358fd4c2b7d63e2f6f0336f9"},
{file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5bdd36b362f419c78d09630cbaebc64913f66f62bda6d42d5fbb08da8cc4f181"},
{file = "pydantic_core-2.33.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2a0147c0bef783fd9abc9f016d66edb6cac466dc54a17ec5f5ada08ff65caf5d"},
{file = "pydantic_core-2.33.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:c860773a0f205926172c6644c394e02c25421dc9a456deff16f64c0e299487d3"},
{file = "pydantic_core-2.33.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:138d31e3f90087f42aa6286fb640f3c7a8eb7bdae829418265e7e7474bd2574b"},
{file = "pydantic_core-2.33.0-cp313-cp313-win32.whl", hash = "sha256:d20cbb9d3e95114325780f3cfe990f3ecae24de7a2d75f978783878cce2ad585"},
{file = "pydantic_core-2.33.0-cp313-cp313-win_amd64.whl", hash = "sha256:ca1103d70306489e3d006b0f79db8ca5dd3c977f6f13b2c59ff745249431a606"},
{file = "pydantic_core-2.33.0-cp313-cp313-win_arm64.whl", hash = "sha256:6291797cad239285275558e0a27872da735b05c75d5237bbade8736f80e4c225"},
{file = "pydantic_core-2.33.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7b79af799630af263eca9ec87db519426d8c9b3be35016eddad1832bac812d87"},
{file = "pydantic_core-2.33.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eabf946a4739b5237f4f56d77fa6668263bc466d06a8036c055587c130a46f7b"},
{file = "pydantic_core-2.33.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8a1d581e8cdbb857b0e0e81df98603376c1a5c34dc5e54039dcc00f043df81e7"},
{file = "pydantic_core-2.33.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7c9c84749f5787781c1c45bb99f433402e484e515b40675a5d121ea14711cf61"},
{file = "pydantic_core-2.33.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:64672fa888595a959cfeff957a654e947e65bbe1d7d82f550417cbd6898a1d6b"},
{file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bc7367c0961dec292244ef2549afa396e72e28cc24706210bd44d947582c59"},
{file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ce72d46eb201ca43994303025bd54d8a35a3fc2a3495fac653d6eb7205ce04f4"},
{file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14229c1504287533dbf6b1fc56f752ce2b4e9694022ae7509631ce346158de11"},
{file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:085d8985b1c1e48ef271e98a658f562f29d89bda98bf120502283efbc87313eb"},
{file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31860fbda80d8f6828e84b4a4d129fd9c4535996b8249cfb8c720dc2a1a00bb8"},
{file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f200b2f20856b5a6c3a35f0d4e344019f805e363416e609e9b47c552d35fd5ea"},
{file = "pydantic_core-2.33.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f72914cfd1d0176e58ddc05c7a47674ef4222c8253bf70322923e73e14a4ac3"},
{file = "pydantic_core-2.33.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:91301a0980a1d4530d4ba7e6a739ca1a6b31341252cb709948e0aca0860ce0ae"},
{file = "pydantic_core-2.33.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7419241e17c7fbe5074ba79143d5523270e04f86f1b3a0dff8df490f84c8273a"},
{file = "pydantic_core-2.33.0-cp39-cp39-win32.whl", hash = "sha256:7a25493320203005d2a4dac76d1b7d953cb49bce6d459d9ae38e30dd9f29bc9c"},
{file = "pydantic_core-2.33.0-cp39-cp39-win_amd64.whl", hash = "sha256:82a4eba92b7ca8af1b7d5ef5f3d9647eee94d1f74d21ca7c21e3a2b92e008358"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2762c568596332fdab56b07060c8ab8362c56cf2a339ee54e491cd503612c50"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bf637300ff35d4f59c006fff201c510b2b5e745b07125458a5389af3c0dff8c"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c151ce3d59ed56ebd7ce9ce5986a409a85db697d25fc232f8e81f195aa39a1"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee65f0cc652261744fd07f2c6e6901c914aa6c5ff4dcfaf1136bc394d0dd26b"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:024d136ae44d233e6322027bbf356712b3940bee816e6c948ce4b90f18471b3d"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e37f10f6d4bc67c58fbd727108ae1d8b92b397355e68519f1e4a7babb1473442"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:502ed542e0d958bd12e7c3e9a015bce57deaf50eaa8c2e1c439b512cb9db1e3a"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:715c62af74c236bf386825c0fdfa08d092ab0f191eb5b4580d11c3189af9d330"},
{file = "pydantic_core-2.33.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bccc06fa0372151f37f6b69834181aa9eb57cf8665ed36405fb45fbf6cac3bae"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d8dc9f63a26f7259b57f46a7aab5af86b2ad6fbe48487500bb1f4b27e051e4c"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:30369e54d6d0113d2aa5aee7a90d17f225c13d87902ace8fcd7bbf99b19124db"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3eb479354c62067afa62f53bb387827bee2f75c9c79ef25eef6ab84d4b1ae3b"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0310524c833d91403c960b8a3cf9f46c282eadd6afd276c8c5edc617bd705dc9"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eddb18a00bbb855325db27b4c2a89a4ba491cd6a0bd6d852b225172a1f54b36c"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ade5dbcf8d9ef8f4b28e682d0b29f3008df9842bb5ac48ac2c17bc55771cc976"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2c0afd34f928383e3fd25740f2050dbac9d077e7ba5adbaa2227f4d4f3c8da5c"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7da333f21cd9df51d5731513a6d39319892947604924ddf2e24a4612975fb936"},
{file = "pydantic_core-2.33.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4b6d77c75a57f041c5ee915ff0b0bb58eabb78728b69ed967bc5b780e8f701b8"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba95691cf25f63df53c1d342413b41bd7762d9acb425df8858d7efa616c0870e"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f1ab031feb8676f6bd7c85abec86e2935850bf19b84432c64e3e239bffeb1ec"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c1151827eef98b83d49b6ca6065575876a02d2211f259fb1a6b7757bd24dd8"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66d931ea2c1464b738ace44b7334ab32a2fd50be023d863935eb00f42be1778"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0bcf0bab28995d483f6c8d7db25e0d05c3efa5cebfd7f56474359e7137f39856"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:89670d7a0045acb52be0566df5bc8b114ac967c662c06cf5e0c606e4aadc964b"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:b716294e721d8060908dbebe32639b01bfe61b15f9f57bcc18ca9a0e00d9520b"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fc53e05c16697ff0c1c7c2b98e45e131d4bfb78068fffff92a82d169cbb4c7b7"},
{file = "pydantic_core-2.33.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:68504959253303d3ae9406b634997a2123a0b0c1da86459abbd0ffc921695eac"},
{file = "pydantic_core-2.33.0.tar.gz", hash = "sha256:40eb8af662ba409c3cbf4a8150ad32ae73514cd7cb1f1a2113af39763dd616b3"},
]
[package.dependencies]
@ -2161,14 +2161,14 @@ files = [
[[package]]
name = "types-requests"
version = "2.32.0.20250306"
version = "2.32.0.20250328"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "types_requests-2.32.0.20250306-py3-none-any.whl", hash = "sha256:25f2cbb5c8710b2022f8bbee7b2b66f319ef14aeea2f35d80f18c9dbf3b60a0b"},
{file = "types_requests-2.32.0.20250306.tar.gz", hash = "sha256:0962352694ec5b2f95fda877ee60a159abdf84a0fc6fdace599f20acb41a03d1"},
{file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"},
{file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"},
]
[package.dependencies]
@ -2198,6 +2198,21 @@ files = [
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "typing-inspection"
version = "0.4.0"
description = "Runtime typing introspection tools"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"},
{file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"},
]
[package.dependencies]
typing-extensions = ">=4.12.0"
[[package]]
name = "urllib3"
version = "2.2.2"

View file

@ -25,7 +25,7 @@ def recover_account(username: str) -> str:
"recover-account",
username,
"--config",
"../../examples/insecure_server.toml",
"./insecure_server.toml",
"--output",
"json",
]

View file

@ -44,7 +44,7 @@ fi
# defaults
KANIDM_CONFIG_FILE="../../examples/insecure_server.toml"
KANIDM_CONFIG_FILE="./insecure_server.toml"
KANIDM_URL="$(rg origin "${KANIDM_CONFIG_FILE}" | awk '{print $NF}' | tr -d '"')"
KANIDM_CA_PATH="/tmp/kanidm/ca.pem"
@ -83,7 +83,7 @@ if [ "${REMOVE_TEST_DB}" -eq 1 ]; then
rm /tmp/kanidm/kanidm.db || true
fi
export KANIDM_CONFIG="../../examples/insecure_server.toml"
export KANIDM_CONFIG="./insecure_server.toml"
IDM_ADMIN_USER="idm_admin@localhost"
echo "Resetting the idm_admin user..."

View file

@ -25,7 +25,7 @@ if [ ! -f "run_insecure_dev_server.sh" ]; then
exit 1
fi
export KANIDM_CONFIG="../../examples/insecure_server.toml"
export KANIDM_CONFIG="./insecure_server.toml"
mkdir -p /tmp/kanidm/client_ca
@ -48,7 +48,7 @@ fi
ATTEMPT=0
KANIDM_CONFIG_FILE="../../examples/insecure_server.toml"
KANIDM_CONFIG_FILE="./insecure_server.toml"
KANIDM_URL="$(rg origin "${KANIDM_CONFIG_FILE}" | awk '{print $NF}' | tr -d '"')"
KANIDM_CA_PATH="/tmp/kanidm/ca.pem"

View file

@ -191,7 +191,7 @@ impl QueryServerReadV1 {
pub async fn handle_online_backup(
&self,
msg: OnlineBackupEvent,
outpath: &str,
outpath: &Path,
versions: usize,
) -> Result<(), OperationError> {
trace!(eventid = ?msg.eventid, "Begin online backup event");
@ -200,12 +200,12 @@ impl QueryServerReadV1 {
#[allow(clippy::unwrap_used)]
let timestamp = now.format(&Rfc3339).unwrap();
let dest_file = format!("{}/backup-{}.json", outpath, timestamp);
let dest_file = outpath.join(format!("backup-{}.json", timestamp));
if Path::new(&dest_file).exists() {
if dest_file.exists() {
error!(
"Online backup file {} already exists, will not overwrite it.",
dest_file
dest_file.display()
);
return Err(OperationError::InvalidState);
}
@ -218,10 +218,14 @@ impl QueryServerReadV1 {
.get_be_txn()
.backup(&dest_file)
.map(|()| {
info!("Online backup created {} successfully", dest_file);
info!("Online backup created {} successfully", dest_file.display());
})
.map_err(|e| {
error!("Online backup failed to create {}: {:?}", dest_file, e);
error!(
"Online backup failed to create {}: {:?}",
dest_file.display(),
e
);
OperationError::InvalidState
})?;
}
@ -267,7 +271,11 @@ impl QueryServerReadV1 {
}
}
Err(e) => {
error!("Online backup cleanup error read dir {}: {}", outpath, e);
error!(
"Online backup cleanup error read dir {}: {}",
outpath.display(),
e
);
return Err(OperationError::InvalidState);
}
}

File diff suppressed because it is too large Load diff

View file

@ -112,19 +112,19 @@ impl IntervalActor {
if !op.exists() {
info!(
"Online backup output folder '{}' does not exist, trying to create it.",
outpath
outpath.display()
);
fs::create_dir_all(&outpath).map_err(|e| {
error!(
"Online backup failed to create output directory '{}': {}",
outpath.clone(),
outpath.display(),
e
)
})?;
}
if !op.is_dir() {
error!("Online backup output '{}' is not a directory or we are missing permissions to access it.", outpath);
error!("Online backup output '{}' is not a directory or we are missing permissions to access it.", outpath.display());
return Err(());
}
@ -148,7 +148,7 @@ impl IntervalActor {
if let Err(e) = server
.handle_online_backup(
OnlineBackupEvent::new(),
outpath.clone().as_str(),
&outpath,
versions,
)
.await

View file

@ -36,9 +36,10 @@ mod ldaps;
mod repl;
mod utils;
use std::fmt::{Display, Formatter};
use std::sync::Arc;
use crate::actors::{QueryServerReadV1, QueryServerWriteV1};
use crate::admin::AdminActor;
use crate::config::{Configuration, ServerRole};
use crate::interval::IntervalActor;
use crate::utils::touch_file_or_quit;
use compact_jwt::{JwsHs256Signer, JwsSigner};
use kanidm_proto::internal::OperationError;
@ -50,17 +51,14 @@ use kanidmd_lib::status::StatusActor;
use kanidmd_lib::value::CredentialType;
#[cfg(not(target_family = "windows"))]
use libc::umask;
use std::fmt::{Display, Formatter};
use std::path::Path;
use std::sync::Arc;
use tokio::sync::broadcast;
use tokio::sync::mpsc;
use tokio::sync::Notify;
use tokio::task;
use crate::actors::{QueryServerReadV1, QueryServerWriteV1};
use crate::admin::AdminActor;
use crate::config::{Configuration, ServerRole};
use crate::interval::IntervalActor;
use tokio::sync::mpsc;
// === internal setup helpers
fn setup_backend(config: &Configuration, schema: &Schema) -> Result<Backend, OperationError> {
@ -80,7 +78,7 @@ fn setup_backend_vacuum(
let pool_size: u32 = config.threads as u32;
let cfg = BackendConfig::new(
config.db_path.as_str(),
config.db_path.as_deref(),
pool_size,
config.db_fs_type.unwrap_or_default(),
config.db_arc_size,
@ -335,7 +333,7 @@ pub fn dbscan_restore_quarantined_core(config: &Configuration, id: u64) {
};
}
pub fn backup_server_core(config: &Configuration, dst_path: &str) {
pub fn backup_server_core(config: &Configuration, dst_path: &Path) {
let schema = match Schema::new() {
Ok(s) => s,
Err(e) => {
@ -371,8 +369,11 @@ pub fn backup_server_core(config: &Configuration, dst_path: &str) {
// Let the txn abort, even on success.
}
pub async fn restore_server_core(config: &Configuration, dst_path: &str) {
touch_file_or_quit(config.db_path.as_str());
pub async fn restore_server_core(config: &Configuration, dst_path: &Path) {
// If it's an in memory database, we don't need to touch anything
if let Some(db_path) = config.db_path.as_ref() {
touch_file_or_quit(db_path);
}
// First, we provide the in-memory schema so that core attrs are indexed correctly.
let schema = match Schema::new() {
@ -1011,7 +1012,7 @@ pub async fn create_server_core(
let tls_accepter_reload_task_notify = tls_acceptor_reload_notify.clone();
let tls_config = config.tls_config.clone();
let ldap_configured = config.ldapaddress.is_some();
let ldap_configured = config.ldapbindaddress.is_some();
let (ldap_tls_acceptor_reload_tx, ldap_tls_acceptor_reload_rx) = mpsc::channel(1);
let (http_tls_acceptor_reload_tx, http_tls_acceptor_reload_rx) = mpsc::channel(1);
@ -1076,7 +1077,7 @@ pub async fn create_server_core(
};
// If we have been requested to init LDAP, configure it now.
let maybe_ldap_acceptor_handle = match &config.ldapaddress {
let maybe_ldap_acceptor_handle = match &config.ldapbindaddress {
Some(la) => {
let opt_ldap_ssl_acceptor = maybe_tls_acceptor.clone();

View file

@ -1,32 +1,39 @@
use filetime::FileTime;
use std::fs::File;
use std::io::ErrorKind;
use std::path::PathBuf;
use std::path::Path;
use std::time::SystemTime;
pub fn touch_file_or_quit(file_path: &str) {
pub fn touch_file_or_quit<P: AsRef<Path>>(file_path: P) {
/*
Attempt to touch the file file_path, will quit the application if it fails for any reason.
Will also create a new file if it doesn't already exist.
*/
if PathBuf::from(file_path).exists() {
let file_path: &Path = file_path.as_ref();
if file_path.exists() {
let t = FileTime::from_system_time(SystemTime::now());
match filetime::set_file_times(file_path, t, t) {
Ok(_) => debug!(
"Successfully touched existing file {}, can continue",
file_path
file_path.display()
),
Err(e) => {
match e.kind() {
ErrorKind::PermissionDenied => {
// we bail here because you won't be able to write them back...
error!("Permission denied writing to {}, quitting.", file_path)
error!(
"Permission denied writing to {}, quitting.",
file_path.display()
)
}
_ => {
error!(
"Failed to write to {} due to error: {:?} ... quitting.",
file_path, e
file_path.display(),
e
)
}
}
@ -35,11 +42,12 @@ pub fn touch_file_or_quit(file_path: &str) {
}
} else {
match File::create(file_path) {
Ok(_) => debug!("Successfully touched new file {}", file_path),
Ok(_) => debug!("Successfully touched new file {}", file_path.display()),
Err(e) => {
error!(
"Failed to write to {} due to error: {:?} ... quitting.",
file_path, e
file_path.display(),
e
);
std::process::exit(1);
}

View file

@ -1,3 +1,4 @@
version = "2"
bindaddress = "[::]:8443"
ldapbindaddress = "127.0.0.1:3636"

View file

@ -22,7 +22,7 @@ fi
mkdir -p "${KANI_TMP}"/client_ca
CONFIG_FILE=${CONFIG_FILE:="${SCRIPT_DIR}/../../examples/insecure_server.toml"}
CONFIG_FILE=${CONFIG_FILE:="${SCRIPT_DIR}/insecure_server.toml"}
if [ ! -f "${CONFIG_FILE}" ]; then
echo "Couldn't find configuration file at ${CONFIG_FILE}, please ensure you're running this script from its base directory (${SCRIPT_DIR})."

View file

@ -37,7 +37,7 @@ use kanidmd_core::admin::{
AdminTaskRequest, AdminTaskResponse, ClientCodec, ProtoDomainInfo,
ProtoDomainUpgradeCheckReport, ProtoDomainUpgradeCheckStatus,
};
use kanidmd_core::config::{Configuration, ServerConfig};
use kanidmd_core::config::{CliConfig, Configuration, EnvironmentConfig, ServerConfigUntagged};
use kanidmd_core::{
backup_server_core, cert_generate_core, create_server_core, dbscan_get_id2entry_core,
dbscan_list_id2entry_core, dbscan_list_index_analysis_core, dbscan_list_index_core,
@ -379,17 +379,13 @@ fn check_file_ownership(opt: &KanidmdParser) -> Result<(), ExitCode> {
}
// We have to do this because we can't use tracing until we've started the logging pipeline, and we can't start the logging pipeline until the tokio runtime's doing its thing.
async fn start_daemon(
opt: KanidmdParser,
mut config: Configuration,
sconfig: ServerConfig,
) -> ExitCode {
async fn start_daemon(opt: KanidmdParser, config: Configuration) -> ExitCode {
// if we have a server config and it has an OTEL URL, then we'll start the logging pipeline now.
// TODO: only send to stderr when we're not in a TTY
let sub = match sketching::otel::start_logging_pipeline(
&sconfig.otel_grpc_url,
sconfig.log_level.unwrap_or_default(),
&config.otel_grpc_url,
config.log_level,
"kanidmd",
) {
Err(err) => {
@ -423,8 +419,8 @@ async fn start_daemon(
return err;
};
if let Some(db_path) = sconfig.db_path.as_ref() {
let db_pathbuf = PathBuf::from(db_path.as_str());
if let Some(db_path) = config.db_path.as_ref() {
let db_pathbuf = db_path.to_path_buf();
// We can't check the db_path permissions because it may not exist yet!
if let Some(db_parent_path) = db_pathbuf.parent() {
if !db_parent_path.exists() {
@ -464,33 +460,11 @@ async fn start_daemon(
warn!("WARNING: DB folder {} has 'everyone' permission bits in the mode. This could be a security risk ...", db_par_path_buf.to_str().unwrap_or("invalid file path"));
}
}
config.update_db_path(db_path);
} else {
error!("No db_path set in configuration, server startup will FAIL!");
return ExitCode::FAILURE;
}
if let Some(origin) = sconfig.origin.clone() {
config.update_origin(&origin);
} else {
error!("No origin set in configuration, server startup will FAIL!");
return ExitCode::FAILURE;
}
if let Some(domain) = sconfig.domain.clone() {
config.update_domain(&domain);
} else {
error!("No domain set in configuration, server startup will FAIL!");
return ExitCode::FAILURE;
}
config.update_db_arc_size(sconfig.get_db_arc_size());
config.update_role(sconfig.role);
config.update_output_mode(opt.commands.commonopt().output_mode.to_owned().into());
config.update_trust_x_forward_for(sconfig.trust_x_forward_for);
config.update_admin_bind_path(&sconfig.adminbindpath);
config.update_replication_config(sconfig.repl_config.clone());
match &opt.commands {
// we aren't going to touch the DB so we can carry on
KanidmdOpt::ShowReplicationCertificate { .. }
@ -501,19 +475,15 @@ async fn start_daemon(
_ => {
// Okay - Lets now create our lock and go.
#[allow(clippy::expect_used)]
let klock_path = match sconfig.db_path.clone() {
Some(val) => format!("{}.klock", val),
None => std::env::temp_dir()
.join("kanidmd.klock")
.to_str()
.expect("Unable to create klock path, this is a critical error!")
.to_string(),
let klock_path = match config.db_path.clone() {
Some(val) => val.with_extension("klock"),
None => std::env::temp_dir().join("kanidmd.klock"),
};
let flock = match File::create(&klock_path) {
Ok(flock) => flock,
Err(e) => {
error!("ERROR: Refusing to start - unable to create kanidmd exclusive lock at {} - {:?}", klock_path, e);
error!("ERROR: Refusing to start - unable to create kanidmd exclusive lock at {} - {:?}", klock_path.display(), e);
return ExitCode::FAILURE;
}
};
@ -521,7 +491,7 @@ async fn start_daemon(
match flock.try_lock_exclusive() {
Ok(()) => debug!("Acquired kanidm exclusive lock"),
Err(e) => {
error!("ERROR: Refusing to start - unable to lock kanidmd exclusive lock at {} - {:?}", klock_path, e);
error!("ERROR: Refusing to start - unable to lock kanidmd exclusive lock at {} - {:?}", klock_path.display(), e);
error!("Is another kanidmd process running?");
return ExitCode::FAILURE;
}
@ -529,7 +499,7 @@ async fn start_daemon(
}
}
kanidm_main(sconfig, config, opt).await
kanidm_main(config, opt).await
}
fn main() -> ExitCode {
@ -556,10 +526,6 @@ fn main() -> ExitCode {
return ExitCode::SUCCESS;
};
//we set up a list of these so we can set the log config THEN log out the errors.
let mut config_error: Vec<String> = Vec::new();
let mut config = Configuration::new();
if env!("KANIDM_SERVER_CONFIG_PATH").is_empty() {
println!("CRITICAL: Kanidmd was not built correctly and is missing a valid KANIDM_SERVER_CONFIG_PATH value");
return ExitCode::FAILURE;
@ -581,49 +547,56 @@ fn main() -> ExitCode {
}
};
let sconfig = match ServerConfig::new(maybe_config_path) {
Ok(c) => Some(c),
Err(e) => {
config_error.push(format!("Config Parse failure {:?}", e));
let maybe_sconfig = if let Some(config_path) = maybe_config_path {
match ServerConfigUntagged::new(config_path) {
Ok(c) => Some(c),
Err(err) => {
eprintln!("ERROR: Configuration Parse Failure: {:?}", err);
return ExitCode::FAILURE;
}
}
} else {
eprintln!("WARNING: No configuration path was provided, relying on environment variables.");
None
};
let envconfig = match EnvironmentConfig::new() {
Ok(ec) => ec,
Err(err) => {
eprintln!("ERROR: Environment Configuration Parse Failure: {:?}", err);
return ExitCode::FAILURE;
}
};
// Get information on the windows username
#[cfg(target_family = "windows")]
get_user_details_windows();
let cli_config = CliConfig {
output_mode: Some(opt.commands.commonopt().output_mode.to_owned().into()),
};
if !config_error.is_empty() {
println!("There were errors on startup, which prevent the server from starting:");
for e in config_error {
println!(" - {}", e);
}
let is_server = matches!(&opt.commands, KanidmdOpt::Server(_));
let config = Configuration::build()
.add_env_config(envconfig)
.add_opt_toml_config(maybe_sconfig)
// We always set threads to 1 unless it's the main server.
.add_cli_config(cli_config)
.is_server_mode(is_server)
.finish();
let Some(config) = config else {
eprintln!(
"ERROR: Unable to build server configuration from provided configuration inputs."
);
return ExitCode::FAILURE;
}
let sconfig = match sconfig {
Some(val) => val,
None => {
println!("Somehow you got an empty ServerConfig after error checking? Cannot start!");
return ExitCode::FAILURE;
}
};
// ===========================================================================
// Config ready
// We always set threads to 1 unless it's the main server.
if matches!(&opt.commands, KanidmdOpt::Server(_)) {
// If not updated, will default to maximum
if let Some(threads) = sconfig.thread_count {
config.update_threads_count(threads);
}
} else {
config.update_threads_count(1);
};
// Get information on the windows username
#[cfg(target_family = "windows")]
get_user_details_windows();
// Start the runtime
let maybe_rt = tokio::runtime::Builder::new_multi_thread()
.worker_threads(config.threads)
.enable_all()
@ -643,16 +616,12 @@ fn main() -> ExitCode {
}
};
rt.block_on(start_daemon(opt, config, sconfig))
rt.block_on(start_daemon(opt, config))
}
/// Build and execute the main server. The ServerConfig are the configuration options
/// that we are processing into the config for the main server.
async fn kanidm_main(
sconfig: ServerConfig,
mut config: Configuration,
opt: KanidmdParser,
) -> ExitCode {
async fn kanidm_main(config: Configuration, opt: KanidmdParser) -> ExitCode {
match &opt.commands {
KanidmdOpt::Server(_sopt) | KanidmdOpt::ConfigTest(_sopt) => {
let config_test = matches!(&opt.commands, KanidmdOpt::ConfigTest(_));
@ -662,88 +631,90 @@ async fn kanidm_main(
info!("Running in server mode ...");
};
// configuration options that only relate to server mode
config.update_config_for_server_mode(&sconfig);
if let Some(i_str) = &(sconfig.tls_chain) {
let i_path = PathBuf::from(i_str.as_str());
let i_meta = match metadata(&i_path) {
Ok(m) => m,
Err(e) => {
error!(
"Unable to read metadata for TLS chain file '{}' - {:?}",
&i_path.to_str().unwrap_or("invalid file path"),
e
);
let diag = kanidm_lib_file_permissions::diagnose_path(&i_path);
info!(%diag);
return ExitCode::FAILURE;
// Verify the TLs configs.
if let Some(tls_config) = config.tls_config.as_ref() {
{
let i_meta = match metadata(&tls_config.chain) {
Ok(m) => m,
Err(e) => {
error!(
"Unable to read metadata for TLS chain file '{}' - {:?}",
tls_config.chain.display(),
e
);
let diag =
kanidm_lib_file_permissions::diagnose_path(&tls_config.chain);
info!(%diag);
return ExitCode::FAILURE;
}
};
if !kanidm_lib_file_permissions::readonly(&i_meta) {
warn!("permissions on {} may not be secure. Should be readonly to running uid. This could be a security risk ...", tls_config.chain.display());
}
};
if !kanidm_lib_file_permissions::readonly(&i_meta) {
warn!("permissions on {} may not be secure. Should be readonly to running uid. This could be a security risk ...", i_str);
}
}
if let Some(i_str) = &(sconfig.tls_key) {
let i_path = PathBuf::from(i_str.as_str());
let i_meta = match metadata(&i_path) {
Ok(m) => m,
Err(e) => {
error!(
"Unable to read metadata for TLS key file '{}' - {:?}",
&i_path.to_str().unwrap_or("invalid file path"),
e
);
let diag = kanidm_lib_file_permissions::diagnose_path(&i_path);
info!(%diag);
return ExitCode::FAILURE;
{
let i_meta = match metadata(&tls_config.key) {
Ok(m) => m,
Err(e) => {
error!(
"Unable to read metadata for TLS key file '{}' - {:?}",
tls_config.key.display(),
e
);
let diag = kanidm_lib_file_permissions::diagnose_path(&tls_config.key);
info!(%diag);
return ExitCode::FAILURE;
}
};
if !kanidm_lib_file_permissions::readonly(&i_meta) {
warn!("permissions on {} may not be secure. Should be readonly to running uid. This could be a security risk ...", tls_config.key.display());
}
#[cfg(not(target_os = "windows"))]
if i_meta.mode() & 0o007 != 0 {
warn!("WARNING: {} has 'everyone' permission bits in the mode. This could be a security risk ...", tls_config.key.display());
}
};
if !kanidm_lib_file_permissions::readonly(&i_meta) {
warn!("permissions on {} may not be secure. Should be readonly to running uid. This could be a security risk ...", i_str);
}
#[cfg(not(target_os = "windows"))]
if i_meta.mode() & 0o007 != 0 {
warn!("WARNING: {} has 'everyone' permission bits in the mode. This could be a security risk ...", i_str);
}
}
if let Some(ca_dir) = &(sconfig.tls_client_ca) {
// check that the TLS client CA config option is what we expect
let ca_dir_path = PathBuf::from(&ca_dir);
if !ca_dir_path.exists() {
error!(
"TLS CA folder {} does not exist, server startup will FAIL!",
ca_dir
);
let diag = kanidm_lib_file_permissions::diagnose_path(&ca_dir_path);
info!(%diag);
}
let i_meta = match metadata(&ca_dir_path) {
Ok(m) => m,
Err(e) => {
error!("Unable to read metadata for '{}' - {:?}", ca_dir, e);
if let Some(ca_dir) = tls_config.client_ca.as_ref() {
// check that the TLS client CA config option is what we expect
let ca_dir_path = PathBuf::from(&ca_dir);
if !ca_dir_path.exists() {
error!(
"TLS CA folder {} does not exist, server startup will FAIL!",
ca_dir.display()
);
let diag = kanidm_lib_file_permissions::diagnose_path(&ca_dir_path);
info!(%diag);
}
let i_meta = match metadata(&ca_dir_path) {
Ok(m) => m,
Err(e) => {
error!(
"Unable to read metadata for '{}' - {:?}",
ca_dir.display(),
e
);
let diag = kanidm_lib_file_permissions::diagnose_path(&ca_dir_path);
info!(%diag);
return ExitCode::FAILURE;
}
};
if !i_meta.is_dir() {
error!(
"ERROR: Refusing to run - TLS Client CA folder {} may not be a directory",
ca_dir.display()
);
return ExitCode::FAILURE;
}
};
if !i_meta.is_dir() {
error!(
"ERROR: Refusing to run - TLS Client CA folder {} may not be a directory",
ca_dir
);
return ExitCode::FAILURE;
}
if kanidm_lib_file_permissions::readonly(&i_meta) {
warn!("WARNING: TLS Client CA folder permissions on {} indicate it may not be RW. This could cause the server start up to fail!", ca_dir);
}
#[cfg(not(target_os = "windows"))]
if i_meta.mode() & 0o007 != 0 {
warn!("WARNING: TLS Client CA folder {} has 'everyone' permission bits in the mode. This could be a security risk ...", ca_dir);
if kanidm_lib_file_permissions::readonly(&i_meta) {
warn!("WARNING: TLS Client CA folder permissions on {} indicate it may not be RW. This could cause the server start up to fail!", ca_dir.display());
}
#[cfg(not(target_os = "windows"))]
if i_meta.mode() & 0o007 != 0 {
warn!("WARNING: TLS Client CA folder {} has 'everyone' permission bits in the mode. This could be a security risk ...", ca_dir.display());
}
}
}
@ -753,14 +724,6 @@ async fn kanidm_main(
#[cfg(target_os = "linux")]
{
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Ready]);
// Undocumented systemd feature - all messages should have a monotonic usec sent
// with them. In some cases like "reloading" messages, it is undocumented but
// failure to send this message causes the reload to fail.
if let Ok(monotonic_usec) = sd_notify::NotifyState::monotonic_usec_now() {
let _ = sd_notify::notify(true, &[monotonic_usec]);
} else {
error!("CRITICAL!!! Unable to access clock monotonic time. SYSTEMD WILL KILL US.");
};
let _ = sd_notify::notify(
true,
&[sd_notify::NotifyState::Status("Started Kanidm 🦀")],
@ -774,86 +737,80 @@ async fn kanidm_main(
{
let mut listener = sctx.subscribe();
tokio::select! {
Ok(()) = tokio::signal::ctrl_c() => {
break
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::terminate();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
break
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::alarm();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Ignore
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::hangup();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Reload TLS certificates
// systemd has a special reload handler for this.
#[cfg(target_os = "linux")]
{
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Reloading]);
// CRITICAL - if you do not send a monotonic usec message after a reloading
// message, your service WILL BE KILLED.
if let Ok(monotonic_usec) = sd_notify::NotifyState::monotonic_usec_now() {
let _ =
sd_notify::notify(true, &[monotonic_usec]);
} else {
error!("CRITICAL!!! Unable to access clock monotonic time. SYSTEMD WILL KILL US.");
};
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Status("Reloading ...")]);
}
Ok(()) = tokio::signal::ctrl_c() => {
break
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::terminate();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
break
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::alarm();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Ignore
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::hangup();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Reload TLS certificates
// systemd has a special reload handler for this.
#[cfg(target_os = "linux")]
{
if let Ok(monotonic_usec) = sd_notify::NotifyState::monotonic_usec_now() {
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Reloading, monotonic_usec]);
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Status("Reloading ...")]);
} else {
error!("CRITICAL!!! Unable to access clock monotonic time. SYSTEMD WILL KILL US.");
};
}
sctx.tls_acceptor_reload().await;
sctx.tls_acceptor_reload().await;
// Systemd freaks out if you send the ready state too fast after the
// reload state and can kill Kanidmd as a result.
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
// Systemd freaks out if you send the ready state too fast after the
// reload state and can kill Kanidmd as a result.
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
#[cfg(target_os = "linux")]
{
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Ready]);
if let Ok(monotonic_usec) = sd_notify::NotifyState::monotonic_usec_now() {
let _ =
sd_notify::notify(true, &[monotonic_usec]);
} else {
error!("CRITICAL!!! Unable to access clock monotonic time. SYSTEMD WILL KILL US.");
};
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Status("Reload Success")]);
}
#[cfg(target_os = "linux")]
{
if let Ok(monotonic_usec) = sd_notify::NotifyState::monotonic_usec_now() {
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Ready, monotonic_usec]);
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Status("Reload Success")]);
} else {
error!("CRITICAL!!! Unable to access clock monotonic time. SYSTEMD WILL KILL US.");
};
}
info!("Reload complete");
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::user_defined1();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Ignore
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::user_defined2();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Ignore
}
// we got a message on thr broadcast from somewhere else
Ok(msg) = async move {
listener.recv().await
} => {
debug!("Main loop received message: {:?}", msg);
break
}
}
info!("Reload complete");
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::user_defined1();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Ignore
}
Some(()) = async move {
let sigterm = tokio::signal::unix::SignalKind::user_defined2();
#[allow(clippy::unwrap_used)]
tokio::signal::unix::signal(sigterm).unwrap().recv().await
} => {
// Ignore
}
// we got a message on thr broadcast from somewhere else
Ok(msg) = async move {
listener.recv().await
} => {
debug!("Main loop received message: {:?}", msg);
break
}
}
}
#[cfg(target_family = "windows")]
{
@ -880,34 +837,19 @@ async fn kanidm_main(
}
KanidmdOpt::CertGenerate(_sopt) => {
info!("Running in certificate generate mode ...");
config.update_config_for_server_mode(&sconfig);
cert_generate_core(&config);
}
KanidmdOpt::Database {
commands: DbCommands::Backup(bopt),
} => {
info!("Running in backup mode ...");
let p = match bopt.path.to_str() {
Some(p) => p,
None => {
error!("Invalid backup path");
return ExitCode::FAILURE;
}
};
backup_server_core(&config, p);
backup_server_core(&config, &bopt.path);
}
KanidmdOpt::Database {
commands: DbCommands::Restore(ropt),
} => {
info!("Running in restore mode ...");
let p = match ropt.path.to_str() {
Some(p) => p,
None => {
error!("Invalid restore path");
return ExitCode::FAILURE;
}
};
restore_server_core(&config, p).await;
restore_server_core(&config, &ropt.path).await;
}
KanidmdOpt::Database {
commands: DbCommands::Verify(_vopt),
@ -1088,8 +1030,6 @@ async fn kanidm_main(
vacuum_server_core(&config);
}
KanidmdOpt::HealthCheck(sopt) => {
config.update_config_for_server_mode(&sconfig);
debug!("{sopt:?}");
let healthcheck_url = match &sopt.check_origin {
@ -1110,12 +1050,15 @@ async fn kanidm_main(
.danger_accept_invalid_hostnames(!sopt.verify_tls)
.https_only(true);
client = match &sconfig.tls_chain {
client = match &config.tls_config {
None => client,
Some(ca_cert) => {
debug!("Trying to load {} to build a CA cert path", ca_cert);
Some(tls_config) => {
debug!(
"Trying to load {} to build a CA cert path",
tls_config.chain.display()
);
// if the ca_cert file exists, then we'll use it
let ca_cert_path = PathBuf::from(ca_cert);
let ca_cert_path = tls_config.chain.clone();
match ca_cert_path.exists() {
true => {
let mut cert_buf = Vec::new();
@ -1148,7 +1091,10 @@ async fn kanidm_main(
client
}
false => {
warn!("Couldn't find ca cert {} but carrying on...", ca_cert);
warn!(
"Couldn't find ca cert {} but carrying on...",
tls_config.chain.display()
);
client
}
}

View file

@ -1,27 +1,21 @@
use std::collections::{BTreeMap, BTreeSet, VecDeque};
use std::convert::{TryFrom, TryInto};
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
use super::keystorage::{KeyHandle, KeyHandleId};
// use crate::valueset;
use hashbrown::HashMap;
use idlset::v2::IDLBitRange;
use kanidm_proto::internal::{ConsistencyError, OperationError};
use rusqlite::vtab::array::Array;
use rusqlite::{Connection, OpenFlags, OptionalExtension};
use uuid::Uuid;
use crate::be::dbentry::DbIdentSpn;
use crate::be::dbvalue::DbCidV1;
use crate::be::{BackendConfig, IdList, IdRawEntry, IdxKey, IdxSlope};
use crate::entry::{Entry, EntryCommitted, EntrySealed};
use crate::prelude::*;
use crate::value::{IndexType, Value};
// use uuid::Uuid;
use hashbrown::HashMap;
use idlset::v2::IDLBitRange;
use kanidm_proto::internal::{ConsistencyError, OperationError};
use rusqlite::vtab::array::Array;
use rusqlite::{Connection, OpenFlags, OptionalExtension};
use std::collections::{BTreeMap, BTreeSet, VecDeque};
use std::convert::{TryFrom, TryInto};
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
use uuid::Uuid;
const DBV_ID2ENTRY: &str = "id2entry";
const DBV_INDEXV: &str = "indexv";
@ -1712,7 +1706,7 @@ impl IdlSqliteWriteTransaction {
impl IdlSqlite {
pub fn new(cfg: &BackendConfig, vacuum: bool) -> Result<Self, OperationError> {
if cfg.path.is_empty() {
if cfg.path.as_os_str().is_empty() {
debug_assert_eq!(cfg.pool_size, 1);
}
// If provided, set the page size to match the tuning we want. By default we use 4096. The VACUUM
@ -1734,8 +1728,7 @@ impl IdlSqlite {
// Initial setup routines.
{
let vconn =
Connection::open_with_flags(cfg.path.as_str(), flags).map_err(sqlite_error)?;
let vconn = Connection::open_with_flags(&cfg.path, flags).map_err(sqlite_error)?;
vconn
.execute_batch(
@ -1764,8 +1757,7 @@ impl IdlSqlite {
);
*/
let vconn =
Connection::open_with_flags(cfg.path.as_str(), flags).map_err(sqlite_error)?;
let vconn = Connection::open_with_flags(&cfg.path, flags).map_err(sqlite_error)?;
vconn
.execute_batch("PRAGMA wal_checkpoint(TRUNCATE);")
@ -1786,8 +1778,7 @@ impl IdlSqlite {
OperationError::SqliteError
})?;
let vconn =
Connection::open_with_flags(cfg.path.as_str(), flags).map_err(sqlite_error)?;
let vconn = Connection::open_with_flags(&cfg.path, flags).map_err(sqlite_error)?;
vconn
.pragma_update(None, "page_size", cfg.fstype as u32)
@ -1821,7 +1812,7 @@ impl IdlSqlite {
.map(|i| {
trace!("Opening Connection {}", i);
let conn =
Connection::open_with_flags(cfg.path.as_str(), flags).map_err(sqlite_error);
Connection::open_with_flags(&cfg.path, flags).map_err(sqlite_error);
match conn {
Ok(conn) => {
// We need to set the cachesize at this point as well.

View file

@ -4,20 +4,6 @@
//! is to persist content safely to disk, load that content, and execute queries
//! utilising indexes in the most effective way possible.
use std::collections::BTreeMap;
use std::fs;
use std::ops::DerefMut;
use std::sync::Arc;
use std::time::Duration;
use concread::cowcell::*;
use hashbrown::{HashMap as Map, HashSet};
use idlset::v2::IDLBitRange;
use idlset::AndNot;
use kanidm_proto::internal::{ConsistencyError, OperationError};
use tracing::{trace, trace_span};
use uuid::Uuid;
use crate::be::dbentry::{DbBackup, DbEntry};
use crate::be::dbrepl::DbReplMeta;
use crate::entry::Entry;
@ -31,6 +17,19 @@ use crate::repl::ruv::{
};
use crate::utils::trigraph_iter;
use crate::value::{IndexType, Value};
use concread::cowcell::*;
use hashbrown::{HashMap as Map, HashSet};
use idlset::v2::IDLBitRange;
use idlset::AndNot;
use kanidm_proto::internal::{ConsistencyError, OperationError};
use std::collections::BTreeMap;
use std::fs;
use std::ops::DerefMut;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
use tracing::{trace, trace_span};
use uuid::Uuid;
pub(crate) mod dbentry;
pub(crate) mod dbrepl;
@ -132,7 +131,7 @@ impl IdxMeta {
#[derive(Clone)]
pub struct BackendConfig {
path: String,
path: PathBuf,
pool_size: u32,
db_name: &'static str,
fstype: FsType,
@ -141,10 +140,16 @@ pub struct BackendConfig {
}
impl BackendConfig {
pub fn new(path: &str, pool_size: u32, fstype: FsType, arcsize: Option<usize>) -> Self {
pub fn new(
path: Option<&Path>,
pool_size: u32,
fstype: FsType,
arcsize: Option<usize>,
) -> Self {
BackendConfig {
pool_size,
path: path.to_string(),
// This means if path is None, that "" implies an sqlite in memory/ram only database.
path: path.unwrap_or_else(|| Path::new("")).to_path_buf(),
db_name: "main",
fstype,
arcsize,
@ -154,7 +159,7 @@ impl BackendConfig {
pub(crate) fn new_test(db_name: &'static str) -> Self {
BackendConfig {
pool_size: 1,
path: "".to_string(),
path: PathBuf::from(""),
db_name,
fstype: FsType::Generic,
arcsize: Some(2048),
@ -936,7 +941,7 @@ pub trait BackendTransaction {
self.get_ruv().verify(&entries, results);
}
fn backup(&mut self, dst_path: &str) -> Result<(), OperationError> {
fn backup(&mut self, dst_path: &Path) -> Result<(), OperationError> {
let repl_meta = self.get_ruv().to_db_backup_ruv();
// load all entries into RAM, may need to change this later
@ -1808,7 +1813,7 @@ impl<'a> BackendWriteTransaction<'a> {
Ok(slope)
}
pub fn restore(&mut self, src_path: &str) -> Result<(), OperationError> {
pub fn restore(&mut self, src_path: &Path) -> Result<(), OperationError> {
let serialized_string = fs::read_to_string(src_path).map_err(|e| {
admin_error!("fs::read_to_string {:?}", e);
OperationError::FsError
@ -2121,7 +2126,7 @@ impl Backend {
debug!(db_tickets = ?cfg.pool_size, profile = %env!("KANIDM_PROFILE_NAME"), cpu_flags = %env!("KANIDM_CPU_FLAGS"));
// If in memory, reduce pool to 1
if cfg.path.is_empty() {
if cfg.path.as_os_str().is_empty() {
cfg.pool_size = 1;
}
@ -2207,13 +2212,6 @@ impl Backend {
#[cfg(test)]
mod tests {
use std::fs;
use std::iter::FromIterator;
use std::sync::Arc;
use std::time::Duration;
use idlset::v2::IDLBitRange;
use super::super::entry::{Entry, EntryInit, EntryNew};
use super::Limits;
use super::{
@ -2223,6 +2221,12 @@ mod tests {
use crate::prelude::*;
use crate::repl::cid::Cid;
use crate::value::{IndexType, PartialValue, Value};
use idlset::v2::IDLBitRange;
use std::fs;
use std::iter::FromIterator;
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
lazy_static! {
static ref CID_ZERO: Cid = Cid::new_zero();
@ -2597,11 +2601,9 @@ mod tests {
#[test]
fn test_be_backup_restore() {
let db_backup_file_name = format!(
"{}/.backup_test.json",
option_env!("OUT_DIR").unwrap_or("/tmp")
);
eprintln!(" ⚠️ {db_backup_file_name}");
let db_backup_file_name =
Path::new(option_env!("OUT_DIR").unwrap_or("/tmp")).join(".backup_test.json");
eprintln!(" ⚠️ {}", db_backup_file_name.display());
run_test!(|be: &mut BackendWriteTransaction| {
// Important! Need db metadata setup!
be.reset_db_s_uuid().unwrap();
@ -2656,11 +2658,9 @@ mod tests {
#[test]
fn test_be_backup_restore_tampered() {
let db_backup_file_name = format!(
"{}/.backup2_test.json",
option_env!("OUT_DIR").unwrap_or("/tmp")
);
eprintln!(" ⚠️ {db_backup_file_name}");
let db_backup_file_name =
Path::new(option_env!("OUT_DIR").unwrap_or("/tmp")).join(".backup2_test.json");
eprintln!(" ⚠️ {}", db_backup_file_name.display());
run_test!(|be: &mut BackendWriteTransaction| {
// Important! Need db metadata setup!
be.reset_db_s_uuid().unwrap();

View file

@ -330,6 +330,10 @@ pub const UUID_SCHEMA_ATTR_DOMAIN_ALLOW_EASTER_EGGS: Uuid =
pub const UUID_SCHEMA_ATTR_LDAP_MAXIMUM_QUERYABLE_ATTRIBUTES: Uuid =
uuid!("00000000-0000-0000-0000-ffff00000187");
pub const UUID_SCHEMA_ATTR_INDEXED: Uuid = uuid!("00000000-0000-0000-0000-ffff00000188");
pub const UUID_SCHEMA_ATTR_ACP_MODIFY_PRESENT_CLASS: Uuid =
uuid!("00000000-0000-0000-0000-ffff00000189");
pub const UUID_SCHEMA_ATTR_ACP_MODIFY_REMOVE_CLASS: Uuid =
uuid!("00000000-0000-0000-0000-ffff00000190");
// System and domain infos
// I'd like to strongly criticise william of the past for making poor choices about these allocations.

View file

@ -599,19 +599,19 @@ impl IdmServerProxyWriteTransaction<'_> {
}
let eperm_search_primary_cred = match &eperm.search {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::PrimaryCredential),
};
let eperm_mod_primary_cred = match &eperm.modify_pres {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::PrimaryCredential),
};
let eperm_rem_primary_cred = match &eperm.modify_rem {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::PrimaryCredential),
};
@ -620,19 +620,19 @@ impl IdmServerProxyWriteTransaction<'_> {
eperm_search_primary_cred && eperm_mod_primary_cred && eperm_rem_primary_cred;
let eperm_search_passkeys = match &eperm.search {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::PassKeys),
};
let eperm_mod_passkeys = match &eperm.modify_pres {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::PassKeys),
};
let eperm_rem_passkeys = match &eperm.modify_rem {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::PassKeys),
};
@ -640,19 +640,19 @@ impl IdmServerProxyWriteTransaction<'_> {
let passkeys_can_edit = eperm_search_passkeys && eperm_mod_passkeys && eperm_rem_passkeys;
let eperm_search_attested_passkeys = match &eperm.search {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::AttestedPasskeys),
};
let eperm_mod_attested_passkeys = match &eperm.modify_pres {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::AttestedPasskeys),
};
let eperm_rem_attested_passkeys = match &eperm.modify_rem {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::AttestedPasskeys),
};
@ -662,19 +662,19 @@ impl IdmServerProxyWriteTransaction<'_> {
&& eperm_rem_attested_passkeys;
let eperm_search_unixcred = match &eperm.search {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::UnixPassword),
};
let eperm_mod_unixcred = match &eperm.modify_pres {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::UnixPassword),
};
let eperm_rem_unixcred = match &eperm.modify_rem {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::UnixPassword),
};
@ -685,19 +685,19 @@ impl IdmServerProxyWriteTransaction<'_> {
&& eperm_rem_unixcred;
let eperm_search_sshpubkey = match &eperm.search {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::SshPublicKey),
};
let eperm_mod_sshpubkey = match &eperm.modify_pres {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::SshPublicKey),
};
let eperm_rem_sshpubkey = match &eperm.modify_rem {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::SshPublicKey),
};
@ -726,7 +726,7 @@ impl IdmServerProxyWriteTransaction<'_> {
})?;
match &eperm.search {
Access::Denied => false,
Access::Deny => false,
Access::Grant => true,
Access::Allow(attrs) => attrs.contains(&Attribute::SyncCredentialPortal),
}

View file

@ -72,6 +72,8 @@ pub struct BuiltinAcp {
modify_present_attrs: Vec<Attribute>,
modify_removed_attrs: Vec<Attribute>,
modify_classes: Vec<EntryClass>,
modify_present_classes: Vec<EntryClass>,
modify_remove_classes: Vec<EntryClass>,
create_classes: Vec<EntryClass>,
create_attrs: Vec<Attribute>,
}
@ -159,9 +161,19 @@ impl From<BuiltinAcp> for EntryInitNew {
value.modify_removed_attrs.into_iter().for_each(|attr| {
entry.add_ava(Attribute::AcpModifyRemovedAttr, Value::from(attr));
});
value.modify_classes.into_iter().for_each(|class| {
entry.add_ava(Attribute::AcpModifyClass, Value::from(class));
});
value.modify_present_classes.into_iter().for_each(|class| {
entry.add_ava(Attribute::AcpModifyPresentClass, Value::from(class));
});
value.modify_remove_classes.into_iter().for_each(|class| {
entry.add_ava(Attribute::AcpModifyRemoveClass, Value::from(class));
});
value.create_classes.into_iter().for_each(|class| {
entry.add_ava(Attribute::AcpCreateClass, Value::from(class));
});
@ -214,7 +226,7 @@ lazy_static! {
ATTR_RECYCLED.to_string()
)),
modify_removed_attrs: vec![Attribute::Class],
modify_classes: vec![EntryClass::Recycled],
modify_remove_classes: vec![EntryClass::Recycled],
..Default::default()
};
}
@ -425,6 +437,7 @@ lazy_static! {
EntryClass::AccessControlCreate,
EntryClass::AccessControlDelete,
],
..Default::default()
};
}

View file

@ -695,7 +695,6 @@ mod tests {
let e = entry_init!(
(Attribute::Class, EntryClass::Person.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson")),
(Attribute::DisplayName, Value::new_iname("testperson")),
(
@ -726,7 +725,6 @@ mod tests {
let e = entry_init!(
(Attribute::Class, EntryClass::Person.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson")),
(Attribute::DisplayName, Value::new_iname("testperson")),
(

View file

@ -22,7 +22,6 @@ mod jwskeygen;
mod keyobject;
mod memberof;
mod namehistory;
mod protected;
mod refint;
mod session;
mod spn;
@ -44,6 +43,7 @@ trait Plugin {
Err(OperationError::InvalidState)
}
#[allow(dead_code)]
fn pre_create(
_qs: &mut QueryServerWriteTransaction,
// List of what we will commit that is valid?
@ -243,13 +243,13 @@ impl Plugins {
attrunique::AttrUnique::pre_create_transform(qs, cand, ce)
}
#[instrument(level = "debug", name = "plugins::run_pre_create", skip_all)]
#[instrument(level = "trace", name = "plugins::run_pre_create", skip_all)]
pub fn run_pre_create(
qs: &mut QueryServerWriteTransaction,
cand: &[Entry<EntrySealed, EntryNew>],
ce: &CreateEvent,
_qs: &mut QueryServerWriteTransaction,
_cand: &[Entry<EntrySealed, EntryNew>],
_ce: &CreateEvent,
) -> Result<(), OperationError> {
protected::Protected::pre_create(qs, cand, ce)
Ok(())
}
#[instrument(level = "debug", name = "plugins::run_post_create", skip_all)]
@ -269,7 +269,6 @@ impl Plugins {
cand: &mut Vec<Entry<EntryInvalid, EntryCommitted>>,
me: &ModifyEvent,
) -> Result<(), OperationError> {
protected::Protected::pre_modify(qs, pre_cand, cand, me)?;
base::Base::pre_modify(qs, pre_cand, cand, me)?;
valuedeny::ValueDeny::pre_modify(qs, pre_cand, cand, me)?;
cred_import::CredImport::pre_modify(qs, pre_cand, cand, me)?;
@ -305,7 +304,6 @@ impl Plugins {
cand: &mut Vec<Entry<EntryInvalid, EntryCommitted>>,
me: &BatchModifyEvent,
) -> Result<(), OperationError> {
protected::Protected::pre_batch_modify(qs, pre_cand, cand, me)?;
base::Base::pre_batch_modify(qs, pre_cand, cand, me)?;
valuedeny::ValueDeny::pre_batch_modify(qs, pre_cand, cand, me)?;
cred_import::CredImport::pre_batch_modify(qs, pre_cand, cand, me)?;
@ -340,7 +338,6 @@ impl Plugins {
cand: &mut Vec<Entry<EntryInvalid, EntryCommitted>>,
de: &DeleteEvent,
) -> Result<(), OperationError> {
protected::Protected::pre_delete(qs, cand, de)?;
memberof::MemberOf::pre_delete(qs, cand, de)
}

View file

@ -1,690 +0,0 @@
// System protected objects. Items matching specific requirements
// may only have certain modifications performed.
use hashbrown::HashSet;
use std::sync::Arc;
use crate::event::{CreateEvent, DeleteEvent, ModifyEvent};
use crate::modify::Modify;
use crate::plugins::Plugin;
use crate::prelude::*;
pub struct Protected {}
// Here is the declaration of all the attrs that can be altered by
// a call on a system object. We trust they are allowed because
// schema will have checked this, and we don't allow class changes!
lazy_static! {
static ref ALLOWED_ATTRS: HashSet<Attribute> = {
let attrs = vec![
// Allow modification of some schema class types to allow local extension
// of schema types.
Attribute::Must,
Attribute::May,
// modification of some domain info types for local configuratiomn.
Attribute::DomainSsid,
Attribute::DomainLdapBasedn,
Attribute::LdapMaxQueryableAttrs,
Attribute::LdapAllowUnixPwBind,
Attribute::FernetPrivateKeyStr,
Attribute::Es256PrivateKeyDer,
Attribute::KeyActionRevoke,
Attribute::KeyActionRotate,
Attribute::IdVerificationEcKey,
Attribute::BadlistPassword,
Attribute::DeniedName,
Attribute::DomainDisplayName,
Attribute::Image,
// modification of account policy values for dyngroup.
Attribute::AuthSessionExpiry,
Attribute::AuthPasswordMinimumLength,
Attribute::CredentialTypeMinimum,
Attribute::PrivilegeExpiry,
Attribute::WebauthnAttestationCaList,
Attribute::LimitSearchMaxResults,
Attribute::LimitSearchMaxFilterTest,
Attribute::AllowPrimaryCredFallback,
];
let mut m = HashSet::with_capacity(attrs.len());
m.extend(attrs);
m
};
static ref PROTECTED_ENTRYCLASSES: Vec<EntryClass> =
vec![
EntryClass::System,
EntryClass::DomainInfo,
EntryClass::SystemInfo,
EntryClass::SystemConfig,
EntryClass::DynGroup,
EntryClass::SyncObject,
EntryClass::Tombstone,
EntryClass::Recycled,
];
}
impl Plugin for Protected {
fn id() -> &'static str {
"plugin_protected"
}
#[instrument(level = "debug", name = "protected_pre_create", skip_all)]
fn pre_create(
_qs: &mut QueryServerWriteTransaction,
// List of what we will commit that is valid?
cand: &[Entry<EntrySealed, EntryNew>],
ce: &CreateEvent,
) -> Result<(), OperationError> {
if ce.ident.is_internal() {
trace!("Internal operation, not enforcing system object protection");
return Ok(());
}
cand.iter().try_fold((), |(), cand| {
if PROTECTED_ENTRYCLASSES
.iter()
.any(|c| cand.attribute_equality(Attribute::Class, &c.to_partialvalue()))
{
trace!("Rejecting operation during pre_create check");
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})
}
#[instrument(level = "debug", name = "protected_pre_modify", skip_all)]
fn pre_modify(
_qs: &mut QueryServerWriteTransaction,
_pre_cand: &[Arc<EntrySealedCommitted>],
cand: &mut Vec<EntryInvalidCommitted>,
me: &ModifyEvent,
) -> Result<(), OperationError> {
if me.ident.is_internal() {
trace!("Internal operation, not enforcing system object protection");
return Ok(());
}
// Prevent adding class: system, domain_info, tombstone, or recycled.
me.modlist.iter().try_fold((), |(), m| match m {
Modify::Present(a, v) => {
if a == Attribute::Class.as_ref()
&& PROTECTED_ENTRYCLASSES.iter().any(|c| v == &c.to_value())
{
trace!("Rejecting operation during pre_modify check");
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
}
_ => Ok(()),
})?;
// HARD block mods on tombstone or recycle. We soft block on the rest as they may
// have some allowed attrs.
cand.iter().try_fold((), |(), cand| {
if cand.attribute_equality(Attribute::Class, &EntryClass::Tombstone.into())
|| cand.attribute_equality(Attribute::Class, &EntryClass::Recycled.into())
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})?;
// if class: system, check the mods are "allowed"
let system_pres = cand.iter().any(|c| {
// We don't need to check for domain info here because domain_info has a class
// system also. We just need to block it from being created.
c.attribute_equality(Attribute::Class, &EntryClass::System.into())
});
trace!("class: system -> {}", system_pres);
// No system types being altered, return.
if !system_pres {
return Ok(());
}
// Something altered is system, check if it's allowed.
me.modlist.into_iter().try_fold((), |(), m| {
// Already hit an error, move on.
let a = match m {
Modify::Present(a, _)
| Modify::Removed(a, _)
| Modify::Set(a, _)
| Modify::Purged(a) => Some(a),
Modify::Assert(_, _) => None,
};
if let Some(attr) = a {
match ALLOWED_ATTRS.contains(attr) {
true => Ok(()),
false => {
trace!("If you're getting this, you need to modify the ALLOWED_ATTRS list");
Err(OperationError::SystemProtectedObject)
}
}
} else {
// Was not a mod needing checking
Ok(())
}
})
}
#[instrument(level = "debug", name = "protected_pre_batch_modify", skip_all)]
fn pre_batch_modify(
_qs: &mut QueryServerWriteTransaction,
_pre_cand: &[Arc<EntrySealedCommitted>],
cand: &mut Vec<EntryInvalidCommitted>,
me: &BatchModifyEvent,
) -> Result<(), OperationError> {
if me.ident.is_internal() {
trace!("Internal operation, not enforcing system object protection");
return Ok(());
}
me.modset
.values()
.flat_map(|ml| ml.iter())
.try_fold((), |(), m| match m {
Modify::Present(a, v) => {
if a == Attribute::Class.as_ref()
&& PROTECTED_ENTRYCLASSES.iter().any(|c| v == &c.to_value())
{
trace!("Rejecting operation during pre_batch_modify check");
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
}
_ => Ok(()),
})?;
// HARD block mods on tombstone or recycle. We soft block on the rest as they may
// have some allowed attrs.
cand.iter().try_fold((), |(), cand| {
if cand.attribute_equality(Attribute::Class, &EntryClass::Tombstone.into())
|| cand.attribute_equality(Attribute::Class, &EntryClass::Recycled.into())
{
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})?;
// if class: system, check the mods are "allowed"
let system_pres = cand.iter().any(|c| {
// We don't need to check for domain info here because domain_info has a class
// system also. We just need to block it from being created.
c.attribute_equality(Attribute::Class, &EntryClass::System.into())
});
trace!("{}: system -> {}", Attribute::Class, system_pres);
// No system types being altered, return.
if !system_pres {
return Ok(());
}
// Something altered is system, check if it's allowed.
me.modset
.values()
.flat_map(|ml| ml.iter())
.try_fold((), |(), m| {
// Already hit an error, move on.
let a = match m {
Modify::Present(a, _) | Modify::Removed(a, _) | Modify::Set(a, _) | Modify::Purged(a) => Some(a),
Modify::Assert(_, _) => None,
};
if let Some(attr) = a {
match ALLOWED_ATTRS.contains(attr) {
true => Ok(()),
false => {
trace!("Rejecting operation during pre_batch_modify check, if you're getting this check ALLOWED_ATTRS");
Err(OperationError::SystemProtectedObject)
},
}
} else {
// Was not a mod needing checking
Ok(())
}
})
}
#[instrument(level = "debug", name = "protected_pre_delete", skip_all)]
fn pre_delete(
_qs: &mut QueryServerWriteTransaction,
// Should these be EntrySealed
cand: &mut Vec<Entry<EntryInvalid, EntryCommitted>>,
de: &DeleteEvent,
) -> Result<(), OperationError> {
if de.ident.is_internal() {
trace!("Internal operation, not enforcing system object protection");
return Ok(());
}
cand.iter().try_fold((), |(), cand| {
if PROTECTED_ENTRYCLASSES
.iter()
.any(|c| cand.attribute_equality(Attribute::Class, &c.to_partialvalue()))
{
trace!("Rejecting operation during pre_delete check");
Err(OperationError::SystemProtectedObject)
} else {
Ok(())
}
})
}
}
#[cfg(test)]
mod tests {
use crate::prelude::*;
use std::sync::Arc;
const UUID_TEST_ACCOUNT: Uuid = uuid::uuid!("cc8e95b4-c24f-4d68-ba54-8bed76f63930");
const UUID_TEST_GROUP: Uuid = uuid::uuid!("81ec1640-3637-4a2f-8a52-874fa3c3c92f");
const UUID_TEST_ACP: Uuid = uuid::uuid!("acae81d6-5ea7-4bd8-8f7f-fcec4c0dd647");
lazy_static! {
pub static ref TEST_ACCOUNT: EntryInitNew = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::ServiceAccount.to_value()),
(Attribute::Class, EntryClass::MemberOf.to_value()),
(Attribute::Name, Value::new_iname("test_account_1")),
(Attribute::DisplayName, Value::new_utf8s("test_account_1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT)),
(Attribute::MemberOf, Value::Refer(UUID_TEST_GROUP))
);
pub static ref TEST_GROUP: EntryInitNew = entry_init!(
(Attribute::Class, EntryClass::Group.to_value()),
(Attribute::Name, Value::new_iname("test_group_a")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_GROUP)),
(Attribute::Member, Value::Refer(UUID_TEST_ACCOUNT))
);
pub static ref ALLOW_ALL: EntryInitNew = entry_init!(
(Attribute::Class, EntryClass::Object.to_value()),
(
Attribute::Class,
EntryClass::AccessControlProfile.to_value()
),
(
Attribute::Class,
EntryClass::AccessControlTargetScope.to_value()
),
(
Attribute::Class,
EntryClass::AccessControlReceiverGroup.to_value()
),
(Attribute::Class, EntryClass::AccessControlModify.to_value()),
(Attribute::Class, EntryClass::AccessControlCreate.to_value()),
(Attribute::Class, EntryClass::AccessControlDelete.to_value()),
(Attribute::Class, EntryClass::AccessControlSearch.to_value()),
(
Attribute::Name,
Value::new_iname("idm_admins_acp_allow_all_test")
),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACP)),
(Attribute::AcpReceiverGroup, Value::Refer(UUID_TEST_GROUP)),
(
Attribute::AcpTargetScope,
Value::new_json_filter_s("{\"pres\":\"class\"}").expect("filter")
),
(Attribute::AcpSearchAttr, Value::from(Attribute::Name)),
(Attribute::AcpSearchAttr, Value::from(Attribute::Class)),
(Attribute::AcpSearchAttr, Value::from(Attribute::Uuid)),
(Attribute::AcpSearchAttr, Value::new_iutf8("classname")),
(
Attribute::AcpSearchAttr,
Value::new_iutf8(Attribute::AttributeName.as_ref())
),
(Attribute::AcpModifyClass, EntryClass::System.to_value()),
(Attribute::AcpModifyClass, Value::new_iutf8("domain_info")),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::Class)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::DisplayName)
),
(Attribute::AcpModifyRemovedAttr, Value::from(Attribute::May)),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::Must)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::DomainName)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::DomainDisplayName)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::DomainUuid)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::DomainSsid)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::FernetPrivateKeyStr)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::Es256PrivateKeyDer)
),
(
Attribute::AcpModifyRemovedAttr,
Value::from(Attribute::PrivateCookieKey)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::Class)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::DisplayName)
),
(Attribute::AcpModifyPresentAttr, Value::from(Attribute::May)),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::Must)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::DomainName)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::DomainDisplayName)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::DomainUuid)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::DomainSsid)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::FernetPrivateKeyStr)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::Es256PrivateKeyDer)
),
(
Attribute::AcpModifyPresentAttr,
Value::from(Attribute::PrivateCookieKey)
),
(Attribute::AcpCreateClass, EntryClass::Object.to_value()),
(Attribute::AcpCreateClass, EntryClass::Account.to_value()),
(Attribute::AcpCreateClass, EntryClass::Person.to_value()),
(Attribute::AcpCreateClass, EntryClass::System.to_value()),
(Attribute::AcpCreateClass, EntryClass::DomainInfo.to_value()),
(Attribute::AcpCreateAttr, Value::from(Attribute::Name)),
(Attribute::AcpCreateAttr, EntryClass::Class.to_value(),),
(
Attribute::AcpCreateAttr,
Value::from(Attribute::Description),
),
(
Attribute::AcpCreateAttr,
Value::from(Attribute::DisplayName),
),
(Attribute::AcpCreateAttr, Value::from(Attribute::DomainName),),
(
Attribute::AcpCreateAttr,
Value::from(Attribute::DomainDisplayName)
),
(Attribute::AcpCreateAttr, Value::from(Attribute::DomainUuid)),
(Attribute::AcpCreateAttr, Value::from(Attribute::DomainSsid)),
(Attribute::AcpCreateAttr, Value::from(Attribute::Uuid)),
(
Attribute::AcpCreateAttr,
Value::from(Attribute::FernetPrivateKeyStr)
),
(
Attribute::AcpCreateAttr,
Value::from(Attribute::Es256PrivateKeyDer)
),
(
Attribute::AcpCreateAttr,
Value::from(Attribute::PrivateCookieKey)
),
(Attribute::AcpCreateAttr, Value::from(Attribute::Version))
);
pub static ref PRELOAD: Vec<EntryInitNew> =
vec![TEST_ACCOUNT.clone(), TEST_GROUP.clone(), ALLOW_ALL.clone()];
pub static ref E_TEST_ACCOUNT: Arc<EntrySealedCommitted> =
Arc::new(TEST_ACCOUNT.clone().into_sealed_committed());
}
#[test]
fn test_pre_create_deny() {
// Test creating with class: system is rejected.
let e = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::Person.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson")),
(
Attribute::DisplayName,
Value::Utf8("testperson".to_string())
)
);
let create = vec![e];
let preload = PRELOAD.clone();
run_create_test!(
Err(OperationError::SystemProtectedObject),
preload,
create,
Some(E_TEST_ACCOUNT.clone()),
|_| {}
);
}
#[test]
fn test_pre_modify_system_deny() {
// Test modify of class to a system is denied
let e = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::Person.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson")),
(
Attribute::DisplayName,
Value::Utf8("testperson".to_string())
)
);
let mut preload = PRELOAD.clone();
preload.push(e);
run_modify_test!(
Err(OperationError::SystemProtectedObject),
preload,
filter!(f_eq(Attribute::Name, PartialValue::new_iname("testperson"))),
modlist!([
m_purge(Attribute::DisplayName),
m_pres(Attribute::DisplayName, &Value::new_utf8s("system test")),
]),
Some(E_TEST_ACCOUNT.clone()),
|_| {},
|_| {}
);
}
#[test]
fn test_pre_modify_class_add_deny() {
// Show that adding a system class is denied
// TODO: replace this with a `SchemaClass` object
let e = entry_init!(
(Attribute::Class, EntryClass::Object.to_value()),
(Attribute::Class, EntryClass::ClassType.to_value()),
(Attribute::ClassName, Value::new_iutf8("testclass")),
(
Attribute::Uuid,
Value::Uuid(uuid::uuid!("66c68b2f-d02c-4243-8013-7946e40fe321"))
),
(
Attribute::Description,
Value::Utf8("class test".to_string())
)
);
let mut preload = PRELOAD.clone();
preload.push(e);
run_modify_test!(
Ok(()),
preload,
filter!(f_eq(
Attribute::ClassName,
PartialValue::new_iutf8("testclass")
)),
modlist!([
m_pres(Attribute::May, &Value::from(Attribute::Name)),
m_pres(Attribute::Must, &Value::from(Attribute::Name)),
]),
Some(E_TEST_ACCOUNT.clone()),
|_| {},
|_| {}
);
}
#[test]
fn test_pre_delete_deny() {
// Test deleting with class: system is rejected.
let e = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::Person.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson")),
(
Attribute::DisplayName,
Value::Utf8("testperson".to_string())
)
);
let mut preload = PRELOAD.clone();
preload.push(e);
run_delete_test!(
Err(OperationError::SystemProtectedObject),
preload,
filter!(f_eq(Attribute::Name, PartialValue::new_iname("testperson"))),
Some(E_TEST_ACCOUNT.clone()),
|_| {}
);
}
#[test]
fn test_modify_domain() {
// Can edit *my* domain_ssid and domain_name
// Show that adding a system class is denied
let e = entry_init!(
(Attribute::Class, EntryClass::DomainInfo.to_value()),
(Attribute::Name, Value::new_iname("domain_example.net.au")),
(Attribute::Uuid, Value::Uuid(uuid::uuid!("96fd1112-28bc-48ae-9dda-5acb4719aaba"))),
(
Attribute::Description,
Value::new_utf8s("Demonstration of a remote domain's info being created for uuid generation in test_modify_domain")
),
(Attribute::DomainUuid, Value::Uuid(uuid::uuid!("96fd1112-28bc-48ae-9dda-5acb4719aaba"))),
(Attribute::DomainName, Value::new_iname("example.net.au")),
(Attribute::DomainDisplayName, Value::Utf8("example.net.au".to_string())),
(Attribute::DomainSsid, Value::Utf8("Example_Wifi".to_string())),
(Attribute::Version, Value::Uint32(1))
);
let mut preload = PRELOAD.clone();
preload.push(e);
run_modify_test!(
Ok(()),
preload,
filter!(f_eq(
Attribute::Name,
PartialValue::new_iname("domain_example.net.au")
)),
modlist!([
m_purge(Attribute::DomainSsid),
m_pres(Attribute::DomainSsid, &Value::new_utf8s("NewExampleWifi")),
]),
Some(E_TEST_ACCOUNT.clone()),
|_| {},
|_| {}
);
}
#[test]
fn test_ext_create_domain() {
// can not add a domain_info type - note the lack of class: system
let e = entry_init!(
(Attribute::Class, EntryClass::DomainInfo.to_value()),
(Attribute::Name, Value::new_iname("domain_example.net.au")),
(Attribute::Uuid, Value::Uuid(uuid::uuid!("96fd1112-28bc-48ae-9dda-5acb4719aaba"))),
(
Attribute::Description,
Value::new_utf8s("Demonstration of a remote domain's info being created for uuid generation in test_modify_domain")
),
(Attribute::DomainUuid, Value::Uuid(uuid::uuid!("96fd1112-28bc-48ae-9dda-5acb4719aaba"))),
(Attribute::DomainName, Value::new_iname("example.net.au")),
(Attribute::DomainDisplayName, Value::Utf8("example.net.au".to_string())),
(Attribute::DomainSsid, Value::Utf8("Example_Wifi".to_string())),
(Attribute::Version, Value::Uint32(1))
);
let create = vec![e];
let preload = PRELOAD.clone();
run_create_test!(
Err(OperationError::SystemProtectedObject),
preload,
create,
Some(E_TEST_ACCOUNT.clone()),
|_| {}
);
}
#[test]
fn test_delete_domain() {
// On the real thing we have a class: system, but to prove the point ...
let e = entry_init!(
(Attribute::Class, EntryClass::DomainInfo.to_value()),
(Attribute::Name, Value::new_iname("domain_example.net.au")),
(Attribute::Uuid, Value::Uuid(uuid::uuid!("96fd1112-28bc-48ae-9dda-5acb4719aaba"))),
(
Attribute::Description,
Value::new_utf8s("Demonstration of a remote domain's info being created for uuid generation in test_modify_domain")
),
(Attribute::DomainUuid, Value::Uuid(uuid::uuid!("96fd1112-28bc-48ae-9dda-5acb4719aaba"))),
(Attribute::DomainName, Value::new_iname("example.net.au")),
(Attribute::DomainDisplayName, Value::Utf8("example.net.au".to_string())),
(Attribute::DomainSsid, Value::Utf8("Example_Wifi".to_string())),
(Attribute::Version, Value::Uint32(1))
);
let mut preload = PRELOAD.clone();
preload.push(e);
run_delete_test!(
Err(OperationError::SystemProtectedObject),
preload,
filter!(f_eq(
Attribute::Name,
PartialValue::new_iname("domain_example.net.au")
)),
Some(E_TEST_ACCOUNT.clone()),
|_| {}
);
}
}

View file

@ -1366,6 +1366,36 @@ impl SchemaWriteTransaction<'_> {
syntax: SyntaxType::Utf8StringInsensitive,
},
);
self.attributes.insert(
Attribute::AcpModifyPresentClass,
SchemaAttribute {
name: Attribute::AcpModifyPresentClass,
uuid: UUID_SCHEMA_ATTR_ACP_MODIFY_PRESENT_CLASS,
description: String::from("The set of class values that could be asserted or added to an entry. Only applies to modify::present operations on class."),
multivalue: true,
unique: false,
phantom: false,
sync_allowed: false,
replicated: Replicated::True,
indexed: false,
syntax: SyntaxType::Utf8StringInsensitive,
},
);
self.attributes.insert(
Attribute::AcpModifyRemoveClass,
SchemaAttribute {
name: Attribute::AcpModifyRemoveClass,
uuid: UUID_SCHEMA_ATTR_ACP_MODIFY_REMOVE_CLASS,
description: String::from("The set of class values that could be asserted or added to an entry. Only applies to modify::remove operations on class."),
multivalue: true,
unique: false,
phantom: false,
sync_allowed: false,
replicated: Replicated::True,
indexed: false,
syntax: SyntaxType::Utf8StringInsensitive,
},
);
self.attributes.insert(
Attribute::EntryManagedBy,
SchemaAttribute {
@ -2069,6 +2099,8 @@ impl SchemaWriteTransaction<'_> {
Attribute::AcpModifyRemovedAttr,
Attribute::AcpModifyPresentAttr,
Attribute::AcpModifyClass,
Attribute::AcpModifyPresentClass,
Attribute::AcpModifyRemoveClass,
],
..Default::default()
},

View file

@ -1,16 +1,17 @@
use super::profiles::{
AccessControlCreateResolved, AccessControlReceiverCondition, AccessControlTargetCondition,
};
use super::protected::PROTECTED_ENTRY_CLASSES;
use crate::prelude::*;
use std::collections::BTreeSet;
pub(super) enum CreateResult {
Denied,
Deny,
Grant,
}
enum IResult {
Denied,
Deny,
Grant,
Ignore,
}
@ -25,25 +26,25 @@ pub(super) fn apply_create_access<'a>(
// This module can never yield a grant.
match protected_filter_entry(ident, entry) {
IResult::Denied => denied = true,
IResult::Deny => denied = true,
IResult::Grant | IResult::Ignore => {}
}
match create_filter_entry(ident, related_acp, entry) {
IResult::Denied => denied = true,
IResult::Deny => denied = true,
IResult::Grant => grant = true,
IResult::Ignore => {}
}
if denied {
// Something explicitly said no.
CreateResult::Denied
CreateResult::Deny
} else if grant {
// Something said yes
CreateResult::Grant
} else {
// Nothing said yes.
CreateResult::Denied
CreateResult::Deny
}
}
@ -60,7 +61,7 @@ fn create_filter_entry<'a>(
}
IdentType::Synch(_) => {
security_critical!("Blocking sync check");
return IResult::Denied;
return IResult::Deny;
}
IdentType::User(_) => {}
};
@ -69,7 +70,7 @@ fn create_filter_entry<'a>(
match ident.access_scope() {
AccessScope::ReadOnly | AccessScope::Synchronise => {
security_access!("denied ❌ - identity access scope is not permitted to create");
return IResult::Denied;
return IResult::Deny;
}
AccessScope::ReadWrite => {
// As you were
@ -96,7 +97,7 @@ fn create_filter_entry<'a>(
Some(s) => s.collect(),
None => {
admin_error!("Class set failed to build - corrupted entry?");
return IResult::Denied;
return IResult::Deny;
}
};
@ -173,22 +174,22 @@ fn protected_filter_entry(ident: &Identity, entry: &Entry<EntryInit, EntryNew>)
}
IdentType::Synch(_) => {
security_access!("sync agreements may not directly create entities");
IResult::Denied
IResult::Deny
}
IdentType::User(_) => {
// Now check things ...
// For now we just block create on sync object
if let Some(classes) = entry.get_ava_set(Attribute::Class) {
if classes.contains(&EntryClass::SyncObject.into()) {
// Block the mod
security_access!("attempt to create with protected class type");
IResult::Denied
} else {
if let Some(classes) = entry.get_ava_as_iutf8(Attribute::Class) {
if classes.is_disjoint(&PROTECTED_ENTRY_CLASSES) {
// It's different, go ahead
IResult::Ignore
} else {
// Block the mod, something is present
security_access!("attempt to create with protected class type");
IResult::Deny
}
} else {
// Nothing to check.
// Nothing to check - this entry will fail to create anyway because it has
// no classes
IResult::Ignore
}
}

View file

@ -1,16 +1,17 @@
use super::profiles::{
AccessControlDeleteResolved, AccessControlReceiverCondition, AccessControlTargetCondition,
};
use super::protected::PROTECTED_ENTRY_CLASSES;
use crate::prelude::*;
use std::sync::Arc;
pub(super) enum DeleteResult {
Denied,
Deny,
Grant,
}
enum IResult {
Denied,
Deny,
Grant,
Ignore,
}
@ -24,25 +25,25 @@ pub(super) fn apply_delete_access<'a>(
let mut grant = false;
match protected_filter_entry(ident, entry) {
IResult::Denied => denied = true,
IResult::Deny => denied = true,
IResult::Grant | IResult::Ignore => {}
}
match delete_filter_entry(ident, related_acp, entry) {
IResult::Denied => denied = true,
IResult::Deny => denied = true,
IResult::Grant => grant = true,
IResult::Ignore => {}
}
if denied {
// Something explicitly said no.
DeleteResult::Denied
DeleteResult::Deny
} else if grant {
// Something said yes
DeleteResult::Grant
} else {
// Nothing said yes.
DeleteResult::Denied
DeleteResult::Deny
}
}
@ -59,7 +60,7 @@ fn delete_filter_entry<'a>(
}
IdentType::Synch(_) => {
security_critical!("Blocking sync check");
return IResult::Denied;
return IResult::Deny;
}
IdentType::User(_) => {}
};
@ -68,7 +69,7 @@ fn delete_filter_entry<'a>(
match ident.access_scope() {
AccessScope::ReadOnly | AccessScope::Synchronise => {
security_access!("denied ❌ - identity access scope is not permitted to delete");
return IResult::Denied;
return IResult::Deny;
}
AccessScope::ReadWrite => {
// As you were
@ -152,28 +153,30 @@ fn protected_filter_entry(ident: &Identity, entry: &Arc<EntrySealedCommitted>) -
}
IdentType::Synch(_) => {
security_access!("sync agreements may not directly delete entities");
IResult::Denied
IResult::Deny
}
IdentType::User(_) => {
// Now check things ...
// For now we just block create on sync object
if let Some(classes) = entry.get_ava_set(Attribute::Class) {
if classes.contains(&EntryClass::SyncObject.into()) {
// Block the mod
security_access!("attempt to delete with protected class type");
return IResult::Denied;
}
};
// Prevent deletion of entries that exist in the system controlled entry range.
if entry.get_uuid() <= UUID_ANONYMOUS {
security_access!("attempt to delete system builtin entry");
return IResult::Denied;
return IResult::Deny;
}
// Checks exhausted, no more input from us
IResult::Ignore
// Prevent deleting some protected types.
if let Some(classes) = entry.get_ava_as_iutf8(Attribute::Class) {
if classes.is_disjoint(&PROTECTED_ENTRY_CLASSES) {
// It's different, go ahead
IResult::Ignore
} else {
// Block the mod, something is present
security_access!("attempt to create with protected class type");
IResult::Deny
}
} else {
// Nothing to check - this entry will fail to create anyway because it has
// no classes
IResult::Ignore
}
}
}
}

View file

@ -50,12 +50,13 @@ mod create;
mod delete;
mod modify;
pub mod profiles;
mod protected;
mod search;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Access {
Grant,
Denied,
Deny,
Allow(BTreeSet<Attribute>),
}
@ -63,7 +64,7 @@ impl From<&Access> for ScimAttributeEffectiveAccess {
fn from(value: &Access) -> Self {
match value {
Access::Grant => Self::Grant,
Access::Denied => Self::Denied,
Access::Deny => Self::Deny,
Access::Allow(set) => Self::Allow(set.clone()),
}
}
@ -72,7 +73,7 @@ impl From<&Access> for ScimAttributeEffectiveAccess {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AccessClass {
Grant,
Denied,
Deny,
Allow(BTreeSet<AttrString>),
}
@ -86,12 +87,22 @@ pub struct AccessEffectivePermission {
pub search: Access,
pub modify_pres: Access,
pub modify_rem: Access,
pub modify_class: AccessClass,
pub modify_pres_class: AccessClass,
pub modify_rem_class: AccessClass,
}
pub enum AccessResult {
pub enum AccessBasicResult {
// Deny this operation unconditionally.
Denied,
Deny,
// Unbounded allow, provided no deny state exists.
Grant,
// This module makes no decisions about this entry.
Ignore,
}
pub enum AccessSrchResult {
// Deny this operation unconditionally.
Deny,
// Unbounded allow, provided no deny state exists.
Grant,
// This module makes no decisions about this entry.
@ -99,24 +110,37 @@ pub enum AccessResult {
// Limit the allowed attr set to this - this doesn't
// allow anything, it constrains what might be allowed
// by a later module.
Constrain(BTreeSet<Attribute>),
// Allow these attributes within constraints.
Allow(BTreeSet<Attribute>),
/*
Constrain {
attr: BTreeSet<Attribute>,
},
*/
Allow { attr: BTreeSet<Attribute> },
}
#[allow(dead_code)]
pub enum AccessResultClass<'a> {
pub enum AccessModResult<'a> {
// Deny this operation unconditionally.
Denied,
// Unbounded allow, provided no denied exists.
Grant,
Deny,
// Unbounded allow, provided no deny state exists.
// Grant,
// This module makes no decisions about this entry.
Ignore,
// Limit the allowed attr set to this - this doesn't
// allow anything, it constrains what might be allowed.
Constrain(BTreeSet<&'a str>),
// Allow these attributes within constraints.
Allow(BTreeSet<&'a str>),
// allow anything, it constrains what might be allowed
// by a later module.
Constrain {
pres_attr: BTreeSet<Attribute>,
rem_attr: BTreeSet<Attribute>,
pres_cls: Option<BTreeSet<&'a str>>,
rem_cls: Option<BTreeSet<&'a str>>,
},
// Allow these modifications within constraints.
Allow {
pres_attr: BTreeSet<Attribute>,
rem_attr: BTreeSet<Attribute>,
pres_class: BTreeSet<&'a str>,
rem_class: BTreeSet<&'a str>,
},
}
// =========================================================================
@ -303,7 +327,7 @@ pub trait AccessControlsTransaction<'a> {
.into_iter()
.filter(|e| {
match apply_search_access(ident, related_acp.as_slice(), e) {
SearchResult::Denied => false,
SearchResult::Deny => false,
SearchResult::Grant => true,
SearchResult::Allow(allowed_attrs) => {
// The allow set constrained.
@ -401,7 +425,7 @@ pub trait AccessControlsTransaction<'a> {
.into_iter()
.filter_map(|entry| {
match apply_search_access(&se.ident, &search_related_acp, &entry) {
SearchResult::Denied => {
SearchResult::Deny => {
None
}
SearchResult::Grant => {
@ -536,7 +560,8 @@ pub trait AccessControlsTransaction<'a> {
// Build the set of classes that we to work on, only in terms of "addition". To remove
// I think we have no limit, but ... william of the future may find a problem with this
// policy.
let mut requested_classes: BTreeSet<&str> = Default::default();
let mut requested_pres_classes: BTreeSet<&str> = Default::default();
let mut requested_rem_classes: BTreeSet<&str> = Default::default();
for modify in me.modlist.iter() {
match modify {
@ -548,27 +573,33 @@ pub trait AccessControlsTransaction<'a> {
// existence, and second, we would have failed the mod at schema checking
// earlier in the process as these were not correctly type. As a result
// we can trust these to be correct here and not to be "None".
requested_classes.extend(v.to_str())
requested_pres_classes.extend(v.to_str())
}
}
Modify::Removed(a, v) => {
if a == Attribute::Class.as_ref() {
requested_classes.extend(v.to_str())
requested_rem_classes.extend(v.to_str())
}
}
Modify::Set(a, v) => {
if a == Attribute::Class.as_ref() {
// flatten to remove the option down to an iterator
requested_classes.extend(v.as_iutf8_iter().into_iter().flatten())
// This is a reasonably complex case - we actually have to contemplate
// the difference between what exists and what doesn't, but that's per-entry.
//
// for now, we treat this as both pres and rem, but I think that ultimately
// to fix this we need to make all modifies apply in terms of "batch mod"
requested_pres_classes.extend(v.as_iutf8_iter().into_iter().flatten());
requested_rem_classes.extend(v.as_iutf8_iter().into_iter().flatten());
}
}
_ => {}
}
}
debug!(?requested_pres, "Requested present set");
debug!(?requested_rem, "Requested remove set");
debug!(?requested_classes, "Requested class set");
debug!(?requested_pres, "Requested present attribute set");
debug!(?requested_rem, "Requested remove attribute set");
debug!(?requested_pres_classes, "Requested present class set");
debug!(?requested_rem_classes, "Requested remove class set");
let sync_agmts = self.get_sync_agreements();
@ -576,9 +607,16 @@ pub trait AccessControlsTransaction<'a> {
debug!(entry_id = %e.get_display_id());
match apply_modify_access(&me.ident, related_acp.as_slice(), sync_agmts, e) {
ModifyResult::Denied => false,
ModifyResult::Deny => false,
ModifyResult::Grant => true,
ModifyResult::Allow { pres, rem, cls } => {
ModifyResult::Allow {
pres,
rem,
pres_cls,
rem_cls,
} => {
let mut decision = true;
if !requested_pres.is_subset(&pres) {
security_error!("requested_pres is not a subset of allowed");
security_error!(
@ -586,23 +624,41 @@ pub trait AccessControlsTransaction<'a> {
requested_pres,
pres
);
false
} else if !requested_rem.is_subset(&rem) {
decision = false
};
if !requested_rem.is_subset(&rem) {
security_error!("requested_rem is not a subset of allowed");
security_error!("requested_rem: {:?} !⊆ allowed: {:?}", requested_rem, rem);
false
} else if !requested_classes.is_subset(&cls) {
security_error!("requested_classes is not a subset of allowed");
decision = false;
};
if !requested_pres_classes.is_subset(&pres_cls) {
security_error!("requested_pres_classes is not a subset of allowed");
security_error!(
"requested_classes: {:?} !⊆ allowed: {:?}",
requested_classes,
cls
"requested_pres_classes: {:?} !⊆ allowed: {:?}",
requested_pres_classes,
pres_cls
);
false
} else {
decision = false;
};
if !requested_rem_classes.is_subset(&rem_cls) {
security_error!("requested_rem_classes is not a subset of allowed");
security_error!(
"requested_rem_classes: {:?} !⊆ allowed: {:?}",
requested_rem_classes,
rem_cls
);
decision = false;
}
if decision {
debug!("passed pres, rem, classes check.");
true
} // if acc == false
}
// Yield the result
decision
}
}
});
@ -668,47 +724,55 @@ pub trait AccessControlsTransaction<'a> {
})
.collect();
// Build the set of classes that we to work on, only in terms of "addition". To remove
// I think we have no limit, but ... william of the future may find a problem with this
// policy.
let requested_classes: BTreeSet<&str> = modlist
.iter()
.filter_map(|m| match m {
let mut requested_pres_classes: BTreeSet<&str> = Default::default();
let mut requested_rem_classes: BTreeSet<&str> = Default::default();
for modify in modlist.iter() {
match modify {
Modify::Present(a, v) => {
if a == Attribute::Class.as_ref() {
// Here we have an option<&str> which could mean there is a risk of
// a malicious entity attempting to trick us by masking class mods
// in non-iutf8 types. However, the server first won't respect their
// existence, and second, we would have failed the mod at schema checking
// earlier in the process as these were not correctly type. As a result
// we can trust these to be correct here and not to be "None".
v.to_str()
} else {
None
requested_pres_classes.extend(v.to_str())
}
}
Modify::Removed(a, v) => {
if a == Attribute::Class.as_ref() {
v.to_str()
} else {
None
requested_rem_classes.extend(v.to_str())
}
}
_ => None,
})
.collect();
Modify::Set(a, v) => {
if a == Attribute::Class.as_ref() {
// This is a reasonably complex case - we actually have to contemplate
// the difference between what exists and what doesn't, but that's per-entry.
//
// for now, we treat this as both pres and rem, but I think that ultimately
// to fix this we need to make all modifies apply in terms of "batch mod"
requested_pres_classes.extend(v.as_iutf8_iter().into_iter().flatten());
requested_rem_classes.extend(v.as_iutf8_iter().into_iter().flatten());
}
}
_ => {}
}
}
debug!(?requested_pres, "Requested present set");
debug!(?requested_rem, "Requested remove set");
debug!(?requested_classes, "Requested class set");
debug!(?requested_pres_classes, "Requested present class set");
debug!(?requested_rem_classes, "Requested remove class set");
debug!(entry_id = %e.get_display_id());
let sync_agmts = self.get_sync_agreements();
match apply_modify_access(&me.ident, related_acp.as_slice(), sync_agmts, e) {
ModifyResult::Denied => false,
ModifyResult::Deny => false,
ModifyResult::Grant => true,
ModifyResult::Allow { pres, rem, cls } => {
ModifyResult::Allow {
pres,
rem,
pres_cls,
rem_cls,
} => {
let mut decision = true;
if !requested_pres.is_subset(&pres) {
security_error!("requested_pres is not a subset of allowed");
security_error!(
@ -716,23 +780,41 @@ pub trait AccessControlsTransaction<'a> {
requested_pres,
pres
);
false
} else if !requested_rem.is_subset(&rem) {
decision = false
};
if !requested_rem.is_subset(&rem) {
security_error!("requested_rem is not a subset of allowed");
security_error!("requested_rem: {:?} !⊆ allowed: {:?}", requested_rem, rem);
false
} else if !requested_classes.is_subset(&cls) {
security_error!("requested_classes is not a subset of allowed");
decision = false;
};
if !requested_pres_classes.is_subset(&pres_cls) {
security_error!("requested_pres_classes is not a subset of allowed");
security_error!(
"requested_classes: {:?} !⊆ allowed: {:?}",
requested_classes,
cls
requested_pres_classes,
pres_cls
);
false
} else {
security_access!("passed pres, rem, classes check.");
true
} // if acc == false
decision = false;
};
if !requested_rem_classes.is_subset(&rem_cls) {
security_error!("requested_rem_classes is not a subset of allowed");
security_error!(
"requested_classes: {:?} !⊆ allowed: {:?}",
requested_rem_classes,
rem_cls
);
decision = false;
}
if decision {
debug!("passed pres, rem, classes check.");
}
// Yield the result
decision
}
}
});
@ -780,7 +862,7 @@ pub trait AccessControlsTransaction<'a> {
// For each entry
let r = entries.iter().all(|e| {
match apply_create_access(&ce.ident, related_acp.as_slice(), e) {
CreateResult::Denied => false,
CreateResult::Deny => false,
CreateResult::Grant => true,
}
});
@ -836,7 +918,7 @@ pub trait AccessControlsTransaction<'a> {
// For each entry
let r = entries.iter().all(|e| {
match apply_delete_access(&de.ident, related_acp.as_slice(), e) {
DeleteResult::Denied => false,
DeleteResult::Deny => false,
DeleteResult::Grant => true,
}
});
@ -925,7 +1007,7 @@ pub trait AccessControlsTransaction<'a> {
) -> AccessEffectivePermission {
// == search ==
let search_effective = match apply_search_access(ident, search_related_acp, entry) {
SearchResult::Denied => Access::Denied,
SearchResult::Deny => Access::Deny,
SearchResult::Grant => Access::Grant,
SearchResult::Allow(allowed_attrs) => {
// Bound by requested attrs?
@ -934,14 +1016,30 @@ pub trait AccessControlsTransaction<'a> {
};
// == modify ==
let (modify_pres, modify_rem, modify_class) =
let (modify_pres, modify_rem, modify_pres_class, modify_rem_class) =
match apply_modify_access(ident, modify_related_acp, sync_agmts, entry) {
ModifyResult::Denied => (Access::Denied, Access::Denied, AccessClass::Denied),
ModifyResult::Grant => (Access::Grant, Access::Grant, AccessClass::Grant),
ModifyResult::Allow { pres, rem, cls } => (
ModifyResult::Deny => (
Access::Deny,
Access::Deny,
AccessClass::Deny,
AccessClass::Deny,
),
ModifyResult::Grant => (
Access::Grant,
Access::Grant,
AccessClass::Grant,
AccessClass::Grant,
),
ModifyResult::Allow {
pres,
rem,
pres_cls,
rem_cls,
} => (
Access::Allow(pres.into_iter().collect()),
Access::Allow(rem.into_iter().collect()),
AccessClass::Allow(cls.into_iter().map(|s| s.into()).collect()),
AccessClass::Allow(pres_cls.into_iter().map(|s| s.into()).collect()),
AccessClass::Allow(rem_cls.into_iter().map(|s| s.into()).collect()),
),
};
@ -949,7 +1047,7 @@ pub trait AccessControlsTransaction<'a> {
let delete_status = apply_delete_access(ident, delete_related_acp, entry);
let delete = match delete_status {
DeleteResult::Denied => false,
DeleteResult::Deny => false,
DeleteResult::Grant => true,
};
@ -960,7 +1058,8 @@ pub trait AccessControlsTransaction<'a> {
search: search_effective,
modify_pres,
modify_rem,
modify_class,
modify_pres_class,
modify_rem_class,
}
}
}
@ -2166,6 +2265,8 @@ mod tests {
"name class",
// And the class allowed is account
EntryClass::Account.into(),
// And the class allowed is account
EntryClass::Account.into(),
);
// Allow member, class is group. IE not account
let acp_deny = AccessControlModify::from_raw(
@ -2182,8 +2283,8 @@ mod tests {
"member class",
// Allow rem name and class
"member class",
// And the class allowed is account
"group",
EntryClass::Group.into(),
EntryClass::Group.into(),
);
// Does not have a pres or rem class in attrs
let acp_no_class = AccessControlModify::from_raw(
@ -2201,7 +2302,8 @@ mod tests {
// Allow rem name and class
"name class",
// And the class allowed is NOT an account ...
"group",
EntryClass::Group.into(),
EntryClass::Group.into(),
);
// Test allowed pres
@ -2287,6 +2389,7 @@ mod tests {
"name class",
// And the class allowed is account
EntryClass::Account.into(),
EntryClass::Account.into(),
);
test_acp_modify!(&me_pres_ro, vec![acp_allow.clone()], &r_set, false);
@ -2614,7 +2717,8 @@ mod tests {
search: Access::Allow(btreeset![Attribute::Name]),
modify_pres: Access::Allow(BTreeSet::new()),
modify_rem: Access::Allow(BTreeSet::new()),
modify_class: AccessClass::Allow(BTreeSet::new()),
modify_pres_class: AccessClass::Allow(BTreeSet::new()),
modify_rem_class: AccessClass::Allow(BTreeSet::new()),
}]
)
}
@ -2647,6 +2751,7 @@ mod tests {
Attribute::Name.as_ref(),
Attribute::Name.as_ref(),
EntryClass::Object.into(),
EntryClass::Object.into(),
)],
&r_set,
vec![AccessEffectivePermission {
@ -2656,7 +2761,8 @@ mod tests {
search: Access::Allow(BTreeSet::new()),
modify_pres: Access::Allow(btreeset![Attribute::Name]),
modify_rem: Access::Allow(btreeset![Attribute::Name]),
modify_class: AccessClass::Allow(btreeset![EntryClass::Object.into()]),
modify_pres_class: AccessClass::Allow(btreeset![EntryClass::Object.into()]),
modify_rem_class: AccessClass::Allow(btreeset![EntryClass::Object.into()]),
}]
)
}
@ -2796,6 +2902,7 @@ mod tests {
&format!("{} {}", Attribute::UserAuthTokenSession, Attribute::Name),
// And the class allowed is account, we don't use it though.
EntryClass::Account.into(),
EntryClass::Account.into(),
);
// NOTE! Syntax doesn't matter here, we just need to assert if the attr exists
@ -3296,6 +3403,7 @@ mod tests {
"name class",
// And the class allowed is account
EntryClass::Account.into(),
EntryClass::Account.into(),
);
// Test allowed pres
@ -3424,4 +3532,185 @@ mod tests {
// Finally test it!
test_acp_search_reduce!(&se_anon_ro, vec![acp], r_set, ex_anon_some);
}
#[test]
fn test_access_protected_deny_create() {
sketching::test_init();
let ev1 = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Name, Value::new_iname("testperson1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT_1))
);
let r1_set = vec![ev1];
let ev2 = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT_1))
);
let r2_set = vec![ev2];
let ce_admin = CreateEvent::new_impersonate_identity(
Identity::from_impersonate_entry_readwrite(E_TEST_ACCOUNT_1.clone()),
vec![],
);
let acp = AccessControlCreate::from_raw(
"test_create",
Uuid::new_v4(),
// Apply to admin
UUID_TEST_GROUP_1,
// To create matching filter testperson
// Can this be empty?
filter_valid!(f_eq(
Attribute::Name,
PartialValue::new_iname("testperson1")
)),
// classes
EntryClass::Account.into(),
// attrs
"class name uuid",
);
// Test allowed to create
test_acp_create!(&ce_admin, vec![acp.clone()], &r1_set, true);
// Test reject create (not allowed attr)
test_acp_create!(&ce_admin, vec![acp.clone()], &r2_set, false);
}
#[test]
fn test_access_protected_deny_delete() {
sketching::test_init();
let ev1 = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Name, Value::new_iname("testperson1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT_1))
)
.into_sealed_committed();
let r1_set = vec![Arc::new(ev1)];
let ev2 = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT_1))
)
.into_sealed_committed();
let r2_set = vec![Arc::new(ev2)];
let de = DeleteEvent::new_impersonate_entry(
E_TEST_ACCOUNT_1.clone(),
filter_all!(f_eq(
Attribute::Name,
PartialValue::new_iname("testperson1")
)),
);
let acp = AccessControlDelete::from_raw(
"test_delete",
Uuid::new_v4(),
// Apply to admin
UUID_TEST_GROUP_1,
// To delete testperson
filter_valid!(f_eq(
Attribute::Name,
PartialValue::new_iname("testperson1")
)),
);
// Test allowed to delete
test_acp_delete!(&de, vec![acp.clone()], &r1_set, true);
// Test not allowed to delete
test_acp_delete!(&de, vec![acp.clone()], &r2_set, false);
}
#[test]
fn test_access_protected_deny_modify() {
sketching::test_init();
let ev1 = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Name, Value::new_iname("testperson1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT_1))
)
.into_sealed_committed();
let r1_set = vec![Arc::new(ev1)];
let ev2 = entry_init!(
(Attribute::Class, EntryClass::Account.to_value()),
(Attribute::Class, EntryClass::System.to_value()),
(Attribute::Name, Value::new_iname("testperson1")),
(Attribute::Uuid, Value::Uuid(UUID_TEST_ACCOUNT_1))
)
.into_sealed_committed();
let r2_set = vec![Arc::new(ev2)];
// Allow name and class, class is account
let acp_allow = AccessControlModify::from_raw(
"test_modify_allow",
Uuid::new_v4(),
// Apply to admin
UUID_TEST_GROUP_1,
// To modify testperson
filter_valid!(f_eq(
Attribute::Name,
PartialValue::new_iname("testperson1")
)),
// Allow pres disp name and class
"displayname class",
// Allow rem disp name and class
"displayname class",
// And the classes allowed to add/rem are as such
"system recycled",
"system recycled",
);
let me_pres = ModifyEvent::new_impersonate_entry(
E_TEST_ACCOUNT_1.clone(),
filter_all!(f_eq(
Attribute::Name,
PartialValue::new_iname("testperson1")
)),
modlist!([m_pres(Attribute::DisplayName, &Value::new_utf8s("value"))]),
);
// Test allowed pres
test_acp_modify!(&me_pres, vec![acp_allow.clone()], &r1_set, true);
// Test not allowed pres (due to system class)
test_acp_modify!(&me_pres, vec![acp_allow.clone()], &r2_set, false);
// Test that we can not remove class::system
let me_rem_sys = ModifyEvent::new_impersonate_entry(
E_TEST_ACCOUNT_1.clone(),
filter_all!(f_eq(
Attribute::Class,
PartialValue::new_iname("testperson1")
)),
modlist!([m_remove(
Attribute::Class,
&EntryClass::System.to_partialvalue()
)]),
);
test_acp_modify!(&me_rem_sys, vec![acp_allow.clone()], &r2_set, false);
// Ensure that we can't add recycled.
let me_pres = ModifyEvent::new_impersonate_entry(
E_TEST_ACCOUNT_1.clone(),
filter_all!(f_eq(
Attribute::Name,
PartialValue::new_iname("testperson1")
)),
modlist!([m_pres(Attribute::Class, &EntryClass::Recycled.to_value())]),
);
test_acp_modify!(&me_pres, vec![acp_allow.clone()], &r1_set, false);
}
}

View file

@ -1,21 +1,25 @@
use crate::prelude::*;
use hashbrown::HashMap;
use std::collections::BTreeSet;
use super::profiles::{
AccessControlModify, AccessControlModifyResolved, AccessControlReceiverCondition,
AccessControlTargetCondition,
};
use super::{AccessResult, AccessResultClass};
use super::protected::{
LOCKED_ENTRY_CLASSES, PROTECTED_MOD_ENTRY_CLASSES, PROTECTED_MOD_PRES_ENTRY_CLASSES,
PROTECTED_MOD_REM_ENTRY_CLASSES,
};
use super::{AccessBasicResult, AccessModResult};
use crate::prelude::*;
use hashbrown::HashMap;
use std::collections::BTreeSet;
use std::sync::Arc;
pub(super) enum ModifyResult<'a> {
Denied,
Deny,
Grant,
Allow {
pres: BTreeSet<Attribute>,
rem: BTreeSet<Attribute>,
cls: BTreeSet<&'a str>,
pres_cls: BTreeSet<&'a str>,
rem_cls: BTreeSet<&'a str>,
},
}
@ -27,12 +31,17 @@ pub(super) fn apply_modify_access<'a>(
) -> ModifyResult<'a> {
let mut denied = false;
let mut grant = false;
let mut constrain_pres = BTreeSet::default();
let mut allow_pres = BTreeSet::default();
let mut constrain_rem = BTreeSet::default();
let mut allow_rem = BTreeSet::default();
let mut constrain_cls = BTreeSet::default();
let mut allow_cls = BTreeSet::default();
let mut constrain_pres_cls = BTreeSet::default();
let mut allow_pres_cls = BTreeSet::default();
let mut constrain_rem_cls = BTreeSet::default();
let mut allow_rem_cls = BTreeSet::default();
// Some useful references.
// - needed for checking entry manager conditions.
@ -43,28 +52,53 @@ pub(super) fn apply_modify_access<'a>(
// kind of being three operations all in one.
match modify_ident_test(ident) {
AccessResult::Denied => denied = true,
AccessResult::Grant => grant = true,
AccessResult::Ignore => {}
AccessResult::Constrain(mut set) => constrain_pres.append(&mut set),
AccessResult::Allow(mut set) => allow_pres.append(&mut set),
AccessBasicResult::Deny => denied = true,
AccessBasicResult::Grant => grant = true,
AccessBasicResult::Ignore => {}
}
// Check with protected if we should proceed.
match modify_protected_attrs(ident, entry) {
AccessModResult::Deny => denied = true,
AccessModResult::Constrain {
mut pres_attr,
mut rem_attr,
pres_cls,
rem_cls,
} => {
constrain_rem.append(&mut rem_attr);
constrain_pres.append(&mut pres_attr);
if let Some(mut pres_cls) = pres_cls {
constrain_pres_cls.append(&mut pres_cls);
}
if let Some(mut rem_cls) = rem_cls {
constrain_rem_cls.append(&mut rem_cls);
}
}
// Can't grant.
// AccessModResult::Grant |
// Can't allow
AccessModResult::Allow { .. } | AccessModResult::Ignore => {}
}
if !grant && !denied {
// Check with protected if we should proceed.
// If it's a sync entry, constrain it.
match modify_sync_constrain(ident, entry, sync_agreements) {
AccessResult::Denied => denied = true,
AccessResult::Constrain(mut set) => {
constrain_rem.extend(set.iter().cloned());
constrain_pres.append(&mut set)
AccessModResult::Deny => denied = true,
AccessModResult::Constrain {
mut pres_attr,
mut rem_attr,
..
} => {
constrain_rem.append(&mut rem_attr);
constrain_pres.append(&mut pres_attr);
}
// Can't grant.
AccessResult::Grant |
// AccessModResult::Grant |
// Can't allow
AccessResult::Allow(_) |
AccessResult::Ignore => {}
AccessModResult::Allow { .. } | AccessModResult::Ignore => {}
}
// Setup the acp's here
@ -122,35 +156,27 @@ pub(super) fn apply_modify_access<'a>(
.collect();
match modify_pres_test(scoped_acp.as_slice()) {
AccessResult::Denied => denied = true,
AccessModResult::Deny => denied = true,
// Can never return a unilateral grant.
AccessResult::Grant => {}
AccessResult::Ignore => {}
AccessResult::Constrain(mut set) => constrain_pres.append(&mut set),
AccessResult::Allow(mut set) => allow_pres.append(&mut set),
}
match modify_rem_test(scoped_acp.as_slice()) {
AccessResult::Denied => denied = true,
// Can never return a unilateral grant.
AccessResult::Grant => {}
AccessResult::Ignore => {}
AccessResult::Constrain(mut set) => constrain_rem.append(&mut set),
AccessResult::Allow(mut set) => allow_rem.append(&mut set),
}
match modify_cls_test(scoped_acp.as_slice()) {
AccessResultClass::Denied => denied = true,
// Can never return a unilateral grant.
AccessResultClass::Grant => {}
AccessResultClass::Ignore => {}
AccessResultClass::Constrain(mut set) => constrain_cls.append(&mut set),
AccessResultClass::Allow(mut set) => allow_cls.append(&mut set),
// AccessModResult::Grant => {}
AccessModResult::Ignore => {}
AccessModResult::Constrain { .. } => {}
AccessModResult::Allow {
mut pres_attr,
mut rem_attr,
mut pres_class,
mut rem_class,
} => {
allow_pres.append(&mut pres_attr);
allow_rem.append(&mut rem_attr);
allow_pres_cls.append(&mut pres_class);
allow_rem_cls.append(&mut rem_class);
}
}
}
if denied {
ModifyResult::Denied
ModifyResult::Deny
} else if grant {
ModifyResult::Grant
} else {
@ -168,31 +194,48 @@ pub(super) fn apply_modify_access<'a>(
allow_rem
};
let allowed_cls = if !constrain_cls.is_empty() {
let mut allowed_pres_cls = if !constrain_pres_cls.is_empty() {
// bit_and
&constrain_cls & &allow_cls
&constrain_pres_cls & &allow_pres_cls
} else {
allow_cls
allow_pres_cls
};
let mut allowed_rem_cls = if !constrain_rem_cls.is_empty() {
// bit_and
&constrain_rem_cls & &allow_rem_cls
} else {
allow_rem_cls
};
// Deny these classes from being part of any addition or removal to an entry
for protected_cls in PROTECTED_MOD_PRES_ENTRY_CLASSES.iter() {
allowed_pres_cls.remove(protected_cls.as_str());
}
for protected_cls in PROTECTED_MOD_REM_ENTRY_CLASSES.iter() {
allowed_rem_cls.remove(protected_cls.as_str());
}
ModifyResult::Allow {
pres: allowed_pres,
rem: allowed_rem,
cls: allowed_cls,
pres_cls: allowed_pres_cls,
rem_cls: allowed_rem_cls,
}
}
}
fn modify_ident_test(ident: &Identity) -> AccessResult {
fn modify_ident_test(ident: &Identity) -> AccessBasicResult {
match &ident.origin {
IdentType::Internal => {
trace!("Internal operation, bypassing access check");
// No need to check ACS
return AccessResult::Grant;
return AccessBasicResult::Grant;
}
IdentType::Synch(_) => {
security_critical!("Blocking sync check");
return AccessResult::Denied;
return AccessBasicResult::Deny;
}
IdentType::User(_) => {}
};
@ -201,53 +244,56 @@ fn modify_ident_test(ident: &Identity) -> AccessResult {
match ident.access_scope() {
AccessScope::ReadOnly | AccessScope::Synchronise => {
security_access!("denied ❌ - identity access scope is not permitted to modify");
return AccessResult::Denied;
return AccessBasicResult::Deny;
}
AccessScope::ReadWrite => {
// As you were
}
};
AccessResult::Ignore
AccessBasicResult::Ignore
}
fn modify_pres_test(scoped_acp: &[&AccessControlModify]) -> AccessResult {
let allowed_pres: BTreeSet<Attribute> = scoped_acp
fn modify_pres_test<'a>(scoped_acp: &[&'a AccessControlModify]) -> AccessModResult<'a> {
let pres_attr: BTreeSet<Attribute> = scoped_acp
.iter()
.flat_map(|acp| acp.presattrs.iter().cloned())
.collect();
AccessResult::Allow(allowed_pres)
}
fn modify_rem_test(scoped_acp: &[&AccessControlModify]) -> AccessResult {
let allowed_rem: BTreeSet<Attribute> = scoped_acp
let rem_attr: BTreeSet<Attribute> = scoped_acp
.iter()
.flat_map(|acp| acp.remattrs.iter().cloned())
.collect();
AccessResult::Allow(allowed_rem)
}
// TODO: Should this be reverted to the Str borrow method? Or do we try to change
// to EntryClass?
fn modify_cls_test<'a>(scoped_acp: &[&'a AccessControlModify]) -> AccessResultClass<'a> {
let allowed_classes: BTreeSet<&'a str> = scoped_acp
let pres_class: BTreeSet<&'a str> = scoped_acp
.iter()
.flat_map(|acp| acp.classes.iter().map(|s| s.as_str()))
.flat_map(|acp| acp.pres_classes.iter().map(|s| s.as_str()))
.collect();
AccessResultClass::Allow(allowed_classes)
let rem_class: BTreeSet<&'a str> = scoped_acp
.iter()
.flat_map(|acp| acp.rem_classes.iter().map(|s| s.as_str()))
.collect();
AccessModResult::Allow {
pres_attr,
rem_attr,
pres_class,
rem_class,
}
}
fn modify_sync_constrain(
fn modify_sync_constrain<'a>(
ident: &Identity,
entry: &Arc<EntrySealedCommitted>,
sync_agreements: &HashMap<Uuid, BTreeSet<Attribute>>,
) -> AccessResult {
) -> AccessModResult<'a> {
match &ident.origin {
IdentType::Internal => AccessResult::Ignore,
IdentType::Internal => AccessModResult::Ignore,
IdentType::Synch(_) => {
// Allowed to mod sync objects. Later we'll probably need to check the limits of what
// it can do if we go that way.
AccessResult::Ignore
AccessModResult::Ignore
}
IdentType::User(_) => {
// We need to meet these conditions.
@ -259,7 +305,7 @@ fn modify_sync_constrain(
.unwrap_or(false);
if !is_sync {
return AccessResult::Ignore;
return AccessModResult::Ignore;
}
if let Some(sync_uuid) = entry.get_ava_single_refer(Attribute::SyncParentUuid) {
@ -274,11 +320,115 @@ fn modify_sync_constrain(
set.extend(sync_yield_authority.iter().cloned())
}
AccessResult::Constrain(set)
AccessModResult::Constrain {
pres_attr: set.clone(),
rem_attr: set,
pres_cls: None,
rem_cls: None,
}
} else {
warn!(entry = ?entry.get_uuid(), "sync_parent_uuid not found on sync object, preventing all access");
AccessResult::Denied
AccessModResult::Deny
}
}
}
}
/// Verify if the modification runs into limits that are defined by our protection rules.
fn modify_protected_attrs<'a>(
ident: &Identity,
entry: &Arc<EntrySealedCommitted>,
) -> AccessModResult<'a> {
match &ident.origin {
IdentType::Internal | IdentType::Synch(_) => {
// We don't constraint or influence these.
AccessModResult::Ignore
}
IdentType::User(_) => {
if let Some(classes) = entry.get_ava_as_iutf8(Attribute::Class) {
if classes.is_disjoint(&PROTECTED_MOD_ENTRY_CLASSES) {
// Not protected, go ahead
AccessModResult::Ignore
} else {
// Okay, the entry is protected, apply the full ruleset.
modify_protected_entry_attrs(classes)
}
} else {
// Nothing to check - this entry will fail to modify anyway because it has
// no classes
AccessModResult::Ignore
}
}
}
}
fn modify_protected_entry_attrs<'a>(classes: &BTreeSet<String>) -> AccessModResult<'a> {
// This is where the majority of the logic is - this contains the modification
// rules as they apply.
// First check for the hard-deny rules.
if !classes.is_disjoint(&LOCKED_ENTRY_CLASSES) {
// Hard deny attribute modifications to these types.
return AccessModResult::Deny;
}
let mut constrain_attrs = BTreeSet::default();
// Allows removal of the recycled class specifically on recycled entries.
if classes.contains(EntryClass::Recycled.into()) {
constrain_attrs.extend([Attribute::Class]);
}
if classes.contains(EntryClass::ClassType.into()) {
constrain_attrs.extend([Attribute::May, Attribute::Must]);
}
if classes.contains(EntryClass::SystemConfig.into()) {
constrain_attrs.extend([Attribute::BadlistPassword]);
}
// Allow domain settings.
if classes.contains(EntryClass::DomainInfo.into()) {
constrain_attrs.extend([
Attribute::DomainSsid,
Attribute::DomainLdapBasedn,
Attribute::LdapMaxQueryableAttrs,
Attribute::LdapAllowUnixPwBind,
Attribute::FernetPrivateKeyStr,
Attribute::Es256PrivateKeyDer,
Attribute::KeyActionRevoke,
Attribute::KeyActionRotate,
Attribute::IdVerificationEcKey,
Attribute::DeniedName,
Attribute::DomainDisplayName,
Attribute::Image,
]);
}
// Allow account policy related attributes to be changed on dyngroup
if classes.contains(EntryClass::DynGroup.into()) {
constrain_attrs.extend([
Attribute::AuthSessionExpiry,
Attribute::AuthPasswordMinimumLength,
Attribute::CredentialTypeMinimum,
Attribute::PrivilegeExpiry,
Attribute::WebauthnAttestationCaList,
Attribute::LimitSearchMaxResults,
Attribute::LimitSearchMaxFilterTest,
Attribute::AllowPrimaryCredFallback,
]);
}
// If we don't constrain the attributes at all, we have to deny the change
// from proceeding.
if constrain_attrs.is_empty() {
AccessModResult::Deny
} else {
AccessModResult::Constrain {
pres_attr: constrain_attrs.clone(),
rem_attr: constrain_attrs,
pres_cls: None,
rem_cls: None,
}
}
}

View file

@ -266,9 +266,10 @@ pub struct AccessControlModifyResolved<'a> {
#[derive(Debug, Clone)]
pub struct AccessControlModify {
pub acp: AccessControlProfile,
pub classes: Vec<AttrString>,
pub presattrs: Vec<Attribute>,
pub remattrs: Vec<Attribute>,
pub pres_classes: Vec<AttrString>,
pub rem_classes: Vec<AttrString>,
}
impl AccessControlModify {
@ -293,14 +294,25 @@ impl AccessControlModify {
.map(|i| i.map(Attribute::from).collect())
.unwrap_or_default();
let classes = value
let classes: Vec<AttrString> = value
.get_ava_iter_iutf8(Attribute::AcpModifyClass)
.map(|i| i.map(AttrString::from).collect())
.unwrap_or_default();
let pres_classes = value
.get_ava_iter_iutf8(Attribute::AcpModifyPresentClass)
.map(|i| i.map(AttrString::from).collect())
.unwrap_or_else(|| classes.clone());
let rem_classes = value
.get_ava_iter_iutf8(Attribute::AcpModifyRemoveClass)
.map(|i| i.map(AttrString::from).collect())
.unwrap_or_else(|| classes);
Ok(AccessControlModify {
acp: AccessControlProfile::try_from(qs, value)?,
classes,
pres_classes,
rem_classes,
presattrs,
remattrs,
})
@ -316,7 +328,8 @@ impl AccessControlModify {
targetscope: Filter<FilterValid>,
presattrs: &str,
remattrs: &str,
classes: &str,
pres_classes: &str,
rem_classes: &str,
) -> Self {
AccessControlModify {
acp: AccessControlProfile {
@ -325,7 +338,14 @@ impl AccessControlModify {
receiver: AccessControlReceiver::Group(btreeset!(receiver)),
target: AccessControlTarget::Scope(targetscope),
},
classes: classes.split_whitespace().map(AttrString::from).collect(),
pres_classes: pres_classes
.split_whitespace()
.map(AttrString::from)
.collect(),
rem_classes: rem_classes
.split_whitespace()
.map(AttrString::from)
.collect(),
presattrs: presattrs.split_whitespace().map(Attribute::from).collect(),
remattrs: remattrs.split_whitespace().map(Attribute::from).collect(),
}
@ -340,7 +360,8 @@ impl AccessControlModify {
target: AccessControlTarget,
presattrs: &str,
remattrs: &str,
classes: &str,
pres_classes: &str,
rem_classes: &str,
) -> Self {
AccessControlModify {
acp: AccessControlProfile {
@ -349,7 +370,14 @@ impl AccessControlModify {
receiver: AccessControlReceiver::EntryManager,
target,
},
classes: classes.split_whitespace().map(AttrString::from).collect(),
pres_classes: pres_classes
.split_whitespace()
.map(AttrString::from)
.collect(),
rem_classes: rem_classes
.split_whitespace()
.map(AttrString::from)
.collect(),
presattrs: presattrs.split_whitespace().map(Attribute::from).collect(),
remattrs: remattrs.split_whitespace().map(Attribute::from).collect(),
}

View file

@ -0,0 +1,83 @@
use crate::prelude::EntryClass;
use std::collections::BTreeSet;
use std::sync::LazyLock;
/// These entry classes may not be created or deleted, and may invoke some protection rules
/// if on an entry.
pub static PROTECTED_ENTRY_CLASSES: LazyLock<BTreeSet<String>> = LazyLock::new(|| {
let classes = vec![
EntryClass::System,
EntryClass::DomainInfo,
EntryClass::SystemInfo,
EntryClass::SystemConfig,
EntryClass::DynGroup,
EntryClass::SyncObject,
EntryClass::Tombstone,
EntryClass::Recycled,
];
BTreeSet::from_iter(classes.into_iter().map(|ec| ec.into()))
});
/// Entries with these classes are protected from modifications - not that
/// sync object is not present here as there are separate rules for that in
/// the modification access module.
///
/// Recycled is also not protected here as it needs to be able to be removed
/// by a recycle bin admin.
pub static PROTECTED_MOD_ENTRY_CLASSES: LazyLock<BTreeSet<String>> = LazyLock::new(|| {
let classes = vec![
EntryClass::System,
EntryClass::DomainInfo,
EntryClass::SystemInfo,
EntryClass::SystemConfig,
EntryClass::DynGroup,
// EntryClass::SyncObject,
EntryClass::Tombstone,
EntryClass::Recycled,
];
BTreeSet::from_iter(classes.into_iter().map(|ec| ec.into()))
});
/// These classes may NOT be added to ANY ENTRY
pub static PROTECTED_MOD_PRES_ENTRY_CLASSES: LazyLock<BTreeSet<String>> = LazyLock::new(|| {
let classes = vec![
EntryClass::System,
EntryClass::DomainInfo,
EntryClass::SystemInfo,
EntryClass::SystemConfig,
EntryClass::DynGroup,
EntryClass::SyncObject,
EntryClass::Tombstone,
EntryClass::Recycled,
];
BTreeSet::from_iter(classes.into_iter().map(|ec| ec.into()))
});
/// These classes may NOT be removed from ANY ENTRY
pub static PROTECTED_MOD_REM_ENTRY_CLASSES: LazyLock<BTreeSet<String>> = LazyLock::new(|| {
let classes = vec![
EntryClass::System,
EntryClass::DomainInfo,
EntryClass::SystemInfo,
EntryClass::SystemConfig,
EntryClass::DynGroup,
EntryClass::SyncObject,
EntryClass::Tombstone,
// EntryClass::Recycled,
];
BTreeSet::from_iter(classes.into_iter().map(|ec| ec.into()))
});
/// Entries with these classes may not be modified under any circumstance.
pub static LOCKED_ENTRY_CLASSES: LazyLock<BTreeSet<String>> = LazyLock::new(|| {
let classes = vec![
EntryClass::Tombstone,
// EntryClass::Recycled,
];
BTreeSet::from_iter(classes.into_iter().map(|ec| ec.into()))
});

View file

@ -4,11 +4,11 @@ use std::collections::BTreeSet;
use super::profiles::{
AccessControlReceiverCondition, AccessControlSearchResolved, AccessControlTargetCondition,
};
use super::AccessResult;
use super::AccessSrchResult;
use std::sync::Arc;
pub(super) enum SearchResult {
Denied,
Deny,
Grant,
Allow(BTreeSet<Attribute>),
}
@ -23,32 +23,32 @@ pub(super) fn apply_search_access(
// that.
let mut denied = false;
let mut grant = false;
let mut constrain = BTreeSet::default();
let constrain = BTreeSet::default();
let mut allow = BTreeSet::default();
// The access control profile
match search_filter_entry(ident, related_acp, entry) {
AccessResult::Denied => denied = true,
AccessResult::Grant => grant = true,
AccessResult::Ignore => {}
AccessResult::Constrain(mut set) => constrain.append(&mut set),
AccessResult::Allow(mut set) => allow.append(&mut set),
AccessSrchResult::Deny => denied = true,
AccessSrchResult::Grant => grant = true,
AccessSrchResult::Ignore => {}
// AccessSrchResult::Constrain { mut attr } => constrain.append(&mut attr),
AccessSrchResult::Allow { mut attr } => allow.append(&mut attr),
};
match search_oauth2_filter_entry(ident, entry) {
AccessResult::Denied => denied = true,
AccessResult::Grant => grant = true,
AccessResult::Ignore => {}
AccessResult::Constrain(mut set) => constrain.append(&mut set),
AccessResult::Allow(mut set) => allow.append(&mut set),
AccessSrchResult::Deny => denied = true,
AccessSrchResult::Grant => grant = true,
AccessSrchResult::Ignore => {}
// AccessSrchResult::Constrain { mut attr } => constrain.append(&mut attr),
AccessSrchResult::Allow { mut attr } => allow.append(&mut attr),
};
match search_sync_account_filter_entry(ident, entry) {
AccessResult::Denied => denied = true,
AccessResult::Grant => grant = true,
AccessResult::Ignore => {}
AccessResult::Constrain(mut set) => constrain.append(&mut set),
AccessResult::Allow(mut set) => allow.append(&mut set),
AccessSrchResult::Deny => denied = true,
AccessSrchResult::Grant => grant = true,
AccessSrchResult::Ignore => {}
// AccessSrchResult::Constrain{ mut attr } => constrain.append(&mut attr),
AccessSrchResult::Allow { mut attr } => allow.append(&mut attr),
};
// We'll add more modules later.
@ -56,7 +56,7 @@ pub(super) fn apply_search_access(
// Now finalise the decision.
if denied {
SearchResult::Denied
SearchResult::Deny
} else if grant {
SearchResult::Grant
} else {
@ -74,17 +74,17 @@ fn search_filter_entry(
ident: &Identity,
related_acp: &[AccessControlSearchResolved],
entry: &Arc<EntrySealedCommitted>,
) -> AccessResult {
) -> AccessSrchResult {
// If this is an internal search, return our working set.
match &ident.origin {
IdentType::Internal => {
trace!(uuid = ?entry.get_display_id(), "Internal operation, bypassing access check");
// No need to check ACS
return AccessResult::Grant;
return AccessSrchResult::Grant;
}
IdentType::Synch(_) => {
security_debug!(uuid = ?entry.get_display_id(), "Blocking sync check");
return AccessResult::Denied;
return AccessSrchResult::Deny;
}
IdentType::User(_) => {}
};
@ -95,7 +95,7 @@ fn search_filter_entry(
security_debug!(
"denied ❌ - identity access scope 'Synchronise' is not permitted to search"
);
return AccessResult::Denied;
return AccessSrchResult::Deny;
}
AccessScope::ReadOnly | AccessScope::ReadWrite => {
// As you were
@ -161,16 +161,21 @@ fn search_filter_entry(
.flatten()
.collect();
AccessResult::Allow(allowed_attrs)
AccessSrchResult::Allow {
attr: allowed_attrs,
}
}
fn search_oauth2_filter_entry(ident: &Identity, entry: &Arc<EntrySealedCommitted>) -> AccessResult {
fn search_oauth2_filter_entry(
ident: &Identity,
entry: &Arc<EntrySealedCommitted>,
) -> AccessSrchResult {
match &ident.origin {
IdentType::Internal | IdentType::Synch(_) => AccessResult::Ignore,
IdentType::Internal | IdentType::Synch(_) => AccessSrchResult::Ignore,
IdentType::User(iuser) => {
if iuser.entry.get_uuid() == UUID_ANONYMOUS {
debug!("Anonymous can't access OAuth2 entries, ignoring");
return AccessResult::Ignore;
return AccessSrchResult::Ignore;
}
let contains_o2_rs = entry
@ -190,16 +195,18 @@ fn search_oauth2_filter_entry(ident: &Identity, entry: &Arc<EntrySealedCommitted
if contains_o2_rs && contains_o2_scope_member {
security_debug!(entry = ?entry.get_uuid(), ident = ?iuser.entry.get_uuid2rdn(), "ident is a memberof a group granted an oauth2 scope by this entry");
return AccessResult::Allow(btreeset!(
Attribute::Class,
Attribute::DisplayName,
Attribute::Uuid,
Attribute::Name,
Attribute::OAuth2RsOriginLanding,
Attribute::Image
));
return AccessSrchResult::Allow {
attr: btreeset!(
Attribute::Class,
Attribute::DisplayName,
Attribute::Uuid,
Attribute::Name,
Attribute::OAuth2RsOriginLanding,
Attribute::Image
),
};
}
AccessResult::Ignore
AccessSrchResult::Ignore
}
}
}
@ -207,9 +214,9 @@ fn search_oauth2_filter_entry(ident: &Identity, entry: &Arc<EntrySealedCommitted
fn search_sync_account_filter_entry(
ident: &Identity,
entry: &Arc<EntrySealedCommitted>,
) -> AccessResult {
) -> AccessSrchResult {
match &ident.origin {
IdentType::Internal | IdentType::Synch(_) => AccessResult::Ignore,
IdentType::Internal | IdentType::Synch(_) => AccessSrchResult::Ignore,
IdentType::User(iuser) => {
// Is the user a synced object?
let is_user_sync_account = iuser
@ -244,16 +251,18 @@ fn search_sync_account_filter_entry(
// We finally got here!
security_debug!(entry = ?entry.get_uuid(), ident = ?iuser.entry.get_uuid2rdn(), "ident is a synchronised account from this sync account");
return AccessResult::Allow(btreeset!(
Attribute::Class,
Attribute::Uuid,
Attribute::SyncCredentialPortal
));
return AccessSrchResult::Allow {
attr: btreeset!(
Attribute::Class,
Attribute::Uuid,
Attribute::SyncCredentialPortal
),
};
}
}
}
// Fall through
AccessResult::Ignore
AccessSrchResult::Ignore
}
}
}

View file

@ -63,7 +63,7 @@ fn parse_attributes(
"ldap" => {
flags.ldap = true;
field_modifications.extend(quote! {
ldapaddress: Some("on".to_string()),})
ldapbindaddress: Some("on".to_string()),})
}
_ => {
let field_name = p.value().left.to_token_stream(); // here we can use to_token_stream as we know we're iterating over ExprAssigns

View file

@ -51,7 +51,7 @@ kanidm_build_profiles = { workspace = true }
compact_jwt = { workspace = true }
escargot = "0.5.13"
# used for webdriver testing
fantoccini = { version = "0.21.4" }
fantoccini = { version = "0.21.5" }
futures = { workspace = true }
ldap3_client = { workspace = true }
oauth2_ext = { workspace = true, default-features = false, features = [
@ -64,7 +64,7 @@ tokio-openssl = { workspace = true }
kanidm_lib_crypto = { workspace = true }
uuid = { workspace = true }
webauthn-authenticator-rs = { workspace = true }
jsonschema = "0.29.0"
jsonschema = "0.29.1"
[package.metadata.cargo-machete]
ignored = ["escargot", "futures", "kanidm_build_profiles"]

View file

@ -84,9 +84,9 @@ pub async fn setup_async_test(mut config: Configuration) -> AsyncTestEnvironment
let addr = format!("http://localhost:{}", port);
let ldap_url = if config.ldapaddress.is_some() {
let ldap_url = if config.ldapbindaddress.is_some() {
let ldapport = port_loop();
config.ldapaddress = Some(format!("127.0.0.1:{}", ldapport));
config.ldapbindaddress = Some(format!("127.0.0.1:{}", ldapport));
Url::parse(&format!("ldap://127.0.0.1:{}", ldapport))
.inspect_err(|err| error!(?err, "ldap address setup"))
.ok()

View file

@ -121,6 +121,7 @@ pub enum ClientRequest {
NssGroups,
NssGroupByGid(u32),
NssGroupByName(String),
NssGroupsByMember(String),
PamAuthenticateInit {
account_id: String,
info: PamServiceInfo,
@ -144,6 +145,7 @@ impl ClientRequest {
ClientRequest::NssGroups => "NssGroups".to_string(),
ClientRequest::NssGroupByGid(id) => format!("NssGroupByGid({})", id),
ClientRequest::NssGroupByName(id) => format!("NssGroupByName({})", id),
ClientRequest::NssGroupsByMember(id) => format!("NssGroupsByMember({})", id),
ClientRequest::PamAuthenticateInit { account_id, info } => format!(
"PamAuthenticateInit{{ account_id={} tty={} pam_secvice{} rhost={} }}",
account_id,

View file

@ -285,6 +285,42 @@ pub fn get_group_entry_by_name(name: String, req_options: RequestOptions) -> Res
}
}
pub fn get_group_entries_by_member(member: String, req_options: RequestOptions) -> Response<Vec<Group>> {
match req_options.connect_to_daemon() {
Source::Daemon(mut daemon_client) => {
let req = ClientRequest::NssGroupsByMember(member);
daemon_client
.call_and_wait(&req, None)
.map(|r| match r {
ClientResponse::NssGroups(l) => {
l.into_iter().map(group_from_nssgroup).collect()
}
_ => Vec::new(),
})
.map(Response::Success)
.unwrap_or_else(|_| Response::Success(vec![]))
}
Source::Fallback { users: _, groups } => {
if groups.is_empty() {
return Response::Unavail;
}
let membergroups = groups
.into_iter()
.filter_map(|etcgroup| {
if etcgroup.members.contains(&member) {
Some(group_from_etcgroup(etcgroup))
} else {
None
}
})
.collect();
Response::Success(membergroups)
}
}
}
fn passwd_from_etcuser(etc: EtcUser) -> Passwd {
Passwd {
name: etc.name,

View file

@ -3,6 +3,7 @@ use kanidm_unix_common::constants::DEFAULT_CONFIG_PATH;
use libnss::group::{Group, GroupHooks};
use libnss::interop::Response;
use libnss::passwd::{Passwd, PasswdHooks};
use libnss::initgroups::{InitgroupsHooks};
struct KanidmPasswd;
libnss_passwd_hooks!(kanidm, KanidmPasswd);
@ -61,3 +62,16 @@ impl GroupHooks for KanidmGroup {
core::get_group_entry_by_name(name, req_opt)
}
}
struct KanidmInitgroups;
libnss_initgroups_hooks!(kanidm, KanidmInitgroups);
impl InitgroupsHooks for KanidmInitgroups {
fn get_entries_by_user(user: String) -> Response<Vec<Group>> {
let req_opt = RequestOptions::Main {
config_path: DEFAULT_CONFIG_PATH,
};
core::get_group_entries_by_member(user, req_opt)
}
}

View file

@ -317,6 +317,14 @@ async fn handle_client(
error!("unable to load group, returning empty.");
ClientResponse::NssGroup(None)
}),
ClientRequest::NssGroupsByMember(account_id) => cachelayer
.get_nssgroups_member_name(account_id.as_str())
.await
.map(ClientResponse::NssGroups)
.unwrap_or_else(|_| {
error!("unable to enum groups");
ClientResponse::NssGroups(Vec::new())
}),
ClientRequest::PamAuthenticateInit { account_id, info } => {
match &pam_auth_session_state {
Some(_auth_session) => {

View file

@ -792,6 +792,37 @@ impl DbTxn<'_> {
}
}
pub fn get_user_groups(&mut self, a_uuid: Uuid) -> Result<Vec<GroupToken>, CacheError> {
let mut stmt = self
.conn
.prepare("SELECT group_t.token FROM (group_t, memberof_t) WHERE group_t.uuid = memberof_t.g_uuid AND memberof_t.a_uuid = :a_uuid")
.map_err(|e| {
self.sqlite_error("select prepare", &e)
})?;
let data_iter = stmt
.query_map([a_uuid.as_hyphenated().to_string()], |row| row.get(0))
.map_err(|e| self.sqlite_error("query_map", &e))?;
let data: Result<Vec<Vec<u8>>, _> = data_iter
.map(|v| v.map_err(|e| self.sqlite_error("map", &e)))
.collect();
let data = data?;
Ok(data
.iter()
.filter_map(|token| {
// token convert with json.
// trace!("{:?}", token);
serde_json::from_slice(token.as_slice())
.map_err(|e| {
error!("json error -> {:?}", e);
})
.ok()
})
.collect())
}
pub fn get_group_members(&mut self, g_uuid: Uuid) -> Result<Vec<UserToken>, CacheError> {
let mut stmt = self
.conn

View file

@ -621,6 +621,17 @@ impl Resolver {
})
}
async fn get_usergroups(&self, g_uuid: Uuid) -> Vec<String> {
let mut dbtxn = self.db.write().await;
dbtxn
.get_user_groups(g_uuid)
.unwrap_or_else(|_| Vec::new())
.into_iter()
.map(|gt| self.token_gidattr(&gt))
.collect()
}
async fn get_groupmembers(&self, g_uuid: Uuid) -> Vec<String> {
let mut dbtxn = self.db.write().await;
@ -781,6 +792,17 @@ impl Resolver {
Ok(r)
}
pub async fn get_nssgroups_member_name(&self, account_id: &str) -> Result<Vec<NssGroup>, ()> {
if let Some(nss_user) = self.get_nssaccount(&account_id).await {
Ok(self.get_usergroups(nss_user).await
.into_iter()
.map(|g| self.token_gidattr(&g))
.collect())
} else {
Ok(Vec::new())
}
}
async fn get_nssgroup(&self, grp_id: Id) -> Result<Option<NssGroup>, ()> {
if let Some(mut nss_group) = self.system_provider.get_nssgroup(&grp_id).await {
debug!("system provider satisfied request");

View file

@ -70,7 +70,7 @@ async fn setup_test(fix_fn: Fixture) -> (Resolver, KanidmClient) {
});
// Setup the config ...
let mut config = Configuration::new();
let mut config = Configuration::new_for_test();
config.address = format!("127.0.0.1:{}", port);
config.integration_test_config = Some(int_config);
config.role = ServerRole::WriteReplicaNoUI;