kanidm/kanidm_rlm_python/mods-available/cache
James Hodgkinson 805ac2dd16
Python module and rewritten RADIUS integration (#826)
* added python kanidm module
* rewrote RADIUS integration
* updated the documentation
* updating github actions to run more often
* BLEEP BLOOP ASYNCIO IS GR8
* adding config to makefile to run pykanidm tests

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Firstyear <william@blackhats.net.au>
2022-06-20 20:16:55 +10:00

134 lines
4.8 KiB
Plaintext

# -*- text -*-
#
# $Id: 8bd4730cf570fdfedc9c516dc6974eab39981600 $
#
# A module to cache attributes. The idea is that you can look
# up information in a database, and then cache it. Repeated
# requests for the same information will then have the cached
# values added to the request.
#
# The module can cache a fixed set of attributes per key.
# It can be listed in "authorize", "post-auth", "pre-proxy"
# and "post-proxy".
#
# If you want different things cached for authorize and post-auth,
# you will need to define two instances of the "cache" module.
#
# The module returns "ok" if it found or created a cache entry.
# The module returns "updated" if it merged a cached entry.
# The module returns "noop" if it did nothing.
# The module returns "fail" on error.
#
cache {
# The backend datastore used to store the cache entries.
# Current datastores are
# rlm_cache_rbtree - An in memory, non persistent rbtree based datastore.
# Useful for caching data locally.
# rlm_cache_memcached - A non persistent "webscale" distributed datastore.
# Useful if the cached data need to be shared between
# a cluster of RADIUS servers.
driver = "rlm_cache_rbtree"
#
# Some drivers accept specific options, to set them a
# config section with the the name as the driver should be added
# to the cache instance.
#
# Driver specific options are:
#
# memcached {
# # Memcached configuration options, as documented here:
# # http://docs.libmemcached.org/libmemcached_configuration.html#memcached
# options = "--SERVER=localhost"
#
# pool {
# start = ${thread[pool].start_servers}
# min = ${thread[pool].min_spare_servers}
# max = ${thread[pool].max_servers}
# spare = ${thread[pool].max_spare_servers}
# uses = 0
# lifetime = 0
# idle_timeout = 60
# }
# }
# The key used to index the cache. It is dynamically expanded
# at run time.
key = "%{User-Name}"
# The TTL of cache entries, in seconds. Entries older than this
# will be expired.
#
# This value should be between 10 and 86400.
ttl = 10
# If yes the following attributes will be added to the request:
# * &request:Cache-Entry-Hits - The number of times this entry
# has been retrieved.
#
# Note: Not supported by the rlm_cache_memcached module.
add_stats = no
#
# The list of attributes to cache for a particular key.
#
# Each key gets the same set of cached attributes. The attributes
# are dynamically expanded at run time.
#
# The semantics of this construct are identical to an unlang
# update block, except the left hand side of the expression
# represents the cache entry. see man unlang for more information
# on update blocks.
#
# Note: Only request, reply, control and session-state lists
# are available in cache entries. Attempting to store attributes
# in other lists will raise an error during config validation.
#
update {
# <list>:<attribute> <op> <value>
# Cache all instances of Reply-Message in the reply list
&reply:Reply-Message += &reply:Reply-Message[*]
# Add our own to show when the cache was last updated
&reply:Reply-Message += "Cache last updated at %t"
&reply:Class := "%{randstr:ssssssssssssssssssssssssssssssss}"
}
# This module supports a number of runtime configuration parameters
# represented by attributes in the &control: list.
#
# &control:Cache-TTL - Sets the TTL of an entry to be created, or
# modifies the TTL of an existing entry.
# - Setting a Cache-TTL of > 0 means set the TTL of the entry to
# the new value (and reset the expiry timer).
# - Setting a Cache-TTL of < 0 means expire the existing entry
# (without merging) and create a new one with TTL set to
# value * -1.
# - Setting a Cache-TTL of 0 means expire the existing entry
# (without merging) and don't create a new one.
#
# &control:Cache-Status-Only - If present and set to 'yes' will
# prevent a new entry from being created, and existing entries from
# being merged. It will also alter the module's return codes.
# - The module will return "ok" if a cache entry was found.
# - The module will return "notfound" if no cache entry was found.
#
# &control:Cache-Read-Only - If present and set to 'yes' will
# prevent a new entry from being created, but will allow existing
# entries to be merged. It will also alter the module's return codes.
# - The module will return "updated" if a cache entry was found.
# - The module will return "notfound" if no cache was found.
#
# &control:Cache-Merge - If present and set to 'yes' will merge new
# cache entries into the current request. Useful if results
# of execs or expansions are stored directly in the cache.
#
# All runtime configuration attributes will be removed from the
# &control: list after the cache module is called.
}