2020-02-13 00:43:01 +01:00
|
|
|
use std::convert::TryFrom;
|
|
|
|
use std::fmt;
|
2020-08-17 03:26:28 +02:00
|
|
|
use std::time::Duration;
|
2020-02-13 00:43:01 +01:00
|
|
|
|
2023-06-27 02:09:19 +02:00
|
|
|
use crate::unix_config::TpmPolicy;
|
2023-03-01 04:10:52 +01:00
|
|
|
use kanidm_lib_crypto::CryptoPolicy;
|
|
|
|
use kanidm_lib_crypto::DbPasswordV1;
|
|
|
|
use kanidm_lib_crypto::Password;
|
2022-10-01 08:08:51 +02:00
|
|
|
use kanidm_proto::v1::{UnixGroupToken, UnixUserToken};
|
|
|
|
use libc::umask;
|
2023-06-24 08:15:31 +02:00
|
|
|
use rusqlite::Connection;
|
2022-10-01 08:08:51 +02:00
|
|
|
use tokio::sync::{Mutex, MutexGuard};
|
|
|
|
|
|
|
|
use crate::cache::Id;
|
2020-02-29 05:02:14 +01:00
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
pub struct Db {
|
2023-06-24 08:15:31 +02:00
|
|
|
conn: Mutex<Connection>,
|
2020-08-17 03:26:28 +02:00
|
|
|
crypto_policy: CryptoPolicy,
|
2023-06-21 12:33:01 +02:00
|
|
|
require_tpm: Option<tpm::TpmConfig>,
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct DbTxn<'a> {
|
2023-06-24 08:15:31 +02:00
|
|
|
conn: MutexGuard<'a, Connection>,
|
2020-02-13 00:43:01 +01:00
|
|
|
committed: bool,
|
2020-08-17 03:26:28 +02:00
|
|
|
crypto_policy: &'a CryptoPolicy,
|
2023-06-21 12:33:01 +02:00
|
|
|
require_tpm: Option<&'a tpm::TpmConfig>,
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Db {
|
2023-06-27 02:09:19 +02:00
|
|
|
pub fn new(path: &str, tpm_policy: &TpmPolicy) -> Result<Self, ()> {
|
2020-06-19 02:00:54 +02:00
|
|
|
let before = unsafe { umask(0o0027) };
|
2023-06-24 08:15:31 +02:00
|
|
|
let conn = Connection::open(path).map_err(|e| {
|
|
|
|
error!(err = ?e, "rusqulite error");
|
|
|
|
})?;
|
2020-06-19 02:00:54 +02:00
|
|
|
let _ = unsafe { umask(before) };
|
2020-02-13 00:43:01 +01:00
|
|
|
// We only build a single thread. If we need more than one, we'll
|
|
|
|
// need to re-do this to account for path = "" for debug.
|
2020-08-17 03:26:28 +02:00
|
|
|
let crypto_policy = CryptoPolicy::time_target(Duration::from_millis(250));
|
|
|
|
|
|
|
|
debug!("Configured {:?}", crypto_policy);
|
|
|
|
|
2023-06-27 02:09:19 +02:00
|
|
|
// Test if we have a tpm context.
|
2023-06-21 12:33:01 +02:00
|
|
|
|
2023-06-27 02:09:19 +02:00
|
|
|
let require_tpm = match tpm_policy {
|
|
|
|
TpmPolicy::Ignore => None,
|
|
|
|
TpmPolicy::IfPossible(tcti_str) => Db::tpm_setup_context(tcti_str, &conn).ok(),
|
|
|
|
TpmPolicy::Required(tcti_str) => Some(Db::tpm_setup_context(tcti_str, &conn)?),
|
2023-06-21 12:33:01 +02:00
|
|
|
};
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
Ok(Db {
|
2023-06-24 08:15:31 +02:00
|
|
|
conn: Mutex::new(conn),
|
2020-08-17 03:26:28 +02:00
|
|
|
crypto_policy,
|
2023-06-21 12:33:01 +02:00
|
|
|
require_tpm,
|
2020-02-13 00:43:01 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-08-04 04:58:11 +02:00
|
|
|
#[allow(clippy::expect_used)]
|
|
|
|
pub async fn write(&self) -> DbTxn<'_> {
|
2023-06-24 08:15:31 +02:00
|
|
|
let conn = self.conn.lock().await;
|
|
|
|
DbTxn::new(conn, &self.crypto_policy, self.require_tpm.as_ref())
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for Db {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
write!(f, "Db {{}}")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> DbTxn<'a> {
|
|
|
|
pub fn new(
|
2023-06-24 08:15:31 +02:00
|
|
|
conn: MutexGuard<'a, Connection>,
|
2020-08-17 03:26:28 +02:00
|
|
|
crypto_policy: &'a CryptoPolicy,
|
2023-06-21 12:33:01 +02:00
|
|
|
require_tpm: Option<&'a tpm::TpmConfig>,
|
2020-02-13 00:43:01 +01:00
|
|
|
) -> Self {
|
|
|
|
// Start the transaction
|
2020-04-10 07:50:45 +02:00
|
|
|
// debug!("Starting db WR txn ...");
|
2020-08-04 04:58:11 +02:00
|
|
|
#[allow(clippy::expect_used)]
|
2021-04-14 01:56:40 +02:00
|
|
|
conn.execute("BEGIN TRANSACTION", [])
|
2020-02-13 00:43:01 +01:00
|
|
|
.expect("Unable to begin transaction!");
|
|
|
|
DbTxn {
|
|
|
|
committed: false,
|
|
|
|
conn,
|
2020-08-17 03:26:28 +02:00
|
|
|
crypto_policy,
|
2023-06-21 12:33:01 +02:00
|
|
|
require_tpm,
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-04 12:39:43 +02:00
|
|
|
/// This handles an error coming back from an sqlite event and dumps more information from it
|
2023-01-25 07:09:54 +01:00
|
|
|
fn sqlite_error(&self, msg: &str, error: &rusqlite::Error) {
|
2022-07-04 12:39:43 +02:00
|
|
|
error!(
|
|
|
|
"sqlite {} error: {:?} db_path={:?}",
|
|
|
|
msg,
|
|
|
|
error,
|
|
|
|
&self.conn.path()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// This handles an error coming back from an sqlite transaction and dumps a load of information from it
|
2023-01-25 07:09:54 +01:00
|
|
|
fn sqlite_transaction_error(&self, error: &rusqlite::Error, _stmt: &rusqlite::Statement) {
|
2022-07-04 12:39:43 +02:00
|
|
|
error!(
|
|
|
|
"sqlite transaction error={:?} db_path={:?}",
|
|
|
|
error,
|
|
|
|
&self.conn.path(),
|
|
|
|
);
|
|
|
|
// TODO: one day figure out if there's an easy way to dump the transaction without the token...
|
|
|
|
}
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
pub fn migrate(&self) -> Result<(), ()> {
|
2020-06-19 02:00:54 +02:00
|
|
|
self.conn.set_prepared_statement_cache_capacity(16);
|
|
|
|
self.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("PRAGMA journal_mode=WAL;")
|
2021-04-14 01:56:40 +02:00
|
|
|
.and_then(|mut wal_stmt| wal_stmt.query([]).map(|_| ()))
|
2020-06-19 02:00:54 +02:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("account_t create", &e);
|
2020-06-19 02:00:54 +02:00
|
|
|
})?;
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
// Setup two tables - one for accounts, one for groups.
|
|
|
|
// correctly index the columns.
|
|
|
|
// Optional pw hash field
|
|
|
|
self.conn
|
|
|
|
.execute(
|
|
|
|
"CREATE TABLE IF NOT EXISTS account_t (
|
|
|
|
uuid TEXT PRIMARY KEY,
|
|
|
|
name TEXT NOT NULL UNIQUE,
|
|
|
|
spn TEXT NOT NULL UNIQUE,
|
|
|
|
gidnumber INTEGER NOT NULL UNIQUE,
|
|
|
|
password BLOB,
|
|
|
|
token BLOB NOT NULL,
|
|
|
|
expiry NUMERIC NOT NULL
|
|
|
|
)
|
|
|
|
",
|
2021-04-14 01:56:40 +02:00
|
|
|
[],
|
2020-02-13 00:43:01 +01:00
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("account_t create", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
self.conn
|
|
|
|
.execute(
|
|
|
|
"CREATE TABLE IF NOT EXISTS group_t (
|
|
|
|
uuid TEXT PRIMARY KEY,
|
|
|
|
name TEXT NOT NULL UNIQUE,
|
|
|
|
spn TEXT NOT NULL UNIQUE,
|
|
|
|
gidnumber INTEGER NOT NULL UNIQUE,
|
|
|
|
token BLOB NOT NULL,
|
|
|
|
expiry NUMERIC NOT NULL
|
|
|
|
)
|
|
|
|
",
|
2021-04-14 01:56:40 +02:00
|
|
|
[],
|
2020-02-13 00:43:01 +01:00
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("group_t create", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
self.conn
|
|
|
|
.execute(
|
|
|
|
"CREATE TABLE IF NOT EXISTS memberof_t (
|
|
|
|
g_uuid TEXT,
|
|
|
|
a_uuid TEXT,
|
|
|
|
FOREIGN KEY(g_uuid) REFERENCES group_t(uuid) ON DELETE CASCADE,
|
|
|
|
FOREIGN KEY(a_uuid) REFERENCES account_t(uuid) ON DELETE CASCADE
|
|
|
|
)
|
|
|
|
",
|
2021-04-14 01:56:40 +02:00
|
|
|
[],
|
2020-02-15 01:27:25 +01:00
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("memberof_t create error", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn commit(mut self) -> Result<(), ()> {
|
2023-01-10 04:50:53 +01:00
|
|
|
// debug!("Committing BE txn");
|
2020-08-04 04:58:11 +02:00
|
|
|
if self.committed {
|
2023-01-10 04:50:53 +01:00
|
|
|
error!("Invalid state, SQL transaction was already committed!");
|
2020-08-04 04:58:11 +02:00
|
|
|
return Err(());
|
|
|
|
}
|
2020-02-13 00:43:01 +01:00
|
|
|
self.committed = true;
|
|
|
|
|
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute("COMMIT TRANSACTION", [])
|
2020-02-13 00:43:01 +01:00
|
|
|
.map(|_| ())
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("commit", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
pub fn invalidate(&self) -> Result<(), ()> {
|
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute("UPDATE group_t SET expiry = 0", [])
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("update group_t", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute("UPDATE account_t SET expiry = 0", [])
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("update account_t", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
pub fn clear_cache(&self) -> Result<(), ()> {
|
2021-04-14 01:56:40 +02:00
|
|
|
self.conn.execute("DELETE FROM group_t", []).map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("delete group_t", &e);
|
2021-04-14 01:56:40 +02:00
|
|
|
})?;
|
2020-02-13 00:43:01 +01:00
|
|
|
|
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute("DELETE FROM account_t", [])
|
2020-02-13 00:43:01 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("delete group_t", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
fn get_account_data_name(&self, account_id: &str) -> Result<Vec<(Vec<u8>, i64)>, ()> {
|
2020-02-13 00:43:01 +01:00
|
|
|
let mut stmt = self.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare(
|
2020-02-13 00:43:01 +01:00
|
|
|
"SELECT token, expiry FROM account_t WHERE uuid = :account_id OR name = :account_id OR spn = :account_id"
|
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
// Makes tuple (token, expiry)
|
|
|
|
let data_iter = stmt
|
2023-01-25 07:09:54 +01:00
|
|
|
.query_map([account_id], |row| Ok((row.get(0)?, row.get(1)?)))
|
2020-02-13 00:43:01 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("query_map failure", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
let data: Result<Vec<(Vec<u8>, i64)>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map failure", &e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
2020-02-15 01:27:25 +01:00
|
|
|
data
|
|
|
|
}
|
2020-02-13 00:43:01 +01:00
|
|
|
|
2020-06-18 02:30:42 +02:00
|
|
|
fn get_account_data_gid(&self, gid: u32) -> Result<Vec<(Vec<u8>, i64)>, ()> {
|
2020-02-15 01:27:25 +01:00
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("SELECT token, expiry FROM account_t WHERE gidnumber = :gid")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
// Makes tuple (token, expiry)
|
|
|
|
let data_iter = stmt
|
2021-04-14 01:56:40 +02:00
|
|
|
.query_map(params![gid], |row| Ok((row.get(0)?, row.get(1)?)))
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("query_map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
let data: Result<Vec<(Vec<u8>, i64)>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
data
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_account(&self, account_id: &Id) -> Result<Option<(UnixUserToken, u64)>, ()> {
|
|
|
|
let data = match account_id {
|
|
|
|
Id::Name(n) => self.get_account_data_name(n.as_str()),
|
2020-06-18 02:30:42 +02:00
|
|
|
Id::Gid(g) => self.get_account_data_gid(*g),
|
2020-02-15 01:27:25 +01:00
|
|
|
}?;
|
2020-02-13 00:43:01 +01:00
|
|
|
|
|
|
|
// Assert only one result?
|
|
|
|
if data.len() >= 2 {
|
|
|
|
error!("invalid db state, multiple entries matched query?");
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
|
2022-09-09 04:17:01 +02:00
|
|
|
if let Some((token, expiry)) = data.first() {
|
|
|
|
// token convert with json.
|
|
|
|
// If this errors, we specifically return Ok(None) because that triggers
|
|
|
|
// the cache to refetch the token.
|
|
|
|
match serde_json::from_slice(token.as_slice()) {
|
|
|
|
Ok(t) => {
|
|
|
|
let e = u64::try_from(*expiry).map_err(|e| {
|
|
|
|
error!("u64 convert error -> {:?}", e);
|
|
|
|
})?;
|
|
|
|
Ok(Some((t, e)))
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
warn!("recoverable - json error -> {:?}", e);
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
pub fn get_accounts(&self) -> Result<Vec<UnixUserToken>, ()> {
|
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("SELECT token FROM account_t")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
2023-01-25 07:09:54 +01:00
|
|
|
let data_iter = stmt.query_map([], |row| row.get(0)).map_err(|e| {
|
|
|
|
self.sqlite_error("query_map", &e);
|
2021-04-14 01:56:40 +02:00
|
|
|
})?;
|
2020-02-15 01:27:25 +01:00
|
|
|
let data: Result<Vec<Vec<u8>>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let data = data?;
|
|
|
|
|
2022-09-09 04:17:01 +02:00
|
|
|
Ok(data
|
|
|
|
.iter()
|
|
|
|
// We filter map here so that anything invalid is skipped.
|
|
|
|
.filter_map(|token| {
|
2022-05-24 02:49:34 +02:00
|
|
|
// token convert with json.
|
2022-09-09 04:17:01 +02:00
|
|
|
serde_json::from_slice(token.as_slice())
|
|
|
|
.map_err(|e| {
|
|
|
|
warn!("get_accounts json error -> {:?}", e);
|
|
|
|
})
|
|
|
|
.ok()
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
2022-09-09 04:17:01 +02:00
|
|
|
.collect())
|
2020-02-15 01:27:25 +01:00
|
|
|
}
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
pub fn update_account(&self, account: &UnixUserToken, expire: u64) -> Result<(), ()> {
|
2022-05-24 02:49:34 +02:00
|
|
|
let data = serde_json::to_vec(account).map_err(|e| {
|
2022-07-04 12:39:43 +02:00
|
|
|
error!("update_account json error -> {:?}", e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
let expire = i64::try_from(expire).map_err(|e| {
|
2022-07-04 12:39:43 +02:00
|
|
|
error!("update_account i64 conversion error -> {:?}", e);
|
2020-02-13 00:43:01 +01:00
|
|
|
})?;
|
|
|
|
|
2020-04-29 06:36:57 +02:00
|
|
|
// This is needed because sqlites 'insert or replace into', will null the password field
|
|
|
|
// if present, and upsert MUST match the exact conflicting column, so that means we have
|
|
|
|
// to manually manage the update or insert :( :(
|
|
|
|
|
|
|
|
// Find anything conflicting and purge it.
|
2021-04-14 01:56:40 +02:00
|
|
|
self.conn.execute("DELETE FROM account_t WHERE NOT uuid = :uuid AND (name = :name OR spn = :spn OR gidnumber = :gidnumber)",
|
|
|
|
named_params!{
|
|
|
|
":uuid": &account.uuid,
|
|
|
|
":name": &account.name,
|
|
|
|
":spn": &account.spn,
|
|
|
|
":gidnumber": &account.gidnumber,
|
|
|
|
}
|
2020-02-29 05:02:14 +01:00
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("delete account_t duplicate", &e);
|
2020-02-29 05:02:14 +01:00
|
|
|
})
|
2020-04-29 06:36:57 +02:00
|
|
|
.map(|_| ())?;
|
2020-02-29 05:02:14 +01:00
|
|
|
|
2021-04-14 01:56:40 +02:00
|
|
|
let updated = self.conn.execute(
|
2020-04-29 06:36:57 +02:00
|
|
|
"UPDATE account_t SET name=:name, spn=:spn, gidnumber=:gidnumber, token=:token, expiry=:expiry WHERE uuid = :uuid",
|
2021-04-14 01:56:40 +02:00
|
|
|
named_params!{
|
|
|
|
":uuid": &account.uuid,
|
|
|
|
":name": &account.name,
|
|
|
|
":spn": &account.spn,
|
|
|
|
":gidnumber": &account.gidnumber,
|
|
|
|
":token": &data,
|
|
|
|
":expiry": &expire,
|
|
|
|
}
|
2020-04-29 06:36:57 +02:00
|
|
|
)
|
2020-02-13 00:43:01 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("delete account_t duplicate", &e);
|
2021-05-09 14:06:04 +02:00
|
|
|
})?;
|
2020-02-13 00:43:01 +01:00
|
|
|
|
2020-04-29 06:36:57 +02:00
|
|
|
if updated == 0 {
|
|
|
|
let mut stmt = self.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("INSERT INTO account_t (uuid, name, spn, gidnumber, token, expiry) VALUES (:uuid, :name, :spn, :gidnumber, :token, :expiry) ON CONFLICT(uuid) DO UPDATE SET name=excluded.name, spn=excluded.name, gidnumber=excluded.gidnumber, token=excluded.token, expiry=excluded.expiry")
|
2020-04-29 06:36:57 +02:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("prepare", &e);
|
2020-04-29 06:36:57 +02:00
|
|
|
})?;
|
|
|
|
|
2021-04-14 01:56:40 +02:00
|
|
|
stmt.execute(named_params! {
|
|
|
|
":uuid": &account.uuid,
|
|
|
|
":name": &account.name,
|
|
|
|
":spn": &account.spn,
|
|
|
|
":gidnumber": &account.gidnumber,
|
|
|
|
":token": &data,
|
|
|
|
":expiry": &expire,
|
|
|
|
})
|
2020-04-29 06:36:57 +02:00
|
|
|
.map(|r| {
|
|
|
|
debug!("insert -> {:?}", r);
|
|
|
|
})
|
2022-07-04 12:39:43 +02:00
|
|
|
.map_err(|error| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_transaction_error(&error, &stmt);
|
2020-04-29 06:36:57 +02:00
|
|
|
})?;
|
|
|
|
}
|
2020-02-15 01:27:25 +01:00
|
|
|
|
|
|
|
// Now, we have to update the group memberships.
|
|
|
|
|
|
|
|
// First remove everything that already exists:
|
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("DELETE FROM memberof_t WHERE a_uuid = :a_uuid")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
2023-01-25 07:09:54 +01:00
|
|
|
stmt.execute([&account.uuid])
|
2020-02-15 01:27:25 +01:00
|
|
|
.map(|r| {
|
|
|
|
debug!("delete memberships -> {:?}", r);
|
|
|
|
})
|
2022-07-04 12:39:43 +02:00
|
|
|
.map_err(|error| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_transaction_error(&error, &stmt);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("INSERT INTO memberof_t (a_uuid, g_uuid) VALUES (:a_uuid, :g_uuid)")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
// Now for each group, add the relation.
|
|
|
|
account.groups.iter().try_for_each(|g| {
|
2021-04-14 01:56:40 +02:00
|
|
|
stmt.execute(named_params! {
|
|
|
|
":a_uuid": &account.uuid,
|
|
|
|
":g_uuid": &g.uuid,
|
|
|
|
})
|
|
|
|
.map(|r| {
|
|
|
|
debug!("insert membership -> {:?}", r);
|
|
|
|
})
|
2022-07-04 12:39:43 +02:00
|
|
|
.map_err(|error| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_transaction_error(&error, &stmt);
|
2021-04-14 01:56:40 +02:00
|
|
|
})
|
2020-02-13 00:43:01 +01:00
|
|
|
})
|
|
|
|
}
|
2020-02-15 01:27:25 +01:00
|
|
|
|
|
|
|
pub fn delete_account(&self, a_uuid: &str) -> Result<(), ()> {
|
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute(
|
|
|
|
"DELETE FROM account_t WHERE uuid = :a_uuid",
|
|
|
|
params![a_uuid],
|
|
|
|
)
|
2020-02-15 01:27:25 +01:00
|
|
|
.map(|_| ())
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("memberof_t create", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-29 05:02:14 +01:00
|
|
|
pub fn update_account_password(&self, a_uuid: &str, cred: &str) -> Result<(), ()> {
|
2023-06-21 12:33:01 +02:00
|
|
|
#[allow(unused_variables)]
|
|
|
|
let pw = if let Some(tcti_str) = self.require_tpm {
|
|
|
|
// Do nothing.
|
|
|
|
#[cfg(not(feature = "tpm"))]
|
|
|
|
return Ok(());
|
|
|
|
|
|
|
|
#[cfg(feature = "tpm")]
|
|
|
|
let pw = Db::tpm_new(self.crypto_policy, cred, tcti_str)?;
|
|
|
|
#[cfg(feature = "tpm")]
|
|
|
|
pw
|
|
|
|
} else {
|
|
|
|
Password::new(self.crypto_policy, cred).map_err(|e| {
|
|
|
|
error!("password error -> {:?}", e);
|
|
|
|
})?
|
|
|
|
};
|
|
|
|
|
2020-02-29 05:02:14 +01:00
|
|
|
let dbpw = pw.to_dbpasswordv1();
|
2022-05-24 02:49:34 +02:00
|
|
|
let data = serde_json::to_vec(&dbpw).map_err(|e| {
|
|
|
|
error!("json error -> {:?}", e);
|
2020-02-29 05:02:14 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute(
|
2020-02-29 05:02:14 +01:00
|
|
|
"UPDATE account_t SET password = :data WHERE uuid = :a_uuid",
|
2021-04-14 01:56:40 +02:00
|
|
|
named_params! {
|
|
|
|
":a_uuid": &a_uuid,
|
|
|
|
":data": &data,
|
|
|
|
},
|
2020-02-29 05:02:14 +01:00
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("update account_t password", &e);
|
2020-02-29 05:02:14 +01:00
|
|
|
})
|
|
|
|
.map(|_| ())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn check_account_password(&self, a_uuid: &str, cred: &str) -> Result<bool, ()> {
|
2023-06-21 12:33:01 +02:00
|
|
|
#[cfg(not(feature = "tpm"))]
|
|
|
|
if self.require_tpm.is_some() {
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
2020-02-29 05:02:14 +01:00
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("SELECT password FROM account_t WHERE uuid = :a_uuid AND password IS NOT NULL")
|
2020-02-29 05:02:14 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-29 05:02:14 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
// Makes tuple (token, expiry)
|
2023-01-25 07:09:54 +01:00
|
|
|
let data_iter = stmt.query_map([a_uuid], |row| row.get(0)).map_err(|e| {
|
|
|
|
self.sqlite_error("query_map", &e);
|
|
|
|
})?;
|
2020-02-29 05:02:14 +01:00
|
|
|
let data: Result<Vec<Vec<u8>>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-29 05:02:14 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let data = data?;
|
|
|
|
|
2020-06-18 02:30:42 +02:00
|
|
|
if data.is_empty() {
|
2020-02-29 05:02:14 +01:00
|
|
|
info!("No cached password, failing authentication");
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
if data.len() >= 2 {
|
|
|
|
error!("invalid db state, multiple entries matched query?");
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
|
2023-06-21 12:33:01 +02:00
|
|
|
let pw = data.first().map(|raw| {
|
|
|
|
// Map the option from data.first.
|
|
|
|
let dbpw: DbPasswordV1 = serde_json::from_slice(raw.as_slice()).map_err(|e| {
|
|
|
|
error!("json error -> {:?}", e);
|
|
|
|
})?;
|
|
|
|
Password::try_from(dbpw)
|
|
|
|
});
|
|
|
|
|
|
|
|
let pw = match pw {
|
|
|
|
Some(Ok(p)) => p,
|
|
|
|
_ => return Ok(false),
|
|
|
|
};
|
|
|
|
|
|
|
|
#[allow(unused_variables)]
|
|
|
|
if let Some(tcti_str) = self.require_tpm {
|
|
|
|
#[cfg(feature = "tpm")]
|
|
|
|
let r = Db::tpm_verify(pw, cred, tcti_str);
|
|
|
|
|
|
|
|
// Do nothing.
|
|
|
|
#[cfg(not(feature = "tpm"))]
|
|
|
|
let r = Ok(false);
|
|
|
|
|
|
|
|
r
|
|
|
|
} else {
|
|
|
|
pw.verify(cred).map_err(|e| {
|
|
|
|
error!("password error -> {:?}", e);
|
2020-02-29 05:02:14 +01:00
|
|
|
})
|
2023-06-21 12:33:01 +02:00
|
|
|
}
|
2020-02-29 05:02:14 +01:00
|
|
|
}
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
fn get_group_data_name(&self, grp_id: &str) -> Result<Vec<(Vec<u8>, i64)>, ()> {
|
|
|
|
let mut stmt = self.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare(
|
2020-02-15 01:27:25 +01:00
|
|
|
"SELECT token, expiry FROM group_t WHERE uuid = :grp_id OR name = :grp_id OR spn = :grp_id"
|
|
|
|
)
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
// Makes tuple (token, expiry)
|
|
|
|
let data_iter = stmt
|
2023-01-25 07:09:54 +01:00
|
|
|
.query_map([grp_id], |row| Ok((row.get(0)?, row.get(1)?)))
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("query_map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
let data: Result<Vec<(Vec<u8>, i64)>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
data
|
|
|
|
}
|
|
|
|
|
2020-06-18 02:30:42 +02:00
|
|
|
fn get_group_data_gid(&self, gid: u32) -> Result<Vec<(Vec<u8>, i64)>, ()> {
|
2020-02-15 01:27:25 +01:00
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("SELECT token, expiry FROM group_t WHERE gidnumber = :gid")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
|
|
|
// Makes tuple (token, expiry)
|
|
|
|
let data_iter = stmt
|
2021-04-14 01:56:40 +02:00
|
|
|
.query_map(params![gid], |row| Ok((row.get(0)?, row.get(1)?)))
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("query_map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
let data: Result<Vec<(Vec<u8>, i64)>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
data
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_group(&self, grp_id: &Id) -> Result<Option<(UnixGroupToken, u64)>, ()> {
|
|
|
|
let data = match grp_id {
|
|
|
|
Id::Name(n) => self.get_group_data_name(n.as_str()),
|
2020-06-18 02:30:42 +02:00
|
|
|
Id::Gid(g) => self.get_group_data_gid(*g),
|
2020-02-15 01:27:25 +01:00
|
|
|
}?;
|
|
|
|
|
|
|
|
// Assert only one result?
|
|
|
|
if data.len() >= 2 {
|
|
|
|
error!("invalid db state, multiple entries matched query?");
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
|
2022-09-09 04:17:01 +02:00
|
|
|
if let Some((token, expiry)) = data.first() {
|
|
|
|
// token convert with json.
|
|
|
|
// If this errors, we specifically return Ok(None) because that triggers
|
|
|
|
// the cache to refetch the token.
|
|
|
|
match serde_json::from_slice(token.as_slice()) {
|
|
|
|
Ok(t) => {
|
|
|
|
let e = u64::try_from(*expiry).map_err(|e| {
|
|
|
|
error!("u64 convert error -> {:?}", e);
|
|
|
|
})?;
|
|
|
|
Ok(Some((t, e)))
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
warn!("recoverable - json error -> {:?}", e);
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
2020-02-15 01:27:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_group_members(&self, g_uuid: &str) -> Result<Vec<UnixUserToken>, ()> {
|
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("SELECT account_t.token FROM (account_t, memberof_t) WHERE account_t.uuid = memberof_t.a_uuid AND memberof_t.g_uuid = :g_uuid")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
2023-01-25 07:09:54 +01:00
|
|
|
let data_iter = stmt.query_map([g_uuid], |row| row.get(0)).map_err(|e| {
|
|
|
|
self.sqlite_error("query_map", &e);
|
|
|
|
})?;
|
2020-02-15 01:27:25 +01:00
|
|
|
let data: Result<Vec<Vec<u8>>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let data = data?;
|
|
|
|
|
|
|
|
data.iter()
|
|
|
|
.map(|token| {
|
2022-05-24 02:49:34 +02:00
|
|
|
// token convert with json.
|
2020-02-29 05:02:14 +01:00
|
|
|
// debug!("{:?}", token);
|
2022-05-24 02:49:34 +02:00
|
|
|
serde_json::from_slice(token.as_slice()).map_err(|e| {
|
|
|
|
error!("json error -> {:?}", e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_groups(&self) -> Result<Vec<UnixGroupToken>, ()> {
|
|
|
|
let mut stmt = self
|
|
|
|
.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("SELECT token FROM group_t")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("select prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
2023-01-25 07:09:54 +01:00
|
|
|
let data_iter = stmt.query_map([], |row| row.get(0)).map_err(|e| {
|
|
|
|
self.sqlite_error("query_map", &e);
|
2021-04-14 01:56:40 +02:00
|
|
|
})?;
|
2020-02-15 01:27:25 +01:00
|
|
|
let data: Result<Vec<Vec<u8>>, _> = data_iter
|
|
|
|
.map(|v| {
|
|
|
|
v.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("map", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let data = data?;
|
|
|
|
|
2022-09-09 04:17:01 +02:00
|
|
|
Ok(data
|
|
|
|
.iter()
|
|
|
|
.filter_map(|token| {
|
2022-05-24 02:49:34 +02:00
|
|
|
// token convert with json.
|
2020-02-29 05:02:14 +01:00
|
|
|
// debug!("{:?}", token);
|
2022-09-09 04:17:01 +02:00
|
|
|
serde_json::from_slice(token.as_slice())
|
|
|
|
.map_err(|e| {
|
|
|
|
error!("json error -> {:?}", e);
|
|
|
|
})
|
|
|
|
.ok()
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
2022-09-09 04:17:01 +02:00
|
|
|
.collect())
|
2020-02-15 01:27:25 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn update_group(&self, grp: &UnixGroupToken, expire: u64) -> Result<(), ()> {
|
2022-05-24 02:49:34 +02:00
|
|
|
let data = serde_json::to_vec(grp).map_err(|e| {
|
|
|
|
error!("json error -> {:?}", e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
let expire = i64::try_from(expire).map_err(|e| {
|
|
|
|
error!("i64 convert error -> {:?}", e);
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut stmt = self.conn
|
2020-06-21 13:57:48 +02:00
|
|
|
.prepare("INSERT OR REPLACE INTO group_t (uuid, name, spn, gidnumber, token, expiry) VALUES (:uuid, :name, :spn, :gidnumber, :token, :expiry)")
|
2020-02-15 01:27:25 +01:00
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("prepare", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})?;
|
|
|
|
|
2021-04-14 01:56:40 +02:00
|
|
|
stmt.execute(named_params! {
|
|
|
|
":uuid": &grp.uuid,
|
|
|
|
":name": &grp.name,
|
|
|
|
":spn": &grp.spn,
|
|
|
|
":gidnumber": &grp.gidnumber,
|
|
|
|
":token": &data,
|
|
|
|
":expiry": &expire,
|
|
|
|
})
|
2020-02-15 01:27:25 +01:00
|
|
|
.map(|r| {
|
|
|
|
debug!("insert -> {:?}", r);
|
|
|
|
})
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("execute", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn delete_group(&self, g_uuid: &str) -> Result<(), ()> {
|
|
|
|
self.conn
|
2023-01-25 07:09:54 +01:00
|
|
|
.execute("DELETE FROM group_t WHERE uuid = :g_uuid", [g_uuid])
|
2020-02-15 01:27:25 +01:00
|
|
|
.map(|_| ())
|
|
|
|
.map_err(|e| {
|
2023-01-25 07:09:54 +01:00
|
|
|
self.sqlite_error("memberof_t create", &e);
|
2020-02-15 01:27:25 +01:00
|
|
|
})
|
|
|
|
}
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> fmt::Debug for DbTxn<'a> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
write!(f, "DbTxn {{}}")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Drop for DbTxn<'a> {
|
|
|
|
// Abort
|
2021-05-09 14:06:04 +02:00
|
|
|
fn drop(&mut self) {
|
2020-02-13 00:43:01 +01:00
|
|
|
if !self.committed {
|
2020-04-10 07:50:45 +02:00
|
|
|
// debug!("Aborting BE WR txn");
|
2020-08-04 04:58:11 +02:00
|
|
|
#[allow(clippy::expect_used)]
|
2020-02-13 00:43:01 +01:00
|
|
|
self.conn
|
2021-04-14 01:56:40 +02:00
|
|
|
.execute("ROLLBACK TRANSACTION", [])
|
2020-02-13 00:43:01 +01:00
|
|
|
.expect("Unable to rollback transaction! Can not proceed!!!");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-21 12:33:01 +02:00
|
|
|
#[cfg(not(feature = "tpm"))]
|
|
|
|
pub(crate) mod tpm {
|
2023-06-27 02:09:19 +02:00
|
|
|
use super::Db;
|
|
|
|
|
|
|
|
use rusqlite::Connection;
|
|
|
|
|
2023-06-21 12:33:01 +02:00
|
|
|
pub struct TpmConfig {}
|
2023-06-27 02:09:19 +02:00
|
|
|
|
|
|
|
impl Db {
|
|
|
|
pub fn tpm_setup_context(_tcti_str: &str, _conn: &Connection) -> Result<TpmConfig, ()> {
|
|
|
|
warn!("tpm feature is not available in this build");
|
|
|
|
Err(())
|
|
|
|
}
|
|
|
|
}
|
2023-06-21 12:33:01 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "tpm")]
|
|
|
|
pub(crate) mod tpm {
|
|
|
|
use super::Db;
|
|
|
|
|
2023-06-27 02:09:19 +02:00
|
|
|
use rusqlite::{Connection, OptionalExtension};
|
2023-06-21 12:33:01 +02:00
|
|
|
|
|
|
|
use kanidm_lib_crypto::{CryptoError, CryptoPolicy, Password, TpmError};
|
|
|
|
use tss_esapi::{utils::TpmsContext, Context, TctiNameConf};
|
|
|
|
|
|
|
|
use std::str::FromStr;
|
|
|
|
|
|
|
|
pub struct TpmConfig {
|
|
|
|
tcti: TctiNameConf,
|
|
|
|
ctx: TpmsContext,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Db {
|
2023-06-27 02:09:19 +02:00
|
|
|
pub fn tpm_setup_context(tcti_str: &str, conn: &Connection) -> Result<TpmConfig, ()> {
|
2023-06-21 12:33:01 +02:00
|
|
|
let tcti = TctiNameConf::from_str(tcti_str).map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to parse tcti name");
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut context = Context::new(tcti.clone()).map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to create tpm context");
|
|
|
|
})?;
|
|
|
|
|
|
|
|
conn.execute(
|
|
|
|
"CREATE TABLE IF NOT EXISTS config_t (
|
|
|
|
key TEXT PRIMARY KEY,
|
|
|
|
value TEXT NOT NULL
|
|
|
|
)
|
|
|
|
",
|
|
|
|
[],
|
|
|
|
)
|
|
|
|
.map_err(|e| {
|
|
|
|
error!(sqlite_err = ?e, "update config_t tpm_ctx");
|
|
|
|
})?;
|
|
|
|
|
|
|
|
// Try and get the db context.
|
|
|
|
let ctx_data: Option<Vec<u8>> = conn
|
|
|
|
.query_row(
|
|
|
|
"SELECT value FROM config_t WHERE key='tpm2_ctx'",
|
|
|
|
[],
|
|
|
|
|row| row.get(0),
|
|
|
|
)
|
|
|
|
.optional()
|
|
|
|
.map_err(|e| {
|
|
|
|
error!(sqlite_err = ?e, "Failed to load tpm2_ctx");
|
|
|
|
})
|
|
|
|
.unwrap_or(None);
|
|
|
|
|
|
|
|
trace!(ctx_data_present = %ctx_data.is_some());
|
|
|
|
|
|
|
|
let ex_ctx = if let Some(ctx_data) = ctx_data {
|
|
|
|
// Test loading, blank it out if it fails.
|
|
|
|
// deserialise
|
|
|
|
let maybe_ctx: TpmsContext =
|
|
|
|
serde_json::from_slice(ctx_data.as_slice()).map_err(|e| {
|
|
|
|
warn!("json error -> {:?}", e);
|
|
|
|
})?;
|
|
|
|
|
|
|
|
// can it load?
|
|
|
|
context
|
|
|
|
.execute_with_nullauth_session(|ctx| ctx.context_load(maybe_ctx.clone()))
|
|
|
|
.map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to load tpm context");
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Some(maybe_ctx)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
|
|
|
let ctx = if let Some(existing_ctx) = ex_ctx {
|
|
|
|
existing_ctx
|
|
|
|
} else {
|
|
|
|
// Need to regenerate for some reason
|
|
|
|
info!("Creating new tpm ctx key");
|
|
|
|
context
|
|
|
|
.execute_with_nullauth_session(|ctx| {
|
|
|
|
let key = Password::prepare_tpm_key(ctx)?;
|
|
|
|
|
|
|
|
ctx.context_save(key.into()).map_err(|e| e.into())
|
|
|
|
})
|
|
|
|
.map_err(|e: CryptoError| {
|
|
|
|
error!(tpm_err = ?e, "Failed to create tpm key");
|
|
|
|
})?
|
|
|
|
};
|
|
|
|
|
|
|
|
// Serialise it out.
|
|
|
|
let data = serde_json::to_vec(&ctx).map_err(|e| {
|
|
|
|
error!("json error -> {:?}", e);
|
|
|
|
})?;
|
|
|
|
|
|
|
|
// Update the tpm ctx str
|
|
|
|
conn.execute(
|
|
|
|
"INSERT OR REPLACE INTO config_t (key, value) VALUES ('tpm2_ctx', :data)",
|
|
|
|
named_params! {
|
|
|
|
":data": &data,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.map_err(|e| {
|
|
|
|
error!(sqlite_err = ?e, "update config_t tpm_ctx");
|
|
|
|
})
|
|
|
|
.map(|_| ())?;
|
|
|
|
|
|
|
|
info!("tpm binding configured");
|
|
|
|
|
|
|
|
Ok(TpmConfig { tcti, ctx })
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn tpm_new(
|
|
|
|
policy: &CryptoPolicy,
|
|
|
|
cred: &str,
|
|
|
|
tpm_conf: &TpmConfig,
|
|
|
|
) -> Result<Password, ()> {
|
|
|
|
let mut context = Context::new(tpm_conf.tcti.clone()).map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to create tpm context");
|
|
|
|
})?;
|
|
|
|
|
|
|
|
context
|
|
|
|
.execute_with_nullauth_session(|ctx| {
|
|
|
|
let key = ctx.context_load(tpm_conf.ctx.clone()).map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to load tpm context");
|
|
|
|
<TpmError as Into<CryptoError>>::into(e)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Password::new_argon2id_tpm(policy, cred, ctx, key)
|
|
|
|
})
|
|
|
|
.map_err(|e: CryptoError| {
|
|
|
|
error!(tpm_err = ?e, "Failed to create tpm bound password");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn tpm_verify(pw: Password, cred: &str, tpm_conf: &TpmConfig) -> Result<bool, ()> {
|
|
|
|
let mut context = Context::new(tpm_conf.tcti.clone()).map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to create tpm context");
|
|
|
|
})?;
|
|
|
|
|
|
|
|
context
|
|
|
|
.execute_with_nullauth_session(|ctx| {
|
|
|
|
let key = ctx.context_load(tpm_conf.ctx.clone()).map_err(|e| {
|
|
|
|
error!(tpm_err = ?e, "Failed to load tpm context");
|
|
|
|
<TpmError as Into<CryptoError>>::into(e)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
pw.verify_ctx(cred, Some((ctx, key)))
|
|
|
|
})
|
|
|
|
.map_err(|e: CryptoError| {
|
|
|
|
error!(tpm_err = ?e, "Failed to create tpm bound password");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2022-10-01 08:08:51 +02:00
|
|
|
use kanidm_proto::v1::{UnixGroupToken, UnixUserToken};
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
use super::Db;
|
2020-02-15 01:27:25 +01:00
|
|
|
use crate::cache::Id;
|
2023-06-27 02:09:19 +02:00
|
|
|
use crate::unix_config::TpmPolicy;
|
2020-02-13 00:43:01 +01:00
|
|
|
|
2020-05-03 08:44:01 +02:00
|
|
|
const TESTACCOUNT1_PASSWORD_A: &str = "password a for account1 test";
|
|
|
|
const TESTACCOUNT1_PASSWORD_B: &str = "password b for account1 test";
|
2020-02-29 05:02:14 +01:00
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
#[tokio::test]
|
|
|
|
async fn test_cache_db_account_basic() {
|
2023-01-28 04:52:44 +01:00
|
|
|
sketching::test_init();
|
2023-06-27 02:09:19 +02:00
|
|
|
let db = Db::new("", &TpmPolicy::default()).expect("failed to create.");
|
2022-04-29 05:03:21 +02:00
|
|
|
let dbtxn = db.write().await;
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(dbtxn.migrate().is_ok());
|
|
|
|
|
|
|
|
let mut ut1 = UnixUserToken {
|
|
|
|
name: "testuser".to_string(),
|
|
|
|
spn: "testuser@example.com".to_string(),
|
|
|
|
displayname: "Test User".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
shell: None,
|
|
|
|
groups: Vec::new(),
|
|
|
|
sshkeys: vec!["key-a".to_string()],
|
2020-10-10 02:31:51 +02:00
|
|
|
valid: true,
|
2020-02-13 00:43:01 +01:00
|
|
|
};
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
let id_name = Id::Name("testuser".to_string());
|
|
|
|
let id_name2 = Id::Name("testuser2".to_string());
|
|
|
|
let id_spn = Id::Name("testuser@example.com".to_string());
|
|
|
|
let id_spn2 = Id::Name("testuser2@example.com".to_string());
|
|
|
|
let id_uuid = Id::Name("0302b99c-f0f6-41ab-9492-852692b0fd16".to_string());
|
|
|
|
let id_gid = Id::Gid(2000);
|
|
|
|
|
2020-02-13 00:43:01 +01:00
|
|
|
// test finding no account
|
2020-02-15 01:27:25 +01:00
|
|
|
let r1 = dbtxn.get_account(&id_name).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r1.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r2 = dbtxn.get_account(&id_spn).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r2.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r3 = dbtxn.get_account(&id_uuid).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r3.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r4 = dbtxn.get_account(&id_gid).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r4.is_none());
|
|
|
|
|
|
|
|
// test adding an account
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
|
|
|
|
// test we can get it.
|
2020-02-15 01:27:25 +01:00
|
|
|
let r1 = dbtxn.get_account(&id_name).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r1.is_some());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r2 = dbtxn.get_account(&id_spn).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r2.is_some());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r3 = dbtxn.get_account(&id_uuid).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r3.is_some());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r4 = dbtxn.get_account(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_some());
|
2020-02-13 00:43:01 +01:00
|
|
|
|
|
|
|
// test adding an account that was renamed
|
|
|
|
ut1.name = "testuser2".to_string();
|
|
|
|
ut1.spn = "testuser2@example.com".to_string();
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
|
|
|
|
// get the account
|
2020-02-15 01:27:25 +01:00
|
|
|
let r1 = dbtxn.get_account(&id_name).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r1.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r2 = dbtxn.get_account(&id_spn).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r2.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r1 = dbtxn.get_account(&id_name2).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r1.is_some());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r2 = dbtxn.get_account(&id_spn2).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r2.is_some());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r3 = dbtxn.get_account(&id_uuid).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r3.is_some());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r4 = dbtxn.get_account(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_some());
|
2020-02-13 00:43:01 +01:00
|
|
|
|
|
|
|
// Clear cache
|
|
|
|
assert!(dbtxn.clear_cache().is_ok());
|
|
|
|
|
|
|
|
// should be nothing
|
2020-02-15 01:27:25 +01:00
|
|
|
let r1 = dbtxn.get_account(&id_name2).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r1.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r2 = dbtxn.get_account(&id_spn2).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r2.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r3 = dbtxn.get_account(&id_uuid).unwrap();
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(r3.is_none());
|
2020-02-15 01:27:25 +01:00
|
|
|
let r4 = dbtxn.get_account(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_none());
|
2020-02-13 00:43:01 +01:00
|
|
|
|
|
|
|
assert!(dbtxn.commit().is_ok());
|
|
|
|
}
|
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
#[tokio::test]
|
|
|
|
async fn test_cache_db_group_basic() {
|
2023-01-28 04:52:44 +01:00
|
|
|
sketching::test_init();
|
2023-06-27 02:09:19 +02:00
|
|
|
let db = Db::new("", &TpmPolicy::default()).expect("failed to create.");
|
2022-04-29 05:03:21 +02:00
|
|
|
let dbtxn = db.write().await;
|
2020-02-13 00:43:01 +01:00
|
|
|
assert!(dbtxn.migrate().is_ok());
|
|
|
|
|
2020-02-15 01:27:25 +01:00
|
|
|
let mut gt1 = UnixGroupToken {
|
|
|
|
name: "testgroup".to_string(),
|
|
|
|
spn: "testgroup@example.com".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let id_name = Id::Name("testgroup".to_string());
|
|
|
|
let id_name2 = Id::Name("testgroup2".to_string());
|
|
|
|
let id_spn = Id::Name("testgroup@example.com".to_string());
|
|
|
|
let id_spn2 = Id::Name("testgroup2@example.com".to_string());
|
|
|
|
let id_uuid = Id::Name("0302b99c-f0f6-41ab-9492-852692b0fd16".to_string());
|
|
|
|
let id_gid = Id::Gid(2000);
|
|
|
|
|
|
|
|
// test finding no group
|
|
|
|
let r1 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r1.is_none());
|
|
|
|
let r2 = dbtxn.get_group(&id_spn).unwrap();
|
|
|
|
assert!(r2.is_none());
|
|
|
|
let r3 = dbtxn.get_group(&id_uuid).unwrap();
|
|
|
|
assert!(r3.is_none());
|
|
|
|
let r4 = dbtxn.get_group(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_none());
|
|
|
|
|
|
|
|
// test adding a group
|
|
|
|
dbtxn.update_group(>1, 0).unwrap();
|
|
|
|
let r1 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r1.is_some());
|
|
|
|
let r2 = dbtxn.get_group(&id_spn).unwrap();
|
|
|
|
assert!(r2.is_some());
|
|
|
|
let r3 = dbtxn.get_group(&id_uuid).unwrap();
|
|
|
|
assert!(r3.is_some());
|
|
|
|
let r4 = dbtxn.get_group(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_some());
|
|
|
|
|
|
|
|
// add a group via update
|
|
|
|
gt1.name = "testgroup2".to_string();
|
|
|
|
gt1.spn = "testgroup2@example.com".to_string();
|
|
|
|
dbtxn.update_group(>1, 0).unwrap();
|
|
|
|
let r1 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r1.is_none());
|
|
|
|
let r2 = dbtxn.get_group(&id_spn).unwrap();
|
|
|
|
assert!(r2.is_none());
|
|
|
|
let r1 = dbtxn.get_group(&id_name2).unwrap();
|
|
|
|
assert!(r1.is_some());
|
|
|
|
let r2 = dbtxn.get_group(&id_spn2).unwrap();
|
|
|
|
assert!(r2.is_some());
|
|
|
|
let r3 = dbtxn.get_group(&id_uuid).unwrap();
|
|
|
|
assert!(r3.is_some());
|
|
|
|
let r4 = dbtxn.get_group(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_some());
|
|
|
|
|
|
|
|
// clear cache
|
|
|
|
assert!(dbtxn.clear_cache().is_ok());
|
|
|
|
|
|
|
|
// should be nothing.
|
|
|
|
let r1 = dbtxn.get_group(&id_name2).unwrap();
|
|
|
|
assert!(r1.is_none());
|
|
|
|
let r2 = dbtxn.get_group(&id_spn2).unwrap();
|
|
|
|
assert!(r2.is_none());
|
|
|
|
let r3 = dbtxn.get_group(&id_uuid).unwrap();
|
|
|
|
assert!(r3.is_none());
|
|
|
|
let r4 = dbtxn.get_group(&id_gid).unwrap();
|
|
|
|
assert!(r4.is_none());
|
|
|
|
|
|
|
|
assert!(dbtxn.commit().is_ok());
|
|
|
|
}
|
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
#[tokio::test]
|
|
|
|
async fn test_cache_db_account_group_update() {
|
2023-01-28 04:52:44 +01:00
|
|
|
sketching::test_init();
|
2023-06-27 02:09:19 +02:00
|
|
|
let db = Db::new("", &TpmPolicy::default()).expect("failed to create.");
|
2022-04-29 05:03:21 +02:00
|
|
|
let dbtxn = db.write().await;
|
2020-02-15 01:27:25 +01:00
|
|
|
assert!(dbtxn.migrate().is_ok());
|
|
|
|
|
|
|
|
let gt1 = UnixGroupToken {
|
|
|
|
name: "testuser".to_string(),
|
|
|
|
spn: "testuser@example.com".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let gt2 = UnixGroupToken {
|
|
|
|
name: "testgroup".to_string(),
|
|
|
|
spn: "testgroup@example.com".to_string(),
|
|
|
|
gidnumber: 2001,
|
|
|
|
uuid: "b500be97-8552-42a5-aca0-668bc5625705".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut ut1 = UnixUserToken {
|
|
|
|
name: "testuser".to_string(),
|
|
|
|
spn: "testuser@example.com".to_string(),
|
|
|
|
displayname: "Test User".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
shell: None,
|
|
|
|
groups: vec![gt1.clone(), gt2],
|
|
|
|
sshkeys: vec!["key-a".to_string()],
|
2020-10-10 02:31:51 +02:00
|
|
|
valid: true,
|
2020-02-15 01:27:25 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// First, add the groups.
|
|
|
|
ut1.groups.iter().for_each(|g| {
|
2023-01-28 04:52:44 +01:00
|
|
|
dbtxn.update_group(g, 0).unwrap();
|
2020-02-15 01:27:25 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
// The add the account
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
|
|
|
|
// Now, get the memberships of the two groups.
|
|
|
|
let m1 = dbtxn
|
|
|
|
.get_group_members("0302b99c-f0f6-41ab-9492-852692b0fd16")
|
|
|
|
.unwrap();
|
|
|
|
let m2 = dbtxn
|
|
|
|
.get_group_members("b500be97-8552-42a5-aca0-668bc5625705")
|
|
|
|
.unwrap();
|
|
|
|
assert!(m1[0].name == "testuser");
|
|
|
|
assert!(m2[0].name == "testuser");
|
|
|
|
|
|
|
|
// Now alter testuser, remove gt2, update.
|
|
|
|
ut1.groups = vec![gt1];
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
|
|
|
|
// Check that the memberships have updated correctly.
|
|
|
|
let m1 = dbtxn
|
|
|
|
.get_group_members("0302b99c-f0f6-41ab-9492-852692b0fd16")
|
|
|
|
.unwrap();
|
|
|
|
let m2 = dbtxn
|
|
|
|
.get_group_members("b500be97-8552-42a5-aca0-668bc5625705")
|
|
|
|
.unwrap();
|
|
|
|
assert!(m1[0].name == "testuser");
|
2023-01-28 04:52:44 +01:00
|
|
|
assert!(m2.is_empty());
|
2020-02-13 00:43:01 +01:00
|
|
|
|
|
|
|
assert!(dbtxn.commit().is_ok());
|
|
|
|
}
|
2020-02-29 05:02:14 +01:00
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
#[tokio::test]
|
|
|
|
async fn test_cache_db_account_password() {
|
2023-01-28 04:52:44 +01:00
|
|
|
sketching::test_init();
|
2023-06-21 12:33:01 +02:00
|
|
|
|
|
|
|
#[cfg(feature = "tpm")]
|
2023-06-27 02:09:19 +02:00
|
|
|
let tpm_policy = TpmPolicy::Required("device:/dev/tpmrm0".to_string());
|
2023-06-21 12:33:01 +02:00
|
|
|
|
|
|
|
#[cfg(not(feature = "tpm"))]
|
2023-06-27 02:09:19 +02:00
|
|
|
let tpm_policy = TpmPolicy::default();
|
2023-06-21 12:33:01 +02:00
|
|
|
|
2023-06-27 02:09:19 +02:00
|
|
|
let db = Db::new("", &tpm_policy).expect("failed to create.");
|
2023-06-21 12:33:01 +02:00
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
let dbtxn = db.write().await;
|
2020-02-29 05:02:14 +01:00
|
|
|
assert!(dbtxn.migrate().is_ok());
|
|
|
|
|
|
|
|
let uuid1 = "0302b99c-f0f6-41ab-9492-852692b0fd16";
|
2020-04-29 06:36:57 +02:00
|
|
|
let mut ut1 = UnixUserToken {
|
2020-02-29 05:02:14 +01:00
|
|
|
name: "testuser".to_string(),
|
|
|
|
spn: "testuser@example.com".to_string(),
|
|
|
|
displayname: "Test User".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
shell: None,
|
|
|
|
groups: Vec::new(),
|
|
|
|
sshkeys: vec!["key-a".to_string()],
|
2020-10-10 02:31:51 +02:00
|
|
|
valid: true,
|
2020-02-29 05:02:14 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// Test that with no account, is false
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_A) == Ok(false));
|
|
|
|
// test adding an account
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
// check with no password is false.
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_A) == Ok(false));
|
|
|
|
// update the pw
|
|
|
|
assert!(dbtxn
|
|
|
|
.update_account_password(uuid1, TESTACCOUNT1_PASSWORD_A)
|
|
|
|
.is_ok());
|
|
|
|
// Check it now works.
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_A) == Ok(true));
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_B) == Ok(false));
|
|
|
|
// Update the pw
|
|
|
|
assert!(dbtxn
|
|
|
|
.update_account_password(uuid1, TESTACCOUNT1_PASSWORD_B)
|
|
|
|
.is_ok());
|
|
|
|
// Check it matches.
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_A) == Ok(false));
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_B) == Ok(true));
|
|
|
|
|
2020-04-29 06:36:57 +02:00
|
|
|
// Check that updating the account does not break the password.
|
|
|
|
ut1.displayname = "Test User Update".to_string();
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
assert!(dbtxn.check_account_password(uuid1, TESTACCOUNT1_PASSWORD_B) == Ok(true));
|
|
|
|
|
2020-02-29 05:02:14 +01:00
|
|
|
assert!(dbtxn.commit().is_ok());
|
|
|
|
}
|
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
#[tokio::test]
|
|
|
|
async fn test_cache_db_group_rename_duplicate() {
|
2023-01-28 04:52:44 +01:00
|
|
|
sketching::test_init();
|
2023-06-27 02:09:19 +02:00
|
|
|
let db = Db::new("", &TpmPolicy::default()).expect("failed to create.");
|
2022-04-29 05:03:21 +02:00
|
|
|
let dbtxn = db.write().await;
|
2020-02-29 05:02:14 +01:00
|
|
|
assert!(dbtxn.migrate().is_ok());
|
|
|
|
|
|
|
|
let mut gt1 = UnixGroupToken {
|
|
|
|
name: "testgroup".to_string(),
|
|
|
|
spn: "testgroup@example.com".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let gt2 = UnixGroupToken {
|
|
|
|
name: "testgroup".to_string(),
|
|
|
|
spn: "testgroup@example.com".to_string(),
|
|
|
|
gidnumber: 2001,
|
|
|
|
uuid: "799123b2-3802-4b19-b0b8-1ffae2aa9a4b".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let id_name = Id::Name("testgroup".to_string());
|
|
|
|
let id_name2 = Id::Name("testgroup2".to_string());
|
|
|
|
|
|
|
|
// test finding no group
|
|
|
|
let r1 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r1.is_none());
|
|
|
|
|
|
|
|
// test adding a group
|
|
|
|
dbtxn.update_group(>1, 0).unwrap();
|
|
|
|
let r0 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r0.unwrap().0.uuid == "0302b99c-f0f6-41ab-9492-852692b0fd16");
|
|
|
|
|
|
|
|
// Do the "rename" of gt1 which is what would allow gt2 to be valid.
|
|
|
|
gt1.name = "testgroup2".to_string();
|
|
|
|
gt1.spn = "testgroup2@example.com".to_string();
|
|
|
|
// Now, add gt2 which dups on gt1 name/spn.
|
|
|
|
dbtxn.update_group(>2, 0).unwrap();
|
|
|
|
let r2 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r2.unwrap().0.uuid == "799123b2-3802-4b19-b0b8-1ffae2aa9a4b");
|
|
|
|
let r3 = dbtxn.get_group(&id_name2).unwrap();
|
|
|
|
assert!(r3.is_none());
|
|
|
|
|
|
|
|
// Now finally update gt1
|
|
|
|
dbtxn.update_group(>1, 0).unwrap();
|
|
|
|
|
|
|
|
// Both now coexist
|
|
|
|
let r4 = dbtxn.get_group(&id_name).unwrap();
|
|
|
|
assert!(r4.unwrap().0.uuid == "799123b2-3802-4b19-b0b8-1ffae2aa9a4b");
|
|
|
|
let r5 = dbtxn.get_group(&id_name2).unwrap();
|
|
|
|
assert!(r5.unwrap().0.uuid == "0302b99c-f0f6-41ab-9492-852692b0fd16");
|
|
|
|
|
|
|
|
assert!(dbtxn.commit().is_ok());
|
|
|
|
}
|
|
|
|
|
2022-04-29 05:03:21 +02:00
|
|
|
#[tokio::test]
|
|
|
|
async fn test_cache_db_account_rename_duplicate() {
|
2023-01-28 04:52:44 +01:00
|
|
|
sketching::test_init();
|
2023-06-27 02:09:19 +02:00
|
|
|
let db = Db::new("", &TpmPolicy::default()).expect("failed to create.");
|
2022-04-29 05:03:21 +02:00
|
|
|
let dbtxn = db.write().await;
|
2020-02-29 05:02:14 +01:00
|
|
|
assert!(dbtxn.migrate().is_ok());
|
|
|
|
|
|
|
|
let mut ut1 = UnixUserToken {
|
|
|
|
name: "testuser".to_string(),
|
|
|
|
spn: "testuser@example.com".to_string(),
|
|
|
|
displayname: "Test User".to_string(),
|
|
|
|
gidnumber: 2000,
|
|
|
|
uuid: "0302b99c-f0f6-41ab-9492-852692b0fd16".to_string(),
|
|
|
|
shell: None,
|
|
|
|
groups: Vec::new(),
|
|
|
|
sshkeys: vec!["key-a".to_string()],
|
2020-10-10 02:31:51 +02:00
|
|
|
valid: true,
|
2020-02-29 05:02:14 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
let ut2 = UnixUserToken {
|
|
|
|
name: "testuser".to_string(),
|
|
|
|
spn: "testuser@example.com".to_string(),
|
|
|
|
displayname: "Test User".to_string(),
|
|
|
|
gidnumber: 2001,
|
|
|
|
uuid: "799123b2-3802-4b19-b0b8-1ffae2aa9a4b".to_string(),
|
|
|
|
shell: None,
|
|
|
|
groups: Vec::new(),
|
|
|
|
sshkeys: vec!["key-a".to_string()],
|
2020-10-10 02:31:51 +02:00
|
|
|
valid: true,
|
2020-02-29 05:02:14 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
let id_name = Id::Name("testuser".to_string());
|
|
|
|
let id_name2 = Id::Name("testuser2".to_string());
|
|
|
|
|
|
|
|
// test finding no account
|
|
|
|
let r1 = dbtxn.get_account(&id_name).unwrap();
|
|
|
|
assert!(r1.is_none());
|
|
|
|
|
|
|
|
// test adding an account
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
let r0 = dbtxn.get_account(&id_name).unwrap();
|
|
|
|
assert!(r0.unwrap().0.uuid == "0302b99c-f0f6-41ab-9492-852692b0fd16");
|
|
|
|
|
|
|
|
// Do the "rename" of gt1 which is what would allow gt2 to be valid.
|
|
|
|
ut1.name = "testuser2".to_string();
|
|
|
|
ut1.spn = "testuser2@example.com".to_string();
|
|
|
|
// Now, add gt2 which dups on gt1 name/spn.
|
|
|
|
dbtxn.update_account(&ut2, 0).unwrap();
|
|
|
|
let r2 = dbtxn.get_account(&id_name).unwrap();
|
|
|
|
assert!(r2.unwrap().0.uuid == "799123b2-3802-4b19-b0b8-1ffae2aa9a4b");
|
|
|
|
let r3 = dbtxn.get_account(&id_name2).unwrap();
|
|
|
|
assert!(r3.is_none());
|
|
|
|
|
|
|
|
// Now finally update gt1
|
|
|
|
dbtxn.update_account(&ut1, 0).unwrap();
|
|
|
|
|
|
|
|
// Both now coexist
|
|
|
|
let r4 = dbtxn.get_account(&id_name).unwrap();
|
|
|
|
assert!(r4.unwrap().0.uuid == "799123b2-3802-4b19-b0b8-1ffae2aa9a4b");
|
|
|
|
let r5 = dbtxn.get_account(&id_name2).unwrap();
|
|
|
|
assert!(r5.unwrap().0.uuid == "0302b99c-f0f6-41ab-9492-852692b0fd16");
|
|
|
|
|
|
|
|
assert!(dbtxn.commit().is_ok());
|
|
|
|
}
|
2020-02-13 00:43:01 +01:00
|
|
|
}
|