2021-06-02 01:42:40 +02:00
|
|
|
//! These contain the server "cores". These are able to startup the server
|
|
|
|
//! (bootstrap) to a running state and then execute tasks. This is where modules
|
|
|
|
//! are logically ordered based on their depenedncies for execution. Some of these
|
|
|
|
//! are task-only i.e. reindexing, and some of these launch the server into a
|
|
|
|
//! fully operational state (https, ldap, etc).
|
|
|
|
//!
|
|
|
|
//! Generally, this is the "entry point" where the server begins to run, and
|
|
|
|
//! the entry point for all client traffic which is then directed to the
|
2021-07-24 03:12:35 +02:00
|
|
|
//! various `actors`.
|
2021-06-02 01:42:40 +02:00
|
|
|
|
2022-02-20 03:43:38 +01:00
|
|
|
#![deny(warnings)]
|
|
|
|
#![warn(unused_extern_crates)]
|
|
|
|
#![deny(clippy::todo)]
|
|
|
|
#![deny(clippy::unimplemented)]
|
|
|
|
#![deny(clippy::unwrap_used)]
|
|
|
|
#![deny(clippy::expect_used)]
|
|
|
|
#![deny(clippy::panic)]
|
|
|
|
#![deny(clippy::unreachable)]
|
|
|
|
#![deny(clippy::await_holding_lock)]
|
|
|
|
#![deny(clippy::needless_pass_by_value)]
|
|
|
|
#![deny(clippy::trivially_copy_pass_by_ref)]
|
|
|
|
|
|
|
|
#[macro_use]
|
|
|
|
extern crate tracing;
|
|
|
|
#[macro_use]
|
2022-10-05 01:48:48 +02:00
|
|
|
extern crate kanidmd_lib;
|
2022-02-20 03:43:38 +01:00
|
|
|
|
2022-10-05 01:48:48 +02:00
|
|
|
pub mod actors;
|
|
|
|
pub mod config;
|
|
|
|
mod crypto;
|
2022-07-03 03:17:46 +02:00
|
|
|
pub mod https;
|
2022-10-05 01:48:48 +02:00
|
|
|
mod interval;
|
2020-06-10 04:07:43 +02:00
|
|
|
mod ldaps;
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2022-10-01 08:08:51 +02:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2022-06-26 10:02:16 +02:00
|
|
|
use compact_jwt::JwsSigner;
|
2022-06-28 01:22:31 +02:00
|
|
|
use kanidm_proto::messages::{AccountChangeMessage, MessageStatus};
|
2020-03-07 01:49:39 +01:00
|
|
|
use kanidm_proto::v1::OperationError;
|
2022-10-05 01:48:48 +02:00
|
|
|
use kanidmd_lib::be::{Backend, BackendConfig, BackendTransaction, FsType};
|
2022-12-28 08:52:25 +01:00
|
|
|
use kanidmd_lib::idm::ldap::LdapServer;
|
2022-10-05 01:48:48 +02:00
|
|
|
use kanidmd_lib::idm::server::{IdmServer, IdmServerDelayed};
|
|
|
|
use kanidmd_lib::prelude::*;
|
|
|
|
use kanidmd_lib::schema::Schema;
|
|
|
|
use kanidmd_lib::status::StatusActor;
|
|
|
|
use kanidmd_lib::utils::{duration_from_epoch_now, touch_file_or_quit};
|
2022-10-01 08:08:51 +02:00
|
|
|
#[cfg(not(target_family = "windows"))]
|
|
|
|
use libc::umask;
|
2020-05-07 06:08:06 +02:00
|
|
|
|
2022-11-23 11:10:43 +01:00
|
|
|
use tokio::sync::broadcast;
|
|
|
|
|
2022-10-05 01:48:48 +02:00
|
|
|
use crate::actors::v1_read::QueryServerReadV1;
|
|
|
|
use crate::actors::v1_write::QueryServerWriteV1;
|
|
|
|
use crate::config::Configuration;
|
|
|
|
use crate::crypto::setup_tls;
|
|
|
|
use crate::interval::IntervalActor;
|
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
// === internal setup helpers
|
|
|
|
|
2020-06-24 13:17:46 +02:00
|
|
|
fn setup_backend(config: &Configuration, schema: &Schema) -> Result<Backend, OperationError> {
|
2021-02-21 06:04:58 +01:00
|
|
|
setup_backend_vacuum(config, schema, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn setup_backend_vacuum(
|
|
|
|
config: &Configuration,
|
|
|
|
schema: &Schema,
|
|
|
|
vacuum: bool,
|
|
|
|
) -> Result<Backend, OperationError> {
|
2020-06-24 13:17:46 +02:00
|
|
|
// Limit the scope of the schema txn.
|
2020-09-06 00:44:35 +02:00
|
|
|
// let schema_txn = task::block_on(schema.write());
|
2020-06-24 13:17:46 +02:00
|
|
|
let schema_txn = schema.write();
|
|
|
|
let idxmeta = schema_txn.reload_idxmeta();
|
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
let pool_size: u32 = config.threads as u32;
|
2020-08-04 08:52:57 +02:00
|
|
|
let fstype: FsType = if config
|
|
|
|
.db_fs_type
|
|
|
|
.as_ref()
|
|
|
|
.map(|s| s == "zfs")
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
2021-05-21 08:35:09 +02:00
|
|
|
FsType::Zfs
|
2020-08-04 08:52:57 +02:00
|
|
|
} else {
|
|
|
|
FsType::Generic
|
|
|
|
};
|
|
|
|
|
2021-04-14 01:56:40 +02:00
|
|
|
let cfg = BackendConfig::new(
|
2020-08-04 08:52:57 +02:00
|
|
|
config.db_path.as_str(),
|
|
|
|
pool_size,
|
|
|
|
fstype,
|
2021-04-14 01:56:40 +02:00
|
|
|
config.db_arc_size,
|
2020-08-04 08:52:57 +02:00
|
|
|
);
|
2021-04-14 01:56:40 +02:00
|
|
|
|
2021-12-16 01:13:03 +01:00
|
|
|
Backend::new(cfg, idxmeta, vacuum)
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO #54: We could move most of the be/schema/qs setup and startup
|
|
|
|
// outside of this call, then pass in "what we need" in a cloneable
|
2023-01-10 04:50:53 +01:00
|
|
|
// form, this way we could have separate Idm vs Qs threads, and dedicated
|
2020-03-07 01:49:39 +01:00
|
|
|
// threads for write vs read
|
2022-10-24 01:50:31 +02:00
|
|
|
async fn setup_qs_idms(
|
2020-03-07 01:49:39 +01:00
|
|
|
be: Backend,
|
2020-06-24 13:17:46 +02:00
|
|
|
schema: Schema,
|
2020-12-02 02:12:07 +01:00
|
|
|
config: &Configuration,
|
2020-08-24 04:15:21 +02:00
|
|
|
) -> Result<(QueryServer, IdmServer, IdmServerDelayed), OperationError> {
|
2020-03-07 01:49:39 +01:00
|
|
|
// Create a query_server implementation
|
2022-02-15 07:17:43 +01:00
|
|
|
let query_server = QueryServer::new(be, schema, config.domain.clone());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// TODO #62: Should the IDM parts be broken out to the IdmServer?
|
|
|
|
// What's important about this initial setup here is that it also triggers
|
|
|
|
// the schema and acp reload, so they are now configured correctly!
|
|
|
|
// Initialise the schema core.
|
|
|
|
//
|
|
|
|
// Now search for the schema itself, and validate that the system
|
|
|
|
// in memory matches the BE on disk, and that it's syntactically correct.
|
|
|
|
// Write it out if changes are needed.
|
2022-10-24 01:50:31 +02:00
|
|
|
query_server
|
|
|
|
.initialise_helper(duration_from_epoch_now())
|
|
|
|
.await?;
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// We generate a SINGLE idms only!
|
|
|
|
|
2021-12-16 01:13:03 +01:00
|
|
|
let (idms, idms_delayed) = IdmServer::new(query_server.clone(), &config.origin)?;
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2020-08-24 04:15:21 +02:00
|
|
|
Ok((query_server, idms, idms_delayed))
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
async fn setup_qs(
|
2022-02-15 07:17:43 +01:00
|
|
|
be: Backend,
|
|
|
|
schema: Schema,
|
|
|
|
config: &Configuration,
|
|
|
|
) -> Result<QueryServer, OperationError> {
|
|
|
|
// Create a query_server implementation
|
|
|
|
let query_server = QueryServer::new(be, schema, config.domain.clone());
|
|
|
|
|
|
|
|
// TODO #62: Should the IDM parts be broken out to the IdmServer?
|
|
|
|
// What's important about this initial setup here is that it also triggers
|
|
|
|
// the schema and acp reload, so they are now configured correctly!
|
|
|
|
// Initialise the schema core.
|
|
|
|
//
|
|
|
|
// Now search for the schema itself, and validate that the system
|
|
|
|
// in memory matches the BE on disk, and that it's syntactically correct.
|
|
|
|
// Write it out if changes are needed.
|
2022-10-24 01:50:31 +02:00
|
|
|
query_server
|
|
|
|
.initialise_helper(duration_from_epoch_now())
|
|
|
|
.await?;
|
2022-02-15 07:17:43 +01:00
|
|
|
|
|
|
|
Ok(query_server)
|
|
|
|
}
|
|
|
|
|
2021-07-01 06:51:25 +02:00
|
|
|
macro_rules! dbscan_setup_be {
|
|
|
|
(
|
|
|
|
$config:expr
|
|
|
|
) => {{
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2021-07-01 06:51:25 +02:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
match setup_backend($config, &schema) {
|
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn dbscan_list_indexes_core(config: &Configuration) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let be = dbscan_setup_be!(config);
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_rotxn = be.read();
|
2021-07-01 06:51:25 +02:00
|
|
|
|
2021-08-26 03:48:03 +02:00
|
|
|
match be_rotxn.list_indexes() {
|
2021-07-01 06:51:25 +02:00
|
|
|
Ok(mut idx_list) => {
|
|
|
|
idx_list.sort_unstable();
|
|
|
|
idx_list.iter().for_each(|idx_name| {
|
|
|
|
println!("{}", idx_name);
|
|
|
|
})
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to retrieve index list: {:?}", e);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn dbscan_list_id2entry_core(config: &Configuration) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let be = dbscan_setup_be!(config);
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_rotxn = be.read();
|
2021-07-01 06:51:25 +02:00
|
|
|
|
2021-08-26 03:48:03 +02:00
|
|
|
match be_rotxn.list_id2entry() {
|
2021-07-01 06:51:25 +02:00
|
|
|
Ok(mut id_list) => {
|
|
|
|
id_list.sort_unstable_by_key(|k| k.0);
|
|
|
|
id_list.iter().for_each(|(id, value)| {
|
|
|
|
println!("{:>8}: {}", id, value);
|
|
|
|
})
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to retrieve id2entry list: {:?}", e);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn dbscan_list_index_analysis_core(config: &Configuration) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let _be = dbscan_setup_be!(config);
|
2021-07-01 06:51:25 +02:00
|
|
|
// TBD in after slopes merge.
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn dbscan_list_index_core(config: &Configuration, index_name: &str) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let be = dbscan_setup_be!(config);
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_rotxn = be.read();
|
2021-07-01 06:51:25 +02:00
|
|
|
|
2021-08-26 03:48:03 +02:00
|
|
|
match be_rotxn.list_index_content(index_name) {
|
2021-07-01 06:51:25 +02:00
|
|
|
Ok(mut idx_list) => {
|
|
|
|
idx_list.sort_unstable_by(|a, b| a.0.cmp(&b.0));
|
|
|
|
idx_list.iter().for_each(|(key, value)| {
|
|
|
|
println!("{:>50}: {:?}", key, value);
|
|
|
|
})
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to retrieve index list: {:?}", e);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn dbscan_get_id2entry_core(config: &Configuration, id: u64) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let be = dbscan_setup_be!(config);
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_rotxn = be.read();
|
2021-07-01 06:51:25 +02:00
|
|
|
|
2021-08-26 03:48:03 +02:00
|
|
|
match be_rotxn.get_id2entry(id) {
|
2021-07-01 06:51:25 +02:00
|
|
|
Ok((id, value)) => println!("{:>8}: {}", id, value),
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to retrieve id2entry value: {:?}", e);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-08-04 04:58:11 +02:00
|
|
|
pub fn backup_server_core(config: &Configuration, dst_path: &str) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-09-15 00:24:37 +02:00
|
|
|
let be = match setup_backend(config, &schema) {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_ro_txn = be.read();
|
2021-09-21 04:42:00 +02:00
|
|
|
let r = be_ro_txn.backup(dst_path);
|
2020-03-07 01:49:39 +01:00
|
|
|
match r {
|
|
|
|
Ok(_) => info!("Backup success!"),
|
|
|
|
Err(e) => {
|
|
|
|
error!("Backup failed: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
// Let the txn abort, even on success.
|
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
pub async fn restore_server_core(config: &Configuration, dst_path: &str) {
|
2021-09-15 00:24:37 +02:00
|
|
|
touch_file_or_quit(config.db_path.as_str());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// First, we provide the in-memory schema so that core attrs are indexed correctly.
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-09-15 00:24:37 +02:00
|
|
|
let be = match setup_backend(config, &schema) {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
2021-07-08 02:09:15 +02:00
|
|
|
error!("Failed to setup backend: {:?}", e);
|
2020-06-24 13:17:46 +02:00
|
|
|
return;
|
|
|
|
}
|
2020-06-10 04:07:43 +02:00
|
|
|
};
|
2020-06-24 13:17:46 +02:00
|
|
|
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_wr_txn = be.write();
|
2021-09-21 04:42:00 +02:00
|
|
|
let r = be_wr_txn.restore(dst_path).and_then(|_| be_wr_txn.commit());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
if r.is_err() {
|
|
|
|
error!("Failed to restore database: {:?}", r);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
2020-06-07 01:53:10 +02:00
|
|
|
info!("Database loaded successfully");
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
info!("Attempting to init query server ...");
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
let (qs, _idms, _idms_delayed) = match setup_qs_idms(be, schema, config).await {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(t) => t,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to setup query server or idm server -> {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
info!("Success!");
|
|
|
|
|
|
|
|
info!("Start reindex phase ...");
|
|
|
|
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut qs_write = qs.write(duration_from_epoch_now()).await;
|
2021-09-21 04:42:00 +02:00
|
|
|
let r = qs_write.reindex().and_then(|_| qs_write.commit());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
match r {
|
|
|
|
Ok(_) => info!("Reindex Success!"),
|
|
|
|
Err(e) => {
|
|
|
|
error!("Restore failed: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
2020-06-07 01:53:10 +02:00
|
|
|
|
|
|
|
info!("✅ Restore Success!");
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
pub async fn reindex_server_core(config: &Configuration) {
|
2020-06-24 13:17:46 +02:00
|
|
|
eprintln!("Start Index Phase 1 ...");
|
2020-03-07 01:49:39 +01:00
|
|
|
// First, we provide the in-memory schema so that core attrs are indexed correctly.
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
2020-06-18 02:30:42 +02:00
|
|
|
eprintln!("Failed to setup in memory schema: {:?}", e);
|
2020-03-07 01:49:39 +01:00
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-09-15 00:24:37 +02:00
|
|
|
let be = match setup_backend(config, &schema) {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
2020-06-10 04:07:43 +02:00
|
|
|
};
|
2020-06-24 13:17:46 +02:00
|
|
|
|
|
|
|
// Reindex only the core schema attributes to bootstrap the process.
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut be_wr_txn = be.write();
|
2021-09-21 04:42:00 +02:00
|
|
|
let r = be_wr_txn.reindex().and_then(|_| be_wr_txn.commit());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// Now that's done, setup a minimal qs and reindex from that.
|
|
|
|
if r.is_err() {
|
2020-06-18 02:30:42 +02:00
|
|
|
eprintln!("Failed to reindex database: {:?}", r);
|
2020-03-07 01:49:39 +01:00
|
|
|
std::process::exit(1);
|
|
|
|
}
|
2020-06-18 02:30:42 +02:00
|
|
|
eprintln!("Index Phase 1 Success!");
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2020-06-18 02:30:42 +02:00
|
|
|
eprintln!("Attempting to init query server ...");
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
let (qs, _idms, _idms_delayed) = match setup_qs_idms(be, schema, config).await {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(t) => t,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to setup query server or idm server -> {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
2020-06-18 02:30:42 +02:00
|
|
|
eprintln!("Init Query Server Success!");
|
|
|
|
|
|
|
|
eprintln!("Start Index Phase 2 ...");
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2022-12-28 08:52:25 +01:00
|
|
|
let mut qs_write = qs.write(duration_from_epoch_now()).await;
|
2021-09-21 04:42:00 +02:00
|
|
|
let r = qs_write.reindex().and_then(|_| qs_write.commit());
|
2020-06-05 06:01:20 +02:00
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
match r {
|
2020-06-18 02:30:42 +02:00
|
|
|
Ok(_) => eprintln!("Index Phase 2 Success!"),
|
2020-03-07 01:49:39 +01:00
|
|
|
Err(e) => {
|
2020-06-18 02:30:42 +02:00
|
|
|
eprintln!("Reindex failed: {:?}", e);
|
2020-03-07 01:49:39 +01:00
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-02-21 06:04:58 +01:00
|
|
|
pub fn vacuum_server_core(config: &Configuration) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2021-02-21 06:04:58 +01:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// The schema doesn't matter here. Vacuum is run as part of db open to avoid
|
|
|
|
// locking.
|
2021-09-15 00:24:37 +02:00
|
|
|
let r = setup_backend_vacuum(config, &schema, true);
|
2021-02-21 06:04:58 +01:00
|
|
|
|
|
|
|
match r {
|
|
|
|
Ok(_) => eprintln!("Vacuum Success!"),
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!("Vacuum failed: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
pub async fn domain_rename_core(config: &Configuration) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
// Start the backend.
|
2021-09-15 00:24:37 +02:00
|
|
|
let be = match setup_backend(config, &schema) {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
2022-02-15 07:17:43 +01:00
|
|
|
|
2022-07-30 14:10:24 +02:00
|
|
|
// Setup the qs, and perform any migrations and changes we may have.
|
2022-10-24 01:50:31 +02:00
|
|
|
let qs = match setup_qs(be, schema, config).await {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(t) => t,
|
|
|
|
Err(e) => {
|
2022-02-15 07:17:43 +01:00
|
|
|
error!("Unable to setup query server -> {:?}", e);
|
2020-03-07 01:49:39 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-02-15 07:17:43 +01:00
|
|
|
let new_domain_name = config.domain.as_str();
|
|
|
|
|
2021-12-08 01:02:04 +01:00
|
|
|
// make sure we're actually changing the domain name...
|
2022-10-24 01:50:31 +02:00
|
|
|
match qs.read().await.get_db_domain_name() {
|
2021-12-08 01:02:04 +01:00
|
|
|
Ok(old_domain_name) => {
|
2022-02-15 07:17:43 +01:00
|
|
|
admin_info!(?old_domain_name, ?new_domain_name);
|
2021-12-08 01:02:04 +01:00
|
|
|
if &old_domain_name == &new_domain_name {
|
|
|
|
admin_info!("Domain name not changing, stopping.");
|
|
|
|
return;
|
|
|
|
}
|
2022-07-07 05:03:08 +02:00
|
|
|
admin_debug!(
|
|
|
|
"Domain name is changing from {:?} to {:?}",
|
|
|
|
old_domain_name,
|
|
|
|
new_domain_name
|
|
|
|
);
|
2021-12-08 01:02:04 +01:00
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
admin_error!("Failed to query domain name, quitting! -> {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
let mut qs_write = qs.write(duration_from_epoch_now()).await;
|
2022-08-01 07:52:01 +02:00
|
|
|
let r = qs_write
|
|
|
|
.domain_rename(new_domain_name)
|
|
|
|
.and_then(|_| qs_write.commit());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
match r {
|
|
|
|
Ok(_) => info!("Domain Rename Success!"),
|
|
|
|
Err(e) => {
|
2023-01-10 04:50:53 +01:00
|
|
|
error!("Domain Rename Failed - Rollback has occurred: {:?}", e);
|
2020-03-07 01:49:39 +01:00
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
pub async fn verify_server_core(config: &Configuration) {
|
2020-03-07 01:49:39 +01:00
|
|
|
// setup the qs - without initialise!
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema_mem = match Schema::new() {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(sc) => sc,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
2020-06-24 13:17:46 +02:00
|
|
|
// Setup the be
|
2021-09-15 00:24:37 +02:00
|
|
|
let be = match setup_backend(config, &schema_mem) {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
2022-02-15 07:17:43 +01:00
|
|
|
let server = QueryServer::new(be, schema_mem, config.domain.clone());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// Run verifications.
|
2022-10-24 01:50:31 +02:00
|
|
|
let r = server.verify().await;
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
if r.is_empty() {
|
2021-07-01 06:51:25 +02:00
|
|
|
eprintln!("Verification passed!");
|
2020-03-07 01:49:39 +01:00
|
|
|
std::process::exit(0);
|
|
|
|
} else {
|
|
|
|
for er in r {
|
|
|
|
error!("{:?}", er);
|
|
|
|
}
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Now add IDM server verifications?
|
|
|
|
}
|
|
|
|
|
2022-10-24 01:50:31 +02:00
|
|
|
pub async fn recover_account_core(config: &Configuration, name: &str) {
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
// Start the backend.
|
2021-09-15 00:24:37 +02:00
|
|
|
let be = match setup_backend(config, &schema) {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE: {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
// setup the qs - *with* init of the migrations and schema.
|
2022-10-24 01:50:31 +02:00
|
|
|
let (_qs, idms, _idms_delayed) = match setup_qs_idms(be, schema, config).await {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(t) => t,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to setup query server or idm server -> {:?}", e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Run the password change.
|
2022-10-24 01:50:31 +02:00
|
|
|
let mut idms_prox_write = idms.proxy_write(duration_from_epoch_now()).await;
|
2021-09-25 03:24:00 +02:00
|
|
|
let new_pw = match idms_prox_write.recover_account(name, None) {
|
2021-09-21 04:42:00 +02:00
|
|
|
Ok(new_pw) => match idms_prox_write.commit() {
|
2021-09-25 03:24:00 +02:00
|
|
|
Ok(_) => new_pw,
|
2020-08-04 04:58:11 +02:00
|
|
|
Err(e) => {
|
2023-01-10 04:50:53 +01:00
|
|
|
error!("A critical error during commit occurred {:?}", e);
|
2020-08-04 04:58:11 +02:00
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
},
|
2020-03-07 01:49:39 +01:00
|
|
|
Err(e) => {
|
|
|
|
error!("Error during password reset -> {:?}", e);
|
|
|
|
// abort the txn
|
|
|
|
std::mem::drop(idms_prox_write);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
};
|
2022-06-28 01:22:31 +02:00
|
|
|
println!(
|
|
|
|
"{}",
|
|
|
|
AccountChangeMessage {
|
|
|
|
output_mode: config.output_mode,
|
|
|
|
status: MessageStatus::Success,
|
|
|
|
src_user: String::from("command-line invocation"),
|
|
|
|
dest_user: name.to_string(),
|
|
|
|
result: new_pw,
|
2022-11-09 22:42:03 +01:00
|
|
|
action: String::from("recover_account password"),
|
2022-06-26 10:02:16 +02:00
|
|
|
}
|
2022-06-28 01:22:31 +02:00
|
|
|
);
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
|
2022-11-23 11:10:43 +01:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub enum CoreAction {
|
|
|
|
Shutdown,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct CoreHandle {
|
|
|
|
clean_shutdown: bool,
|
|
|
|
tx: broadcast::Sender<CoreAction>,
|
|
|
|
|
|
|
|
handles: Vec<tokio::task::JoinHandle<()>>,
|
|
|
|
// interval_handle: tokio::task::JoinHandle<()>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl CoreHandle {
|
|
|
|
pub async fn shutdown(&mut self) {
|
|
|
|
if let Err(_) = self.tx.send(CoreAction::Shutdown) {
|
|
|
|
eprintln!("No receivers acked shutdown request. Treating as unclean.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait on the handles.
|
|
|
|
while let Some(handle) = self.handles.pop() {
|
|
|
|
if let Err(_) = handle.await {
|
|
|
|
eprintln!("A task failed to join");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.clean_shutdown = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Drop for CoreHandle {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
if !self.clean_shutdown {
|
2023-01-10 04:50:53 +01:00
|
|
|
eprintln!("⚠️ UNCLEAN SHUTDOWN OCCURRED ⚠️ ");
|
2022-11-23 11:10:43 +01:00
|
|
|
}
|
|
|
|
// Can't enable yet until we clean up unix_int cache layer test
|
|
|
|
// debug_assert!(self.clean_shutdown);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn create_server_core(
|
|
|
|
config: Configuration,
|
|
|
|
config_test: bool,
|
|
|
|
) -> Result<CoreHandle, ()> {
|
2020-03-07 01:49:39 +01:00
|
|
|
// Until this point, we probably want to write to the log macro fns.
|
2022-11-23 11:10:43 +01:00
|
|
|
let (broadcast_tx, mut broadcast_rx) = broadcast::channel(4);
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
if config.integration_test_config.is_some() {
|
|
|
|
warn!("RUNNING IN INTEGRATION TEST MODE.");
|
|
|
|
warn!("IF YOU SEE THIS IN PRODUCTION YOU MUST CONTACT SUPPORT IMMEDIATELY.");
|
2022-09-21 05:36:58 +02:00
|
|
|
} else if config.tls_config.is_none() {
|
|
|
|
// TLS is great! We won't run without it.
|
|
|
|
error!("Running without TLS is not supported! Quitting!");
|
|
|
|
return Err({});
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
|
2022-02-15 07:17:43 +01:00
|
|
|
info!(
|
|
|
|
"Starting kanidm with configuration: {} {}",
|
|
|
|
if config_test { "TEST" } else { "" },
|
|
|
|
config
|
|
|
|
);
|
2020-07-28 08:55:58 +02:00
|
|
|
// Setup umask, so that every we touch or create is secure.
|
2022-07-06 02:53:43 +02:00
|
|
|
#[cfg(not(target_family = "windows"))]
|
|
|
|
unsafe {
|
|
|
|
umask(0o0027)
|
|
|
|
};
|
2020-07-28 08:55:58 +02:00
|
|
|
|
2020-09-06 00:44:35 +02:00
|
|
|
// Similar, create a stats task which aggregates statistics from the
|
2020-06-24 13:17:46 +02:00
|
|
|
// server as they come in.
|
2022-10-05 01:48:48 +02:00
|
|
|
let status_ref = StatusActor::start();
|
2020-05-07 06:08:06 +02:00
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
// Setup TLS (if any)
|
2020-09-06 00:44:35 +02:00
|
|
|
let _opt_tls_params = match setup_tls(&config) {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(opt_tls_params) => opt_tls_params,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to configure TLS parameters -> {:?}", e);
|
2020-06-05 06:01:20 +02:00
|
|
|
return Err(());
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-09-21 04:42:00 +02:00
|
|
|
let schema = match Schema::new() {
|
2020-06-24 13:17:46 +02:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup in memory schema: {:?}", e);
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
};
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// Setup the be for the qs.
|
2020-06-24 13:17:46 +02:00
|
|
|
let be = match setup_backend(&config, &schema) {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(be) => be,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to setup BE -> {:?}", e);
|
2020-06-05 06:01:20 +02:00
|
|
|
return Err(());
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
// Start the IDM server.
|
2022-10-24 01:50:31 +02:00
|
|
|
let (_qs, idms, mut idms_delayed) = match setup_qs_idms(be, schema, &config).await {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(t) => t,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to setup query server or idm server -> {:?}", e);
|
2020-06-05 06:01:20 +02:00
|
|
|
return Err(());
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
};
|
2020-06-10 04:07:43 +02:00
|
|
|
|
2021-05-26 08:11:00 +02:00
|
|
|
// Extract any configuration from the IDMS that we may need.
|
|
|
|
// For now we just do this per run, but we need to extract this from the db later.
|
2022-03-14 08:29:04 +01:00
|
|
|
let jws_signer = match JwsSigner::generate_hs256() {
|
2021-05-26 08:11:00 +02:00
|
|
|
Ok(k) => k,
|
|
|
|
Err(e) => {
|
2022-03-14 08:29:04 +01:00
|
|
|
error!("Unable to setup jws signer -> {:?}", e);
|
2021-05-26 08:11:00 +02:00
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
// Any pre-start tasks here.
|
|
|
|
match &config.integration_test_config {
|
|
|
|
Some(itc) => {
|
2022-10-24 01:50:31 +02:00
|
|
|
let mut idms_prox_write = idms.proxy_write(duration_from_epoch_now()).await;
|
2021-09-21 04:42:00 +02:00
|
|
|
match idms_prox_write.recover_account("admin", Some(&itc.admin_password)) {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(_) => {}
|
|
|
|
Err(e) => {
|
|
|
|
error!(
|
|
|
|
"Unable to configure INTERGATION TEST admin account -> {:?}",
|
|
|
|
e
|
|
|
|
);
|
2020-06-05 06:01:20 +02:00
|
|
|
return Err(());
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
};
|
2021-09-21 04:42:00 +02:00
|
|
|
match idms_prox_write.commit() {
|
2020-03-07 01:49:39 +01:00
|
|
|
Ok(_) => {}
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to commit INTERGATION TEST setup -> {:?}", e);
|
2020-06-05 06:01:20 +02:00
|
|
|
return Err(());
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
2020-06-10 04:07:43 +02:00
|
|
|
|
2021-09-21 04:42:00 +02:00
|
|
|
let ldap = match LdapServer::new(&idms) {
|
2020-06-10 04:07:43 +02:00
|
|
|
Ok(l) => l,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to start LdapServer -> {:?}", e);
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Arc the idms and ldap
|
2020-03-07 01:49:39 +01:00
|
|
|
let idms_arc = Arc::new(idms);
|
2020-06-10 04:07:43 +02:00
|
|
|
let ldap_arc = Arc::new(ldap);
|
2020-03-07 01:49:39 +01:00
|
|
|
|
|
|
|
// Pass it to the actor for threading.
|
|
|
|
// Start the read query server with the given be path: future config
|
2022-10-05 01:48:48 +02:00
|
|
|
let server_read_ref = QueryServerReadV1::start_static(idms_arc.clone(), ldap_arc.clone());
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2020-09-06 00:44:35 +02:00
|
|
|
// Create the server async write entry point.
|
2022-10-05 01:48:48 +02:00
|
|
|
let server_write_ref = QueryServerWriteV1::start_static(idms_arc.clone());
|
2020-09-06 00:44:35 +02:00
|
|
|
|
2022-11-23 11:10:43 +01:00
|
|
|
let delayed_handle = tokio::spawn(async move {
|
2022-10-05 01:48:48 +02:00
|
|
|
loop {
|
2022-11-23 11:10:43 +01:00
|
|
|
tokio::select! {
|
|
|
|
Ok(action) = broadcast_rx.recv() => {
|
|
|
|
match action {
|
|
|
|
CoreAction::Shutdown => break,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
delayed = idms_delayed.next() => {
|
|
|
|
match delayed {
|
|
|
|
Some(da) => server_write_ref.handle_delayedaction(da).await,
|
|
|
|
// Channel has closed, stop the task.
|
|
|
|
None => break,
|
|
|
|
}
|
|
|
|
}
|
2022-10-05 01:48:48 +02:00
|
|
|
}
|
|
|
|
}
|
2022-11-23 11:10:43 +01:00
|
|
|
info!("Stopped DelayedActionActor");
|
2020-09-06 00:44:35 +02:00
|
|
|
});
|
2020-08-24 04:15:21 +02:00
|
|
|
|
2020-03-07 01:49:39 +01:00
|
|
|
// Setup timed events associated to the write thread
|
2022-11-23 11:10:43 +01:00
|
|
|
let interval_handle = IntervalActor::start(server_write_ref, broadcast_tx.subscribe());
|
2021-07-31 09:13:46 +02:00
|
|
|
// Setup timed events associated to the read thread
|
2022-11-23 11:10:43 +01:00
|
|
|
let maybe_backup_handle = match &config.online_backup {
|
2021-07-31 09:13:46 +02:00
|
|
|
Some(cfg) => {
|
2022-11-23 11:10:43 +01:00
|
|
|
let handle =
|
|
|
|
IntervalActor::start_online_backup(server_read_ref, cfg, broadcast_tx.subscribe())?;
|
|
|
|
Some(handle)
|
2021-07-31 09:13:46 +02:00
|
|
|
}
|
|
|
|
None => {
|
|
|
|
debug!("Online backup not requested, skipping");
|
2022-11-23 11:10:43 +01:00
|
|
|
None
|
2021-07-31 09:13:46 +02:00
|
|
|
}
|
|
|
|
};
|
2020-03-07 01:49:39 +01:00
|
|
|
|
2020-06-10 04:07:43 +02:00
|
|
|
// If we have been requested to init LDAP, configure it now.
|
2022-11-23 11:10:43 +01:00
|
|
|
let maybe_ldap_acceptor_handle = match &config.ldapaddress {
|
2020-06-10 04:07:43 +02:00
|
|
|
Some(la) => {
|
|
|
|
let opt_ldap_tls_params = match setup_tls(&config) {
|
|
|
|
Ok(t) => t,
|
|
|
|
Err(e) => {
|
|
|
|
error!("Failed to configure LDAP TLS parameters -> {:?}", e);
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
};
|
2022-02-15 07:17:43 +01:00
|
|
|
if !config_test {
|
|
|
|
// ⚠️ only start the sockets and listeners in non-config-test modes.
|
2022-11-23 11:10:43 +01:00
|
|
|
let h = ldaps::create_ldap_server(
|
|
|
|
la.as_str(),
|
|
|
|
opt_ldap_tls_params,
|
|
|
|
server_read_ref,
|
|
|
|
broadcast_tx.subscribe(),
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
Some(h)
|
|
|
|
} else {
|
|
|
|
None
|
2022-02-15 07:17:43 +01:00
|
|
|
}
|
2020-06-10 04:07:43 +02:00
|
|
|
}
|
|
|
|
None => {
|
|
|
|
debug!("LDAP not requested, skipping");
|
2022-11-23 11:10:43 +01:00
|
|
|
None
|
2020-06-10 04:07:43 +02:00
|
|
|
}
|
2022-11-23 11:10:43 +01:00
|
|
|
};
|
2020-06-10 04:07:43 +02:00
|
|
|
|
2020-09-06 00:44:35 +02:00
|
|
|
// TODO: Remove these when we go to auth bearer!
|
2020-03-07 01:49:39 +01:00
|
|
|
// Copy the max size
|
2020-09-06 00:44:35 +02:00
|
|
|
let _secure_cookies = config.secure_cookies;
|
2020-03-07 01:49:39 +01:00
|
|
|
// domain will come from the qs now!
|
|
|
|
let cookie_key: [u8; 32] = config.cookie_key;
|
|
|
|
|
2022-11-23 11:10:43 +01:00
|
|
|
let maybe_http_acceptor_handle = if config_test {
|
2022-02-15 07:17:43 +01:00
|
|
|
admin_info!("this config rocks! 🪨 ");
|
2022-11-23 11:10:43 +01:00
|
|
|
None
|
2022-02-15 07:17:43 +01:00
|
|
|
} else {
|
|
|
|
// ⚠️ only start the sockets and listeners in non-config-test modes.
|
2022-11-23 11:10:43 +01:00
|
|
|
let h = self::https::create_https_server(
|
2022-02-15 07:17:43 +01:00
|
|
|
config.address,
|
2022-12-15 07:09:09 +01:00
|
|
|
config.domain,
|
2022-02-15 07:17:43 +01:00
|
|
|
config.tls_config.as_ref(),
|
|
|
|
config.role,
|
2022-10-10 13:18:57 +02:00
|
|
|
config.trust_x_forward_for,
|
2022-02-15 07:17:43 +01:00
|
|
|
&cookie_key,
|
2022-03-14 08:29:04 +01:00
|
|
|
jws_signer,
|
2022-02-15 07:17:43 +01:00
|
|
|
status_ref,
|
|
|
|
server_write_ref,
|
|
|
|
server_read_ref,
|
2022-11-23 11:10:43 +01:00
|
|
|
broadcast_tx.subscribe(),
|
2022-02-15 07:17:43 +01:00
|
|
|
)?;
|
|
|
|
|
|
|
|
admin_info!("ready to rock! 🪨 ");
|
2022-11-23 11:10:43 +01:00
|
|
|
Some(h)
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut handles = vec![interval_handle, delayed_handle];
|
|
|
|
|
|
|
|
if let Some(backup_handle) = maybe_backup_handle {
|
|
|
|
handles.push(backup_handle)
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(ldap_handle) = maybe_ldap_acceptor_handle {
|
|
|
|
handles.push(ldap_handle)
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(http_handle) = maybe_http_acceptor_handle {
|
|
|
|
handles.push(http_handle)
|
2022-02-15 07:17:43 +01:00
|
|
|
}
|
2020-06-05 06:01:20 +02:00
|
|
|
|
2022-11-23 11:10:43 +01:00
|
|
|
Ok(CoreHandle {
|
|
|
|
clean_shutdown: false,
|
|
|
|
tx: broadcast_tx,
|
|
|
|
handles,
|
|
|
|
})
|
2020-03-07 01:49:39 +01:00
|
|
|
}
|