use include-based file finding for much faster discovery

This commit is contained in:
mbecker20
2025-08-10 15:37:12 -07:00
parent 7f9b7aa6cf
commit 1f078a0432
10 changed files with 502 additions and 417 deletions

1
.kminclude Normal file
View File

@@ -0,0 +1 @@
.dev

View File

@@ -20,24 +20,33 @@ pub fn cli_args() -> &'static CliArgs {
CLI_ARGS.get_or_init(CliArgs::parse)
}
pub fn cli_config() -> &'static CliConfig {
static CLI_CONFIG: OnceLock<CliConfig> = OnceLock::new();
CLI_CONFIG.get_or_init(|| {
let env: Env = match envy::from_env()
pub fn cli_env() -> &'static Env {
static CLI_ARGS: OnceLock<Env> = OnceLock::new();
CLI_ARGS.get_or_init(|| {
match envy::from_env()
.context("Failed to parse Komodo CLI environment")
{
Ok(env) => env,
Err(e) => {
panic!("{e:?}");
}
};
}
})
}
pub fn cli_config() -> &'static CliConfig {
static CLI_CONFIG: OnceLock<CliConfig> = OnceLock::new();
CLI_CONFIG.get_or_init(|| {
let args = cli_args();
let env = cli_env().clone();
let config_paths = args
.config_path
.clone()
.unwrap_or(env.komodo_cli_config_paths);
let debug_startup =
args.debug_startup.unwrap_or(env.komodo_cli_debug_startup);
if env.komodo_cli_debug_startup {
if debug_startup {
println!(
"{}: Komodo CLI version: {}",
"DEBUG".cyan(),
@@ -58,26 +67,25 @@ pub fn cli_config() -> &'static CliConfig {
.iter()
.map(String::as_str)
.collect::<Vec<_>>();
if env.komodo_cli_debug_startup {
if debug_startup {
println!(
"{}: {}: {config_keywords:?}",
"DEBUG".cyan(),
"Config File Keywords".dimmed(),
);
}
let mut unparsed_config = config::parse_config_paths::<
serde_json::Map<String, serde_json::Value>,
>(
&config_paths
let mut unparsed_config = (config::ConfigLoader {
paths: &config_paths
.iter()
.map(PathBuf::as_path)
.collect::<Vec<_>>(),
&config_keywords,
".kmignore",
env.komodo_cli_merge_nested_config,
env.komodo_cli_extend_config_arrays,
env.komodo_cli_debug_startup,
)
match_wildcards: &config_keywords,
include_file_name: ".kminclude",
merge_nested: env.komodo_cli_merge_nested_config,
extend_array: env.komodo_cli_extend_config_arrays,
debug_print: debug_startup,
})
.load::<serde_json::Map<String, serde_json::Value>>()
.expect("failed at parsing config from paths");
let init_parsed_config = serde_json::from_value::<CliConfig>(
serde_json::Value::Object(unparsed_config.clone()),
@@ -174,7 +182,7 @@ pub fn cli_config() -> &'static CliConfig {
.context("Failed to parse final config")
.unwrap();
let config_profile = if config.config_profile.is_empty() {
String::from("Default")
String::from("None")
} else {
config.config_profile
};

View File

@@ -12,11 +12,14 @@ mod config;
async fn app() -> anyhow::Result<()> {
dotenvy::dotenv().ok();
logger::init(&config::cli_config().cli_logging)?;
let args = config::cli_args();
let env = config::cli_env();
let debug_load =
args.debug_startup.unwrap_or(env.komodo_cli_debug_startup);
match &config::cli_args().command {
match &args.command {
args::Command::Config {
all_profiles,
debug,
unsanitized,
} => {
let mut config = if *unsanitized {
@@ -27,7 +30,7 @@ async fn app() -> anyhow::Result<()> {
if !*all_profiles {
config.profile = Default::default();
}
if *debug {
if debug_load {
println!("\n{config:#?}");
} else {
println!(

View File

@@ -2,6 +2,7 @@ use std::{path::PathBuf, sync::OnceLock};
use anyhow::Context;
use colored::Colorize;
use config::ConfigLoader;
use environment_file::{
maybe_read_item_from_file, maybe_read_list_from_file,
};
@@ -42,17 +43,17 @@ pub fn core_config() -> &'static CoreConfig {
"INFO".green(),
"Config File Keywords".dimmed(),
);
config::parse_config_paths::<CoreConfig>(
&env.komodo_config_paths
(ConfigLoader {
paths: &env.komodo_config_paths
.iter()
.map(PathBuf::as_path)
.collect::<Vec<_>>(),
&config_keywords,
".kcoreignore",
env.komodo_merge_nested_config,
env.komodo_extend_config_arrays,
env.komodo_config_debug,
)
match_wildcards: &config_keywords,
include_file_name: ".kcoreinclude",
merge_nested: env.komodo_merge_nested_config,
extend_array: env.komodo_extend_config_arrays,
debug_print: env.komodo_config_debug,
}).load::<CoreConfig>()
.expect("Failed at parsing config from paths")
};

View File

@@ -2,6 +2,7 @@ use std::{path::PathBuf, sync::OnceLock};
use clap::Parser;
use colored::Colorize;
use config::ConfigLoader;
use environment_file::maybe_read_list_from_file;
use komodo_client::entities::{
config::periphery::{CliArgs, Env, PeripheryConfig},
@@ -25,32 +26,33 @@ pub fn periphery_config() -> &'static PeripheryConfig {
);
PeripheryConfig::default()
} else {
config::parse_config_paths::<PeripheryConfig>(
&config_paths
(ConfigLoader {
paths: &config_paths
.iter()
.map(PathBuf::as_path)
.collect::<Vec<_>>(),
&args
match_wildcards: &args
.config_keyword
.unwrap_or(env.periphery_config_keywords)
.iter()
.map(String::as_str)
.collect::<Vec<_>>(),
".peripheryignore",
args
include_file_name: ".peripheryignore",
merge_nested: args
.merge_nested_config
.unwrap_or(env.periphery_merge_nested_config),
args
extend_array: args
.extend_config_arrays
.unwrap_or(env.periphery_extend_config_arrays),
args
debug_print: args
.log_level
.map(|level| {
level == tracing::Level::DEBUG
|| level == tracing::Level::TRACE
})
.unwrap_or_default(),
)
})
.load()
.expect("failed at parsing config from paths")
};

View File

@@ -30,6 +30,10 @@ pub struct CliArgs {
/// Can use multiple times to match multiple patterns independently.
#[arg(long, short = 'm')]
pub config_keyword: Option<Vec<String>>,
/// Whether to debug print on configuration load (on startup)
#[arg(action, alias = "debug", short = 'd')]
pub debug_startup: Option<bool>,
}
#[derive(Debug, Clone, clap::Subcommand)]
@@ -40,9 +44,7 @@ pub enum Command {
/// Whether to print the additional profiles picked up
#[arg(long, short = 'a', default_value_t = false)]
all_profiles: bool,
/// Whether to debug print the config
#[arg(long, short = 'd', default_value_t = false)]
debug: bool,
/// Whether to print unsanitized config,
/// including sensitive credentials.
#[arg(long, action)]

View File

@@ -0,0 +1,57 @@
use std::{
collections::HashSet,
path::{Path, PathBuf},
};
pub struct IncludesLoader {
includes: HashSet<PathBuf>,
include_file_name: &'static str,
}
impl IncludesLoader {
pub fn new(include_file_name: &'static str) -> Self {
Self {
includes: HashSet::new(),
include_file_name,
}
}
pub fn init(path: &Path, include_file_name: &'static str) -> Self {
let mut includes = Self::new(include_file_name);
includes.load_more(path);
includes
}
pub fn finish(self) -> HashSet<PathBuf> {
self.includes
}
pub fn load_more(&mut self, folder: &Path) {
if !folder.is_dir() {
return;
}
let Ok(folder) = folder.canonicalize() else {
return;
};
// Add any includes in this folder
if let Ok(ignore) =
std::fs::read_to_string(folder.join(self.include_file_name))
{
self.includes.extend(
ignore
.split('\n')
.map(|line| line.trim())
// Ignore empty / commented out lines
.filter(|line| !line.is_empty() && !line.starts_with('#'))
// Remove end of line comments
.map(|line| {
line
.split_once('#')
.map(|res| res.0.trim())
.unwrap_or(line)
})
.flat_map(|line| folder.join(line).canonicalize()),
);
};
}
}

View File

@@ -4,397 +4,115 @@
//! It supports interpolating in environment variables (only '${VAR}' syntax),
//! as well as merging together multiple files into a final configuration object.
use std::{
collections::HashSet,
fs::File,
io::Read,
path::{Path, PathBuf},
};
use std::path::Path;
use colored::Colorize;
use indexmap::IndexSet;
use serde::{Serialize, de::DeserializeOwned};
use serde::de::DeserializeOwned;
mod error;
mod includes;
mod load;
mod merge;
pub use error::Error;
pub use merge::{merge_config, merge_objects};
pub type Result<T> = ::core::result::Result<T, Error>;
/// parse paths that are either directories or files
pub fn parse_config_paths<T: DeserializeOwned>(
paths: &[&Path],
match_wildcards: &[&str],
ignore_file_name: &str,
merge_nested: bool,
extend_array: bool,
debug_print: bool,
) -> Result<T> {
let mut wildcards = Vec::with_capacity(match_wildcards.len());
for &wc in match_wildcards {
match wildcard::Wildcard::new(wc.as_bytes()) {
Ok(wc) => wildcards.push(wc),
Err(e) => {
eprintln!(
"{}: Keyword '{}' is invalid wildcard | {e:?}",
"ERROR".red(),
wc.bold(),
);
}
}
}
let mut all_files = IndexSet::new();
for &path in paths {
let Ok(metadata) = std::fs::metadata(path) else {
continue;
};
if metadata.is_dir() {
// Collect ignore paths
let mut ignores = HashSet::new();
add_ignores(path, ignore_file_name, &mut ignores);
if debug_print && !ignores.is_empty() {
println!(
"{}: {}: {ignores:?}",
"DEBUG".cyan(),
format_args!(
"{} {path:?} {}",
"Config Path".dimmed(),
"Ignores".dimmed()
),
);
}
let mut files = Vec::new();
add_files(&mut files, path, &wildcards, &ignores);
files.sort_by(|(a_index, a_path), (b_index, b_path)| {
a_index.cmp(b_index).then(a_path.cmp(b_path))
});
all_files.extend(files.into_iter().map(|(_, path)| path));
} else if metadata.is_file() {
let path = path.to_path_buf();
// If the same path comes up again later on, it should be removed and
// reinserted so it maintains higher priority.
all_files.shift_remove(&path);
all_files.insert(path);
}
}
if debug_print {
println!(
"{}: {}: {all_files:?}",
"DEBUG".cyan(),
"Found Files".dimmed()
);
}
parse_config_files(
&all_files.into_iter().collect::<Vec<_>>(),
merge_nested,
extend_array,
)
/// Set the configuration for loading config files.
pub struct ConfigLoader<'outer, 'inner> {
/// Paths to either files or directories
/// to include in the final configuration.
///
/// Path coming later in the array (higher index) will override
/// configuration in earlier paths.
pub paths: &'outer [&'inner Path],
/// Wilcard patterns to match file names in given directories.
///
/// Patterns coming later in the array (higher index) will override
/// configuration added by earlier patterns, however this is
/// only relavant for an individual `path`. Later `paths`
/// will still have higher priority.
pub match_wildcards: &'outer [&'inner str],
/// The file name to search for `.include` file.
pub include_file_name: &'static str,
/// Whether to merge nested config objects.
/// Otherwise, the object will be replaced at
/// the top-level key by the highest priority config file
/// in which it is specified.
pub merge_nested: bool,
/// Whether to extend array in configuration files.
/// Otherwise, the array will be replaced at
/// the top-level key by the highest priority config file
/// in which it is specified.
pub extend_array: bool,
/// Print some extra information on configuation load.
///
/// Note. This is different than application level log level.
pub debug_print: bool,
}
fn ignore_dir(path: &Path, ignores: &HashSet<PathBuf>) -> bool {
const IGNORE: &[&str] = &["target", "node_modules", ".git"];
IGNORE.iter().any(|ignore| path.ends_with(ignore))
|| ignores.contains(path)
}
fn add_files(
// stores index of matching keyword as well as path
files: &mut Vec<(usize, PathBuf)>,
folder: &Path,
wildcards: &[wildcard::Wildcard],
ignores: &HashSet<PathBuf>,
) {
let Ok(folder) = folder.canonicalize() else {
return;
};
if ignores.contains(&folder) {
return;
}
let Ok(read_dir) = std::fs::read_dir(folder) else {
return;
};
for dir_entry in read_dir.flatten() {
let path = dir_entry.path();
if ignore_dir(&path, ignores) {
continue;
}
let Ok(metadata) = dir_entry.metadata() else {
continue;
};
if metadata.is_file() {
// BASE CASE
let file_name = dir_entry.file_name();
let Some(file_name) = file_name.to_str() else {
continue;
};
// Ensure file name matches a wildcard keyword
let index = if wildcards.is_empty() {
0
} else if let Some(index) = wildcards
.iter()
.position(|wc| wc.is_match(file_name.as_bytes()))
{
index
} else {
continue;
};
let Ok(path) = path.canonicalize() else {
continue;
};
files.push((index, path));
} else if metadata.is_dir() {
// RECURSIVE CASE
add_files(files, &dir_entry.path(), wildcards, ignores);
}
}
}
fn add_ignores(
folder: &Path,
ignore_file_name: &str,
ignores: &mut HashSet<PathBuf>,
) {
let Ok(folder) = folder.canonicalize() else {
return;
};
if ignores.contains(&folder) {
return;
}
// Add any ignores in this folder
if let Ok(ignore) =
std::fs::read_to_string(folder.join(ignore_file_name))
{
ignores.extend(
ignore
.split('\n')
.map(|line| line.trim())
// Ignore empty / commented out lines
.filter(|line| !line.is_empty() && !line.starts_with('#'))
// Remove end of line comments
.map(|line| {
line.split_once('#').map(|res| res.0.trim()).unwrap_or(line)
})
.flat_map(|line| folder.join(line).canonicalize()),
);
};
if ignores.contains(&folder) {
return;
}
// Then check any sub directories
let Ok(entries) = std::fs::read_dir(folder) else {
return;
};
for entry in entries.flatten() {
let Ok(path) = entry.path().canonicalize() else {
continue;
};
if ignore_dir(&path, ignores) {
continue;
}
let Ok(metadata) = entry.metadata() else {
continue;
};
if !metadata.is_dir() {
continue;
}
add_ignores(&path, ignore_file_name, ignores);
}
}
/// parses multiple config files
pub fn parse_config_files<T: DeserializeOwned>(
files: &[PathBuf],
merge_nested: bool,
extend_array: bool,
) -> Result<T> {
let mut target = serde_json::Map::new();
for file in files {
let source = match parse_config_file(file) {
Ok(source) => source,
Err(e) => {
eprintln!("{}: {e}", "WARN".yellow());
continue;
}
};
target = match merge_objects(
target.clone(),
source,
impl ConfigLoader<'_, '_> {
pub fn load<T: DeserializeOwned>(self) -> Result<T> {
let ConfigLoader {
paths,
match_wildcards,
include_file_name,
merge_nested,
extend_array,
) {
Ok(target) => target,
Err(e) => {
eprint!("{}: {e}", "WARN".yellow());
target
debug_print,
} = self;
let mut wildcards = Vec::with_capacity(match_wildcards.len());
for &wc in match_wildcards {
match wildcard::Wildcard::new(wc.as_bytes()) {
Ok(wc) => wildcards.push(wc),
Err(e) => {
eprintln!(
"{}: Keyword '{}' is invalid wildcard | {e:?}",
"ERROR".red(),
wc.bold(),
);
}
}
};
}
let mut all_files = IndexSet::new();
for &path in paths {
let Ok(metadata) = std::fs::metadata(path) else {
continue;
};
if metadata.is_dir() {
let mut files = Vec::new();
load::load_config_files(
&mut files,
path,
&wildcards,
include_file_name,
debug_print,
);
files.sort_by(|(a_index, a_path), (b_index, b_path)| {
a_index.cmp(b_index).then(a_path.cmp(b_path))
});
all_files.extend(files.into_iter().map(|(_, path)| path));
} else if metadata.is_file() {
let path = path.to_path_buf();
// If the same path comes up again later on, it should be removed and
// reinserted so it maintains higher priority.
all_files.shift_remove(&path);
all_files.insert(path);
}
}
if debug_print {
println!(
"{}: {}: {all_files:?}",
"DEBUG".cyan(),
"Found Files".dimmed()
);
}
load::load_parse_config_files(
&all_files.into_iter().collect::<Vec<_>>(),
merge_nested,
extend_array,
)
}
serde_json::from_value(serde_json::Value::Object(target))
.map_err(|e| Error::ParseFinalJson { e })
}
/// parses a single config file
pub fn parse_config_file<T: DeserializeOwned>(
file: &Path,
) -> Result<T> {
let mut file_handle =
File::open(file).map_err(|e| Error::FileOpen {
e,
path: file.to_path_buf(),
})?;
let mut contents = String::new();
file_handle.read_to_string(&mut contents).map_err(|e| {
Error::ReadFileContents {
e,
path: file.to_path_buf(),
}
})?;
let contents = interpolate_env(&contents);
let config = match file.extension().and_then(|e| e.to_str()) {
Some("toml") => {
toml::from_str(&contents).map_err(|e| Error::ParseToml {
e,
path: file.to_path_buf(),
})?
}
Some("yaml") | Some("yml") => serde_yaml_ng::from_str(&contents)
.map_err(|e| Error::ParseYaml {
e,
path: file.to_path_buf(),
})?,
Some("json") => {
serde_json::from_reader(file_handle).map_err(|e| {
Error::ParseJson {
e,
path: file.to_path_buf(),
}
})?
}
Some(_) | None => {
return Err(Error::UnsupportedFileType {
path: file.to_path_buf(),
});
}
};
Ok(config)
}
/// - Object is serde_json::Map<String, serde_json::Value>.
/// - Source will overide target.
/// - Will recurse when field is object if merge_object = true, otherwise object will be replaced.
/// - Will extend when field is array if extend_array = true, otherwise array will be replaced.
/// - Will return error when types on source and target fields do not match.
fn merge_objects(
mut target: serde_json::Map<String, serde_json::Value>,
source: serde_json::Map<String, serde_json::Value>,
merge_nested: bool,
extend_array: bool,
) -> Result<serde_json::Map<String, serde_json::Value>> {
for (key, value) in source {
let Some(curr) = target.remove(&key) else {
target.insert(key, value);
continue;
};
match curr {
serde_json::Value::Object(target_obj) => {
if !merge_nested {
target.insert(key, value);
continue;
}
match value {
serde_json::Value::Object(source_obj) => {
target.insert(
key,
serde_json::Value::Object(merge_objects(
target_obj,
source_obj,
merge_nested,
extend_array,
)?),
);
}
_ => {
return Err(Error::ObjectFieldTypeMismatch {
key,
value: Box::new(value),
});
}
}
}
serde_json::Value::Array(mut target_arr) => {
if !extend_array {
target.insert(key, value);
continue;
}
match value {
serde_json::Value::Array(source_arr) => {
target_arr.extend(source_arr);
target.insert(key, serde_json::Value::Array(target_arr));
}
_ => {
return Err(Error::ArrayFieldTypeMismatch {
key,
value: Box::new(value),
});
}
}
}
_ => {
target.insert(key, value);
}
}
}
Ok(target)
}
/// Source will overide target
pub fn merge_config<T: Serialize + DeserializeOwned>(
target: T,
source: T,
merge_nested: bool,
extend_array: bool,
) -> Result<T> {
let serde_json::Value::Object(target) =
serde_json::to_value(target)
.map_err(|e| Error::SerializeJson { e })?
else {
return Err(Error::ValueIsNotObject);
};
let serde_json::Value::Object(source) =
serde_json::to_value(source)
.map_err(|e| Error::SerializeJson { e })?
else {
return Err(Error::ValueIsNotObject);
};
let object =
merge_objects(target, source, merge_nested, extend_array)?;
serde_json::from_value(serde_json::Value::Object(object))
.map_err(|e| Error::ParseFinalJson { e })
}
/// Only supports '${VAR}' syntax
fn interpolate_env(input: &str) -> String {
let re = regex::Regex::new(r"\$\{([A-Za-z0-9_]+)\}").unwrap();
let first_pass = re
.replace_all(input, |caps: &regex::Captures| {
let var_name = &caps[1];
std::env::var(var_name).unwrap_or_default()
})
.into_owned();
// Do it twice in case any env vars expand again to env vars
re.replace_all(&first_pass, |caps: &regex::Captures| {
let var_name = &caps[1];
std::env::var(var_name).unwrap_or_default()
})
.into_owned()
}

197
lib/config/src/load.rs Normal file
View File

@@ -0,0 +1,197 @@
use std::{
fs::File,
io::Read,
path::{Path, PathBuf},
};
use colored::Colorize;
use serde::de::DeserializeOwned;
use crate::{
Error, Result, includes::IncludesLoader, merge::merge_objects,
};
pub fn load_config_files(
// stores index of matching keyword as well as path
files: &mut Vec<(usize, PathBuf)>,
path: &Path,
keywords: &[wildcard::Wildcard],
include_file_name: &'static str,
debug_print: bool,
) {
// File base case.
if path.is_file() {
files.push((0, path.to_path_buf()));
return;
}
if !path.is_dir() {
return;
}
let Ok(folder) = path.canonicalize() else {
return;
};
let Ok(read_dir) = std::fs::read_dir(&folder) else {
return;
};
// Collect any config files in the current dir.
for dir_entry in read_dir.flatten() {
let path = dir_entry.path();
let Ok(metadata) = dir_entry.metadata() else {
continue;
};
if metadata.is_file() {
let file_name = dir_entry.file_name();
let Some(file_name) = file_name.to_str() else {
continue;
};
// Ensure file name matches a wildcard keyword
let index = if keywords.is_empty() {
0
} else if let Some(index) = keywords
.iter()
.position(|wc| wc.is_match(file_name.as_bytes()))
{
// actual config keyword matches will have higher priority than
// when files are added via the base case.
index + 1
} else {
continue;
};
let Ok(path) = path.canonicalize() else {
continue;
};
files.push((index, path));
}
}
// Collect any paths specified in 'includes'
let includes =
IncludesLoader::init(&folder, include_file_name).finish();
if includes.is_empty() {
return;
}
if debug_print {
println!(
"{}: {}: {includes:?}",
"DEBUG".cyan(),
format_args!(
"{} {path:?} {}",
"Config Path".dimmed(),
"Includes".dimmed()
),
);
}
// Add these paths as well recursively.
for path in includes {
load_config_files(
files,
&path,
keywords,
include_file_name,
debug_print,
);
}
}
/// loads multiple config files
pub fn load_parse_config_files<T: DeserializeOwned>(
files: &[PathBuf],
merge_nested: bool,
extend_array: bool,
) -> Result<T> {
let mut target = serde_json::Map::new();
for file in files {
let source = match load_parse_config_file(file) {
Ok(source) => source,
Err(e) => {
eprintln!("{}: {e}", "WARN".yellow());
continue;
}
};
target = match merge_objects(
target.clone(),
source,
merge_nested,
extend_array,
) {
Ok(target) => target,
Err(e) => {
eprint!("{}: {e}", "WARN".yellow());
target
}
};
}
serde_json::from_value(serde_json::Value::Object(target))
.map_err(|e| Error::ParseFinalJson { e })
}
/// Loads and parses a single config file
pub fn load_parse_config_file<T: DeserializeOwned>(
file: &Path,
) -> Result<T> {
let mut file_handle =
File::open(file).map_err(|e| Error::FileOpen {
e,
path: file.to_path_buf(),
})?;
let mut contents = String::new();
file_handle.read_to_string(&mut contents).map_err(|e| {
Error::ReadFileContents {
e,
path: file.to_path_buf(),
}
})?;
// Interpolate environment variables matching `${VAR}` syntax (not `$VAR` to avoid edge cases).
let contents = interpolate_env(&contents);
let config = match file.extension().and_then(|e| e.to_str()) {
Some("toml") => {
toml::from_str(&contents).map_err(|e| Error::ParseToml {
e,
path: file.to_path_buf(),
})?
}
Some("yaml") | Some("yml") => serde_yaml_ng::from_str(&contents)
.map_err(|e| Error::ParseYaml {
e,
path: file.to_path_buf(),
})?,
Some("json") => {
serde_json::from_reader(file_handle).map_err(|e| {
Error::ParseJson {
e,
path: file.to_path_buf(),
}
})?
}
Some(_) | None => {
return Err(Error::UnsupportedFileType {
path: file.to_path_buf(),
});
}
};
Ok(config)
}
/// Only supports '${VAR}' syntax
fn interpolate_env(input: &str) -> String {
let re = regex::Regex::new(r"\$\{([A-Za-z0-9_]+)\}").unwrap();
let first_pass = re
.replace_all(input, |caps: &regex::Captures| {
let var_name = &caps[1];
std::env::var(var_name).unwrap_or_default()
})
.into_owned();
// Do it twice in case any env vars expand again to env vars
re.replace_all(&first_pass, |caps: &regex::Captures| {
let var_name = &caps[1];
std::env::var(var_name).unwrap_or_default()
})
.into_owned()
}

96
lib/config/src/merge.rs Normal file
View File

@@ -0,0 +1,96 @@
use serde::{Serialize, de::DeserializeOwned};
use crate::{Error, Result};
/// - Object is serde_json::Map<String, serde_json::Value>.
/// - Source will overide target.
/// - Will recurse when field is object if merge_object = true, otherwise object will be replaced.
/// - Will extend when field is array if extend_array = true, otherwise array will be replaced.
/// - Will return error when types on source and target fields do not match.
pub fn merge_objects(
mut target: serde_json::Map<String, serde_json::Value>,
source: serde_json::Map<String, serde_json::Value>,
merge_nested: bool,
extend_array: bool,
) -> Result<serde_json::Map<String, serde_json::Value>> {
for (key, value) in source {
let Some(curr) = target.remove(&key) else {
target.insert(key, value);
continue;
};
match curr {
serde_json::Value::Object(target_obj) => {
if !merge_nested {
target.insert(key, value);
continue;
}
match value {
serde_json::Value::Object(source_obj) => {
target.insert(
key,
serde_json::Value::Object(merge_objects(
target_obj,
source_obj,
merge_nested,
extend_array,
)?),
);
}
_ => {
return Err(Error::ObjectFieldTypeMismatch {
key,
value: Box::new(value),
});
}
}
}
serde_json::Value::Array(mut target_arr) => {
if !extend_array {
target.insert(key, value);
continue;
}
match value {
serde_json::Value::Array(source_arr) => {
target_arr.extend(source_arr);
target.insert(key, serde_json::Value::Array(target_arr));
}
_ => {
return Err(Error::ArrayFieldTypeMismatch {
key,
value: Box::new(value),
});
}
}
}
_ => {
target.insert(key, value);
}
}
}
Ok(target)
}
/// Source will overide target
pub fn merge_config<T: Serialize + DeserializeOwned>(
target: T,
source: T,
merge_nested: bool,
extend_array: bool,
) -> Result<T> {
let serde_json::Value::Object(target) =
serde_json::to_value(target)
.map_err(|e| Error::SerializeJson { e })?
else {
return Err(Error::ValueIsNotObject);
};
let serde_json::Value::Object(source) =
serde_json::to_value(source)
.map_err(|e| Error::SerializeJson { e })?
else {
return Err(Error::ValueIsNotObject);
};
let object =
merge_objects(target, source, merge_nested, extend_array)?;
serde_json::from_value(serde_json::Value::Object(object))
.map_err(|e| Error::ParseFinalJson { e })
}