From 78db60cc857e712b7d9d507566dcbbf934189abd Mon Sep 17 00:00:00 2001 From: bconn98 Date: Sat, 2 Mar 2024 21:00:25 -0500 Subject: [PATCH] chore: add config tests --- Cargo.toml | 1 + src/config/file.rs | 104 +++++++++++++++++++- src/config/raw.rs | 91 ++++++++++++++++-- src/config/runtime.rs | 215 ++++++++++++++++++++++++++++++++++++++++-- test_cfgs/test.json | 51 ++++++++++ test_cfgs/test.toml | 39 ++++++++ test_cfgs/test.yml | 36 +++++++ 7 files changed, 521 insertions(+), 16 deletions(-) create mode 100644 test_cfgs/test.json create mode 100644 test_cfgs/test.toml create mode 100644 test_cfgs/test.yml diff --git a/Cargo.toml b/Cargo.toml index 8f7134ca..9f648269 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -88,6 +88,7 @@ streaming-stats = "0.2.3" humantime = "2.1" tempfile = "3.8" mock_instant = "0.3" +serde_test = "1.0.176" [[example]] name = "json_logger" diff --git a/src/config/file.rs b/src/config/file.rs index 4355c0f8..05d950c0 100644 --- a/src/config/file.rs +++ b/src/config/file.rs @@ -92,7 +92,7 @@ pub enum FormatError { UnknownFormat, } -#[derive(Debug)] +#[derive(Debug, PartialEq)] enum Format { #[cfg(feature = "yaml_format")] Yaml, @@ -230,3 +230,105 @@ impl ConfigReloader { Ok(rate) } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_from_path_all_formats() { + #[cfg(feature = "yaml_format")] + { + assert_eq!( + Format::from_path(Path::new("test.yml")).unwrap(), + Format::Yaml + ); + assert_eq!( + Format::from_path(Path::new("test.yaml")).unwrap(), + Format::Yaml + ); + } + + #[cfg(not(feature = "yaml_format"))] + assert!(Format::from_path(Path::new("test.yml")).is_err()); + + #[cfg(feature = "json_format")] + assert_eq!( + Format::from_path(Path::new("test.json")).unwrap(), + Format::Json + ); + #[cfg(not(feature = "json_format"))] + assert!(Format::from_path(Path::new("test.json")).is_err()); + + #[cfg(feature = "toml_format")] + assert_eq!( + Format::from_path(Path::new("test.toml")).unwrap(), + Format::Toml + ); + #[cfg(not(feature = "toml_format"))] + assert!(Format::from_path(Path::new("test.toml")).is_err()); + + // Unsupported Type + assert!(Format::from_path(Path::new("test.ini")).is_err()); + + // UnknownFormat + assert!(Format::from_path(Path::new("test")).is_err()); + } + + #[test] + fn test_format_parse_all_formats() { + #[cfg(feature = "yaml_format")] + { + let cfg = read_config(Path::new("./test_cfgs/test.yml")).unwrap(); + assert!(!cfg.is_empty()); + + let cfg = Format::Yaml.parse(&cfg); + assert!(cfg.is_ok()); + + // No actions to test at this time. + deserialize(&cfg.unwrap(), &Deserializers::default()); + } + + #[cfg(feature = "json_format")] + { + let cfg = read_config(Path::new("./test_cfgs/test.json")).unwrap(); + assert!(!cfg.is_empty()); + + let cfg = Format::Json.parse(&cfg); + assert!(cfg.is_ok()); + + // No actions to test at this time. + deserialize(&cfg.unwrap(), &Deserializers::default()); + } + + #[cfg(feature = "toml_format")] + { + let cfg = read_config(Path::new("./test_cfgs/test.toml")).unwrap(); + assert!(!cfg.is_empty()); + + let cfg = Format::Toml.parse(&cfg); + assert!(cfg.is_ok()); + + // No actions to test at this time. + deserialize(&cfg.unwrap(), &Deserializers::default()); + } + } + + #[test] + fn test_load_cfg_all_formats() { + #[cfg(feature = "yaml_format")] + assert!( + load_config_file(Path::new("./test_cfgs/test.yml"), Deserializers::default()).is_ok() + ); + + #[cfg(feature = "json_format")] + assert!( + load_config_file(Path::new("./test_cfgs/test.json"), Deserializers::default()).is_ok() + ); + + #[cfg(feature = "toml_format")] + assert!( + load_config_file(Path::new("./test_cfgs/test.toml"), Deserializers::default()).is_ok() + ); + } +} diff --git a/src/config/raw.rs b/src/config/raw.rs index 9868b495..2c2b64c8 100644 --- a/src/config/raw.rs +++ b/src/config/raw.rs @@ -462,13 +462,17 @@ fn logger_additive_default() -> bool { #[cfg(test)] #[allow(unused_imports)] mod test { - use std::fs; + use crate::filter::FilterConfig; use super::*; + use anyhow::Error; + use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; + use serde_value::{DeserializerError::UnknownField, Value}; + use std::{collections::BTreeMap, fs, vec}; #[test] #[cfg(all(feature = "yaml_format", feature = "threshold_filter"))] - fn full_deserialize() { + fn test_yaml_deserialize() { let cfg = r#" refresh_rate: 60 seconds @@ -498,14 +502,44 @@ loggers: "#; let config = ::serde_yaml::from_str::(cfg).unwrap(); let errors = config.appenders_lossy(&Deserializers::new()).1; - println!("{:?}", errors); assert!(errors.is_empty()); + assert_eq!(config.refresh_rate().unwrap(), Duration::new(60, 0)); } #[test] - #[cfg(feature = "yaml_format")] - fn empty() { - ::serde_yaml::from_str::("{}").unwrap(); + #[cfg(all(feature = "yaml_format", feature = "threshold_filter"))] + fn test_bad_filter_and_appender_yaml() { + let cfg = r#" +refresh_rate: 60 seconds + +appenders: + console: + kind: console + filters: + - kind: threshold + leve: debug + baz: + kind: file + pah: /tmp/baz.log + encoder: + pattern: "%m" + +root: + appenders: + - console + level: info + +loggers: + foo::bar::baz: + level: warn + appenders: + - baz + additive: false +"#; + let config = ::serde_yaml::from_str::(cfg).unwrap(); + let errors = config.appenders_lossy(&Deserializers::new()).1; + assert_eq!(errors.0.len(), 2); + // TODO look for a way to check the errors } #[cfg(windows)] @@ -517,7 +551,7 @@ loggers: #[test] #[cfg(feature = "yaml_format")] - fn readme_sample_file_is_ok() { + fn test_readme_sample_file_is_ok() { let readme = fs::read_to_string("./README.md").expect("README file exists"); let sample_file = &readme[readme .find("log4rs.yaml:") @@ -533,4 +567,47 @@ loggers: assert!(config.is_ok()); assert!(config::create_raw_config(config.unwrap()).is_ok()); } + + #[test] + #[cfg(feature = "yaml_format")] + fn test_empty_cfg() { + ::serde_yaml::from_str::("{}").unwrap(); + } + + #[test] + fn test_appender_errors() { + let errs = AppenderErrors { 0: vec![] }; + + assert!(errs.is_empty()); + + let mut errs = AppenderErrors { + 0: vec![DeserializingConfigError::Appender( + "example".to_owned(), + anyhow!("test"), + )], + }; + + // Reports to stderr + errs.handle(); + } + + #[test] + fn test_duration_deserialize() { + let duration = Duration::new(5, 0); + + assert_de_tokens( + &duration, + &[ + Token::Struct { + name: "Duration", + len: 2, + }, + Token::Str("secs"), + Token::U64(5), + Token::Str("nanos"), + Token::U64(0), + Token::StructEnd, + ], + ); + } } diff --git a/src/config/runtime.rs b/src/config/runtime.rs index 3d04947e..2119d82b 100644 --- a/src/config/runtime.rs +++ b/src/config/runtime.rs @@ -420,7 +420,7 @@ fn check_logger_name(name: &str) -> Result<(), ConfigError> { } /// Errors encountered when validating a log4rs `Config`. -#[derive(Debug, Error)] +#[derive(Debug, Error, PartialEq)] #[error("Configuration errors: {0:#?}")] pub struct ConfigErrors(Vec); @@ -442,7 +442,7 @@ impl ConfigErrors { } /// An error validating a log4rs `Config`. -#[derive(Debug, Error)] +#[derive(Debug, Error, PartialEq)] pub enum ConfigError { /// Multiple appenders were registered with the same name. #[error("Duplicate appender name `{0}`")] @@ -467,8 +467,17 @@ pub enum ConfigError { #[cfg(test)] mod test { + use super::*; + + #[cfg(feature = "console_appender")] + use crate::append::console::ConsoleAppender; + + #[cfg(all(feature = "threshold_filter", feature = "console_appender"))] + use crate::filter::threshold::ThresholdFilter; + use log::LevelFilter; + #[test] - fn check_logger_name() { + fn test_check_logger_name() { let tests = [ ("", false), ("asdf", true), @@ -481,11 +490,201 @@ mod test { ]; for &(ref name, expected) in &tests { - assert!( - expected == super::check_logger_name(name).is_ok(), - "{}", - name - ); + assert!(expected == check_logger_name(name).is_ok(), "{}", name); } } + + #[test] + #[cfg(feature = "console_appender")] + fn test_default_appender() { + let stdout = ConsoleAppender::builder().build(); + let appender = Appender::builder().build("stdout", Box::new(stdout)); + + assert_eq!(appender.name(), "stdout"); + assert!(appender.filters().is_empty()); + + // Nothing to test on this right now + let _appender = appender.appender(); + } + + #[test] + #[cfg(all(feature = "threshold_filter", feature = "console_appender"))] + fn test_appender_with_filters() { + let stdout = ConsoleAppender::builder().build(); + + let filter = ThresholdFilter::new(LevelFilter::Warn); + + let filters: Vec> = vec![ + Box::new(ThresholdFilter::new(LevelFilter::Trace)), + Box::new(ThresholdFilter::new(LevelFilter::Debug)), + Box::new(ThresholdFilter::new(LevelFilter::Info)), + ]; + + let appender = Appender::builder() + .filters(filters) + .filter(Box::new(filter)) + .build("stdout", Box::new(stdout)); + + assert_eq!(appender.name(), "stdout"); + assert!(!appender.filters().is_empty()); + assert_eq!(appender.filters().len(), 4); + + // Nothing to test on this right now + let _appender = appender.appender(); + } + + #[test] + fn test_default_root() { + let mut root = Root::builder().build(LevelFilter::Debug); + + // Test level set by builder and is accessible + assert_eq!(LevelFilter::Debug, root.level()); + + // Test no appenders added to builder + assert!(root.appenders().is_empty()); + + // Test level set after root created and is accessible + root.set_level(LevelFilter::Warn); + assert_ne!(LevelFilter::Debug, root.level()); + assert_eq!(LevelFilter::Warn, root.level()); + } + + #[test] + fn test_root_with_appenders() { + let appenders = vec!["stdout", "stderr"]; + + let mut root = Root::builder() + .appender("file") + .appenders(appenders) + .build(LevelFilter::Debug); + + // Test level set by builder and is accessible + assert_eq!(LevelFilter::Debug, root.level()); + + // Test appenders were added to builder + assert_eq!(root.appenders().len(), 3); + + // Test level set after root created and is accessible + root.set_level(LevelFilter::Warn); + assert_ne!(LevelFilter::Debug, root.level()); + assert_eq!(LevelFilter::Warn, root.level()); + } + + #[test] + fn test_default_config() { + let root = Root::builder().build(LevelFilter::Debug); + let cfg = Config::builder().build(root); + + assert!(cfg.is_ok()); + + let mut cfg = cfg.unwrap(); + assert!(cfg.appenders().is_empty()); + assert!(cfg.loggers().is_empty()); + + // No test, just coverage + let _ = cfg.root(); + let _ = cfg.root_mut(); + } + + #[test] + #[cfg(feature = "console_appender")] + fn test_populated_config() { + let root = Root::builder().build(LevelFilter::Debug); + let logger = Logger::builder().build("stdout", LevelFilter::Warn); + let appender = + Appender::builder().build("stdout0", Box::new(ConsoleAppender::builder().build())); + + let loggers = vec![ + Logger::builder().build("stdout0", LevelFilter::Trace), + Logger::builder().build("stdout1", LevelFilter::Debug), + Logger::builder().build("stdout2", LevelFilter::Info), + ]; + + let appenders = vec![ + Appender::builder().build("stdout1", Box::new(ConsoleAppender::builder().build())), + Appender::builder().build("stderr", Box::new(ConsoleAppender::builder().build())), + ]; + + let cfg = Config::builder() + .logger(logger) + .loggers(loggers) + .appender(appender) + .appenders(appenders) + .build(root); + + let cfg = cfg.unwrap(); + assert_eq!(cfg.appenders().len(), 3); + assert_eq!(cfg.loggers().len(), 4); + } + + #[test] + fn test_duplicate_logger_name() { + let root = Root::builder().build(LevelFilter::Debug); + let loggers = vec![ + Logger::builder().build("stdout", LevelFilter::Trace), + Logger::builder().build("stdout", LevelFilter::Debug), + ]; + + let cfg = Config::builder().loggers(loggers).build(root); + + let error = ConfigErrors { + 0: vec![ConfigError::DuplicateLoggerName("stdout".to_owned())], + }; + + assert_eq!(cfg.unwrap_err(), error); + } + + #[test] + #[cfg(feature = "console_appender")] + fn test_duplicate_appender_name() { + let root = Root::builder().build(LevelFilter::Debug); + + let appenders = vec![ + Appender::builder().build("stdout", Box::new(ConsoleAppender::builder().build())), + Appender::builder().build("stdout", Box::new(ConsoleAppender::builder().build())), + ]; + + let cfg = Config::builder().appenders(appenders).build(root); + + let error = ConfigErrors { + 0: vec![ConfigError::DuplicateAppenderName("stdout".to_owned())], + }; + + assert_eq!(cfg.unwrap_err(), error); + } + + #[test] + fn test_nonexist_appender() { + let root = Root::builder().appender("file").build(LevelFilter::Debug); + + let logger = Logger::builder() + .appender("stdout") + .build("stdout", LevelFilter::Trace); + + let cfg = Config::builder().logger(logger).build(root); + + let error = ConfigErrors { + 0: vec![ + ConfigError::NonexistentAppender("file".to_owned()), + ConfigError::NonexistentAppender("stdout".to_owned()), + ], + }; + + assert_eq!(cfg.unwrap_err(), error); + } + + #[test] + fn test_invalid_logger_name() { + let root = Root::builder().build(LevelFilter::Debug); + + let logger = Logger::builder().build("", LevelFilter::Trace); + + let cfg = Config::builder().logger(logger).build(root); + + let error = ConfigErrors { + 0: vec![ConfigError::InvalidLoggerName("".to_owned())], + }; + + assert_eq!(cfg.unwrap_err(), error); + } } diff --git a/test_cfgs/test.json b/test_cfgs/test.json new file mode 100644 index 00000000..b93f08c9 --- /dev/null +++ b/test_cfgs/test.json @@ -0,0 +1,51 @@ +{ + "refresh_rate": "5 seconds", + "appenders": { + "stdout": { + "kind": "console", + "encoder": { + "pattern": "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + }, + "filters": [ + { + "kind": "threshold", + "level": "info" + } + ] + }, + "file": { + "kind": "file", + "path": "log/file.log", + "encoder": { + "pattern": "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + } + }, + "rollingfile": { + "kind": "rolling_file", + "path": "log/rolling_file.log", + "encoder": { + "pattern": "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + }, + "policy": { + "trigger": { + "kind": "time", + "interval": "1 minute" + }, + "roller": { + "kind": "fixed_window", + "pattern": "log/old-rolling_file-{}.log", + "base": 0, + "count": 2 + } + } + } + }, + "root": { + "level": "info", + "appenders": [ + "stdout", + "file", + "rollingfile" + ] + } +} diff --git a/test_cfgs/test.toml b/test_cfgs/test.toml new file mode 100644 index 00000000..dc1f10da --- /dev/null +++ b/test_cfgs/test.toml @@ -0,0 +1,39 @@ +refresh_rate = "5 seconds" + +[appenders.stdout] +kind = "console" + + [appenders.stdout.encoder] + pattern = "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + + [[appenders.stdout.filters]] + kind = "threshold" + level = "info" + +[appenders.file] +kind = "file" +path = "log/file.log" + + [appenders.file.encoder] + pattern = "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + +[appenders.rollingfile] +kind = "rolling_file" +path = "log/rolling_file.log" + + [appenders.rollingfile.encoder] + pattern = "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + +[appenders.rollingfile.policy.trigger] +kind = "time" +interval = "1 minute" + +[appenders.rollingfile.policy.roller] +kind = "fixed_window" +pattern = "log/old-rolling_file-{}.log" +base = 0 +count = 2 + +[root] +level = "info" +appenders = [ "stdout", "file", "rollingfile" ] diff --git a/test_cfgs/test.yml b/test_cfgs/test.yml new file mode 100644 index 00000000..47d18ccd --- /dev/null +++ b/test_cfgs/test.yml @@ -0,0 +1,36 @@ +refresh_rate: 5 seconds + +appenders: + stdout: + kind: console + encoder: + pattern: "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + filters: + - kind: threshold + level: info + file: + kind: file + path: "log/file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + rollingfile: + kind: rolling_file + path: "log/rolling_file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + policy: + trigger: + kind: time + interval: 1 minute + roller: + kind: fixed_window + pattern: "log/old-rolling_file-{}.log" + base: 0 + count: 2 + +root: + level: info + appenders: + - stdout + - file + - rollingfile