Skip to content

Commit

Permalink
test: continuing
Browse files Browse the repository at this point in the history
  • Loading branch information
bconn98 committed Feb 25, 2024
1 parent 43973fb commit 540d260
Show file tree
Hide file tree
Showing 4 changed files with 274 additions and 11 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
options: --security-opt seccomp=unconfined
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3

- name: Generate code coverage
run: |
Expand All @@ -30,6 +30,6 @@ jobs:
USER: "test-user"

- name: Upload to codecov.io
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
132 changes: 129 additions & 3 deletions src/append/rolling_file/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -375,9 +375,50 @@ mod test {
use super::*;
use crate::append::rolling_file::policy::Policy;

#[cfg(feature = "config_parsing")]
use serde_test::{assert_de_tokens, Token};

#[test]
#[cfg(feature = "config_parsing")]
fn test_policy_derser() {
use super::*;
use serde_value::Value;
use std::collections::BTreeMap;

assert_de_tokens(
&Policy {
kind: "compound".to_owned(),
config: Value::Map(BTreeMap::new()),
},
&[
Token::Struct {
name: "Policy",
len: 1,
},
Token::Str("kind"),
Token::Str("compound"),
Token::StructEnd,
],
);

assert_de_tokens(
&Policy {
kind: "compound".to_owned(),
config: Value::Map(BTreeMap::new()),
},
&[
Token::Struct {
name: "Policy",
len: 0,
},
Token::StructEnd,
],
);
}

#[test]
#[cfg(feature = "yaml_format")]
fn deserialize() {
fn test_deserialize_appenders() {
use crate::config::{Deserializers, RawConfig};

let dir = tempfile::tempdir().unwrap();
Expand Down Expand Up @@ -430,7 +471,7 @@ appenders:
}

#[test]
fn append() {
fn test_append() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("append.log");
RollingFileAppender::builder()
Expand All @@ -453,11 +494,12 @@ appenders:
}

#[test]
fn truncate() {
fn test_truncate() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("truncate.log");
RollingFileAppender::builder()
.append(false)
.encoder(Box::new(PatternEncoder::new("{m}{n}")))
.build(&path, Box::new(NopPolicy))
.unwrap();
assert!(path.exists());
Expand All @@ -474,4 +516,88 @@ appenders:
.unwrap();
assert_eq!(contents, b"");
}

#[test]
fn test_logfile() {
let file = tempfile::tempdir().unwrap();
let file = file.path().join("foo.log");
let _ = std::fs::File::create(&file).unwrap();
let mut logfile = LogFile {
writer: &mut None,
path: file.as_path(),
len: 0,
};

assert_eq!(logfile.path(), file.as_path());
assert_eq!(logfile.len_estimate(), 0);

// No actions to take here, the writer becomes inaccessible but theres
// no getter to verify
logfile.roll();
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_deserializer() {
use super::*;
use crate::config::Deserializers;
use serde_value::Value;
use std::collections::BTreeMap;

let file = tempfile::tempdir().unwrap();
let file = file.path().join("foo.log");

let append_cfg = RollingFileAppenderConfig {
path: file.as_os_str().to_str().unwrap().to_owned(),
append: Some(true),
encoder: Some(EncoderConfig {
kind: "pattern".to_owned(),
config: Value::Map(BTreeMap::new()),
}),
policy: Policy {
kind: "compound".to_owned(),
config: Value::Map({
let mut map = BTreeMap::new();
map.insert(
Value::String("trigger".to_owned()),
Value::Map({
let mut map = BTreeMap::new();
map.insert(
Value::String("kind".to_owned()),
Value::String("size".to_owned()),
);
map.insert(
Value::String("limit".to_owned()),
Value::String("1mb".to_owned()),
);
map
}),
);
map.insert(
Value::String("roller".to_owned()),
Value::Map({
let mut map = BTreeMap::new();
map.insert(
Value::String("kind".to_owned()),
Value::String("fixed_window".to_owned()),
);
map.insert(Value::String("base".to_owned()), Value::I32(1));
map.insert(Value::String("count".to_owned()), Value::I32(5));
map.insert(
Value::String("pattern".to_owned()),
Value::String("logs/test.{}.log".to_owned()),
);
map
}),
);
map
}),
},
};

let deserializer = RollingFileAppenderDeserializer;

let res = deserializer.deserialize(append_cfg, &Deserializers::default());
assert!(res.is_ok());
}
}
120 changes: 120 additions & 0 deletions src/append/rolling_file/policy/compound/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,3 +159,123 @@ impl Deserialize for CompoundPolicyDeserializer {
Ok(Box::new(CompoundPolicy::new(trigger, roller)))
}
}

#[cfg(test)]
mod test {
use self::{roll::delete::DeleteRoller, trigger::size::SizeTrigger};

use super::*;

#[cfg(feature = "config_parsing")]
use serde_test::{assert_de_tokens, assert_de_tokens_error, Token};

#[test]
#[cfg(feature = "config_parsing")]
fn test_trigger_deser() {
assert_de_tokens(
&Trigger {
kind: "size".to_owned(),
config: Value::Map({
let mut map = BTreeMap::new();
map.insert(Value::String("limit".to_owned()), Value::U64(1024));
map
}),
},
&[
Token::Struct {
name: "Trigger",
len: 2,
},
Token::Str("kind"),
Token::Str("size"),
Token::Str("limit"),
Token::U64(1024),
Token::StructEnd,
],
);

assert_de_tokens_error::<Trigger>(
&[
Token::Struct {
name: "Trigger",
len: 1,
},
Token::Str("knd"),
Token::Str("size"),
Token::StructEnd,
],
"missing field `kind`",
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_roller_deser() {
assert_de_tokens(
&Roller {
kind: "delete".to_owned(),
config: Value::Map(BTreeMap::new()),
},
&[
Token::Struct {
name: "Roller",
len: 1,
},
Token::Str("kind"),
Token::Str("delete"),
Token::StructEnd,
],
);

assert_de_tokens_error::<Roller>(
&[
Token::Struct {
name: "Roller",
len: 1,
},
Token::Str("knd"),
Token::Str("delete"),
Token::StructEnd,
],
"missing field `kind`",
);
}

#[test]
fn test_pre_process() {
let trigger = SizeTrigger::new(1024);
let roller = DeleteRoller::new();
let policy = CompoundPolicy::new(Box::new(trigger), Box::new(roller));

assert!(!policy.is_pre_process());
}

#[test]
fn test_process() {
let trigger = SizeTrigger::new(1024);
let roller = DeleteRoller::new();
let policy = CompoundPolicy::new(Box::new(trigger), Box::new(roller));

// Test don't roll
let file = tempfile::tempdir().unwrap();
let file = file.path().join("foo.log");
let _ = std::fs::File::create(&file).unwrap();
let mut logfile = LogFile {
writer: &mut None,
path: file.as_path(),
len: 0,
};
assert!(policy.process(&mut logfile).is_ok());

// Test roll
let file = tempfile::tempdir().unwrap();
let file = file.path().join("foo2.log");
let _ = std::fs::File::create(&file).unwrap();
let mut logfile = LogFile {
writer: &mut None,
path: file.as_path(),
len: 2048,
};
assert!(policy.process(&mut logfile).is_ok());
}
}
29 changes: 23 additions & 6 deletions src/append/rolling_file/policy/compound/trigger/size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ mod test {
static BYTE_MULTIPLIER: u64 = 1024;

#[test]
fn pre_process() {
fn test_pre_process() {
let trigger = SizeTrigger::new(2048);
assert!(!trigger.is_pre_process());
}
Expand Down Expand Up @@ -250,6 +250,23 @@ mod test {
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_float_deserialize() {
assert_de_tokens_error::<SizeTriggerConfig>(
&[
Token::Struct {
name: "SizeTriggerConfig",
len: 1,
},
Token::Str("limit"),
Token::F32(2.0),
Token::StructEnd,
],
"invalid type: floating point `2.0`, expected a size",
);
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_str_deserialize() {
Expand Down Expand Up @@ -317,7 +334,7 @@ mod test {

#[test]
#[cfg(feature = "config_parsing")]
fn byte_deserialize() {
fn test_byte_deserialize() {
let trigger = SizeTriggerConfig {
limit: BYTE_MULTIPLIER,
};
Expand Down Expand Up @@ -351,7 +368,7 @@ mod test {

#[test]
#[cfg(feature = "config_parsing")]
fn kilobyte_deserialize() {
fn test_kilobyte_deserialize() {
let trigger = SizeTriggerConfig {
limit: BYTE_MULTIPLIER,
};
Expand Down Expand Up @@ -409,7 +426,7 @@ mod test {

#[test]
#[cfg(feature = "config_parsing")]
fn megabyte_deserialize() {
fn test_megabyte_deserialize() {
// Test mb unit
let trigger = SizeTriggerConfig {
limit: BYTE_MULTIPLIER.pow(2),
Expand Down Expand Up @@ -466,7 +483,7 @@ mod test {

#[test]
#[cfg(feature = "config_parsing")]
fn gigabyte_deserialize() {
fn test_gigabyte_deserialize() {
// Test gb unit
let trigger = SizeTriggerConfig {
limit: BYTE_MULTIPLIER.pow(3),
Expand Down Expand Up @@ -523,7 +540,7 @@ mod test {

#[test]
#[cfg(feature = "config_parsing")]
fn terabyte_deserialize() {
fn test_terabyte_deserialize() {
// Test tb unit
let trigger = SizeTriggerConfig {
limit: BYTE_MULTIPLIER.pow(4),
Expand Down

0 comments on commit 540d260

Please sign in to comment.