From 6c6ace0fcd136c8b623f7b56d0f8cc0b5099a767 Mon Sep 17 00:00:00 2001 From: Dirreck Date: Fri, 2 Feb 2024 13:51:04 +0800 Subject: [PATCH] add trigger kind "time" in rolling_file (#296) * feat(triger): Add "time" triger * feat(trigger): use pre_process for time trigger * test(time): add test case of time trigger. * feat(triger): Add "time" triger like log4j * Update docs/Configuration.md Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> * Update src/append/rolling_file/policy/compound/trigger/time.rs Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> * Update src/append/rolling_file/policy/compound/trigger/time.rs Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> * Update src/append/rolling_file/policy/compound/trigger/time.rs Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> * Update src/append/rolling_file/policy/compound/trigger/time.rs Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> * fix nix Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> * Update Configuration.md and time.rs Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> --------- Co-authored-by: Bryan Conn <30739012+bconn98@users.noreply.github.com> --- Cargo.toml | 4 + docs/Configuration.md | 60 ++- examples/compile_time_config.rs | 4 +- examples/sample_config.yml | 41 +- src/append/rolling_file/mod.rs | 54 +- .../rolling_file/policy/compound/mod.rs | 4 + .../policy/compound/trigger/mod.rs | 8 + .../policy/compound/trigger/size.rs | 15 + .../policy/compound/trigger/time.rs | 496 ++++++++++++++++++ src/append/rolling_file/policy/mod.rs | 2 + src/config/raw.rs | 8 + src/lib.rs | 1 + 12 files changed, 663 insertions(+), 34 deletions(-) create mode 100644 src/append/rolling_file/policy/compound/trigger/time.rs diff --git a/Cargo.toml b/Cargo.toml index f2f488e1..705f12b0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ compound_policy = [] delete_roller = [] fixed_window_roller = [] size_trigger = [] +time_trigger = ["rand"] json_encoder = ["serde", "serde_json", "chrono", "log-mdc", "log/serde", "thread-id"] pattern_encoder = ["chrono", "log-mdc", "thread-id"] ansi_writer = [] @@ -41,6 +42,7 @@ all_components = [ "delete_roller", "fixed_window_roller", "size_trigger", + "time_trigger", "json_encoder", "pattern_encoder", "threshold_filter" @@ -68,6 +70,7 @@ serde_json = { version = "1.0", optional = true } serde_yaml = { version = "0.9", optional = true } toml = { version = "0.8", optional = true } parking_lot = { version = "0.12.0", optional = true } +rand = { version = "0.8", optional = true} thiserror = "1.0.15" anyhow = "1.0.28" derivative = "2.2" @@ -84,6 +87,7 @@ lazy_static = "1.4" streaming-stats = "0.2.3" humantime = "2.1" tempfile = "3.8" +mock_instant = "0.3" [[example]] name = "json_logger" diff --git a/docs/Configuration.md b/docs/Configuration.md index 6c23c9f2..5ec2ce65 100644 --- a/docs/Configuration.md +++ b/docs/Configuration.md @@ -168,13 +168,15 @@ my_rolling_appender: pattern: "logs/test.{}.log" ``` -The new component is the _policy_ field. A policy must have `kind` like most +The new component is the _policy_ field. A policy must have the _kind_ field like most other components, the default (and only supported) policy is `kind: compound`. -The _trigger_ field is used to dictate when the log file should be rolled. The -only supported trigger is `kind: size`. There is a required field `limit` -which defines the maximum file size prior to a rolling of the file. The limit -field requires one of the following units in bytes, case does not matter: +The _trigger_ field is used to dictate when the log file should be rolled. It +supports two types: `size`, and `time`. + +For `size`, it require a _limit_ field. The _limit_ field is a string which defines the maximum file size +prior to a rolling of the file. The limit field requires one of the following +units in bytes, case does not matter: - b - kb/kib @@ -190,6 +192,47 @@ trigger: limit: 10 mb ``` +For `time`, it has three field, _interval_, _modulate_ and _max_random_delay_. + +The _interval_ field is a string which defines the time to roll the +file. The interval field supports the following units(second will be used if the +unit is not specified), case does not matter: + +- second[s] +- minute[s] +- hour[s] +- day[s] +- week[s] +- month[s] +- year[s] + +> Note: `log4j` treats `Sunday` as the first day of the week, but `log4rs` treats +> `Monday` as the first day of the week, which follows the `chrono` crate +> and the `ISO 8601` standard. So when using `week`, the log file will be rolled +> on `Monday` instead of `Sunday`. + +The _modulate_ field is an optional boolean. It indicates whether the interval should +be adjusted to cause the next rollover to occur on the interval boundary. For example, +if the interval is 4 hours and the current hour is 3 am, when true, the first rollover +will occur at 4 am and then next ones will occur at 8 am, noon, 4pm, etc. The default +value is false. + +The _max_random_delay_ field is an optional integer. It indicates the maximum number +of seconds to randomly delay a rollover. By default, this is 0 which indicates no +delay. This setting is useful on servers where multiple applications are configured +to rollover log files at the same time and can spread the load of doing so across +time. + +i.e. + +```yml +trigger: + kind: time + interval: 1 day + modulate: false + max_random_delay: 0 +``` + The _roller_ field supports two types: delete, and fixed_window. The delete roller does not take any other configuration fields. The fixed_window roller supports three fields: pattern, base, and count. The most current log file will @@ -202,7 +245,7 @@ that if the file extension of the pattern is `.gz` and the `gzip` Cargo feature is enabled, the archive files will be gzip-compressed. > Note: This pattern field is only used for archived files. The `path` field -of the higher level `rolling_file` will be used for the active log file. +> of the higher level `rolling_file` will be used for the active log file. The _base_ field is the starting index used to name rolling files. @@ -210,6 +253,11 @@ The _count_ field is the exclusive maximum index used to name rolling files. However, be warned that the roller renames every file when a log rolls over. Having a large count value can negatively impact performance. +> Note: If you use the `triger: time`, the log file will be rolled before it +> gets written, which ensures that the logs are rolled in the correct position +> instead of leaving a single line of logs in the previous log file. However, +> this may cause a substantial slowdown if the `background` feature is not enabled. + i.e. ```yml diff --git a/examples/compile_time_config.rs b/examples/compile_time_config.rs index bf484188..0f478552 100644 --- a/examples/compile_time_config.rs +++ b/examples/compile_time_config.rs @@ -7,7 +7,7 @@ fn main() { let config = serde_yaml::from_str(config_str).unwrap(); log4rs::init_raw_config(config).unwrap(); - info!("Goes to console"); - error!("Goes to console"); + info!("Goes to console, file and rolling file"); + error!("Goes to console, file and rolling file"); trace!("Doesn't go to console as it is filtered out"); } diff --git a/examples/sample_config.yml b/examples/sample_config.yml index 4a0d69cd..84bb9133 100644 --- a/examples/sample_config.yml +++ b/examples/sample_config.yml @@ -1,12 +1,33 @@ appenders: - stdout: - kind: console - encoder: - pattern: "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" - filters: - - kind: threshold - level: info + stdout: + kind: console + encoder: + pattern: "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + filters: + - kind: threshold + level: info + file: + kind: file + path: "log/file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + rollingfile: + kind: rolling_file + path: "log/rolling_file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + policy: + trigger: + kind: time + interval: 1 minute + roller: + kind: fixed_window + pattern: "log/old-rolling_file-{}.log" + base: 0 + count: 2 root: - level: info - appenders: - - stdout + level: info + appenders: + - stdout + - file + - rollingfile diff --git a/src/append/rolling_file/mod.rs b/src/append/rolling_file/mod.rs index e14e4ad8..9e6d35ee 100644 --- a/src/append/rolling_file/mod.rs +++ b/src/append/rolling_file/mod.rs @@ -167,22 +167,41 @@ impl Append for RollingFileAppender { // TODO(eas): Perhaps this is better as a concurrent queue? let mut writer = self.writer.lock(); - let len = { - let writer = self.get_writer(&mut writer)?; - self.encoder.encode(writer, record)?; - writer.flush()?; - writer.len - }; + let is_pre_process = self.policy.is_pre_process(); + let log_writer = self.get_writer(&mut writer)?; - let mut file = LogFile { - writer: &mut writer, - path: &self.path, - len, - }; + if is_pre_process { + let len = log_writer.len; + + let mut file = LogFile { + writer: &mut writer, + path: &self.path, + len, + }; + + // TODO(eas): Idea: make this optionally return a future, and if so, we initialize a queue for + // data that comes in while we are processing the file rotation. + + self.policy.process(&mut file)?; + + let log_writer_new = self.get_writer(&mut writer)?; + self.encoder.encode(log_writer_new, record)?; + log_writer_new.flush()?; + } else { + self.encoder.encode(log_writer, record)?; + log_writer.flush()?; + let len = log_writer.len; - // TODO(eas): Idea: make this optionally return a future, and if so, we initialize a queue for - // data that comes in while we are processing the file rotation. - self.policy.process(&mut file) + let mut file = LogFile { + writer: &mut writer, + path: &self.path, + len, + }; + + self.policy.process(&mut file)?; + } + + Ok(()) } fn flush(&self) {} @@ -371,8 +390,8 @@ appenders: path: {0}/foo.log policy: trigger: - kind: size - limit: 1024 + kind: time + interval: 2 minutes roller: kind: delete bar: @@ -405,6 +424,9 @@ appenders: fn process(&self, _: &mut LogFile) -> anyhow::Result<()> { Ok(()) } + fn is_pre_process(&self) -> bool { + false + } } #[test] diff --git a/src/append/rolling_file/policy/compound/mod.rs b/src/append/rolling_file/policy/compound/mod.rs index 9fe7b233..484af19c 100644 --- a/src/append/rolling_file/policy/compound/mod.rs +++ b/src/append/rolling_file/policy/compound/mod.rs @@ -107,6 +107,10 @@ impl Policy for CompoundPolicy { } Ok(()) } + + fn is_pre_process(&self) -> bool { + self.trigger.is_pre_process() + } } /// A deserializer for the `CompoundPolicyDeserializer`. diff --git a/src/append/rolling_file/policy/compound/trigger/mod.rs b/src/append/rolling_file/policy/compound/trigger/mod.rs index 76e67e74..3d61e0da 100644 --- a/src/append/rolling_file/policy/compound/trigger/mod.rs +++ b/src/append/rolling_file/policy/compound/trigger/mod.rs @@ -9,10 +9,18 @@ use crate::config::Deserializable; #[cfg(feature = "size_trigger")] pub mod size; +#[cfg(feature = "time_trigger")] +pub mod time; + /// A trait which identifies if the active log file should be rolled over. pub trait Trigger: fmt::Debug + Send + Sync + 'static { /// Determines if the active log file should be rolled over. fn trigger(&self, file: &LogFile) -> anyhow::Result; + + /// Sets the is_pre_process flag for log files. + /// + /// Defaults to true for time triggers and false for size triggers + fn is_pre_process(&self) -> bool; } #[cfg(feature = "config_parsing")] diff --git a/src/append/rolling_file/policy/compound/trigger/size.rs b/src/append/rolling_file/policy/compound/trigger/size.rs index 511ea981..d399cb2f 100644 --- a/src/append/rolling_file/policy/compound/trigger/size.rs +++ b/src/append/rolling_file/policy/compound/trigger/size.rs @@ -117,6 +117,10 @@ impl Trigger for SizeTrigger { fn trigger(&self, file: &LogFile) -> anyhow::Result { Ok(file.len_estimate() > self.limit) } + + fn is_pre_process(&self) -> bool { + false + } } /// A deserializer for the `SizeTrigger`. @@ -149,3 +153,14 @@ impl Deserialize for SizeTriggerDeserializer { Ok(Box::new(SizeTrigger::new(config.limit))) } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn pre_process() { + let trigger = SizeTrigger::new(2048); + assert!(!trigger.is_pre_process()); + } +} diff --git a/src/append/rolling_file/policy/compound/trigger/time.rs b/src/append/rolling_file/policy/compound/trigger/time.rs new file mode 100644 index 00000000..4568a524 --- /dev/null +++ b/src/append/rolling_file/policy/compound/trigger/time.rs @@ -0,0 +1,496 @@ +//! The time trigger. +//! +//! Requires the `time_trigger` feature. + +#[cfg(test)] +use chrono::NaiveDateTime; +use chrono::{DateTime, Datelike, Duration, Local, TimeZone, Timelike}; +#[cfg(test)] +use mock_instant::{SystemTime, UNIX_EPOCH}; +use rand::Rng; +#[cfg(feature = "config_parsing")] +use serde::de; +#[cfg(feature = "config_parsing")] +use std::fmt; +use std::sync::RwLock; + +use crate::append::rolling_file::{policy::compound::trigger::Trigger, LogFile}; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; + +#[cfg(feature = "config_parsing")] +/// Configuration for the time trigger. +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] +#[serde(deny_unknown_fields)] +pub struct TimeTriggerConfig { + interval: TimeTriggerInterval, + #[serde(default)] + modulate: bool, + #[serde(default)] + max_random_delay: u64, +} + +#[cfg(not(feature = "config_parsing"))] +/// Configuration for the time trigger. +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] +pub struct TimeTriggerConfig { + interval: TimeTriggerInterval, + modulate: bool, + max_random_delay: u64, +} + +/// A trigger which rolls the log once it has passed a certain time. +#[derive(Debug)] +pub struct TimeTrigger { + config: TimeTriggerConfig, + next_roll_time: RwLock>, +} + +/// The TimeTrigger supports the following units (case insensitive): +/// "second", "seconds", "minute", "minutes", "hour", "hours", "day", "days", "week", "weeks", "month", "months", "year", "years". The unit defaults to +/// second if not specified. +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] +pub enum TimeTriggerInterval { + /// TimeTriger in second(s). + Second(i64), + /// TimeTriger in minute(s). + Minute(i64), + /// TimeTriger in hour(s). + Hour(i64), + /// TimeTriger in day(s). + Day(i64), + /// TimeTriger in week(s). + Week(i64), + /// TimeTriger in month(s). + Month(i64), + /// TimeTriger in year(s). + Year(i64), +} + +impl Default for TimeTriggerInterval { + fn default() -> Self { + TimeTriggerInterval::Second(1) + } +} + +#[cfg(feature = "config_parsing")] +impl<'de> serde::Deserialize<'de> for TimeTriggerInterval { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + struct V; + + impl<'de2> de::Visitor<'de2> for V { + type Value = TimeTriggerInterval; + + fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.write_str("a time") + } + + fn visit_u64(self, v: u64) -> Result + where + E: de::Error, + { + Ok(TimeTriggerInterval::Second(v as i64)) + } + + fn visit_i64(self, v: i64) -> Result + where + E: de::Error, + { + if v < 0 { + return Err(E::invalid_value( + de::Unexpected::Signed(v), + &"a non-negative number", + )); + } + + Ok(TimeTriggerInterval::Second(v)) + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + let (number, unit) = match v.find(|c: char| !c.is_ascii_digit()) { + Some(n) => (v[..n].trim(), Some(v[n..].trim())), + None => (v.trim(), None), + }; + + let number = match number.parse::() { + Ok(n) => { + if n < 0 { + return Err(E::invalid_value( + de::Unexpected::Signed(n), + &"a non-negative number", + )); + } + n + } + Err(_) => { + return Err(E::invalid_value(de::Unexpected::Str(number), &"a number")) + } + }; + + let unit = match unit { + Some(u) => u, + None => return Ok(TimeTriggerInterval::Second(number)), + }; + + let result = if unit.eq_ignore_ascii_case("second") + || unit.eq_ignore_ascii_case("seconds") + { + Some(TimeTriggerInterval::Second(number)) + } else if unit.eq_ignore_ascii_case("minute") + || unit.eq_ignore_ascii_case("minutes") + { + Some(TimeTriggerInterval::Minute(number)) + } else if unit.eq_ignore_ascii_case("hour") || unit.eq_ignore_ascii_case("hours") { + Some(TimeTriggerInterval::Hour(number)) + } else if unit.eq_ignore_ascii_case("day") || unit.eq_ignore_ascii_case("days") { + Some(TimeTriggerInterval::Day(number)) + } else if unit.eq_ignore_ascii_case("week") || unit.eq_ignore_ascii_case("weeks") { + Some(TimeTriggerInterval::Week(number)) + } else if unit.eq_ignore_ascii_case("month") || unit.eq_ignore_ascii_case("months") + { + Some(TimeTriggerInterval::Month(number)) + } else if unit.eq_ignore_ascii_case("year") || unit.eq_ignore_ascii_case("years") { + Some(TimeTriggerInterval::Year(number)) + } else { + return Err(E::invalid_value(de::Unexpected::Str(unit), &"a valid unit")); + }; + + match result { + Some(n) => Ok(n), + None => Err(E::invalid_value(de::Unexpected::Str(v), &"a time")), + } + } + } + + d.deserialize_any(V) + } +} + +impl TimeTrigger { + /// Returns a new trigger which rolls the log once it has passed the + /// specified time. + pub fn new(config: TimeTriggerConfig) -> TimeTrigger { + #[cfg(test)] + let current = { + let now: std::time::Duration = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time before Unix epoch"); + NaiveDateTime::from_timestamp_opt(now.as_secs() as i64, now.subsec_nanos()) + .unwrap() + .and_local_timezone(Local) + .unwrap() + }; + + #[cfg(not(test))] + let current = Local::now(); + let next_time = TimeTrigger::get_next_time(current, config.interval, config.modulate); + let next_roll_time = if config.max_random_delay > 0 { + let random_delay = rand::thread_rng().gen_range(0..config.max_random_delay); + next_time + Duration::seconds(random_delay as i64) + } else { + next_time + }; + + TimeTrigger { + config, + next_roll_time: RwLock::new(next_roll_time), + } + } + + fn get_next_time( + current: DateTime, + interval: TimeTriggerInterval, + modulate: bool, + ) -> DateTime { + let year = current.year(); + if let TimeTriggerInterval::Year(n) = interval { + let n = n as i32; + let increment = if modulate { n - year % n } else { n }; + let year_new = year + increment; + return Local.with_ymd_and_hms(year_new, 1, 1, 0, 0, 0).unwrap(); + } + + if let TimeTriggerInterval::Month(n) = interval { + let month0 = current.month0(); + let n = n as u32; + let increment = if modulate { n - month0 % n } else { n }; + let num_months = (year as u32) * 12 + month0; + let num_months_new = num_months + increment; + let year_new = (num_months_new / 12) as i32; + let month_new = (num_months_new) % 12 + 1; + return Local + .with_ymd_and_hms(year_new, month_new, 1, 0, 0, 0) + .unwrap(); + } + + let month = current.month(); + let day = current.day(); + if let TimeTriggerInterval::Week(n) = interval { + let week0 = current.iso_week().week0() as i64; + let weekday = current.weekday().num_days_from_monday() as i64; // Monday is the first day of the week + let time = Local.with_ymd_and_hms(year, month, day, 0, 0, 0).unwrap(); + let increment = if modulate { n - week0 % n } else { n }; + return time + Duration::weeks(increment) - Duration::days(weekday); + } + + if let TimeTriggerInterval::Day(n) = interval { + let ordinal0 = current.ordinal0() as i64; + let time = Local.with_ymd_and_hms(year, month, day, 0, 0, 0).unwrap(); + let increment = if modulate { n - ordinal0 % n } else { n }; + return time + Duration::days(increment); + } + + let hour = current.hour(); + if let TimeTriggerInterval::Hour(n) = interval { + let time = Local + .with_ymd_and_hms(year, month, day, hour, 0, 0) + .unwrap(); + let increment = if modulate { n - (hour as i64) % n } else { n }; + return time + Duration::hours(increment); + } + + let min = current.minute(); + if let TimeTriggerInterval::Minute(n) = interval { + let time = Local + .with_ymd_and_hms(year, month, day, hour, min, 0) + .unwrap(); + let increment = if modulate { n - (min as i64) % n } else { n }; + return time + Duration::minutes(increment); + } + + let sec = current.second(); + if let TimeTriggerInterval::Second(n) = interval { + let time = Local + .with_ymd_and_hms(year, month, day, hour, min, sec) + .unwrap(); + let increment = if modulate { n - (sec as i64) % n } else { n }; + return time + Duration::seconds(increment); + } + panic!("Should not reach here!"); + } +} + +impl Trigger for TimeTrigger { + fn trigger(&self, _file: &LogFile) -> anyhow::Result { + #[cfg(test)] + let current = { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time before Unix epoch"); + NaiveDateTime::from_timestamp_opt(now.as_secs() as i64, now.subsec_nanos()) + .unwrap() + .and_local_timezone(Local) + .unwrap() + }; + + #[cfg(not(test))] + let current: DateTime = Local::now(); + let mut next_roll_time = self.next_roll_time.write().unwrap(); + let is_trigger = current >= *next_roll_time; + if is_trigger { + let tmp = TimeTrigger::new(self.config); + let time_new = tmp.next_roll_time.read().unwrap(); + *next_roll_time = *time_new; + } + Ok(is_trigger) + } + + fn is_pre_process(&self) -> bool { + true + } +} + +/// A deserializer for the `TimeTrigger`. +/// +/// # Configuration +/// +/// ```yaml +/// kind: time +/// +/// # The time interval. The following units are supported (case insensitive): +/// # "second(s)", "minute(s)", "hour(s)", "day(s)", "week(s)", "month(s)", "year(s)". The unit defaults to +/// # second if not specified. +/// interval: 7 day +/// ``` +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] +pub(crate) struct TimeTriggerDeserializer; + +#[cfg(feature = "config_parsing")] +impl Deserialize for TimeTriggerDeserializer { + type Trait = dyn Trigger; + + type Config = TimeTriggerConfig; + + fn deserialize( + &self, + config: TimeTriggerConfig, + _: &Deserializers, + ) -> anyhow::Result> { + Ok(Box::new(TimeTrigger::new(config))) + } +} + +#[cfg(test)] +mod test { + use super::*; + use mock_instant::MockClock; + use std::time::Duration; + + fn trigger_with_time_and_modulate( + interval: TimeTriggerInterval, + modulate: bool, + millis: u64, + ) -> (bool, bool) { + let file = tempfile::tempdir().unwrap(); + let logfile = LogFile { + writer: &mut None, + path: file.path(), + len: 0, + }; + + let config = TimeTriggerConfig { + interval, + modulate, + max_random_delay: 0, + }; + + let trigger = TimeTrigger::new(config); + + MockClock::advance_system_time(Duration::from_millis(millis / 2)); + let result1 = trigger.trigger(&logfile).unwrap(); + + MockClock::advance_system_time(Duration::from_millis(millis / 2)); + let result2 = trigger.trigger(&logfile).unwrap(); + + (result1, result2) + } + + #[test] + fn trigger() { + let second_in_milli = 1000; + let minute_in_milli = second_in_milli * 60; + let hour_in_milli = minute_in_milli * 60; + let day_in_milli = hour_in_milli * 24; + let week_in_milli = day_in_milli * 7; + let month_in_milli = day_in_milli * 31; + let year_in_milli = day_in_milli * 365; + + let test_list = vec![ + (TimeTriggerInterval::Second(1), second_in_milli), + (TimeTriggerInterval::Minute(1), minute_in_milli), + (TimeTriggerInterval::Hour(1), hour_in_milli), + (TimeTriggerInterval::Day(1), day_in_milli), + (TimeTriggerInterval::Week(1), week_in_milli), + (TimeTriggerInterval::Month(1), month_in_milli), + (TimeTriggerInterval::Year(1), year_in_milli), + ]; + let modulate = false; + for (time_trigger_interval, time_in_milli) in test_list.iter() { + MockClock::set_system_time(Duration::from_millis(4 * day_in_milli)); // 1970/1/5 00:00:00 Monday + assert_eq!( + trigger_with_time_and_modulate(*time_trigger_interval, modulate, *time_in_milli), + (false, true) + ); + // trigger will be aligned with units. + MockClock::set_system_time( + Duration::from_millis(4 * day_in_milli) + Duration::from_millis(time_in_milli / 2), + ); + assert_eq!( + trigger_with_time_and_modulate(*time_trigger_interval, modulate, *time_in_milli), + (true, false) + ); + } + + let test_list = vec![ + (TimeTriggerInterval::Second(3), 3 * second_in_milli), + (TimeTriggerInterval::Minute(3), 3 * minute_in_milli), + (TimeTriggerInterval::Hour(3), 3 * hour_in_milli), + (TimeTriggerInterval::Day(3), 3 * day_in_milli), + (TimeTriggerInterval::Week(3), 3 * week_in_milli), + (TimeTriggerInterval::Month(3), 3 * month_in_milli), + (TimeTriggerInterval::Year(3), 3 * year_in_milli), + ]; + let modulate = true; + for (time_trigger_interval, time_in_milli) in test_list.iter() { + MockClock::set_system_time(Duration::from_millis( + 59 * day_in_milli + 2 * hour_in_milli + 2 * minute_in_milli + 2 * second_in_milli, + )); // 1970/3/1 02:02:02 Sunday + assert_eq!( + trigger_with_time_and_modulate(*time_trigger_interval, modulate, *time_in_milli), + (true, false) + ); + } + } + + #[test] + #[cfg(feature = "yaml_format")] + fn test_serde() { + let test_error = vec![ + "abc", // // str none none + "", // none + "5 das", // bad unit + "-1", // inegative integar + "2.0", //flaot + ]; + + for interval in test_error.iter() { + let error = ::serde_yaml::from_str::(&interval); + assert!(error.is_err()); + } + + let test_ok = vec![ + // u64 + ("1", TimeTriggerInterval::Second(1)), + // str second + ("1 second", TimeTriggerInterval::Second(1)), + ("1 seconds", TimeTriggerInterval::Second(1)), + // str minute + ("1 minute", TimeTriggerInterval::Minute(1)), + ("1 minutes", TimeTriggerInterval::Minute(1)), + // str hour + ("1 hour", TimeTriggerInterval::Hour(1)), + ("1 hours", TimeTriggerInterval::Hour(1)), + // str day + ("1 day", TimeTriggerInterval::Day(1)), + ("1 days", TimeTriggerInterval::Day(1)), + // str week + ("1 week", TimeTriggerInterval::Week(1)), + ("1 weeks", TimeTriggerInterval::Week(1)), + // str month + ("1 month", TimeTriggerInterval::Month(1)), + ("1 months", TimeTriggerInterval::Month(1)), + // str year + ("1 year", TimeTriggerInterval::Year(1)), + ("1 years", TimeTriggerInterval::Year(1)), + ]; + for (interval, expected) in test_ok.iter() { + let interval = format!("{}", interval); + let interval = ::serde_yaml::from_str::(&interval).unwrap(); + assert_eq!(interval, *expected); + } + } + + #[test] + fn test_time_trigger_limit_default() { + let interval = TimeTriggerInterval::default(); + assert_eq!(interval, TimeTriggerInterval::Second(1)); + } + + #[test] + fn pre_process() { + let config = TimeTriggerConfig { + interval: TimeTriggerInterval::Minute(2), + modulate: true, + max_random_delay: 0, + }; + let trigger = TimeTrigger::new(config); + assert!(trigger.is_pre_process()); + } +} diff --git a/src/append/rolling_file/policy/mod.rs b/src/append/rolling_file/policy/mod.rs index 8c1e6b2d..9d692f59 100644 --- a/src/append/rolling_file/policy/mod.rs +++ b/src/append/rolling_file/policy/mod.rs @@ -16,6 +16,8 @@ pub trait Policy: Sync + Send + 'static + fmt::Debug { /// This method is called after each log event. It is provided a reference /// to the current log file. fn process(&self, log: &mut LogFile) -> anyhow::Result<()>; + /// Return the config `Trigger.is_pre_process` value + fn is_pre_process(&self) -> bool; } #[cfg(feature = "config_parsing")] diff --git a/src/config/raw.rs b/src/config/raw.rs index cd1d951f..a092d56b 100644 --- a/src/config/raw.rs +++ b/src/config/raw.rs @@ -215,6 +215,12 @@ impl Default for Deserializers { append::rolling_file::policy::compound::trigger::size::SizeTriggerDeserializer, ); + #[cfg(feature = "time_trigger")] + d.insert( + "time", + append::rolling_file::policy::compound::trigger::time::TimeTriggerDeserializer, + ); + #[cfg(feature = "json_encoder")] d.insert("json", encode::json::JsonEncoderDeserializer); @@ -259,6 +265,8 @@ impl Deserializers { /// * Triggers /// * "size" -> `SizeTriggerDeserializer` /// * Requires the `size_trigger` feature. + /// * "time" -> `TimeTriggerDeserializer` + /// * Requires the `time_trigger` feature. pub fn new() -> Deserializers { Deserializers::default() } diff --git a/src/lib.rs b/src/lib.rs index ecd7e354..5c7bf278 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,6 +21,7 @@ //! - [fixed_window](append/rolling_file/policy/compound/roll/fixed_window/struct.FixedWindowRollerDeserializer.html#configuration): requires the `fixed_window_roller` feature //! - Triggers //! - [size](append/rolling_file/policy/compound/trigger/size/struct.SizeTriggerDeserializer.html#configuration): requires the `size_trigger` feature +//! - [time](append/rolling_file/policy/compound/trigger/tine/struct.TimeTriggerDeserializer.html#configuration): requires the `time_trigger` feature //! //! ## Encoders //!