diff --git a/.gitignore b/.gitignore index a0db182e..30094a80 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ Cargo.lock .idea/ *.iml .vscode/ +log/ diff --git a/.markdownlint.yml b/.markdownlint.yml index 5b824f75..e9ca274b 100644 --- a/.markdownlint.yml +++ b/.markdownlint.yml @@ -1 +1 @@ -line-length: false +line-length: true diff --git a/docs/Configuration.md b/docs/Configuration.md index 4e47e7ab..668054f7 100644 --- a/docs/Configuration.md +++ b/docs/Configuration.md @@ -174,8 +174,8 @@ other components, the default (and only supported) policy is `kind: compound`. The _trigger_ field is used to dictate when the log file should be rolled. It supports two types: `size`, and `time`. They both require a `limit` field. -For `size`, the `limit` field is a string which defines the maximum file size -prior to a rolling of the file. The limit field requires one of the following +For `size`, the `limit` field is a string which defines the maximum file size +prior to a rolling of the file. The limit field requires one of the following units in bytes, case does not matter: - b @@ -192,7 +192,7 @@ trigger: limit: 10 mb ``` -For `time`, the `limit` field is a string which defines the time to roll the +For `time`, the `limit` field is a string which defines the time to roll the file. The limit field supports the following units(second will be used if the unit is not specified), case does not matter: @@ -204,11 +204,11 @@ unit is not specified), case does not matter: - month[s] - year[s] -> note: The log file will be rolled at the integer time. For example, if the -`limit` is set to `2 day`, the log file will be rolled at 0:00 every other a -day, regardless of the time `log4rs` was started or the log file was created. -This means that the initial log file will be likely rolled before the limit -is reached. +> Note: The log file will be rolled at the integer time. For example, if the +> `limit` is set to `2 day`, the log file will be rolled at 0:00 every other a +> day, regardless of the time `log4rs` was started or the log file was created. +> This means that the initial log file will be likely rolled before the limit +> is reached. i.e. @@ -238,6 +238,11 @@ The _count_ field is the exclusive maximum index used to name rolling files. However, be warned that the roller renames every file when a log rolls over. Having a large count value can negatively impact performance. +> Note: If you use the `triger: time`, the log file will be rolled before it +> gets written, which ensure that the logs are rolled in the correct position +> instead of leaving a single line of logs in the previous log file. However, +> this may cause a substantial slowdown if the `background` feature is not enabled. + i.e. ```yml diff --git a/examples/compile_time_config.rs b/examples/compile_time_config.rs index bf484188..0f478552 100644 --- a/examples/compile_time_config.rs +++ b/examples/compile_time_config.rs @@ -7,7 +7,7 @@ fn main() { let config = serde_yaml::from_str(config_str).unwrap(); log4rs::init_raw_config(config).unwrap(); - info!("Goes to console"); - error!("Goes to console"); + info!("Goes to console, file and rolling file"); + error!("Goes to console, file and rolling file"); trace!("Doesn't go to console as it is filtered out"); } diff --git a/examples/sample_config.yml b/examples/sample_config.yml index 65d96e96..7a57fe0a 100644 --- a/examples/sample_config.yml +++ b/examples/sample_config.yml @@ -8,12 +8,12 @@ appenders: level: info file: kind: file - path: "log/log.log" + path: "log/file.log" encoder: pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" rollingfile: kind: rolling_file - path: "log/log2.log" + path: "log/rolling_file.log" encoder: pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" policy: @@ -22,7 +22,7 @@ appenders: limit: 1 minute roller: kind: fixed_window - pattern: "log/old-log-{}.log" + pattern: "log/old-rolling_file-{}.log" base: 0 count: 2 root: diff --git a/src/append/rolling_file/mod.rs b/src/append/rolling_file/mod.rs index 9b9355dd..6160142f 100644 --- a/src/append/rolling_file/mod.rs +++ b/src/append/rolling_file/mod.rs @@ -167,25 +167,39 @@ impl Append for RollingFileAppender { // TODO(eas): Perhaps this is better as a concurrent queue? let mut writer = self.writer.lock(); + let is_pre_process = self.policy.is_pre_process(); let log_writer = self.get_writer(&mut writer)?; - let len = log_writer.len; - let mut file = LogFile { - writer: &mut writer, - path: &self.path, - len, - }; + if is_pre_process { + let len = log_writer.len; + + let mut file = LogFile { + writer: &mut writer, + path: &self.path, + len, + }; + + // TODO(eas): Idea: make this optionally return a future, and if so, we initialize a queue for + // data that comes in while we are processing the file rotation. - // TODO(eas): Idea: make this optionally return a future, and if so, we initialize a queue for - // data that comes in while we are processing the file rotation. + self.policy.process(&mut file)?; - //first, rotate - self.policy.process(&mut file)?; + let log_writer_new = self.get_writer(&mut writer)?; + self.encoder.encode(log_writer_new, record)?; + log_writer_new.flush()?; + } else { + self.encoder.encode(log_writer, record)?; + log_writer.flush()?; + let len = log_writer.len; - //second, write - let writer_file = self.get_writer(&mut writer)?; - self.encoder.encode(writer_file, record)?; - writer_file.flush()?; + let mut file = LogFile { + writer: &mut writer, + path: &self.path, + len, + }; + + self.policy.process(&mut file)?; + } Ok(()) } @@ -410,6 +424,9 @@ appenders: fn process(&self, _: &mut LogFile) -> anyhow::Result<()> { Ok(()) } + fn is_pre_process(&self) -> bool { + false + } } #[test] diff --git a/src/append/rolling_file/policy/compound/mod.rs b/src/append/rolling_file/policy/compound/mod.rs index 9fe7b233..484af19c 100644 --- a/src/append/rolling_file/policy/compound/mod.rs +++ b/src/append/rolling_file/policy/compound/mod.rs @@ -107,6 +107,10 @@ impl Policy for CompoundPolicy { } Ok(()) } + + fn is_pre_process(&self) -> bool { + self.trigger.is_pre_process() + } } /// A deserializer for the `CompoundPolicyDeserializer`. diff --git a/src/append/rolling_file/policy/compound/trigger/mod.rs b/src/append/rolling_file/policy/compound/trigger/mod.rs index 5de7570f..3d61e0da 100644 --- a/src/append/rolling_file/policy/compound/trigger/mod.rs +++ b/src/append/rolling_file/policy/compound/trigger/mod.rs @@ -16,6 +16,11 @@ pub mod time; pub trait Trigger: fmt::Debug + Send + Sync + 'static { /// Determines if the active log file should be rolled over. fn trigger(&self, file: &LogFile) -> anyhow::Result; + + /// Sets the is_pre_process flag for log files. + /// + /// Defaults to true for time triggers and false for size triggers + fn is_pre_process(&self) -> bool; } #[cfg(feature = "config_parsing")] diff --git a/src/append/rolling_file/policy/compound/trigger/size.rs b/src/append/rolling_file/policy/compound/trigger/size.rs index 511ea981..d1e8b4a5 100644 --- a/src/append/rolling_file/policy/compound/trigger/size.rs +++ b/src/append/rolling_file/policy/compound/trigger/size.rs @@ -117,6 +117,10 @@ impl Trigger for SizeTrigger { fn trigger(&self, file: &LogFile) -> anyhow::Result { Ok(file.len_estimate() > self.limit) } + + fn is_pre_process(&self) -> bool { + false + } } /// A deserializer for the `SizeTrigger`. diff --git a/src/append/rolling_file/policy/compound/trigger/time.rs b/src/append/rolling_file/policy/compound/trigger/time.rs index ac3a6b24..9e5ab737 100644 --- a/src/append/rolling_file/policy/compound/trigger/time.rs +++ b/src/append/rolling_file/policy/compound/trigger/time.rs @@ -210,6 +210,10 @@ impl Trigger for TimeTrigger { } Ok(is_triger) } + + fn is_pre_process(&self) -> bool { + true + } } /// A deserializer for the `TimeTrigger`. diff --git a/src/append/rolling_file/policy/mod.rs b/src/append/rolling_file/policy/mod.rs index 8c1e6b2d..9d692f59 100644 --- a/src/append/rolling_file/policy/mod.rs +++ b/src/append/rolling_file/policy/mod.rs @@ -16,6 +16,8 @@ pub trait Policy: Sync + Send + 'static + fmt::Debug { /// This method is called after each log event. It is provided a reference /// to the current log file. fn process(&self, log: &mut LogFile) -> anyhow::Result<()>; + /// Return the config `Trigger.is_pre_process` value + fn is_pre_process(&self) -> bool; } #[cfg(feature = "config_parsing")]