A log implementation for extreme speed, using Crossbeam/channel ,once Batch write logs,fast log date, Appender architecture, appender per thread
ZIP
,LZ4
compressionlog::logger().flush()
method wait to flush diskByDate
,BySize
,ByDuration
)All
,KeepTime
,KeepNum
) Delete old logs,Prevent logs from occupying the disk#![forbid(unsafe_code)]
100% Safe Rust. -----------------
log data-> | main channel(crossbeam) | ->
-----------------
---------------- ----------------------
-> |thread channel)| -> background thread | appender1 |
---------------- ----------------------
---------------- ----------------------
-> |thread channel)| -> background thread | appender2 |
---------------- ----------------------
---------------- ----------------------
-> |thread channel)| -> background thread | appender3 |
---------------- ----------------------
---------------- ----------------------
-> |thread channel)| -> background thread | appender4 |
---------------- ----------------------
How fast is?
no flush(chan_len=1000000) benches/log.rs
//MACOS(Apple M1MAX-32GB)
test bench_log ... bench: 85 ns/iter (+/- 1,800)
//MACOS(Apple M1MAX-32GB)
test bench_log ... bench: 323 ns/iter (+/- 0)
log = "0.4"
fast_log = { version = "1.7" }
or enable zip/lz4/gzip Compression library
log = "0.4"
# "lz4","zip","gzip"
fast_log = { version = "1.7", features = ["lz4", "zip", "gzip"] }
chan_len(Some(100000))
Preallocating channel memory reduces the overhead of memory allocation,for example:use log::{error, info, warn};
fn main() {
fast_log::init(Config::new().file("target/test.log").chan_len(Some(100000))).unwrap();
log::info!("Commencing yak shaving{}", 0);
}
use log::{error, info, warn};
fn main() {
fast_log::init(Config::new().console().chan_len(Some(100000))).unwrap();
log::info!("Commencing yak shaving{}", 0);
}
use log::{error, info, warn};
fn main() {
fast_log::init(Config::new().console().chan_len(Some(100000))).unwrap();
fast_log::print("Commencing print\n".into());
}
use fast_log::{init_log};
use log::{error, info, warn};
fn main() {
fast_log::init(Config::new().file("target/test.log").chan_len(Some(100000))).unwrap();
log::info!("Commencing yak shaving{}", 0);
info!("Commencing yak shaving");
}
use fast_log::config::Config;
use fast_log::plugin::file_split::{RollingType, KeepType, DateType, Rolling};
use std::thread::sleep;
use std::time::Duration;
use fast_log::plugin::packer::LogPacker;
fn main() {
fast_log::init(Config::new().chan_len(Some(100000)).console().file_split(
"target/logs/",
Rolling::new(RollingType::ByDate(DateType::Day)),
KeepType::KeepNum(2),
LogPacker {},
))
.unwrap();
for _ in 0..60 {
sleep(Duration::from_secs(1));
log::info!("Commencing yak shaving");
}
log::logger().flush();
println!("you can see log files in path: {}", "target/logs/")
}
use fast_log::config::Config;
use fast_log::consts::LogSize;
use fast_log::plugin::file_split::{RollingType, KeepType, Rolling};
use fast_log::plugin::packer::LogPacker;
fn main() {
fast_log::init(Config::new().chan_len(Some(100000)).console().file_split(
"target/logs/",
Rolling::new(RollingType::BySize(LogSize::KB(500))),
KeepType::KeepNum(2),
LogPacker {},
))
.unwrap();
for _ in 0..40000 {
log::info!("Commencing yak shaving");
}
log::logger().flush();
println!("you can see log files in path: {}", "target/logs/")
}
use fast_log::appender::{FastLogRecord, LogAppender};
use fast_log::config::Config;
use fastdate::DateTime;
use log::Level;
struct CustomLog {}
impl LogAppender for CustomLog {
fn do_logs(&mut self, records: &[FastLogRecord]) {
for record in records {
let now = DateTime::from(record.now);
let data;
match record.level {
Level::Warn | Level::Error => {
data = format!(
"{} {} {} - {} {}\n",
now, record.level, record.module_path, record.args, record.formated
);
}
_ => {
data = format!(
"{} {} {} - {}\n",
&now, record.level, record.module_path, record.args
);
}
}
print!("{}", data);
}
}
}
fn main() {
fast_log::init(Config::new().custom(CustomLog {})).unwrap();
log::info!("Commencing yak shaving");
log::error!("Commencing error");
log::logger().flush();
}