Integrate tracing (#6339)

Tracing Integration
- [reference](5bbf1859e9/projects/project-ideas.md (L297))


  - [x] replace slog & log with tracing throughout the codebase
- [x] implement custom crit log
- [x] make relevant changes in the formatter
- [x] replace sloggers
- [x] re-write SSE logging components

cc: @macladson @eserilev
This commit is contained in:
ThreeHrSleep
2025-03-13 04:01:05 +05:30
committed by GitHub
parent f23f984f85
commit d60c24ef1c
241 changed files with 9485 additions and 9328 deletions

View File

@@ -9,14 +9,12 @@ test_logger = [] # Print log output to stderr when running tests instead of drop
[dependencies]
chrono = { version = "0.4", default-features = false, features = ["clock", "std"] }
logroller = { workspace = true }
metrics = { workspace = true }
once_cell = "1.17.1"
parking_lot = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
slog = { workspace = true }
slog-term = { workspace = true }
sloggers = { workspace = true }
take_mut = "0.2.2"
tokio = { workspace = true, features = [ "time" ] }
tracing = "0.1"
tracing-appender = { workspace = true }

View File

@@ -1,307 +0,0 @@
//! An object that can be used to pass through a channel and be cloned. It can therefore be used
//! via the broadcast channel.
use parking_lot::Mutex;
use serde::ser::SerializeMap;
use serde::serde_if_integer128;
use serde::Serialize;
use slog::{BorrowedKV, Key, Level, OwnedKVList, Record, RecordStatic, Serializer, SingleKV, KV};
use std::cell::RefCell;
use std::fmt;
use std::fmt::Write;
use std::sync::Arc;
use take_mut::take;
thread_local! {
static TL_BUF: RefCell<String> = RefCell::new(String::with_capacity(128))
}
/// Serialized record.
#[derive(Clone)]
pub struct AsyncRecord {
msg: String,
level: Level,
location: Box<slog::RecordLocation>,
tag: String,
logger_values: OwnedKVList,
kv: Arc<Mutex<dyn KV + Send>>,
}
impl AsyncRecord {
/// Serializes a `Record` and an `OwnedKVList`.
pub fn from(record: &Record, logger_values: &OwnedKVList) -> Self {
let mut ser = ToSendSerializer::new();
record
.kv()
.serialize(record, &mut ser)
.expect("`ToSendSerializer` can't fail");
AsyncRecord {
msg: fmt::format(*record.msg()),
level: record.level(),
location: Box::new(*record.location()),
tag: String::from(record.tag()),
logger_values: logger_values.clone(),
kv: Arc::new(Mutex::new(ser.finish())),
}
}
pub fn to_json_string(&self) -> Result<String, String> {
serde_json::to_string(&self).map_err(|e| format!("{:?}", e))
}
}
pub struct ToSendSerializer {
kv: Box<dyn KV + Send>,
}
impl ToSendSerializer {
fn new() -> Self {
ToSendSerializer { kv: Box::new(()) }
}
fn finish(self) -> Box<dyn KV + Send> {
self.kv
}
}
impl Serializer for ToSendSerializer {
fn emit_bool(&mut self, key: Key, val: bool) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_unit(&mut self, key: Key) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, ()))));
Ok(())
}
fn emit_none(&mut self, key: Key) -> slog::Result {
let val: Option<()> = None;
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_char(&mut self, key: Key, val: char) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_u8(&mut self, key: Key, val: u8) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_i8(&mut self, key: Key, val: i8) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_u16(&mut self, key: Key, val: u16) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_i16(&mut self, key: Key, val: i16) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_u32(&mut self, key: Key, val: u32) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_i32(&mut self, key: Key, val: i32) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_f32(&mut self, key: Key, val: f32) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_u64(&mut self, key: Key, val: u64) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_i64(&mut self, key: Key, val: i64) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_f64(&mut self, key: Key, val: f64) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_u128(&mut self, key: Key, val: u128) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_i128(&mut self, key: Key, val: i128) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_usize(&mut self, key: Key, val: usize) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_isize(&mut self, key: Key, val: isize) -> slog::Result {
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_str(&mut self, key: Key, val: &str) -> slog::Result {
let val = val.to_owned();
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
fn emit_arguments(&mut self, key: Key, val: &fmt::Arguments) -> slog::Result {
let val = fmt::format(*val);
take(&mut self.kv, |kv| Box::new((kv, SingleKV(key, val))));
Ok(())
}
}
impl Serialize for AsyncRecord {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
// Get the current time
let dt = chrono::Local::now().format("%b %e %T").to_string();
let rs = RecordStatic {
location: &self.location,
level: self.level,
tag: &self.tag,
};
let mut map_serializer = SerdeSerializer::new(serializer)?;
// Serialize the time and log level first
map_serializer.serialize_entry("time", &dt)?;
map_serializer.serialize_entry("level", self.level.as_short_str())?;
let kv = self.kv.lock();
// Convoluted pattern to avoid binding `format_args!` to a temporary.
// See: https://stackoverflow.com/questions/56304313/cannot-use-format-args-due-to-temporary-value-is-freed-at-the-end-of-this-state
let mut f = |msg: std::fmt::Arguments| {
map_serializer.serialize_entry("msg", msg.to_string())?;
let record = Record::new(&rs, &msg, BorrowedKV(&(*kv)));
self.logger_values
.serialize(&record, &mut map_serializer)
.map_err(serde::ser::Error::custom)?;
record
.kv()
.serialize(&record, &mut map_serializer)
.map_err(serde::ser::Error::custom)
};
f(format_args!("{}", self.msg))?;
map_serializer.end()
}
}
struct SerdeSerializer<S: serde::Serializer> {
/// Current state of map serializing: `serde::Serializer::MapState`
ser_map: S::SerializeMap,
}
impl<S: serde::Serializer> SerdeSerializer<S> {
fn new(ser: S) -> Result<Self, S::Error> {
let ser_map = ser.serialize_map(None)?;
Ok(SerdeSerializer { ser_map })
}
fn serialize_entry<K, V>(&mut self, key: K, value: V) -> Result<(), S::Error>
where
K: serde::Serialize,
V: serde::Serialize,
{
self.ser_map.serialize_entry(&key, &value)
}
/// Finish serialization, and return the serializer
fn end(self) -> Result<S::Ok, S::Error> {
self.ser_map.end()
}
}
// NOTE: This is borrowed from slog_json
macro_rules! impl_m(
($s:expr, $key:expr, $val:expr) => ({
let k_s: &str = $key.as_ref();
$s.ser_map.serialize_entry(k_s, $val)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, format!("serde serialization error: {}", e)))?;
Ok(())
});
);
impl<S> slog::Serializer for SerdeSerializer<S>
where
S: serde::Serializer,
{
fn emit_bool(&mut self, key: Key, val: bool) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_unit(&mut self, key: Key) -> slog::Result {
impl_m!(self, key, &())
}
fn emit_char(&mut self, key: Key, val: char) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_none(&mut self, key: Key) -> slog::Result {
let val: Option<()> = None;
impl_m!(self, key, &val)
}
fn emit_u8(&mut self, key: Key, val: u8) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_i8(&mut self, key: Key, val: i8) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_u16(&mut self, key: Key, val: u16) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_i16(&mut self, key: Key, val: i16) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_usize(&mut self, key: Key, val: usize) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_isize(&mut self, key: Key, val: isize) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_u32(&mut self, key: Key, val: u32) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_i32(&mut self, key: Key, val: i32) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_f32(&mut self, key: Key, val: f32) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_u64(&mut self, key: Key, val: u64) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_i64(&mut self, key: Key, val: i64) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_f64(&mut self, key: Key, val: f64) -> slog::Result {
impl_m!(self, key, &val)
}
serde_if_integer128! {
fn emit_u128(&mut self, key: Key, val: u128) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_i128(&mut self, key: Key, val: i128) -> slog::Result {
impl_m!(self, key, &val)
}
}
fn emit_str(&mut self, key: Key, val: &str) -> slog::Result {
impl_m!(self, key, &val)
}
fn emit_arguments(&mut self, key: Key, val: &fmt::Arguments) -> slog::Result {
TL_BUF.with(|buf| {
let mut buf = buf.borrow_mut();
buf.write_fmt(*val).unwrap();
let res = { || impl_m!(self, key, &*buf) }();
buf.clear();
res
})
}
}

View File

@@ -1,20 +1,20 @@
use metrics::{inc_counter, try_create_int_counter, IntCounter, Result as MetricsResult};
use slog::Logger;
use slog_term::Decorator;
use std::io::{Result, Write};
use chrono::Local;
use logroller::{Compression, LogRollerBuilder, Rotation, RotationSize};
use metrics::{try_create_int_counter, IntCounter, Result as MetricsResult};
use std::io::Write;
use std::path::PathBuf;
use std::sync::LazyLock;
use std::time::{Duration, Instant};
use tracing_appender::non_blocking::NonBlocking;
use tracing_appender::rolling::{RollingFileAppender, Rotation};
use tracing_logging_layer::LoggingLayer;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
use tracing::Subscriber;
use tracing_appender::non_blocking::{NonBlocking, WorkerGuard};
use tracing_subscriber::layer::Context;
use tracing_subscriber::{EnvFilter, Layer};
pub const MAX_MESSAGE_WIDTH: usize = 40;
pub mod async_record;
pub mod macros;
mod sse_logging_components;
mod tracing_logging_layer;
pub mod tracing_logging_layer;
mod tracing_metrics_layer;
pub use sse_logging_components::SSELoggingComponents;
@@ -32,169 +32,6 @@ pub static ERRORS_TOTAL: LazyLock<MetricsResult<IntCounter>> =
pub static CRITS_TOTAL: LazyLock<MetricsResult<IntCounter>> =
LazyLock::new(|| try_create_int_counter("crit_total", "Count of crits logged"));
pub struct AlignedTermDecorator<D: Decorator> {
wrapped: D,
message_width: usize,
}
impl<D: Decorator> AlignedTermDecorator<D> {
pub fn new(decorator: D, message_width: usize) -> Self {
AlignedTermDecorator {
wrapped: decorator,
message_width,
}
}
}
impl<D: Decorator> Decorator for AlignedTermDecorator<D> {
fn with_record<F>(
&self,
record: &slog::Record,
_logger_values: &slog::OwnedKVList,
f: F,
) -> Result<()>
where
F: FnOnce(&mut dyn slog_term::RecordDecorator) -> std::io::Result<()>,
{
match record.level() {
slog::Level::Info => inc_counter(&INFOS_TOTAL),
slog::Level::Warning => inc_counter(&WARNS_TOTAL),
slog::Level::Error => inc_counter(&ERRORS_TOTAL),
slog::Level::Critical => inc_counter(&CRITS_TOTAL),
_ => (),
}
self.wrapped.with_record(record, _logger_values, |deco| {
f(&mut AlignedRecordDecorator::new(deco, self.message_width))
})
}
}
struct AlignedRecordDecorator<'a> {
wrapped: &'a mut dyn slog_term::RecordDecorator,
message_count: usize,
message_active: bool,
ignore_comma: bool,
message_width: usize,
}
impl<'a> AlignedRecordDecorator<'a> {
fn new(
decorator: &'a mut dyn slog_term::RecordDecorator,
message_width: usize,
) -> AlignedRecordDecorator<'a> {
AlignedRecordDecorator {
wrapped: decorator,
message_count: 0,
ignore_comma: false,
message_active: false,
message_width,
}
}
fn filtered_write(&mut self, buf: &[u8]) -> Result<usize> {
if self.ignore_comma {
//don't write comma
self.ignore_comma = false;
Ok(buf.len())
} else if self.message_active {
self.wrapped.write(buf).inspect(|n| self.message_count += n)
} else {
self.wrapped.write(buf)
}
}
}
impl Write for AlignedRecordDecorator<'_> {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
if buf.iter().any(u8::is_ascii_control) {
let filtered = buf
.iter()
.cloned()
.map(|c| if !is_ascii_control(&c) { c } else { b'_' })
.collect::<Vec<u8>>();
self.filtered_write(&filtered)
} else {
self.filtered_write(buf)
}
}
fn flush(&mut self) -> Result<()> {
self.wrapped.flush()
}
}
impl slog_term::RecordDecorator for AlignedRecordDecorator<'_> {
fn reset(&mut self) -> Result<()> {
self.message_active = false;
self.message_count = 0;
self.ignore_comma = false;
self.wrapped.reset()
}
fn start_whitespace(&mut self) -> Result<()> {
self.wrapped.start_whitespace()
}
fn start_msg(&mut self) -> Result<()> {
self.message_active = true;
self.ignore_comma = false;
self.wrapped.start_msg()
}
fn start_timestamp(&mut self) -> Result<()> {
self.wrapped.start_timestamp()
}
fn start_level(&mut self) -> Result<()> {
self.wrapped.start_level()
}
fn start_comma(&mut self) -> Result<()> {
if self.message_active && self.message_count + 1 < self.message_width {
self.ignore_comma = true;
}
self.wrapped.start_comma()
}
fn start_key(&mut self) -> Result<()> {
if self.message_active && self.message_count + 1 < self.message_width {
write!(
self,
"{}",
" ".repeat(self.message_width - self.message_count)
)?;
self.message_active = false;
self.message_count = 0;
self.ignore_comma = false;
}
self.wrapped.start_key()
}
fn start_value(&mut self) -> Result<()> {
self.wrapped.start_value()
}
fn start_separator(&mut self) -> Result<()> {
self.wrapped.start_separator()
}
}
/// Function to filter out ascii control codes.
///
/// This helps to keep log formatting consistent.
/// Whitespace and padding control codes are excluded.
fn is_ascii_control(character: &u8) -> bool {
matches!(
character,
b'\x00'..=b'\x08' |
b'\x0b'..=b'\x0c' |
b'\x0e'..=b'\x1f' |
b'\x7f' |
b'\x81'..=b'\x9f'
)
}
/// Provides de-bounce functionality for logging.
#[derive(Default)]
pub struct TimeLatch(Option<Instant>);
@@ -214,75 +51,127 @@ impl TimeLatch {
}
}
pub fn create_tracing_layer(base_tracing_log_path: PathBuf) {
let mut tracing_log_path = PathBuf::new();
pub struct Libp2pDiscv5TracingLayer {
pub libp2p_non_blocking_writer: NonBlocking,
pub _libp2p_guard: WorkerGuard,
pub discv5_non_blocking_writer: NonBlocking,
pub _discv5_guard: WorkerGuard,
}
// Ensure that `tracing_log_path` only contains directories.
for p in base_tracing_log_path.iter() {
tracing_log_path = tracing_log_path.join(p);
if let Ok(metadata) = tracing_log_path.metadata() {
if !metadata.is_dir() {
tracing_log_path.pop();
break;
}
impl<S> Layer<S> for Libp2pDiscv5TracingLayer
where
S: Subscriber,
{
fn on_event(&self, event: &tracing::Event<'_>, _ctx: Context<S>) {
let meta = event.metadata();
let log_level = meta.level();
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
let target = match meta.target().split_once("::") {
Some((crate_name, _)) => crate_name,
None => "unknown",
};
let mut writer = match target {
"gossipsub" => self.libp2p_non_blocking_writer.clone(),
"discv5" => self.discv5_non_blocking_writer.clone(),
_ => return,
};
let mut visitor = LogMessageExtractor {
message: String::default(),
};
event.record(&mut visitor);
let message = format!("{} {} {}\n", timestamp, log_level, visitor.message);
if let Err(e) = writer.write_all(message.as_bytes()) {
eprintln!("Failed to write log: {}", e);
}
}
let filter_layer = match tracing_subscriber::EnvFilter::try_from_default_env()
.or_else(|_| tracing_subscriber::EnvFilter::try_new("warn"))
{
Ok(filter) => filter,
Err(e) => {
eprintln!("Failed to initialize dependency logging {e}");
return;
}
};
let Ok(libp2p_writer) = RollingFileAppender::builder()
.rotation(Rotation::DAILY)
.max_log_files(2)
.filename_prefix("libp2p")
.filename_suffix("log")
.build(tracing_log_path.clone())
else {
eprintln!("Failed to initialize libp2p rolling file appender");
return;
};
let Ok(discv5_writer) = RollingFileAppender::builder()
.rotation(Rotation::DAILY)
.max_log_files(2)
.filename_prefix("discv5")
.filename_suffix("log")
.build(tracing_log_path)
else {
eprintln!("Failed to initialize discv5 rolling file appender");
return;
};
let (libp2p_non_blocking_writer, _libp2p_guard) = NonBlocking::new(libp2p_writer);
let (discv5_non_blocking_writer, _discv5_guard) = NonBlocking::new(discv5_writer);
let custom_layer = LoggingLayer {
libp2p_non_blocking_writer,
_libp2p_guard,
discv5_non_blocking_writer,
_discv5_guard,
};
if let Err(e) = tracing_subscriber::fmt()
.with_env_filter(filter_layer)
.with_writer(std::io::sink)
.finish()
.with(MetricsLayer)
.with(custom_layer)
.try_init()
{
eprintln!("Failed to initialize dependency logging {e}");
}
}
/// Return a logger suitable for test usage.
struct LogMessageExtractor {
message: String,
}
impl tracing_core::field::Visit for LogMessageExtractor {
fn record_debug(&mut self, _: &tracing_core::Field, value: &dyn std::fmt::Debug) {
self.message = format!("{} {:?}", self.message, value);
}
}
pub fn create_libp2p_discv5_tracing_layer(
base_tracing_log_path: Option<PathBuf>,
max_log_size: u64,
compression: bool,
max_log_number: usize,
) -> Libp2pDiscv5TracingLayer {
if let Some(mut tracing_log_path) = base_tracing_log_path {
// Ensure that `tracing_log_path` only contains directories.
for p in tracing_log_path.clone().iter() {
tracing_log_path = tracing_log_path.join(p);
if let Ok(metadata) = tracing_log_path.metadata() {
if !metadata.is_dir() {
tracing_log_path.pop();
break;
}
}
}
let mut libp2p_writer =
LogRollerBuilder::new(tracing_log_path.clone(), PathBuf::from("libp2p.log"))
.rotation(Rotation::SizeBased(RotationSize::MB(max_log_size)))
.max_keep_files(max_log_number.try_into().unwrap_or_else(|e| {
eprintln!("Failed to convert max_log_number to u64: {}", e);
10
}));
let mut discv5_writer =
LogRollerBuilder::new(tracing_log_path.clone(), PathBuf::from("discv5.log"))
.rotation(Rotation::SizeBased(RotationSize::MB(max_log_size)))
.max_keep_files(max_log_number.try_into().unwrap_or_else(|e| {
eprintln!("Failed to convert max_log_number to u64: {}", e);
10
}));
if compression {
libp2p_writer = libp2p_writer.compression(Compression::Gzip);
discv5_writer = discv5_writer.compression(Compression::Gzip);
}
let Ok(libp2p_writer) = libp2p_writer.build() else {
eprintln!("Failed to initialize libp2p rolling file appender");
std::process::exit(1);
};
let Ok(discv5_writer) = discv5_writer.build() else {
eprintln!("Failed to initialize discv5 rolling file appender");
std::process::exit(1);
};
let (libp2p_non_blocking_writer, _libp2p_guard) = NonBlocking::new(libp2p_writer);
let (discv5_non_blocking_writer, _discv5_guard) = NonBlocking::new(discv5_writer);
Libp2pDiscv5TracingLayer {
libp2p_non_blocking_writer,
_libp2p_guard,
discv5_non_blocking_writer,
_discv5_guard,
}
} else {
let (libp2p_non_blocking_writer, _libp2p_guard) = NonBlocking::new(std::io::sink());
let (discv5_non_blocking_writer, _discv5_guard) = NonBlocking::new(std::io::sink());
Libp2pDiscv5TracingLayer {
libp2p_non_blocking_writer,
_libp2p_guard,
discv5_non_blocking_writer,
_discv5_guard,
}
}
}
/// Return a tracing subscriber suitable for test usage.
///
/// By default no logs will be printed, but they can be enabled via
/// the `test_logger` feature. This feature can be enabled for any
@@ -290,17 +179,10 @@ pub fn create_tracing_layer(base_tracing_log_path: PathBuf) {
/// ```bash
/// cargo test -p beacon_chain --features logging/test_logger
/// ```
pub fn test_logger() -> Logger {
use sloggers::Build;
pub fn create_test_tracing_subscriber() {
if cfg!(feature = "test_logger") {
sloggers::terminal::TerminalLoggerBuilder::new()
.level(sloggers::types::Severity::Debug)
.build()
.expect("Should build TerminalLoggerBuilder")
} else {
sloggers::null::NullLoggerBuilder
.build()
.expect("Should build NullLoggerBuilder")
let _ = tracing_subscriber::fmt()
.with_env_filter(EnvFilter::try_new("debug").unwrap())
.try_init();
}
}

View File

@@ -0,0 +1,6 @@
#[macro_export]
macro_rules! crit {
($($arg:tt)*) => {
tracing::error!(error_type = "crit", $($arg)*);
};
}

View File

@@ -1,46 +1,108 @@
//! This module provides an implementation of `slog::Drain` that optionally writes to a channel if
//! there are subscribers to a HTTP SSE stream.
use crate::async_record::AsyncRecord;
use slog::{Drain, OwnedKVList, Record};
use std::panic::AssertUnwindSafe;
use serde_json::json;
use serde_json::Value;
use std::sync::Arc;
use tokio::sync::broadcast::Sender;
use tracing::field::{Field, Visit};
use tracing::{Event, Subscriber};
use tracing_subscriber::layer::{Context, Layer};
/// Default log level for SSE Events.
// NOTE: Made this a constant. Debug level seems to be pretty intense. Can make this
// configurable later if needed.
const LOG_LEVEL: slog::Level = slog::Level::Info;
const LOG_LEVEL: tracing::Level = tracing::Level::INFO;
/// The components required in the HTTP API task to receive logged events.
#[derive(Clone)]
pub struct SSELoggingComponents {
/// The channel to receive events from.
pub sender: Arc<AssertUnwindSafe<Sender<AsyncRecord>>>,
pub sender: Arc<Sender<Arc<Value>>>,
}
impl SSELoggingComponents {
/// Create a new SSE drain.
pub fn new(channel_size: usize) -> Self {
let (sender, _receiver) = tokio::sync::broadcast::channel(channel_size);
let sender = Arc::new(AssertUnwindSafe(sender));
SSELoggingComponents { sender }
SSELoggingComponents {
sender: Arc::new(sender),
}
}
}
impl Drain for SSELoggingComponents {
type Ok = ();
type Err = &'static str;
impl<S: Subscriber> Layer<S> for SSELoggingComponents {
fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) {
if *event.metadata().level() > LOG_LEVEL {
return;
}
fn log(&self, record: &Record, logger_values: &OwnedKVList) -> Result<Self::Ok, Self::Err> {
if record.level().is_at_least(LOG_LEVEL) {
// Attempt to send the logs
match self.sender.send(AsyncRecord::from(record, logger_values)) {
Ok(_num_sent) => {} // Everything got sent
Err(_err) => {} // There are no subscribers, do nothing
let mut visitor = TracingEventVisitor::new();
event.record(&mut visitor);
let mut log_entry = visitor.finish(event.metadata());
if let Some(error_type) = log_entry
.get("fields")
.and_then(|fields| fields.get("error_type"))
.and_then(|val| val.as_str())
{
if error_type.eq_ignore_ascii_case("crit") {
log_entry["level"] = json!("CRIT");
if let Some(Value::Object(ref mut map)) = log_entry.get_mut("fields") {
map.remove("error_type");
}
}
}
Ok(())
let _ = self.sender.send(Arc::new(log_entry));
}
}
struct TracingEventVisitor {
fields: serde_json::Map<String, Value>,
}
impl TracingEventVisitor {
fn new() -> Self {
TracingEventVisitor {
fields: serde_json::Map::new(),
}
}
fn finish(self, metadata: &tracing::Metadata<'_>) -> Value {
let mut log_entry = serde_json::Map::new();
log_entry.insert(
"time".to_string(),
json!(chrono::Local::now()
.format("%b %d %H:%M:%S%.3f")
.to_string()),
);
log_entry.insert("level".to_string(), json!(metadata.level().to_string()));
log_entry.insert("target".to_string(), json!(metadata.target()));
log_entry.insert("fields".to_string(), Value::Object(self.fields));
Value::Object(log_entry)
}
}
impl Visit for TracingEventVisitor {
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
self.fields
.insert(field.name().to_string(), json!(format!("{:?}", value)));
}
fn record_str(&mut self, field: &Field, value: &str) {
self.fields.insert(field.name().to_string(), json!(value));
}
fn record_i64(&mut self, field: &Field, value: i64) {
self.fields.insert(field.name().to_string(), json!(value));
}
fn record_u64(&mut self, field: &Field, value: u64) {
self.fields.insert(field.name().to_string(), json!(value));
}
fn record_bool(&mut self, field: &Field, value: bool) {
self.fields.insert(field.name().to_string(), json!(value));
}
}

View File

@@ -1,56 +1,531 @@
use chrono::prelude::*;
use serde_json::{Map, Value};
use std::collections::HashMap;
use std::io::Write;
use std::sync::{Arc, Mutex};
use tracing::field::Field;
use tracing::span::Id;
use tracing::Subscriber;
use tracing_appender::non_blocking::{NonBlocking, WorkerGuard};
use tracing_subscriber::layer::Context;
use tracing_subscriber::registry::LookupSpan;
use tracing_subscriber::Layer;
pub struct LoggingLayer {
pub libp2p_non_blocking_writer: NonBlocking,
pub _libp2p_guard: WorkerGuard,
pub discv5_non_blocking_writer: NonBlocking,
pub _discv5_guard: WorkerGuard,
pub non_blocking_writer: NonBlocking,
pub guard: WorkerGuard,
pub disable_log_timestamp: bool,
pub log_color: bool,
pub logfile_color: bool,
pub log_format: Option<String>,
pub logfile_format: Option<String>,
pub extra_info: bool,
pub dep_logs: bool,
span_fields: Arc<Mutex<HashMap<Id, SpanData>>>,
}
impl LoggingLayer {
#[allow(clippy::too_many_arguments)]
pub fn new(
non_blocking_writer: NonBlocking,
guard: WorkerGuard,
disable_log_timestamp: bool,
log_color: bool,
logfile_color: bool,
log_format: Option<String>,
logfile_format: Option<String>,
extra_info: bool,
dep_logs: bool,
) -> Self {
Self {
non_blocking_writer,
guard,
disable_log_timestamp,
log_color,
logfile_color,
log_format,
logfile_format,
extra_info,
dep_logs,
span_fields: Arc::new(Mutex::new(HashMap::new())),
}
}
}
impl<S> Layer<S> for LoggingLayer
where
S: Subscriber,
S: Subscriber + for<'a> LookupSpan<'a>,
{
fn on_event(&self, event: &tracing::Event<'_>, _ctx: Context<S>) {
fn on_new_span(&self, attrs: &tracing::span::Attributes<'_>, id: &Id, _ctx: Context<S>) {
let metadata = attrs.metadata();
let span_name = metadata.name();
let mut visitor = SpanFieldsExtractor::default();
attrs.record(&mut visitor);
let span_data = SpanData {
name: span_name.to_string(),
fields: visitor.fields,
};
let mut span_fields = match self.span_fields.lock() {
Ok(guard) => guard,
Err(poisoned) => poisoned.into_inner(),
};
span_fields.insert(id.clone(), span_data);
}
fn on_event(&self, event: &tracing::Event<'_>, ctx: Context<S>) {
let meta = event.metadata();
let log_level = meta.level();
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
let target = match meta.target().split_once("::") {
Some((crate_name, _)) => crate_name,
None => "unknown",
let timestamp = if !self.disable_log_timestamp {
Local::now().format("%b %d %H:%M:%S%.3f").to_string()
} else {
String::new()
};
let mut writer = match target {
"gossipsub" => self.libp2p_non_blocking_writer.clone(),
"discv5" => self.discv5_non_blocking_writer.clone(),
_ => return,
};
if !self.dep_logs {
if let Some(file) = meta.file() {
if file.contains("/.cargo/") {
return;
}
} else {
return;
}
}
let mut writer = self.non_blocking_writer.clone();
let mut visitor = LogMessageExtractor {
message: String::default(),
message: String::new(),
fields: Vec::new(),
is_crit: false,
};
event.record(&mut visitor);
// Remove ascii control codes from message.
// All following formatting and logs components are predetermined or known.
if visitor.message.as_bytes().iter().any(u8::is_ascii_control) {
let filtered = visitor
.message
.as_bytes()
.iter()
.map(|c| if is_ascii_control(c) { b'_' } else { *c })
.collect::<Vec<u8>>();
visitor.message = String::from_utf8(filtered).unwrap_or_default();
};
event.record(&mut visitor);
let message = format!("{} {} {}\n", timestamp, log_level, visitor.message);
let module = meta.module_path().unwrap_or("<unknown_module>");
let file = meta.file().unwrap_or("<unknown_file>");
let line = match meta.line() {
Some(line) => line.to_string(),
None => "<unknown_line>".to_string(),
};
if let Err(e) = writer.write_all(message.as_bytes()) {
eprintln!("Failed to write log: {}", e);
if module.contains("discv5") {
visitor
.fields
.push(("service".to_string(), "\"discv5\"".to_string()));
}
let gray = "\x1b[90m";
let reset = "\x1b[0m";
let location = if self.extra_info {
if self.logfile_color {
format!("{}{}::{}:{}{}", gray, module, file, line, reset)
} else {
format!("{}::{}:{}", module, file, line)
}
} else {
String::new()
};
let plain_level_str = if visitor.is_crit {
"CRIT"
} else {
match *log_level {
tracing::Level::ERROR => "ERROR",
tracing::Level::WARN => "WARN",
tracing::Level::INFO => "INFO",
tracing::Level::DEBUG => "DEBUG",
tracing::Level::TRACE => "TRACE",
}
};
let color_level_str = if visitor.is_crit {
"\x1b[35mCRIT\x1b[0m"
} else {
match *log_level {
tracing::Level::ERROR => "\x1b[31mERROR\x1b[0m",
tracing::Level::WARN => "\x1b[33mWARN\x1b[0m",
tracing::Level::INFO => "\x1b[32mINFO\x1b[0m",
tracing::Level::DEBUG => "\x1b[34mDEBUG\x1b[0m",
tracing::Level::TRACE => "\x1b[35mTRACE\x1b[0m",
}
};
if self.dep_logs {
if self.logfile_format.as_deref() == Some("JSON") {
build_json_log_file(
&visitor,
plain_level_str,
meta,
&ctx,
&self.span_fields,
event,
&mut writer,
);
} else {
build_log_text(
&visitor,
plain_level_str,
&timestamp,
&ctx,
&self.span_fields,
event,
&location,
color_level_str,
self.logfile_color,
&mut writer,
);
}
} else if self.log_format.as_deref() == Some("JSON") {
build_json_log_stdout(&visitor, plain_level_str, &timestamp, &mut writer);
} else {
build_log_text(
&visitor,
plain_level_str,
&timestamp,
&ctx,
&self.span_fields,
event,
&location,
color_level_str,
self.log_color,
&mut writer,
);
}
}
}
struct SpanData {
name: String,
fields: Vec<(String, String)>,
}
#[derive(Default)]
struct SpanFieldsExtractor {
fields: Vec<(String, String)>,
}
impl tracing_core::field::Visit for SpanFieldsExtractor {
fn record_str(&mut self, field: &Field, value: &str) {
self.fields
.push((field.name().to_string(), format!("\"{}\"", value)));
}
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
self.fields
.push((field.name().to_string(), format!("{:?}", value)));
}
fn record_i64(&mut self, field: &Field, value: i64) {
self.fields
.push((field.name().to_string(), value.to_string()));
}
fn record_u64(&mut self, field: &Field, value: u64) {
self.fields
.push((field.name().to_string(), value.to_string()));
}
fn record_bool(&mut self, field: &Field, value: bool) {
self.fields
.push((field.name().to_string(), value.to_string()));
}
}
struct LogMessageExtractor {
message: String,
fields: Vec<(String, String)>,
is_crit: bool,
}
impl tracing_core::field::Visit for LogMessageExtractor {
fn record_debug(&mut self, _: &tracing_core::Field, value: &dyn std::fmt::Debug) {
self.message = format!("{} {:?}", self.message, value);
fn record_str(&mut self, field: &Field, value: &str) {
if field.name() == "message" {
if self.message.is_empty() {
self.message = value.to_string();
} else {
self.fields
.push(("msg_id".to_string(), format!("\"{}\"", value)));
}
} else if field.name() == "error_type" && value == "crit" {
self.is_crit = true;
} else {
self.fields
.push((field.name().to_string(), format!("\"{}\"", value)));
}
}
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
if field.name() == "message" {
if self.message.is_empty() {
self.message = format!("{:?}", value);
} else {
self.fields
.push(("msg_id".to_string(), format!("{:?}", value)));
}
} else if field.name() == "error_type" && format!("{:?}", value) == "\"crit\"" {
self.is_crit = true;
} else {
self.fields
.push((field.name().to_string(), format!("{:?}", value)));
}
}
fn record_i64(&mut self, field: &Field, value: i64) {
self.fields
.push((field.name().to_string(), value.to_string()));
}
fn record_u64(&mut self, field: &Field, value: u64) {
self.fields
.push((field.name().to_string(), value.to_string()));
}
fn record_bool(&mut self, field: &Field, value: bool) {
self.fields
.push((field.name().to_string(), value.to_string()));
}
}
/// Function to filter out ascii control codes.
///
/// This helps to keep log formatting consistent.
/// Whitespace and padding control codes are excluded.
fn is_ascii_control(character: &u8) -> bool {
matches!(
character,
b'\x00'..=b'\x08' |
b'\x0b'..=b'\x0c' |
b'\x0e'..=b'\x1f' |
b'\x7f' |
b'\x81'..=b'\x9f'
)
}
fn build_json_log_stdout(
visitor: &LogMessageExtractor,
plain_level_str: &str,
timestamp: &str,
writer: &mut impl Write,
) {
let mut log_map = Map::new();
log_map.insert("msg".to_string(), Value::String(visitor.message.clone()));
log_map.insert(
"level".to_string(),
Value::String(plain_level_str.to_string()),
);
log_map.insert("ts".to_string(), Value::String(timestamp.to_string()));
for (key, val) in visitor.fields.clone().into_iter() {
let parsed_val = parse_field(&val);
log_map.insert(key, parsed_val);
}
let json_obj = Value::Object(log_map);
let output = format!("{}\n", json_obj);
if let Err(e) = writer.write_all(output.as_bytes()) {
eprintln!("Failed to write log: {}", e);
}
}
fn build_json_log_file<'a, S>(
visitor: &LogMessageExtractor,
plain_level_str: &str,
meta: &tracing::Metadata<'_>,
ctx: &Context<'_, S>,
span_fields: &Arc<Mutex<HashMap<Id, SpanData>>>,
event: &tracing::Event<'_>,
writer: &mut impl Write,
) where
S: Subscriber + for<'lookup> LookupSpan<'lookup>,
{
let utc_timestamp = Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Micros, true);
let mut log_map = Map::new();
log_map.insert("msg".to_string(), Value::String(visitor.message.clone()));
log_map.insert(
"level".to_string(),
Value::String(plain_level_str.to_string()),
);
log_map.insert("ts".to_string(), Value::String(utc_timestamp));
let module_path = meta.module_path().unwrap_or("<unknown_module>");
let line_number = meta
.line()
.map_or("<unknown_line>".to_string(), |l| l.to_string());
let module_field = format!("{}:{}", module_path, line_number);
log_map.insert("module".to_string(), Value::String(module_field));
for (key, val) in visitor.fields.clone().into_iter() {
let cleaned_value = if val.starts_with('\"') && val.ends_with('\"') && val.len() >= 2 {
&val[1..val.len() - 1]
} else {
&val
};
let parsed_val =
serde_json::from_str(cleaned_value).unwrap_or(Value::String(cleaned_value.to_string()));
log_map.insert(key, parsed_val);
}
if let Some(scope) = ctx.event_scope(event) {
let guard = span_fields.lock().ok();
if let Some(span_map) = guard {
for span in scope {
let id = span.id();
if let Some(span_data) = span_map.get(&id) {
for (key, val) in &span_data.fields {
let parsed_span_val = parse_field(val);
log_map.insert(key.clone(), parsed_span_val);
}
}
}
}
}
let json_obj = Value::Object(log_map);
let output = format!("{}\n", json_obj);
if let Err(e) = writer.write_all(output.as_bytes()) {
eprintln!("Failed to write log: {}", e);
}
}
#[allow(clippy::too_many_arguments)]
fn build_log_text<'a, S>(
visitor: &LogMessageExtractor,
plain_level_str: &str,
timestamp: &str,
ctx: &Context<'_, S>,
span_fields: &Arc<Mutex<HashMap<Id, SpanData>>>,
event: &tracing::Event<'_>,
location: &str,
color_level_str: &str,
use_color: bool,
writer: &mut impl Write,
) where
S: Subscriber + for<'lookup> LookupSpan<'lookup>,
{
let bold_start = "\x1b[1m";
let bold_end = "\x1b[0m";
let mut collected_span_fields = Vec::new();
if let Some(scope) = ctx.event_scope(event) {
for span in scope {
let id = span.id();
let span_fields_map = span_fields.lock().unwrap();
if let Some(span_data) = span_fields_map.get(&id) {
collected_span_fields.push((span_data.name.clone(), span_data.fields.clone()));
}
}
}
let mut formatted_spans = String::new();
for (_, fields) in collected_span_fields.iter().rev() {
for (i, (field_name, field_value)) in fields.iter().enumerate() {
if i > 0 && !visitor.fields.is_empty() {
formatted_spans.push_str(", ");
}
if use_color {
formatted_spans.push_str(&format!(
"{}{}{}: {}",
bold_start, field_name, bold_end, field_value
));
} else {
formatted_spans.push_str(&format!("{}: {}", field_name, field_value));
}
}
}
let level_str = if use_color {
color_level_str
} else {
plain_level_str
};
let fixed_message_width = 44;
let message_len = visitor.message.len();
let message_content = if use_color {
format!("{}{}{}", bold_start, visitor.message, bold_end)
} else {
visitor.message.clone()
};
let padded_message = if message_len < fixed_message_width {
let extra_color_len = if use_color {
bold_start.len() + bold_end.len()
} else {
0
};
format!(
"{:<width$}",
message_content,
width = fixed_message_width + extra_color_len
)
} else {
message_content.clone()
};
let mut formatted_fields = String::new();
for (i, (field_name, field_value)) in visitor.fields.iter().enumerate() {
if i > 0 {
formatted_fields.push_str(", ");
}
if use_color {
formatted_fields.push_str(&format!(
"{}{}{}: {}",
bold_start, field_name, bold_end, field_value
));
} else {
formatted_fields.push_str(&format!("{}: {}", field_name, field_value));
}
if i == visitor.fields.len() - 1 && !collected_span_fields.is_empty() {
formatted_fields.push(',');
}
}
let full_message = if !formatted_fields.is_empty() {
format!("{} {}", padded_message, formatted_fields)
} else {
padded_message.to_string()
};
let message = if !location.is_empty() {
format!(
"{} {} {} {} {}\n",
timestamp, level_str, location, full_message, formatted_spans
)
} else {
format!(
"{} {} {} {}\n",
timestamp, level_str, full_message, formatted_spans
)
};
if let Err(e) = writer.write_all(message.as_bytes()) {
eprintln!("Failed to write log: {}", e);
}
}
fn parse_field(val: &str) -> Value {
let cleaned = if val.starts_with('"') && val.ends_with('"') && val.len() >= 2 {
&val[1..val.len() - 1]
} else {
val
};
serde_json::from_str(cleaned).unwrap_or(Value::String(cleaned.to_string()))
}

View File

@@ -1,51 +0,0 @@
use std::env;
use std::process::Command;
use std::process::Output;
fn run_cmd(cmd_line: &str) -> Result<Output, std::io::Error> {
if cfg!(target_os = "windows") {
Command::new(r#"cmd"#).args(["/C", cmd_line]).output()
} else {
Command::new(r#"sh"#).args(["-c", cmd_line]).output()
}
}
#[test]
fn test_test_logger_with_feature_test_logger() {
let cur_dir = env::current_dir().unwrap();
let test_dir = cur_dir
.join("..")
.join("..")
.join("testing")
.join("test-test_logger");
let cmd_line = format!(
"cd {} && cargo test --features logging/test_logger",
test_dir.to_str().unwrap()
);
let output = run_cmd(&cmd_line);
// Assert output data DOES contain "INFO hi, "
let data = String::from_utf8(output.unwrap().stderr).unwrap();
println!("data={}", data);
assert!(data.contains("INFO hi, "));
}
#[test]
fn test_test_logger_no_features() {
// Test without features
let cur_dir = env::current_dir().unwrap();
let test_dir = cur_dir
.join("..")
.join("..")
.join("testing")
.join("test-test_logger");
let cmd_line = format!("cd {} && cargo test", test_dir.to_str().unwrap());
let output = run_cmd(&cmd_line);
// Assert output data DOES contain "INFO hi, "
let data = String::from_utf8(output.unwrap().stderr).unwrap();
println!("data={}", data);
assert!(!data.contains("INFO hi, "));
}