From cd7b80e524b43cbcc33b66796e49d5b48dcce5e9 Mon Sep 17 00:00:00 2001 From: Guillaume Binet Date: Thu, 27 Jun 2024 12:02:12 -0500 Subject: [PATCH] more unified --- copper_helpers/src/lib.rs | 4 ++-- copper_log_reader/src/cli.rs | 4 ++-- copper_log_reader/src/lib.rs | 6 +++--- copper_traits/src/lib.rs | 10 ++++----- copper_unifiedlog/src/lib.rs | 40 ++++++++++++++++++------------------ 5 files changed, 32 insertions(+), 32 deletions(-) diff --git a/copper_helpers/src/lib.rs b/copper_helpers/src/lib.rs index 34513b789..4cffaa54a 100644 --- a/copper_helpers/src/lib.rs +++ b/copper_helpers/src/lib.rs @@ -1,7 +1,7 @@ use copper_clock::RobotClock; use copper_log::default_log_index_dir; use copper_log_runtime::{ExtraTextLogger, LoggerRuntime}; -use copper_traits::{CuResult, DataLogType}; +use copper_traits::{CuResult, UnifiedLogType}; use copper_unifiedlog::{stream_write, UnifiedLogger, UnifiedLoggerBuilder}; use simplelog::{ColorChoice, Config, LevelFilter, TermLogger, TerminalMode}; use std::path::Path; @@ -29,7 +29,7 @@ pub fn basic_logger_runtime_setup( panic!("Failed to create logger") }; let data_logger = Arc::new(Mutex::new(logger)); - let stream = stream_write(data_logger.clone(), DataLogType::StructuredLogLine, 1024); + let stream = stream_write(data_logger.clone(), UnifiedLogType::StructuredLogLine, 1024); let extra = if text_log { let slow_text_logger = TermLogger::new( diff --git a/copper_log_reader/src/cli.rs b/copper_log_reader/src/cli.rs index 4913dcff5..45358df7b 100644 --- a/copper_log_reader/src/cli.rs +++ b/copper_log_reader/src/cli.rs @@ -1,6 +1,6 @@ use clap::{Parser, Subcommand}; use copper_log_reader::full_log_dump; -use copper_traits::DataLogType; +use copper_traits::UnifiedLogType; use copper_unifiedlog::{UnifiedLogger, UnifiedLoggerBuilder, UnifiedLoggerIOReader}; use std::io::Read; use std::path::PathBuf; @@ -36,6 +36,6 @@ fn main() { panic!("Failed to create logger"); }; - let reader = UnifiedLoggerIOReader::new(dl, DataLogType::StructuredLogLine); + let reader = UnifiedLoggerIOReader::new(dl, UnifiedLogType::StructuredLogLine); full_log_dump(reader, &index).expect("Failed to dump log"); } diff --git a/copper_log_reader/src/lib.rs b/copper_log_reader/src/lib.rs index f77b9feef..48489ed68 100644 --- a/copper_log_reader/src/lib.rs +++ b/copper_log_reader/src/lib.rs @@ -75,7 +75,7 @@ mod tests { use copper_log::value::Value; use copper_log_runtime::log; use copper_log_runtime::LoggerRuntime; - use copper_traits::{DataLogType, WriteStream}; + use copper_traits::{UnifiedLogType, WriteStream}; use std::io::{Cursor, Write}; use std::sync::{Arc, Mutex}; use tempfile::tempdir; @@ -107,7 +107,7 @@ mod tests { panic!("Failed to create logger") }; let data_logger = Arc::new(Mutex::new(logger)); - let stream = stream_write(data_logger.clone(), DataLogType::StructuredLogLine, 1024); + let stream = stream_write(data_logger.clone(), UnifiedLogType::StructuredLogLine, 1024); let rt = LoggerRuntime::init(RobotClock::default(), stream, None); let mut entry = CuLogEntry::new(4); // this is a "Just a String {}" log line @@ -127,7 +127,7 @@ mod tests { else { panic!("Failed to create logger") }; - let reader = DataLoggerIOReader::new(logger, DataLogType::StructuredLogLine); + let reader = DataLoggerIOReader::new(logger, UnifiedLogType::StructuredLogLine); full_log_dump(reader, Path::new("test/copper_log_index")); } } diff --git a/copper_traits/src/lib.rs b/copper_traits/src/lib.rs index bc306d36b..16f01b7ac 100644 --- a/copper_traits/src/lib.rs +++ b/copper_traits/src/lib.rs @@ -62,10 +62,10 @@ pub trait WriteStream: Sync + Send { fn log(&mut self, obj: &impl Encode) -> CuResult<()>; } -/// Defines the types of what can be logged. +/// Defines the types of what can be logged in the unified logger. #[derive(dEncode, dDecode, Copy, Clone, Debug, PartialEq)] -pub enum DataLogType { - StructuredLogLine, - CopperList, - LastEntry, // This is a special entry that is used to signal the end of the log. +pub enum UnifiedLogType { + StructuredLogLine, // This is for the structured logs (ie. debug! etc..) + CopperList, // This is the actual data log storing activities between tasks. + LastEntry, // This is a special entry that is used to signal the end of the log. } diff --git a/copper_unifiedlog/src/lib.rs b/copper_unifiedlog/src/lib.rs index 71b1b54a6..2bcefd479 100644 --- a/copper_unifiedlog/src/lib.rs +++ b/copper_unifiedlog/src/lib.rs @@ -18,7 +18,7 @@ use bincode::{decode_from_reader, decode_from_slice}; use bincode_derive::Decode as dDecode; use bincode_derive::Encode as dEncode; -use copper_traits::{CuError, CuResult, DataLogType, WriteStream}; +use copper_traits::{CuError, CuResult, UnifiedLogType, WriteStream}; const MAIN_MAGIC: [u8; 4] = [0xB4, 0xA5, 0x50, 0xFF]; @@ -36,13 +36,13 @@ struct MainHeader { #[derive(dEncode, dDecode)] struct SectionHeader { magic: [u8; 2], - entry_type: DataLogType, + entry_type: UnifiedLogType, section_size: u32, // offset of section_magic + section_size -> should be the index of the next section_magic } /// A wrapper around a memory mapped file to write to. struct MmapStream { - entry_type: DataLogType, + entry_type: UnifiedLogType, parent_logger: Arc>, current_slice: &'static mut [u8], current_position: usize, @@ -51,7 +51,7 @@ struct MmapStream { impl MmapStream { fn new( - entry_type: DataLogType, + entry_type: UnifiedLogType, parent_logger: Arc>, current_slice: &'static mut [u8], minimum_allocation_amount: usize, @@ -112,7 +112,7 @@ impl Drop for MmapStream { /// Create a new stream to write to the unifiedlogger. pub fn stream_write( logger: Arc>, - entry_type: DataLogType, + entry_type: UnifiedLogType, minimum_allocation_amount: usize, ) -> impl WriteStream { let aclone = logger.clone(); @@ -299,7 +299,7 @@ impl UnifiedLoggerWrite { } /// The returned slice is section_size or greater. - fn add_section(&mut self, entry_type: DataLogType, section_size: usize) -> &mut [u8] { + fn add_section(&mut self, entry_type: UnifiedLogType, section_size: usize) -> &mut [u8] { // align current_position to the next page self.current_global_position = (self.current_global_position + self.page_size - 1) & !(self.page_size - 1); @@ -348,7 +348,7 @@ impl UnifiedLoggerWrite { impl Drop for UnifiedLoggerWrite { fn drop(&mut self) { - self.add_section(DataLogType::LastEntry, 0); + self.add_section(UnifiedLogType::LastEntry, 0); self.flush(); self.file .set_len(self.current_global_position as u64) @@ -359,7 +359,7 @@ impl Drop for UnifiedLoggerWrite { impl UnifiedLoggerRead { pub fn read_next_section_type( &mut self, - datalogtype: DataLogType, + datalogtype: UnifiedLogType, ) -> CuResult>> { // TODO: eventually implement a 0 copy of this too. loop { @@ -373,7 +373,7 @@ impl UnifiedLoggerRead { let header = header_result.unwrap(); // Reached the end of file - if header.entry_type == DataLogType::LastEntry { + if header.entry_type == UnifiedLogType::LastEntry { return Ok(None); } @@ -436,13 +436,13 @@ impl UnifiedLoggerRead { /// This a a convience wrapper around the UnifiedLoggerRead to implement the Read trait. pub struct UnifiedLoggerIOReader { logger: UnifiedLoggerRead, - log_type: DataLogType, + log_type: UnifiedLogType, buffer: Vec, buffer_pos: usize, } impl UnifiedLoggerIOReader { - pub fn new(logger: UnifiedLoggerRead, log_type: DataLogType) -> Self { + pub fn new(logger: UnifiedLoggerRead, log_type: UnifiedLogType) -> Self { Self { logger, log_type, @@ -520,8 +520,8 @@ mod tests { else { panic!("Failed to create logger") }; - logger.add_section(DataLogType::StructuredLogLine, 1024); - logger.add_section(DataLogType::CopperList, 2048); + logger.add_section(UnifiedLogType::StructuredLogLine, 1024); + logger.add_section(UnifiedLogType::CopperList, 2048); let used = logger.used(); assert!(used < 4 * 4096); // ie. 3 headers, 1 page max per // logger drops @@ -545,7 +545,7 @@ mod tests { let tmp_dir = TempDir::new().expect("could not create a tmp dir"); let (logger, _) = make_a_logger(&tmp_dir); { - let _stream = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024); + let _stream = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1); } assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 0); @@ -557,9 +557,9 @@ mod tests { fn test_two_sections_self_cleaning_in_order() { let tmp_dir = TempDir::new().expect("could not create a tmp dir"); let (logger, _) = make_a_logger(&tmp_dir); - let s1 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024); + let s1 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1); - let s2 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024); + let s2 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 2); drop(s2); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1); @@ -573,9 +573,9 @@ mod tests { fn test_two_sections_self_cleaning_out_of_order() { let tmp_dir = TempDir::new().expect("could not create a tmp dir"); let (logger, _) = make_a_logger(&tmp_dir); - let s1 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024); + let s1 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1); - let s2 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024); + let s2 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 2); drop(s1); assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1); @@ -592,7 +592,7 @@ mod tests { let p = f.as_path(); println!("Path : {:?}", p); { - let mut stream = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024); + let mut stream = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024); stream.log(&1u32).unwrap(); stream.log(&2u32).unwrap(); stream.log(&3u32).unwrap(); @@ -606,7 +606,7 @@ mod tests { panic!("Failed to build logger"); }; let section = dl - .read_next_section_type(DataLogType::StructuredLogLine) + .read_next_section_type(UnifiedLogType::StructuredLogLine) .expect("Failed to read section"); assert!(section.is_some()); let section = section.unwrap();