Skip to content

Commit

Permalink
more unified
Browse files Browse the repository at this point in the history
  • Loading branch information
gbin committed Jun 27, 2024
1 parent d0a58fe commit cd7b80e
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 32 deletions.
4 changes: 2 additions & 2 deletions copper_helpers/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use copper_clock::RobotClock;
use copper_log::default_log_index_dir;
use copper_log_runtime::{ExtraTextLogger, LoggerRuntime};
use copper_traits::{CuResult, DataLogType};
use copper_traits::{CuResult, UnifiedLogType};
use copper_unifiedlog::{stream_write, UnifiedLogger, UnifiedLoggerBuilder};
use simplelog::{ColorChoice, Config, LevelFilter, TermLogger, TerminalMode};
use std::path::Path;
Expand Down Expand Up @@ -29,7 +29,7 @@ pub fn basic_logger_runtime_setup(
panic!("Failed to create logger")
};
let data_logger = Arc::new(Mutex::new(logger));
let stream = stream_write(data_logger.clone(), DataLogType::StructuredLogLine, 1024);
let stream = stream_write(data_logger.clone(), UnifiedLogType::StructuredLogLine, 1024);

let extra = if text_log {
let slow_text_logger = TermLogger::new(
Expand Down
4 changes: 2 additions & 2 deletions copper_log_reader/src/cli.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use clap::{Parser, Subcommand};
use copper_log_reader::full_log_dump;
use copper_traits::DataLogType;
use copper_traits::UnifiedLogType;
use copper_unifiedlog::{UnifiedLogger, UnifiedLoggerBuilder, UnifiedLoggerIOReader};
use std::io::Read;
use std::path::PathBuf;
Expand Down Expand Up @@ -36,6 +36,6 @@ fn main() {
panic!("Failed to create logger");
};

let reader = UnifiedLoggerIOReader::new(dl, DataLogType::StructuredLogLine);
let reader = UnifiedLoggerIOReader::new(dl, UnifiedLogType::StructuredLogLine);
full_log_dump(reader, &index).expect("Failed to dump log");
}
6 changes: 3 additions & 3 deletions copper_log_reader/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ mod tests {
use copper_log::value::Value;
use copper_log_runtime::log;
use copper_log_runtime::LoggerRuntime;
use copper_traits::{DataLogType, WriteStream};
use copper_traits::{UnifiedLogType, WriteStream};
use std::io::{Cursor, Write};
use std::sync::{Arc, Mutex};
use tempfile::tempdir;
Expand Down Expand Up @@ -107,7 +107,7 @@ mod tests {
panic!("Failed to create logger")
};
let data_logger = Arc::new(Mutex::new(logger));
let stream = stream_write(data_logger.clone(), DataLogType::StructuredLogLine, 1024);
let stream = stream_write(data_logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
let rt = LoggerRuntime::init(RobotClock::default(), stream, None);

let mut entry = CuLogEntry::new(4); // this is a "Just a String {}" log line
Expand All @@ -127,7 +127,7 @@ mod tests {
else {
panic!("Failed to create logger")
};
let reader = DataLoggerIOReader::new(logger, DataLogType::StructuredLogLine);
let reader = DataLoggerIOReader::new(logger, UnifiedLogType::StructuredLogLine);
full_log_dump(reader, Path::new("test/copper_log_index"));
}
}
10 changes: 5 additions & 5 deletions copper_traits/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,10 @@ pub trait WriteStream: Sync + Send {
fn log(&mut self, obj: &impl Encode) -> CuResult<()>;
}

/// Defines the types of what can be logged.
/// Defines the types of what can be logged in the unified logger.
#[derive(dEncode, dDecode, Copy, Clone, Debug, PartialEq)]
pub enum DataLogType {
StructuredLogLine,
CopperList,
LastEntry, // This is a special entry that is used to signal the end of the log.
pub enum UnifiedLogType {
StructuredLogLine, // This is for the structured logs (ie. debug! etc..)
CopperList, // This is the actual data log storing activities between tasks.
LastEntry, // This is a special entry that is used to signal the end of the log.
}
40 changes: 20 additions & 20 deletions copper_unifiedlog/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use bincode::{decode_from_reader, decode_from_slice};
use bincode_derive::Decode as dDecode;
use bincode_derive::Encode as dEncode;

use copper_traits::{CuError, CuResult, DataLogType, WriteStream};
use copper_traits::{CuError, CuResult, UnifiedLogType, WriteStream};

const MAIN_MAGIC: [u8; 4] = [0xB4, 0xA5, 0x50, 0xFF];

Expand All @@ -36,13 +36,13 @@ struct MainHeader {
#[derive(dEncode, dDecode)]
struct SectionHeader {
magic: [u8; 2],
entry_type: DataLogType,
entry_type: UnifiedLogType,
section_size: u32, // offset of section_magic + section_size -> should be the index of the next section_magic
}

/// A wrapper around a memory mapped file to write to.
struct MmapStream {
entry_type: DataLogType,
entry_type: UnifiedLogType,
parent_logger: Arc<Mutex<UnifiedLoggerWrite>>,
current_slice: &'static mut [u8],
current_position: usize,
Expand All @@ -51,7 +51,7 @@ struct MmapStream {

impl MmapStream {
fn new(
entry_type: DataLogType,
entry_type: UnifiedLogType,
parent_logger: Arc<Mutex<UnifiedLoggerWrite>>,
current_slice: &'static mut [u8],
minimum_allocation_amount: usize,
Expand Down Expand Up @@ -112,7 +112,7 @@ impl Drop for MmapStream {
/// Create a new stream to write to the unifiedlogger.
pub fn stream_write(
logger: Arc<Mutex<UnifiedLoggerWrite>>,
entry_type: DataLogType,
entry_type: UnifiedLogType,
minimum_allocation_amount: usize,
) -> impl WriteStream {
let aclone = logger.clone();
Expand Down Expand Up @@ -299,7 +299,7 @@ impl UnifiedLoggerWrite {
}

/// The returned slice is section_size or greater.
fn add_section(&mut self, entry_type: DataLogType, section_size: usize) -> &mut [u8] {
fn add_section(&mut self, entry_type: UnifiedLogType, section_size: usize) -> &mut [u8] {
// align current_position to the next page
self.current_global_position =
(self.current_global_position + self.page_size - 1) & !(self.page_size - 1);
Expand Down Expand Up @@ -348,7 +348,7 @@ impl UnifiedLoggerWrite {

impl Drop for UnifiedLoggerWrite {
fn drop(&mut self) {
self.add_section(DataLogType::LastEntry, 0);
self.add_section(UnifiedLogType::LastEntry, 0);
self.flush();
self.file
.set_len(self.current_global_position as u64)
Expand All @@ -359,7 +359,7 @@ impl Drop for UnifiedLoggerWrite {
impl UnifiedLoggerRead {
pub fn read_next_section_type(
&mut self,
datalogtype: DataLogType,
datalogtype: UnifiedLogType,
) -> CuResult<Option<Vec<u8>>> {
// TODO: eventually implement a 0 copy of this too.
loop {
Expand All @@ -373,7 +373,7 @@ impl UnifiedLoggerRead {
let header = header_result.unwrap();

// Reached the end of file
if header.entry_type == DataLogType::LastEntry {
if header.entry_type == UnifiedLogType::LastEntry {
return Ok(None);
}

Expand Down Expand Up @@ -436,13 +436,13 @@ impl UnifiedLoggerRead {
/// This a a convience wrapper around the UnifiedLoggerRead to implement the Read trait.
pub struct UnifiedLoggerIOReader {
logger: UnifiedLoggerRead,
log_type: DataLogType,
log_type: UnifiedLogType,
buffer: Vec<u8>,
buffer_pos: usize,
}

impl UnifiedLoggerIOReader {
pub fn new(logger: UnifiedLoggerRead, log_type: DataLogType) -> Self {
pub fn new(logger: UnifiedLoggerRead, log_type: UnifiedLogType) -> Self {
Self {
logger,
log_type,
Expand Down Expand Up @@ -520,8 +520,8 @@ mod tests {
else {
panic!("Failed to create logger")
};
logger.add_section(DataLogType::StructuredLogLine, 1024);
logger.add_section(DataLogType::CopperList, 2048);
logger.add_section(UnifiedLogType::StructuredLogLine, 1024);
logger.add_section(UnifiedLogType::CopperList, 2048);
let used = logger.used();
assert!(used < 4 * 4096); // ie. 3 headers, 1 page max per
// logger drops
Expand All @@ -545,7 +545,7 @@ mod tests {
let tmp_dir = TempDir::new().expect("could not create a tmp dir");
let (logger, _) = make_a_logger(&tmp_dir);
{
let _stream = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024);
let _stream = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1);
}
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 0);
Expand All @@ -557,9 +557,9 @@ mod tests {
fn test_two_sections_self_cleaning_in_order() {
let tmp_dir = TempDir::new().expect("could not create a tmp dir");
let (logger, _) = make_a_logger(&tmp_dir);
let s1 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024);
let s1 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1);
let s2 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024);
let s2 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 2);
drop(s2);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1);
Expand All @@ -573,9 +573,9 @@ mod tests {
fn test_two_sections_self_cleaning_out_of_order() {
let tmp_dir = TempDir::new().expect("could not create a tmp dir");
let (logger, _) = make_a_logger(&tmp_dir);
let s1 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024);
let s1 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1);
let s2 = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024);
let s2 = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 2);
drop(s1);
assert_eq!(logger.lock().unwrap().sections_in_flight.len(), 1);
Expand All @@ -592,7 +592,7 @@ mod tests {
let p = f.as_path();
println!("Path : {:?}", p);
{
let mut stream = stream_write(logger.clone(), DataLogType::StructuredLogLine, 1024);
let mut stream = stream_write(logger.clone(), UnifiedLogType::StructuredLogLine, 1024);
stream.log(&1u32).unwrap();
stream.log(&2u32).unwrap();
stream.log(&3u32).unwrap();
Expand All @@ -606,7 +606,7 @@ mod tests {
panic!("Failed to build logger");
};
let section = dl
.read_next_section_type(DataLogType::StructuredLogLine)
.read_next_section_type(UnifiedLogType::StructuredLogLine)
.expect("Failed to read section");
assert!(section.is_some());
let section = section.unwrap();
Expand Down

0 comments on commit cd7b80e

Please sign in to comment.