Skip to content

Commit

Permalink
Metrics fixes.
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptoquick committed Dec 11, 2023
1 parent ef29bc6 commit 9e9b32e
Show file tree
Hide file tree
Showing 4 changed files with 196 additions and 74 deletions.
20 changes: 11 additions & 9 deletions src/bin/bitmaskd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,7 @@ use axum::{
use bitcoin_30::secp256k1::{ecdh::SharedSecret, PublicKey, SecretKey};
use bitmask_core::{
bitcoin::{save_mnemonic, sign_and_publish_psbt_file},
carbonado::{
handle_file,
metrics::{self, metrics_csv, metrics_json},
server_retrieve, server_store, store,
},
carbonado::{handle_file, metrics, server_retrieve, server_store, store},
constants::{
get_marketplace_nostr_key, get_marketplace_seed, get_network, get_udas_utxo, switch_network,
},
Expand Down Expand Up @@ -433,7 +429,13 @@ async fn co_store(
Path((pk, name)): Path<(String, String)>,
body: Bytes,
) -> Result<impl IntoResponse, AppError> {
let cc = CacheControl::new().with_no_cache();

let incoming_header = carbonado::file::Header::try_from(&body)?;
if incoming_header.pubkey.to_string() != pk {
return Ok((StatusCode::UNAUTHORIZED, TypedHeader(cc), "Unauthorized"));
}

let body_len = incoming_header.encoded_len - incoming_header.padding_len;
info!("POST /carbonado/{pk}/{name}, {body_len} bytes");

Expand Down Expand Up @@ -472,9 +474,9 @@ async fn co_store(
},
}

let cc = CacheControl::new().with_no_cache();
metrics::update(&filepath).await?;

Ok((StatusCode::OK, TypedHeader(cc)))
Ok((StatusCode::OK, TypedHeader(cc), "Success"))
}

async fn co_force_store(
Expand Down Expand Up @@ -722,7 +724,7 @@ async fn send_coins(
}

async fn json_metrics() -> Result<impl IntoResponse, AppError> {
let metrics_json = metrics_json().await?;
let metrics_json = metrics::json().await?;

Ok((
StatusCode::OK,
Expand All @@ -732,7 +734,7 @@ async fn json_metrics() -> Result<impl IntoResponse, AppError> {
}

async fn csv_metrics() -> Result<impl IntoResponse, AppError> {
let metrics_csv = metrics_csv().await;
let metrics_csv = metrics::csv().await;

Ok((StatusCode::OK, [("content-type", "text/csv")], metrics_csv))
}
Expand Down
6 changes: 4 additions & 2 deletions src/carbonado.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ mod server {

let (body, _encode_info) = carbonado::file::encode(&sk, Some(&pk), input, level, meta)?;
let filepath = handle_file(&pk_hex, name, body.len()).await?;
fs::write(filepath, body).await?;
fs::write(&filepath, body).await?;
metrics::update(&filepath).await?;
Ok(())
}

Expand All @@ -74,7 +75,8 @@ mod server {

let (body, _encode_info) = carbonado::file::encode(&sk, Some(&pk), input, level, meta)?;
let filepath = handle_file(&pk_hex, name, body.len()).await?;
fs::write(filepath.clone(), body.clone()).await?;
fs::write(&filepath, body.clone()).await?;
metrics::update(&filepath).await?;
Ok((filepath, body))
}

Expand Down
2 changes: 2 additions & 0 deletions src/carbonado/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,6 @@ pub enum CarbonadoError {
WrongNostrPrivateKey,
/// Debug: {0}
Debug(String),
/// Error: {0}
AnyhowError(#[from] anyhow::Error),
}
242 changes: 179 additions & 63 deletions src/carbonado/metrics.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
#![cfg(not(target_arch = "wasm32"))]
use std::{collections::BTreeMap, path::Path, sync::Arc, time::SystemTime};

use anyhow::Result;
use std::{
collections::{BTreeMap, BTreeSet},
path::{Path, PathBuf},
sync::Arc,
time::SystemTime,
};

use anyhow::{anyhow, Result};
use chrono::{DateTime, Duration, NaiveDate, Utc};
use log::debug;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use tokio::sync::RwLock;
Expand Down Expand Up @@ -41,9 +47,11 @@ const NETWORK_RGB_STOCKS: &str = "rgb_stocks";
const NETWORK_RGB_TRANSFER_FILES: &str = "rgb_transfer_files";

static METRICS_DATA: Lazy<Arc<RwLock<MetricsData>>> = Lazy::new(Default::default);
static METRICS_SET: Lazy<Arc<RwLock<BTreeSet<PathBuf>>>> = Lazy::new(Default::default);

pub async fn init(dir: &Path) -> Result<()> {
let mut metrics = METRICS_DATA.write().await;
let mut dataset = METRICS_SET.write().await;

metrics
.wallets_by_network
Expand Down Expand Up @@ -76,69 +84,71 @@ pub async fn init(dir: &Path) -> Result<()> {
let day_created = metadata.created()?;
let day = round_datetime_to_day(day_created.into());

dataset.insert(entry.path().to_path_buf());

if metadata.is_file() {
metrics.bytes += metadata.len();

*metrics.bytes_by_day.entry(day.clone()).or_insert(0) += metadata.len();

if filename == MAINNET_WALLET {
*metrics
.wallets_by_network
.get_mut(NETWORK_BITCOIN)
.unwrap_or(&mut 0) += 1;
*metrics
.bitcoin_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
}

if filename == TESTNET_WALLET {
*metrics
.wallets_by_network
.get_mut(NETWORK_TESTNET)
.unwrap_or(&mut 0) += 1;
*metrics
.testnet_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
}

if filename == SIGNET_WALLET {
*metrics
.wallets_by_network
.get_mut(NETWORK_SIGNET)
.unwrap_or(&mut 0) += 1;
*metrics
.signet_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
}

if filename == REGTEST_WALLET {
*metrics
.wallets_by_network
.get_mut(NETWORK_REGTEST)
.unwrap_or(&mut 0) += 1;
*metrics
.regtest_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
}

if filename == MAINNET_WALLET
|| filename == TESTNET_WALLET
|| filename == SIGNET_WALLET
|| filename == REGTEST_WALLET
{
total_wallets += 1;
}

if filename == RGB_STOCK {
rgb_stocks += 1;
}

if filename == RGB_TRANSFER_FILE {
rgb_transfer_files += 1;
match filename.as_str() {
MAINNET_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_BITCOIN)
.unwrap_or(&mut 0) += 1;
*metrics
.bitcoin_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
total_wallets += 1;
}

TESTNET_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_TESTNET)
.unwrap_or(&mut 0) += 1;
*metrics
.testnet_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
total_wallets += 1;
}

SIGNET_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_SIGNET)
.unwrap_or(&mut 0) += 1;
*metrics
.signet_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
total_wallets += 1;
}

REGTEST_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_REGTEST)
.unwrap_or(&mut 0) += 1;
*metrics
.regtest_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
total_wallets += 1;
}

RGB_STOCK => {
rgb_stocks += 1;
}

RGB_TRANSFER_FILE => {
rgb_transfer_files += 1;
}

_ => {}
}
}
}
Expand Down Expand Up @@ -256,7 +266,113 @@ pub async fn init(dir: &Path) -> Result<()> {
Ok(())
}

pub async fn metrics_csv() -> String {
pub async fn update(path: &Path) -> Result<()> {
debug!("Updating metrics with {path:?}");

let mut metrics = METRICS_DATA.write().await;
let mut dataset = METRICS_SET.write().await;

if dataset.get(path).is_some() {
debug!("Path already present");
return Ok(());
} else {
dataset.insert(path.to_path_buf());
}

let filename = path
.file_name()
.ok_or(anyhow!("no filename for path"))?
.to_string_lossy()
.to_string();
let metadata = path.metadata()?;
let day_created = metadata.created()?;
let day = round_datetime_to_day(day_created.into());

if metadata.is_file() {
metrics.bytes += metadata.len();

*metrics.bytes_by_day.entry(day.clone()).or_insert(0) += metadata.len();

match filename.as_str() {
MAINNET_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_BITCOIN)
.unwrap_or(&mut 0) += 1;
*metrics
.bitcoin_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
*metrics
.wallets_by_network
.get_mut(NETWORK_TOTAL)
.unwrap_or(&mut 0) += 1;
}
TESTNET_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_TESTNET)
.unwrap_or(&mut 0) += 1;
*metrics
.testnet_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
*metrics
.wallets_by_network
.get_mut(NETWORK_TOTAL)
.unwrap_or(&mut 0) += 1;
}
SIGNET_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_SIGNET)
.unwrap_or(&mut 0) += 1;
*metrics
.signet_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
*metrics
.wallets_by_network
.get_mut(NETWORK_TOTAL)
.unwrap_or(&mut 0) += 1;
}
REGTEST_WALLET => {
*metrics
.wallets_by_network
.get_mut(NETWORK_REGTEST)
.unwrap_or(&mut 0) += 1;
*metrics
.regtest_wallets_by_day
.entry(day.clone())
.or_insert(0) += 1;
*metrics
.wallets_by_network
.get_mut(NETWORK_TOTAL)
.unwrap_or(&mut 0) += 1;
}

RGB_STOCK => {
*metrics
.wallets_by_network
.get_mut(NETWORK_RGB_STOCKS)
.unwrap_or(&mut 0) += 1;
}

RGB_TRANSFER_FILE => {
*metrics
.wallets_by_network
.get_mut(NETWORK_RGB_TRANSFER_FILES)
.unwrap_or(&mut 0) += 1;
}

_ => {}
}
}

Ok(())
}

pub async fn csv() -> String {
let mut lines = vec![vec![
"Wallet".to_owned(),
"Wallet Count".to_owned(),
Expand Down Expand Up @@ -396,7 +512,7 @@ pub async fn metrics_csv() -> String {
lines.join("\n")
}

pub async fn metrics_json() -> Result<String> {
pub async fn json() -> Result<String> {
let metrics = METRICS_DATA.read().await;

Ok(serde_json::to_string_pretty(&*metrics)?)
Expand Down

0 comments on commit 9e9b32e

Please sign in to comment.