From ebc52ad5ba66c9b842e4ca203a58cc6cbc40f92e Mon Sep 17 00:00:00 2001 From: Szegoo Date: Thu, 8 Feb 2024 08:48:58 +0100 Subject: [PATCH 1/2] New grouping options --- routes/src/consumption.rs | 6 ++++++ routes/tests/consumption.rs | 18 ++++++++++++++++++ routes/tests/extend_subscription.rs | 1 - scripts/reset_env.sh | 15 +++++++++++++++ 4 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 scripts/reset_env.sh diff --git a/routes/src/consumption.rs b/routes/src/consumption.rs index 579868a..8542b35 100644 --- a/routes/src/consumption.rs +++ b/routes/src/consumption.rs @@ -28,6 +28,8 @@ use types::{DispatchClassConsumption, ParaId, Timestamp, WeightConsumption}; #[serde(crate = "rocket::serde")] pub enum Grouping { BlockNumber, + Minute, + Hour, Day, Month, Year, @@ -37,6 +39,8 @@ pub enum Grouping { impl<'r> FromFormField<'r> for Grouping { fn from_value(field: ValueField<'r>) -> form::Result<'r, Self> { match field.value { + "minute" => Ok(Grouping::Minute), + "hour" => Ok(Grouping::Hour), "day" => Ok(Grouping::Day), "month" => Ok(Grouping::Month), "year" => Ok(Grouping::Year), @@ -117,6 +121,8 @@ fn get_aggregation_key(datum: WeightConsumption, grouping: Grouping) -> String { match grouping { Grouping::BlockNumber => datum.block_number.to_string(), + Grouping::Minute => datetime.format("%Y-%m-%dT%H:%M").to_string(), + Grouping::Hour => datetime.format("%Y-%m-%dT%H:00").to_string(), Grouping::Day => datetime.format("%Y-%m-%d").to_string(), Grouping::Month => datetime.format("%Y-%m").to_string(), Grouping::Year => datetime.format("%Y").to_string(), diff --git a/routes/tests/consumption.rs b/routes/tests/consumption.rs index 40e7461..9485520 100644 --- a/routes/tests/consumption.rs +++ b/routes/tests/consumption.rs @@ -253,6 +253,24 @@ fn grouping_works() { ); assert_eq!(consumption_data, expected_consumption); + // Grouping by minute: + let response = client.get("/consumption/polkadot/2000?grouping=minute").dispatch(); + assert_eq!(response.status(), Status::Ok); + + let consumption_data = parse_ok_response(response); + let expected_consumption = + group_consumption(mock_consumption().get(¶).unwrap().clone(), Grouping::Minute); + assert_eq!(consumption_data, expected_consumption); + + // Grouping by hour: + let response = client.get("/consumption/polkadot/2000?grouping=hour").dispatch(); + assert_eq!(response.status(), Status::Ok); + + let consumption_data = parse_ok_response(response); + let expected_consumption = + group_consumption(mock_consumption().get(¶).unwrap().clone(), Grouping::Hour); + assert_eq!(consumption_data, expected_consumption); + // Grouping by day: let response = client.get("/consumption/polkadot/2000?grouping=day").dispatch(); assert_eq!(response.status(), Status::Ok); diff --git a/routes/tests/extend_subscription.rs b/routes/tests/extend_subscription.rs index 9aab725..a3b75ae 100644 --- a/routes/tests/extend_subscription.rs +++ b/routes/tests/extend_subscription.rs @@ -25,7 +25,6 @@ use routes::{ }; use shared::{ chaindata::get_para, - current_timestamp, payment::PaymentError, registry::{registered_para, update_registry}, }; diff --git a/scripts/reset_env.sh b/scripts/reset_env.sh new file mode 100644 index 0000000..92ecf15 --- /dev/null +++ b/scripts/reset_env.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +TRACKER="./target/release/tracker" +WATCHDOG="scripts/watchdog.sh" + +PIDS=$(pgrep -f "$TRACKER|$WATCHDOG") + +if [ -z "$PIDS" ]; then + echo "Process not found." +else + # Kill each process + for PID in $PIDS; do + kill -9 $PID + done +fi From 903ba5efa19ca21383784440f45a19345ab3434a Mon Sep 17 00:00:00 2001 From: Szegoo Date: Fri, 9 Feb 2024 11:19:57 +0100 Subject: [PATCH 2/2] Sorted consumption --- routes/src/consumption.rs | 22 ++++++++++++++++------ routes/tests/consumption.rs | 3 +-- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/routes/src/consumption.rs b/routes/src/consumption.rs index 8542b35..c784359 100644 --- a/routes/src/consumption.rs +++ b/routes/src/consumption.rs @@ -21,7 +21,7 @@ use rocket::{ get, }; use shared::{consumption::get_consumption, registry::registered_para}; -use std::collections::HashMap; +use std::collections::BTreeMap; use types::{DispatchClassConsumption, ParaId, Timestamp, WeightConsumption}; #[derive(Clone, Debug, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)] @@ -52,6 +52,7 @@ impl<'r> FromFormField<'r> for Grouping { #[derive(Default, Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize)] #[serde(crate = "rocket::serde")] pub struct AggregatedData { + pub group: String, /// The aggregated ref_time consumption over all the dispatch classes. pub ref_time: DispatchClassConsumption, /// The aggregated proof size over all dispatch classes. @@ -88,7 +89,7 @@ pub fn consumption( let grouping = grouping.unwrap_or(Grouping::BlockNumber); - let grouped: HashMap = group_consumption(weight_consumptions, grouping); + let grouped = group_consumption(weight_consumptions, grouping); serde_json::to_string(&grouped).map_err(|_| Error::InvalidData) } @@ -96,10 +97,10 @@ pub fn consumption( pub fn group_consumption( weight_consumptions: Vec, grouping: Grouping, -) -> HashMap { - weight_consumptions.iter().fold(HashMap::new(), |mut acc, datum| { +) -> Vec { + let grouped = weight_consumptions.iter().fold(BTreeMap::new(), |mut acc, datum| { let key = get_aggregation_key(datum.clone(), grouping); - let entry = acc.entry(key).or_default(); + let entry: &mut AggregatedData = acc.entry(key).or_default(); entry.ref_time.normal += datum.ref_time.normal; entry.ref_time.operational += datum.ref_time.operational; @@ -112,7 +113,16 @@ pub fn group_consumption( entry.count += 1; acc - }) + }); + + grouped + .into_iter() + .map(|(key, entry)| { + let mut entry = entry; + entry.group = key; + entry + }) + .collect() } fn get_aggregation_key(datum: WeightConsumption, grouping: Grouping) -> String { diff --git a/routes/tests/consumption.rs b/routes/tests/consumption.rs index 9485520..618777b 100644 --- a/routes/tests/consumption.rs +++ b/routes/tests/consumption.rs @@ -23,7 +23,6 @@ use routes::{ Error, }; use shared::{chaindata::get_para, registry::update_registry, reset_mock_environment}; -use std::collections::HashMap; use types::{RelayChain::*, WeightConsumption}; mod mock; @@ -300,7 +299,7 @@ fn grouping_works() { }); } -fn parse_ok_response<'a>(response: LocalResponse<'a>) -> HashMap { +fn parse_ok_response<'a>(response: LocalResponse<'a>) -> Vec { let body = response.into_string().unwrap(); serde_json::from_str(&body).expect("can't parse value") }