Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add grouping #24

Merged
merged 4 commits into from
Feb 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions routes/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ edition = "2021"

[dependencies]
log = "0.4"
chrono = "0.4.33"
rocket = { version = "0.5.0", features=["json"] }
rocket_cors = "0.6.0"
serde = "1.0.193"
Expand Down
84 changes: 80 additions & 4 deletions routes/src/consumption.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,59 @@
// along with RegionX. If not, see <https://www.gnu.org/licenses/>.

use crate::Error;
use rocket::get;
use chrono::NaiveDateTime;
use rocket::{
form,
form::{FromFormField, ValueField},
get,
};
use shared::{consumption::get_consumption, registry::registered_para};
use types::{ParaId, Timestamp, WeightConsumption};
use std::collections::HashMap;
use types::{DispatchClassConsumption, ParaId, Timestamp, WeightConsumption};

#[derive(Clone, Debug, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(crate = "rocket::serde")]
pub enum Grouping {
BlockNumber,
Day,
Month,
Year,
}

#[rocket::async_trait]
impl<'r> FromFormField<'r> for Grouping {
fn from_value(field: ValueField<'r>) -> form::Result<'r, Self> {
match field.value {
"day" => Ok(Grouping::Day),
"month" => Ok(Grouping::Month),
"year" => Ok(Grouping::Year),
_ => Err(form::Error::validation("invalid Grouping").into()),
}
}
}

#[derive(Default, Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(crate = "rocket::serde")]
pub struct AggregatedData {
/// The aggregated ref_time consumption over all the dispatch classes.
pub ref_time: DispatchClassConsumption,
/// The aggregated proof size over all dispatch classes.
pub proof_size: DispatchClassConsumption,
pub count: usize,
}

/// Query the consumption data of a parachain.
///
/// This will return an error in case there is no data associated with the specific parachain.
#[get("/consumption/<relay>/<para_id>?<start>&<end>&<page>&<page_size>")]
#[get("/consumption/<relay>/<para_id>?<start>&<end>&<page>&<page_size>&<grouping>")]
pub fn consumption(
relay: &str,
para_id: ParaId,
start: Option<Timestamp>,
end: Option<Timestamp>,
page: Option<u32>,
page_size: Option<u32>,
grouping: Option<Grouping>,
) -> Result<String, Error> {
let para = registered_para(relay.into(), para_id).ok_or(Error::NotRegistered)?;

Expand All @@ -44,5 +82,43 @@ pub fn consumption(
.take(page_size as usize)
.collect();

serde_json::to_string(&weight_consumptions).map_err(|_| Error::InvalidData)
let grouping = grouping.unwrap_or(Grouping::BlockNumber);

let grouped: HashMap<String, AggregatedData> = group_consumption(weight_consumptions, grouping);

serde_json::to_string(&grouped).map_err(|_| Error::InvalidData)
}

pub fn group_consumption(
weight_consumptions: Vec<WeightConsumption>,
grouping: Grouping,
) -> HashMap<String, AggregatedData> {
weight_consumptions.iter().fold(HashMap::new(), |mut acc, datum| {
let key = get_aggregation_key(datum.clone(), grouping);
let entry = acc.entry(key).or_default();

entry.ref_time.normal += datum.ref_time.normal;
entry.ref_time.operational += datum.ref_time.operational;
entry.ref_time.mandatory += datum.ref_time.mandatory;

entry.proof_size.normal += datum.proof_size.normal;
entry.proof_size.operational += datum.proof_size.operational;
entry.proof_size.mandatory += datum.proof_size.mandatory;

entry.count += 1;

acc
})
}

fn get_aggregation_key(datum: WeightConsumption, grouping: Grouping) -> String {
let datetime =
NaiveDateTime::from_timestamp_opt((datum.timestamp / 1000) as i64, 0).unwrap_or_default();

match grouping {
Grouping::BlockNumber => datum.block_number.to_string(),
Grouping::Day => datetime.format("%Y-%m-%d").to_string(),
Grouping::Month => datetime.format("%Y-%m").to_string(),
Grouping::Year => datetime.format("%Y").to_string(),
}
}
129 changes: 100 additions & 29 deletions routes/tests/consumption.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,12 @@ use rocket::{
local::blocking::{Client, LocalResponse},
routes,
};
use routes::{consumption::consumption, Error};
use routes::{
consumption::{consumption, group_consumption, AggregatedData, Grouping},
Error,
};
use shared::{chaindata::get_para, registry::update_registry, reset_mock_environment};
use std::collections::HashMap;
use types::{RelayChain::*, WeightConsumption};

mod mock;
Expand All @@ -36,7 +40,11 @@ fn getting_all_consumption_data_works() {
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
assert_eq!(consumption_data, mock_consumption().get(&para).unwrap().clone());
let expected_consumption = group_consumption(
mock_consumption().get(&para).unwrap().clone(),
Grouping::BlockNumber,
);
assert_eq!(consumption_data, expected_consumption);
});
}

Expand Down Expand Up @@ -88,27 +96,31 @@ fn pagination_works() {
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
let expected_data =
group_consumption(vec![mock_data.first().unwrap().clone()], Grouping::BlockNumber);
// Should only contain the first consumption data.
assert_eq!(consumption_data, vec![mock_data.first().unwrap().clone()]);
assert_eq!(consumption_data, expected_data);

// CASE 2: Specifying the page without page size will still show all the data.
let response = client.get("/consumption/polkadot/2000?page=0").dispatch();
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
let expected_data = group_consumption(mock_data.clone(), Grouping::BlockNumber);
// Should only contain the first consumption data.
assert_eq!(consumption_data, mock_data);
assert_eq!(consumption_data, expected_data);

// CASE 3: Specifying the page and page size works.
let response = client.get("/consumption/polkadot/2000?page=1&page_size=2").dispatch();
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
// Should skip the first page and take the second one.
assert_eq!(
consumption_data,
mock_data.into_iter().skip(2).take(2).collect::<Vec<WeightConsumption>>()
let expected_data = group_consumption(
mock_data.into_iter().skip(2).take(2).collect::<Vec<WeightConsumption>>(),
Grouping::BlockNumber,
);
// Should skip the first page and take the second one.
assert_eq!(consumption_data, expected_data);

// CASE 4: An out-of-bound page and page size will return an empty vector.
let response = client.get("/consumption/polkadot/2000?page=69&page_size=42").dispatch();
Expand All @@ -134,11 +146,14 @@ fn timestamp_based_filtering_works() {
assert_eq!(response.status(), Status::Ok);

let response_data = parse_ok_response(response);
let expected_data = mock_data
.clone()
.into_iter()
.filter(|c| c.timestamp >= start_timestamp)
.collect::<Vec<WeightConsumption>>();
let expected_data = group_consumption(
mock_data
.clone()
.into_iter()
.filter(|c| c.timestamp >= start_timestamp)
.collect::<Vec<WeightConsumption>>(),
Grouping::BlockNumber,
);

// Should only contain the consumption where the timestamp is greater than or equal to 6.
assert_eq!(response_data, expected_data);
Expand All @@ -149,11 +164,14 @@ fn timestamp_based_filtering_works() {
assert_eq!(response.status(), Status::Ok);

let response_data = parse_ok_response(response);
let expected_data = mock_data
.clone()
.into_iter()
.filter(|c| c.timestamp <= end_timestamp)
.collect::<Vec<WeightConsumption>>();
let expected_data = group_consumption(
mock_data
.clone()
.into_iter()
.filter(|c| c.timestamp <= end_timestamp)
.collect::<Vec<WeightConsumption>>(),
Grouping::BlockNumber,
);

// Should only contain the consumption where the timestamp is less than or equal to 12.
assert_eq!(response_data, expected_data);
Expand All @@ -165,10 +183,13 @@ fn timestamp_based_filtering_works() {
assert_eq!(response.status(), Status::Ok);

let response_data = parse_ok_response(response);
let expected_data = mock_data
.into_iter()
.filter(|c| c.timestamp >= start_timestamp && c.timestamp <= end_timestamp)
.collect::<Vec<WeightConsumption>>();
let expected_data = group_consumption(
mock_data
.into_iter()
.filter(|c| c.timestamp >= start_timestamp && c.timestamp <= end_timestamp)
.collect::<Vec<WeightConsumption>>(),
Grouping::BlockNumber,
);

assert_eq!(response_data, expected_data);
// Should only contain one consumption data since the `start` and `end` are set to the same
Expand Down Expand Up @@ -199,19 +220,69 @@ fn pagination_and_timestamp_filtering_works() {
assert_eq!(response.status(), Status::Ok);

let response_data = parse_ok_response(response);
let expected_data = mock_data
.into_iter()
.filter(|c| c.timestamp >= start_timestamp)
.skip(page_size * page_number)
.take(page_size)
.collect::<Vec<WeightConsumption>>();
let expected_data = group_consumption(
mock_data
.into_iter()
.filter(|c| c.timestamp >= start_timestamp)
.skip(page_size * page_number)
.take(page_size)
.collect::<Vec<WeightConsumption>>(),
Grouping::BlockNumber,
);

// Check if the data is filtered by timestamp and then paginated
assert_eq!(response_data, expected_data);
});
}

fn parse_ok_response<'a>(response: LocalResponse<'a>) -> Vec<WeightConsumption> {
#[test]
fn grouping_works() {
MockEnvironment::new().execute_with(|| {
let rocket = rocket::build().mount("/", routes![consumption]);
let client = Client::tracked(rocket).expect("valid rocket instance");

// Default is grouping by block number:
let para = get_para(Polkadot, 2000).unwrap();
let response = client.get("/consumption/polkadot/2000").dispatch();
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
let expected_consumption = group_consumption(
mock_consumption().get(&para).unwrap().clone(),
Grouping::BlockNumber,
);
assert_eq!(consumption_data, expected_consumption);

// Grouping by day:
let response = client.get("/consumption/polkadot/2000?grouping=day").dispatch();
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
let expected_consumption =
group_consumption(mock_consumption().get(&para).unwrap().clone(), Grouping::Day);
assert_eq!(consumption_data, expected_consumption);

// Grouping by month:
let response = client.get("/consumption/polkadot/2000?grouping=month").dispatch();
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
let expected_consumption =
group_consumption(mock_consumption().get(&para).unwrap().clone(), Grouping::Month);
assert_eq!(consumption_data, expected_consumption);

// Grouping by year:
let response = client.get("/consumption/polkadot/2000?grouping=year").dispatch();
assert_eq!(response.status(), Status::Ok);

let consumption_data = parse_ok_response(response);
let expected_consumption =
group_consumption(mock_consumption().get(&para).unwrap().clone(), Grouping::Year);
assert_eq!(consumption_data, expected_consumption);
});
}

fn parse_ok_response<'a>(response: LocalResponse<'a>) -> HashMap<String, AggregatedData> {
let body = response.into_string().unwrap();
serde_json::from_str(&body).expect("can't parse value")
}
Expand Down
2 changes: 1 addition & 1 deletion types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ pub struct WeightConsumption {
pub proof_size: DispatchClassConsumption,
}

#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
#[derive(Default, Debug, Serialize, PartialEq, Deserialize, Clone)]
pub struct DispatchClassConsumption {
/// The percentage of the weight used by user submitted extrinsics compared to the
/// maximum potential.
Expand Down
Loading