diff --git a/.github/workflows/dapps.yml b/.github/workflows/dapps.yml
index ccebf0076c..58d243e6b8 100644
--- a/.github/workflows/dapps.yml
+++ b/.github/workflows/dapps.yml
@@ -118,6 +118,15 @@ jobs:
proxy_ip: ${{ needs.prepare.outputs.proxy_ip }}
solana_ip: ${{ needs.prepare.outputs.solana_ip }}
external_call: false
+ repo: 'tests'
+ event_name: ${{ github.event_name }}
+ ref: ${{ github.ref }}
+ ref_name: ${{ github.ref_name }}
+ head_ref: ${{ github.head_ref }}
+ base_ref: ${{ github.base_ref }}
+ last_commit_message: ${{ github.event.head_commit.message }}
+ docker_image_tag: ''
+ history_depth_limit: '10'
notify:
runs-on: ubuntu-20.04
diff --git a/.github/workflows/dapps_reusable.yml b/.github/workflows/dapps_reusable.yml
index 24e8308d87..e60bebdc82 100644
--- a/.github/workflows/dapps_reusable.yml
+++ b/.github/workflows/dapps_reusable.yml
@@ -44,6 +44,42 @@ on:
type: boolean
required: false
default: true
+ repo:
+ type: string
+ description: "Repository type: tests | proxy | evm"
+ required: false
+ event_name:
+ type: string
+ description: "Event name"
+ required: false
+ ref:
+ type: string
+ description: "Reference (branch or tag)"
+ required: false
+ ref_name:
+ type: string
+ description: "Reference name"
+ required: false
+ head_ref:
+ type: string
+ description: "Head reference for pull requests"
+ required: false
+ base_ref:
+ type: string
+ description: "Base reference for pull requests"
+ required: false
+ last_commit_message:
+ type: string
+ description: "Message of the last commit"
+ required: false
+ docker_image_tag:
+ type: string
+ description: "Docker image tag"
+ required: false
+ history_depth_limit:
+ type: string
+ description: "Limit for commit history depth"
+ required: false
env:
NETWORK: ${{ inputs.network }}
@@ -612,7 +648,46 @@ jobs:
name: pancake-report
path: reports/
- name: "Swap report"
+ env:
+ TEST_RESULTS_DB_HOST: ${{ secrets.TEST_RESULTS_DB_HOST }}
+ TEST_RESULTS_DB_PORT: ${{ secrets.TEST_RESULTS_DB_PORT }}
+ TEST_RESULTS_DB_NAME: ${{ secrets.TEST_RESULTS_DB_NAME }}
+ TEST_RESULTS_DB_USER: ${{ secrets.TEST_RESULTS_DB_USER }}
+ TEST_RESULTS_DB_PASSWORD: ${{ secrets.TEST_RESULTS_DB_PASSWORD }}
run: |
python3 ./clickfile.py dapps report --directory=reports \
- --pr_url_for_report=${{ inputs.pr_url_for_report }} \
- --token=${{secrets.GHTOKEN}}
+ --repo="${{inputs.repo}}" \
+ --event_name="${{inputs.event_name}}" \
+ --ref="${{inputs.ref}}" \
+ --ref_name="${{inputs.ref_name}}" \
+ --head_ref="${{inputs.head_ref}}" \
+ --base_ref="${{inputs.base_ref}}" \
+ --last_commit_message="${{inputs.last_commit_message}}" \
+ --docker_image_tag="${{inputs.docker_image_tag}}" \
+ --history_depth_limit="${{inputs.history_depth_limit}}"
+ - name: "Upload cost_reports.pdf"
+ if: ${{ hashFiles('cost_reports.pdf') != '' }}
+ uses: actions/upload-artifact@v4
+ with:
+ name: cost_reports
+ path: cost_reports.pdf
+ - name: Get the download url for cost_reports and save to cost_reports.md
+ if: ${{ hashFiles('cost_reports.pdf') != '' }}
+ id: get_artifact_url
+ env:
+ GITHUB_TOKEN: ${{ secrets.GHTOKEN }}
+ run: |
+ ARTIFACT_NAME="cost_reports"
+ ARTIFACT_ID=$(gh api -X GET /repos/${{ github.repository }}/actions/artifacts | jq -r ".artifacts[] | select(.name==\"${ARTIFACT_NAME}\") | .id")
+ ARTIFACT_URL=$(gh api -X GET /repos/${{ github.repository }}/actions/artifacts/${ARTIFACT_ID} | jq -r ".archive_download_url")
+ echo "🔗 [Cost report](${ARTIFACT_URL})\n" > cost_reports.md
+ - name: "Add cost reports to summary"
+ run: |
+ cat cost_reports.md >> $GITHUB_STEP_SUMMARY
+ - name: "Add PR comment"
+ if: ${{ inputs.pr_url_for_report != '' }}
+ run: |
+ python3 ./clickfile.py dapps add_pr_comment \
+ --pr_url_for_report="${{ inputs.pr_url_for_report }}" \
+ --token="${{ secrets.GHTOKEN }}" \
+ --md_file cost_reports.md
diff --git a/clickfile.py b/clickfile.py
index 00ce3ad57c..a9c77753f9 100755
--- a/clickfile.py
+++ b/clickfile.py
@@ -18,9 +18,10 @@
import pytest
+from deploy.test_results_db.test_results_handler import TestResultsHandler
from utils.error_log import error_log
from utils.slack_notification import SlackNotification
-from utils.types import TestGroup
+from utils.types import TestGroup, RepoType
try:
import click
@@ -108,6 +109,7 @@ def red(s):
def catch_traceback(func: tp.Callable) -> tp.Callable:
"""Catch traceback to file"""
+
def add_error_log_comment(func_name, exc: BaseException):
err_msg = ERR_MESSAGES.get(func_name) or f"{exc.__class__.__name__}({exc})"
error_log.add_comment(text=f"{func_name}: {err_msg}")
@@ -522,13 +524,14 @@ def update_contracts(branch):
@cli.command(help="Run any type of tests")
-@click.option("-n", "--network", default=EnvName.NIGHT_STAND.value, type=click.Choice(EnvName),
- help="In which stand run tests")
+@click.option(
+ "-n", "--network", default=EnvName.NIGHT_STAND.value, type=click.Choice(EnvName), help="In which stand run tests"
+)
@click.option("-j", "--jobs", default=8, help="Number of parallel jobs (for openzeppelin)")
@click.option("-p", "--numprocesses", help="Number of parallel jobs for basic tests")
@click.option("-a", "--amount", default=20000, help="Requested amount from faucet")
@click.option("-u", "--users", default=8, help="Accounts numbers used in OZ tests")
-@click.option("-c", "--case", default='', type=str, help="Specific test case name pattern to run")
+@click.option("-c", "--case", default="", type=str, help="Specific test case name pattern to run")
@click.option("--marker", help="Run tests by mark")
@click.option(
"--ui-item",
@@ -537,10 +540,7 @@ def update_contracts(branch):
help="Which UI test run",
)
@click.option(
- "--keep-error-log",
- is_flag=True,
- default=False,
- help=f"Don't clear {error_log.file_path.name} before run"
+ "--keep-error-log", is_flag=True, default=False, help=f"Don't clear {error_log.file_path.name} before run"
)
@click.argument(
"name",
@@ -607,10 +607,10 @@ def run(
if name == "tracer":
assert wait_for_tracer_service(network)
- if case != '':
+ if case != "":
command += " -vk {}".format(case)
if marker:
- command += f' -m {marker}'
+ command += f" -m {marker}"
command += f" -s --network={network} --make-report --test-group {name}"
if keep_error_log:
@@ -625,8 +625,8 @@ def run(
@cli.command(
help="OZ actions:\n"
- "report - summarize openzeppelin tests results\n"
- "analyze - analyze openzeppelin tests results"
+ "report - summarize openzeppelin tests results\n"
+ "analyze - analyze openzeppelin tests results"
)
@click.argument(
"name",
@@ -719,7 +719,7 @@ def analyze_openzeppelin_results():
"--run-time",
type=int,
help="Stop after the specified amount of time, e.g. (300s, 20m, 3h, 1h30m, etc.). "
- "Only used together without Locust Web UI. [default: always run]",
+ "Only used together without Locust Web UI. [default: always run]",
)
locust_tags = click.option(
@@ -903,8 +903,9 @@ def generate_allure_report():
@cli.command(help="Send notification to slack")
@click.option("-u", "--url", help="slack app endpoint url.")
@click.option("-b", "--build_url", help="github action test build url.")
-@click.option("-n", "--network", type=click.Choice(EnvName), default=EnvName.NIGHT_STAND.value,
- help="In which stand run tests")
+@click.option(
+ "-n", "--network", type=click.Choice(EnvName), default=EnvName.NIGHT_STAND.value, help="In which stand run tests"
+)
@click.option("--test-group", help="Name of the failed test group")
def send_notification(url, build_url, network, test_group: str):
slack_notification = SlackNotification()
@@ -954,12 +955,7 @@ def send_notification(url, build_url, network, test_group: str):
@click.option("-n", "--network", default="night-stand", type=str, help="In which stand run tests")
def get_operator_balances(network: str):
net = network_manager.get_network_object(network)
- operator = Operator(
- net["proxy_url"],
- net["solana_url"],
- net["spl_neon_mint"],
- evm_loader=net["evm_loader"]
- )
+ operator = Operator(net["proxy_url"], net["solana_url"], net["spl_neon_mint"], evm_loader=net["evm_loader"])
neon_balance = operator.get_token_balance()
sol_balance = operator.get_solana_balance()
print(
@@ -1058,16 +1054,203 @@ def dapps():
@dapps.command("report", help="Print dapps report (from .json files)")
@click.option("-d", "--directory", default="reports", help="Directory with reports")
+@click.option("--repo", type=click.Choice(tp.get_args(RepoType)), required=True)
+@click.option("--event_name", required=True)
+@click.option("--ref", required=True)
+@click.option("--ref_name", required=True)
+@click.option("--head_ref", required=True)
+@click.option("--base_ref", required=True)
+@click.option("--last_commit_message", required=True)
+@click.option("--docker_image_tag", required=True)
+@click.option("--history_depth_limit", default=10, type=int, help="How many runs to include into statistical analysis")
+def make_dapps_report(
+ directory: str,
+ repo: RepoType,
+ event_name: str,
+ ref: str,
+ ref_name: str,
+ head_ref: str,
+ base_ref: str,
+ last_commit_message: str,
+ docker_image_tag: str,
+ history_depth_limit: int,
+):
+ gh_client = GithubClient(
+ token="",
+ repo=repo,
+ event_name=event_name,
+ ref=ref,
+ ref_name=ref_name,
+ head_ref=head_ref,
+ base_ref=base_ref,
+ last_commit_message=last_commit_message,
+ )
+ click.echo(yellow(f"GithubClient: {gh_client.__dict__}"))
+
+ do_save = False
+ do_compare = False
+ do_delete = False
+
+ save_branch = None
+ current_branch = None
+ previous_branch = None
+ tag = None
+
+ if repo != "tests":
+ click.echo(yellow(f"GithubEvent: {gh_client.event}"))
+
+ # merge to develop
+ if gh_client.event == "merge_request":
+ if gh_client.is_base_branch(base_ref):
+ do_save = True
+ save_branch = head_ref
+ click.echo(green("This is a merge to develop"))
+
+ # creating a tag
+ elif gh_client.event == "push_tag":
+ do_save = do_compare = True
+ save_branch = current_branch = previous_branch = gh_client.base_branch
+ tag = ref_name
+ click.echo(green("This is a push with tag"))
+
+ # PR
+ elif gh_client.event == "pull_request":
+ is_base = gh_client.is_base_branch(base_ref)
+ is_version = gh_client.is_version_branch(base_ref)
+
+ # to develop
+ if is_base:
+ do_save = do_compare = do_delete = True
+ save_branch = head_ref
+ current_branch = head_ref
+ previous_branch = base_ref
+ click.echo(green("This is a pull request to develop"))
+
+ # to version branch
+ elif is_version:
+ do_save = do_compare = True
+ save_branch = head_ref
+ current_branch = head_ref
+ previous_branch = base_ref
+ click.echo(green("This is a pull request to a version branch"))
+
+ report_data = dapps_cli.prepare_report_data(directory)
+ test_results_handler = TestResultsHandler()
+
+ if do_delete:
+ test_results_handler.delete_report_and_data(repo=repo, branch=save_branch, tag=None)
+
+ if do_save:
+ proxy_url = network_manager.get_network_param(os.environ.get("NETWORK"), "proxy_url")
+ web3_client = NeonChainWeb3Client(proxy_url)
+ token_usd_gas_price = web3_client.get_token_usd_gas_price()
+ # token_usd_gas_price = 0.6064 # proxy 1.14.25
+ # token_usd_gas_price = 0.59922 # proxy 1.14.26
+ test_results_handler.save_to_db(
+ report_data=report_data,
+ repo=repo,
+ branch=save_branch,
+ github_tag=gh_client.tag_name,
+ docker_image_tag=docker_image_tag or None,
+ token_usd_gas_price=token_usd_gas_price,
+ )
+
+ if do_compare:
+ compare_and_save_dapp_results(
+ repo=repo,
+ current_branch=current_branch,
+ previous_branch=previous_branch,
+ tag=tag,
+ docker_image_tag=docker_image_tag,
+ history_depth_limit=history_depth_limit,
+ )
+ else:
+ proxy_url = network_manager.get_network_param(os.environ.get("NETWORK"), "proxy_url")
+ web3_client = NeonChainWeb3Client(proxy_url)
+ token_usd_gas_price = web3_client.get_token_usd_gas_price()
+
+ # Add 'fee_in_usd' column after 'fee_in_eth'
+ report_data.insert(
+ report_data.columns.get_loc("fee_in_eth") + 1, "fee_in_usd", report_data["fee_in_eth"] * token_usd_gas_price
+ )
+
+ # Add 'used_%_of_EG' column after 'gas_used'
+ report_data.insert(
+ report_data.columns.get_loc("gas_used") + 1,
+ "used_%_of_EG",
+ (report_data["gas_used"] / report_data["gas_estimated"]) * 100,
+ )
+ report_data["used_%_of_EG"] = report_data["used_%_of_EG"].round(2)
+
+ # Dump report_data DataFrame to markdown, grouped by the dApp
+ report_as_table_markdown = dapps_cli.format_report_as_table_markdown(df=report_data)
+ Path("cost_reports.md").write_text(report_as_table_markdown)
+
+
+@dapps.command("compare_results", help="Compare dApp results")
+@click.option("--repo", type=click.Choice(tp.get_args(RepoType)), required=True)
+@click.option("--current_branch", help="Head branch", required=True)
+@click.option("--previous_branch", help="The branch to compare against", required=True)
+@click.option("--tag", help="Github tag")
+@click.option("--docker_image_tag", required=True)
+@click.option("--history_depth_limit", type=int, help="How many runs to include into statistical analysis")
+def compare_and_save_results(
+ repo: RepoType,
+ current_branch: str,
+ previous_branch: str,
+ tag: tp.Optional[str],
+ docker_image_tag: str,
+ history_depth_limit: int,
+):
+ compare_and_save_dapp_results(
+ repo=repo,
+ current_branch=current_branch,
+ previous_branch=previous_branch,
+ tag=tag,
+ docker_image_tag=docker_image_tag,
+ history_depth_limit=history_depth_limit,
+ )
+
+
+def compare_and_save_dapp_results(
+ repo: RepoType,
+ current_branch: str,
+ previous_branch: str,
+ tag: tp.Optional[str],
+ docker_image_tag: str,
+ history_depth_limit: int,
+):
+ test_results_handler = TestResultsHandler()
+
+ # fetch statistical data
+ historical_data = test_results_handler.get_historical_data(
+ depth=history_depth_limit,
+ repo=repo,
+ last_branch=current_branch,
+ previous_branch=previous_branch,
+ tag=tag,
+ )
+
+ # generate plots and save to pdf
+ tag_ = f", tag {tag}" if tag else ""
+ test_results_handler.generate_and_save_plots_pdf(
+ historical_data=historical_data,
+ title_end=f"{repo}, branch {current_branch}{tag_}, Docker image tag {docker_image_tag}"
+ )
+
+
+@dapps.command("add_pr_comment", help="Add PR comment with dApp cost reports")
@click.option("--pr_url_for_report", default="", help="Url to send the report as comment for PR")
@click.option("--token", default="", help="github token")
-def make_dapps_report(directory, pr_url_for_report, token):
- report_data = dapps_cli.prepare_report_data(directory)
- dapps_cli.print_report(report_data)
- if pr_url_for_report:
- gh_client = GithubClient(token)
- gh_client.delete_last_comment(pr_url_for_report)
- format_data = dapps_cli.format_report_for_github_comment(report_data)
- gh_client.add_comment_to_pr(pr_url_for_report, format_data)
+@click.option("--md_file", help="File with markdown for the comment")
+def add_pr_comment(pr_url_for_report: str, token: str, md_file: str):
+ gh_client = GithubClient(token=token)
+ gh_client.delete_last_comment(pr_url_for_report)
+
+ with open(md_file) as f:
+ markdown = f.read()
+
+ gh_client.add_comment_to_pr(pr_url_for_report, markdown)
if __name__ == "__main__":
diff --git a/deploy/cli/dapps.py b/deploy/cli/dapps.py
index 7559bff6a7..d7a6e67369 100644
--- a/deploy/cli/dapps.py
+++ b/deploy/cli/dapps.py
@@ -3,16 +3,16 @@
import json
import typing as tp
import pathlib
+from collections import Counter
import tabulate
+import pandas as pd
from deploy.cli.infrastructure import get_solana_accounts_in_tx
from deploy.cli.network_manager import NetworkManager
-
from utils.web3client import NeonChainWeb3Client
-REPORT_HEADERS = ["Action", "Fee", "Cost in $", "Accounts", "TRx", "Estimated Gas", "Used Gas", "Used % of EG"]
NETWORK_MANAGER = NetworkManager()
@@ -25,15 +25,15 @@ def set_github_env(envs: tp.Dict, upper=True) -> None:
env_file.write(f"\n{key.upper() if upper else key}={str(value)}")
-def prepare_report_data(directory):
+def prepare_report_data(directory: str) -> pd.DataFrame:
proxy_url = NETWORK_MANAGER.get_network_param(os.environ.get("NETWORK"), "proxy_url")
web3_client = NeonChainWeb3Client(proxy_url)
- out = {}
+
reports = {}
for path in glob.glob(str(pathlib.Path(directory) / "*-report.json")):
with open(path, "r") as f:
rep = json.load(f)
- if type(rep) is list:
+ if isinstance(rep, list):
for r in rep:
if "actions" in r:
reports[r["name"]] = r["actions"]
@@ -41,46 +41,54 @@ def prepare_report_data(directory):
if "actions" in rep:
reports[rep["name"]] = rep["actions"]
- for app in reports:
- out[app] = []
- for action in reports[app]:
+ data = []
+
+ for app, actions in reports.items():
+ added_number = 1
+ counts = Counter([action["name"].lower().strip() for action in actions])
+ duplicate_actions = [action for action, count in counts.items() if count > 1]
+
+ for action in actions:
+ # Ensure action name is unique by appending a counter if necessary
+ base_action_name = action["name"].lower().strip()
+ if base_action_name in duplicate_actions:
+ unique_action_name = f"{base_action_name} {added_number}"
+ added_number += 1
+ else:
+ unique_action_name = base_action_name
+
accounts, trx = get_solana_accounts_in_tx(action["tx"])
+ # accounts, trx = (2, 12)
tx = web3_client.get_transaction_by_hash(action["tx"])
estimated_gas = int(tx.gas) if tx and tx.gas else None
+ # estimated_gas = 122879
used_gas = int(action["usedGas"])
- row = [action["name"]]
fee = used_gas * int(action["gasPrice"]) / 1000000000000000000
- used_gas_percentage = round(used_gas * 100 / estimated_gas, 2) if estimated_gas else None
- row.append(fee)
- row.append(fee * web3_client.get_token_usd_gas_price())
- row.append(accounts)
- row.append(trx)
- row.append(estimated_gas)
- row.append(used_gas)
- row.append(used_gas_percentage)
- out[app].append(row)
- return out
-
-
-def print_report(data):
- report_content = ""
- for app in data:
- report_content += f'Cost report for "{app.title()}" dApp\n'
- report_content += "----------------------------------------\n"
- report_content += tabulate.tabulate(data[app], REPORT_HEADERS, tablefmt="simple_grid") + "\n"
- print(report_content)
- return report_content
+ data.append(
+ {
+ "dapp_name": app.lower().strip(),
+ "action": unique_action_name,
+ "fee_in_eth": fee,
+ "acc_count": accounts,
+ "trx_count": trx,
+ "gas_estimated": estimated_gas,
+ "gas_used": used_gas,
+ }
+ )
+ df = pd.DataFrame(data)
+ return df
-def format_report_for_github_comment(data):
- headers = "| " + " | ".join(REPORT_HEADERS) + " |\n"
- headers += "| --- | --- | --- | --- | --- | --- | --- |--- |\n"
+
+def format_report_as_table_markdown(df: pd.DataFrame) -> str:
report_content = ""
+ dapp_names = df['dapp_name'].unique()
+ df.columns = [col.upper() for col in df.columns]
+
+ for dapp_name in dapp_names:
+ dapp_df = df[df['DAPP_NAME'] == dapp_name].drop(columns='DAPP_NAME')
+ report_content += f'\n## Cost Report for "{dapp_name.title()}" dApp\n\n'
+ report_content += dapp_df.to_markdown(index=False) + "\n"
- for app in data:
- report_content += f'\nCost report for "{app.title()}" dApp\n\n'
- report_content += headers
- for action_data in data[app]:
- report_content += "| " + " | ".join([str(item) for item in action_data]) + " | " + "\n"
return report_content
diff --git a/deploy/cli/github_api_client.py b/deploy/cli/github_api_client.py
index 3da971ff29..9bac512633 100644
--- a/deploy/cli/github_api_client.py
+++ b/deploy/cli/github_api_client.py
@@ -1,18 +1,38 @@
+import os
import re
+import typing as tp
+from pathlib import Path
import click
import requests
+from utils.types import GithubEvent, RepoType
+
DAPPS_REPORT_COMMENT_TITLE = "Dapps report"
class GithubClient:
+ def __init__(
+ self,
+ token: str,
+ repo: tp.Union[RepoType, tp.Literal[""]] = "",
+ event_name: str = "",
+ ref: str = "",
+ ref_name: str = "",
+ head_ref: str = "",
+ base_ref: str = "",
+ last_commit_message: str = "",
+ ):
+ self.headers = {"Authorization": f"Bearer {token}", "Accept": "application/vnd.github+json"}
+ self.repo = repo
+ self.event_name = event_name
+ self.ref = ref
+ self.ref_name = ref_name
+ self.head_ref = head_ref
+ self.base_ref = base_ref
+ self.last_commit_message = last_commit_message
- def __init__(self, token):
- self.headers = {"Authorization": f"Bearer {token}",
- "Accept": "application/vnd.github+json"}
-
- def add_comment_to_pr(self, url, msg):
+ def add_comment_to_pr(self, url: str, msg: str):
data = {"body": f"{DAPPS_REPORT_COMMENT_TITLE}\n\n{msg}\n\n"}
click.echo(f"Sent data: {data}")
click.echo(f"Headers: {self.headers}")
@@ -21,19 +41,71 @@ def add_comment_to_pr(self, url, msg):
if response.status_code != 201:
raise RuntimeError(f"Attempt to leave a comment on a PR failed: {response.text}")
- def delete_last_comment(self, pr_url):
+ def delete_last_comment(self, pr_url: str):
response = requests.get(pr_url, headers=self.headers).json()
old_comment_id = None
for item in response:
if DAPPS_REPORT_COMMENT_TITLE in item["body"]:
- old_comment_id=item["id"]
+ old_comment_id = item["id"]
break
if old_comment_id:
- pattern = r'/(\d+)/comments'
- repo_url = re.sub(pattern, '', pr_url)
- comment_url = f'{repo_url}/comments/{old_comment_id}'
+ pattern = r"/(\d+)/comments"
+ repo_url = re.sub(pattern, "", pr_url)
+ comment_url = f"{repo_url}/comments/{old_comment_id}"
response = requests.delete(comment_url, headers=self.headers)
if response.status_code != 204:
print(f"Attempt to delete a comment on a PR failed: {response.text}")
+ @property
+ def event(self) -> GithubEvent:
+ event_name: GithubEvent = "unknown"
+
+ if "merge" in self.last_commit_message.lower():
+ event_name = "merge_request"
+ if self.event_name == 'push':
+ if self.ref.startswith('refs/tags/'):
+ event_name = "push_tag"
+ elif self.ref.startswith('refs/heads/'):
+ event_name = "push_branch"
+ elif self.event_name == 'pull_request':
+ event_name = "pull_request"
+ elif self.event_name == 'workflow_dispatch':
+ event_name = "workflow_dispatch"
+
+ return event_name
+
+ @property
+ def base_branch(self) -> tp.Optional[str]:
+ if any(substring in self.event for substring in ("pull", "push")):
+ if "refs" in self.base_ref:
+ return self.base_ref[len('refs/heads/'):]
+ else:
+ return self.base_ref
+
+ @property
+ def tag_name(self) -> tp.Optional[str]:
+ if self.event == "push_with_tag":
+ return self.ref_name
+
+ @property
+ def source_and_target_branch_names(self) -> tp.Optional[tuple[str, str]]:
+ if self.event == "pull_request":
+ return self.head_ref, self.base_ref
+ return None
+
+ def is_feature_branch(self, branch: str) -> bool:
+ is_base = self.is_base_branch(branch)
+ is_version = self.is_version_branch(branch)
+ return not is_base and not is_version
+
+ @staticmethod
+ def is_base_branch(branch: str) -> bool:
+ pattern = re.compile(r"^(main|master|develop)$")
+ is_base = True if pattern.match(branch) else False
+ return is_base
+ @staticmethod
+ def is_version_branch(branch: str) -> bool:
+ from clickfile import VERSION_BRANCH_TEMPLATE
+ is_base = True if re.fullmatch(VERSION_BRANCH_TEMPLATE, branch) else False
+ return is_base
diff --git a/deploy/requirements/click.txt b/deploy/requirements/click.txt
index b8c6bdccfd..448ceac989 100644
--- a/deploy/requirements/click.txt
+++ b/deploy/requirements/click.txt
@@ -18,3 +18,7 @@ deepdiff==7.0.1
pydantic==2.7.3
slack-sdk==3.30.0
filelock==3.15.4
+psycopg2-binary==2.9.9
+sqlalchemy==2.0.31
+pandas==2.2.2
+matplotlib==3.9.1
diff --git a/deploy/test_results_db/__init__.py b/deploy/test_results_db/__init__.py
new file mode 100644
index 0000000000..57af14b466
--- /dev/null
+++ b/deploy/test_results_db/__init__.py
@@ -0,0 +1,4 @@
+# this is necessary to make sure Base is aware of all the tables
+
+from .table_models.cost_report import CostReport
+from .table_models.dapp_data import DappData
diff --git a/deploy/test_results_db/db_handler.py b/deploy/test_results_db/db_handler.py
new file mode 100644
index 0000000000..e288c836fc
--- /dev/null
+++ b/deploy/test_results_db/db_handler.py
@@ -0,0 +1,212 @@
+import os
+import re
+import signal
+import typing as tp
+
+from sqlalchemy import create_engine, Engine, asc, distinct
+from sqlalchemy.orm import sessionmaker
+import pandas as pd
+from packaging import version
+
+from deploy.test_results_db.table_models.base import Base
+from deploy.test_results_db.table_models.cost_report import CostReport
+from deploy.test_results_db.table_models.dapp_data import DappData
+from utils.types import RepoType
+from utils.version import remove_heading_chars_till_first_digit
+
+
+class PostgresTestResultsHandler:
+ def __init__(self):
+ self.engine: Engine = self.__create_engine()
+ self.Session = sessionmaker(bind=self.engine)
+ self.session = self.Session()
+ self.__create_tables_if_needed()
+ signal.signal(signal.SIGTERM, self.__handle_exit)
+ signal.signal(signal.SIGINT, self.__handle_exit)
+
+ @staticmethod
+ def __create_engine() -> Engine:
+ db_host = os.environ["TEST_RESULTS_DB_HOST"]
+ db_port = os.environ["TEST_RESULTS_DB_PORT"]
+ db_name = os.environ["TEST_RESULTS_DB_NAME"]
+ db_user = os.environ["TEST_RESULTS_DB_USER"]
+ db_password = os.environ["TEST_RESULTS_DB_PASSWORD"]
+ db_url = f"postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
+ engine = create_engine(db_url)
+ return engine
+
+ def __create_tables_if_needed(self):
+ Base.metadata.create_all(self.engine)
+
+ def __handle_exit(self, signum, frame):
+ self.Session.close_all()
+
+ def get_cost_report_ids_by_branch(self, branch: str) -> tp.List[int]:
+ cost_reports = self.session.query(CostReport).filter(CostReport.branch == branch).all()
+ return [cost_report.id for cost_report in cost_reports] if cost_reports else []
+
+ def delete_reports(self, repo: str, branch: str, tag: tp.Optional[str]) -> list[int]:
+ reports = self.session.query(CostReport).filter(
+ CostReport.repo == repo,
+ CostReport.branch == branch,
+ CostReport.github_tag == tag,
+ )
+ reports.delete(synchronize_session=False)
+ self.session.commit()
+ report_ids = [r.id for r in reports]
+ return report_ids
+
+ def delete_data_by_report_ids(self, report_ids: tp.List[int]):
+ self.session.query(DappData).filter(DappData.cost_report_id.in_(report_ids)).delete(synchronize_session=False)
+ self.session.commit()
+
+ def save_cost_report(
+ self,
+ branch: str,
+ repo: RepoType,
+ token_usd_gas_price: float,
+ github_tag: tp.Optional[str],
+ docker_image_tag: tp.Optional[str],
+ ) -> int:
+ cost_report = CostReport(
+ repo=repo,
+ branch=branch,
+ github_tag=github_tag,
+ docker_image_tag=docker_image_tag,
+ token_usd_gas_price=token_usd_gas_price,
+ )
+ self.session.add(cost_report)
+ self.session.commit()
+ return cost_report.id
+
+ def save_cost_report_data(self, report_data: pd.DataFrame, cost_report_id: int):
+ dapp_groups = report_data.groupby("dapp_name")
+
+ for dapp_name, group in dapp_groups:
+ for _, row in group.iterrows():
+ cost_report_data = DappData(
+ cost_report_id=cost_report_id,
+ dapp_name=str(dapp_name),
+ action=row["action"],
+ fee_in_eth=row["fee_in_eth"],
+ acc_count=row["acc_count"],
+ trx_count=row["trx_count"],
+ gas_estimated=row["gas_estimated"],
+ gas_used=row["gas_used"],
+ )
+ self.session.add(cost_report_data)
+
+ self.session.commit()
+
+ def get_previous_tag(self, repo: str, branch: str, tag: str) -> tp.Optional[str]:
+ tags: list[str] = (
+ self.session.query(distinct(CostReport.github_tag))
+ .filter(
+ CostReport.repo == repo,
+ CostReport.branch == branch,
+ )
+ .all()
+ )
+
+ sorted_tags: list[str] = sorted(
+ [t[0] for t in tags if t[0] is not None],
+ key=lambda t: version.parse(remove_heading_chars_till_first_digit(t)),
+ reverse=True,
+ )
+
+ latest_tag: version.Version = version.parse(remove_heading_chars_till_first_digit(tag))
+
+ for tag_ in sorted_tags:
+ next_tag: version.Version = version.parse(remove_heading_chars_till_first_digit(tag_))
+ if next_tag < latest_tag:
+ return tag_
+
+ def get_historical_data(
+ self,
+ depth: int,
+ repo: RepoType,
+ last_branch: str,
+ previous_branch: str,
+ tag: tp.Optional[str],
+ ) -> pd.DataFrame:
+ # Define the previous tag
+ from clickfile import VERSION_BRANCH_TEMPLATE
+ tag_ = tag
+ if tag is not None:
+ if re.fullmatch(VERSION_BRANCH_TEMPLATE, previous_branch):
+ tag_ = previous_branch.replace("x", "99999999")
+
+ previous_tag = tag if tag is None else self.get_previous_tag(repo=repo, branch=previous_branch, tag=tag_)
+
+ # Fetch previous CostReport entries
+ offset = 0 if previous_branch != last_branch or previous_tag else 1
+ previous_reports: list[CostReport] = (
+ self.session.query(CostReport)
+ .filter(
+ CostReport.repo == repo,
+ CostReport.branch == previous_branch,
+ CostReport.github_tag == previous_tag,
+ )
+ .order_by(asc(CostReport.timestamp))
+ .offset(offset)
+ .limit(depth - 1)
+ .all()
+ )
+
+ # Fetch last CostReport
+ last_report: CostReport = (
+ self.session.query(CostReport)
+ .filter(
+ CostReport.repo == repo,
+ CostReport.branch == last_branch,
+ CostReport.github_tag == tag,
+ )
+ .order_by(asc(CostReport.timestamp))
+ .first()
+ )
+
+ cost_report_entries: list[CostReport] = [last_report] + previous_reports
+ cost_report_ids: list[int] = [r.id for r in previous_reports] + [last_report.id]
+
+ # Fetch DappData entries for these cost_report_ids
+ actions_tuples = (
+ self.session.query(distinct(DappData.action)).filter(DappData.cost_report_id == last_report.id).all()
+ )
+ actions = [action_tuple[0] for action_tuple in actions_tuples]
+ dapp_data_entries = (
+ self.session.query(DappData)
+ .filter(
+ DappData.cost_report_id.in_(cost_report_ids),
+ DappData.action.in_(actions),
+ )
+ .all()
+ )
+
+ # Convert the list of CostReport objects to a dictionary for quick lookup
+ cost_report_dict: dict[int, CostReport] = {r.id: r for r in cost_report_entries}
+
+ df_data = []
+ for data_entry in dapp_data_entries:
+ cost_report: tp.Optional[CostReport] = cost_report_dict.get(data_entry.cost_report_id)
+ if cost_report:
+ df_data.append(
+ {
+ "timestamp": cost_report.timestamp,
+ "branch": cost_report.branch,
+ "tag": cost_report.github_tag,
+ "token_usd_gas_price": cost_report.token_usd_gas_price,
+ "dapp_name": data_entry.dapp_name, # Directly use dapp_name from DappData
+ "action": data_entry.action,
+ "fee_in_eth": data_entry.fee_in_eth,
+ "acc_count": data_entry.acc_count,
+ "trx_count": data_entry.trx_count,
+ "gas_estimated": data_entry.gas_estimated,
+ "gas_used": data_entry.gas_used,
+ }
+ )
+
+ # Initialize the DataFrame and sort it
+ df = pd.DataFrame(data=df_data)
+ df = df.sort_values(by=["timestamp", "dapp_name", "action"])
+
+ return df
diff --git a/deploy/test_results_db/table_models/base.py b/deploy/test_results_db/table_models/base.py
new file mode 100644
index 0000000000..e632e965d1
--- /dev/null
+++ b/deploy/test_results_db/table_models/base.py
@@ -0,0 +1,4 @@
+from sqlalchemy.orm import declarative_base
+
+
+Base = declarative_base()
diff --git a/deploy/test_results_db/table_models/cost_report.py b/deploy/test_results_db/table_models/cost_report.py
new file mode 100644
index 0000000000..5190223ba0
--- /dev/null
+++ b/deploy/test_results_db/table_models/cost_report.py
@@ -0,0 +1,30 @@
+from datetime import datetime
+import typing as tp
+
+from sqlalchemy import Column, Integer, Numeric, String, DateTime
+from sqlalchemy.orm import relationship
+
+from deploy.test_results_db.table_models.base import Base
+from utils.types import RepoType
+
+
+class CostReport(Base):
+ __tablename__ = "cost_report"
+
+ id: int = Column(Integer, primary_key=True, autoincrement=True)
+ timestamp: datetime.utcnow = Column(DateTime, default=datetime.utcnow, nullable=False)
+ repo: RepoType = Column(String(255), nullable=False)
+ branch: str = Column(String(255), nullable=True)
+ github_tag: tp.Optional[str] = Column(String(255), nullable=True)
+ docker_image_tag: tp.Optional[str] = Column(String(255), nullable=True)
+ token_usd_gas_price: float = Column(Numeric(20, 8), nullable=False)
+
+ dapp_data = relationship("DappData", back_populates="report", cascade="all, delete-orphan")
+
+ def __repr__(self):
+ formatted_timestamp = self.timestamp.strftime("%Y-%m-%d %H:%M:%S")
+ return (
+ f""
+ )
diff --git a/deploy/test_results_db/table_models/dapp_data.py b/deploy/test_results_db/table_models/dapp_data.py
new file mode 100644
index 0000000000..8a2d19359f
--- /dev/null
+++ b/deploy/test_results_db/table_models/dapp_data.py
@@ -0,0 +1,25 @@
+from sqlalchemy import Column, Integer, String, Numeric, ForeignKey
+from sqlalchemy.orm import relationship
+
+from deploy.test_results_db.table_models.base import Base
+
+
+class DappData(Base):
+ __tablename__ = 'dapp_data'
+
+ id: int = Column(Integer, primary_key=True, autoincrement=True)
+ cost_report_id: int = Column(Integer, ForeignKey('cost_report.id'), nullable=False)
+ dapp_name: str = Column(String(255), nullable=False)
+ action: str = Column(String(255), nullable=False)
+ fee_in_eth = Column(Numeric(20, 8), nullable=False)
+ acc_count: int = Column(Integer, nullable=False)
+ trx_count: int = Column(Integer, nullable=False)
+ gas_estimated: int = Column(Integer, nullable=False)
+ gas_used: int = Column(Integer, nullable=False)
+
+ report = relationship('CostReport', back_populates='dapp_data')
+
+ def __repr__(self):
+ return (f"")
diff --git a/deploy/test_results_db/test_results_handler.py b/deploy/test_results_db/test_results_handler.py
new file mode 100644
index 0000000000..b9e0202ab6
--- /dev/null
+++ b/deploy/test_results_db/test_results_handler.py
@@ -0,0 +1,188 @@
+import textwrap
+import time
+import typing as tp
+from pathlib import Path
+from decimal import Decimal, ROUND_HALF_UP
+
+import pandas as pd
+import matplotlib.pyplot as plt
+import matplotlib.ticker as ticker
+from matplotlib.backends.backend_pdf import PdfPages
+
+from utils.types import RepoType
+from deploy.test_results_db.db_handler import PostgresTestResultsHandler
+
+
+class TestResultsHandler:
+ def __init__(self):
+ self.db_handler = PostgresTestResultsHandler()
+
+ def save_to_db(
+ self,
+ report_data: pd.DataFrame,
+ repo: RepoType,
+ branch: str,
+ github_tag: tp.Optional[str],
+ docker_image_tag: tp.Optional[str],
+ token_usd_gas_price: float,
+ ):
+ cost_report_id = self.db_handler.save_cost_report(
+ repo=repo,
+ branch=branch,
+ github_tag=github_tag,
+ docker_image_tag=docker_image_tag,
+ token_usd_gas_price=token_usd_gas_price,
+ )
+ self.db_handler.save_cost_report_data(report_data=report_data, cost_report_id=cost_report_id)
+
+ def delete_report_and_data(self, repo: str, branch: str, tag: tp.Optional[str]):
+ report_ids = self.db_handler.delete_reports(repo=repo, branch=branch, tag=tag)
+ self.db_handler.delete_data_by_report_ids(report_ids=report_ids)
+
+ def get_historical_data(
+ self,
+ depth: int,
+ repo: RepoType,
+ last_branch: str,
+ previous_branch: str,
+ tag: str,
+ ) -> pd.DataFrame:
+ return self.db_handler.get_historical_data(
+ depth=depth,
+ repo=repo,
+ last_branch=last_branch,
+ previous_branch=previous_branch,
+ tag=tag,
+ )
+
+ @staticmethod
+ def generate_and_save_plots_pdf(historical_data: pd.DataFrame, title_end: str, output_pdf="cost_reports.pdf") -> str:
+ historical_data["timestamp"] = pd.to_datetime(historical_data["timestamp"], errors="coerce")
+ historical_data["fee_in_eth"] = historical_data["fee_in_eth"].apply(Decimal)
+ historical_data["fee_in_usd"] = historical_data["fee_in_eth"] * historical_data["token_usd_gas_price"]
+ historical_data["acc_count"] = historical_data["acc_count"].apply(Decimal)
+ historical_data["trx_count"] = historical_data["trx_count"].apply(Decimal)
+ historical_data["gas_estimated"] = historical_data["gas_estimated"].apply(Decimal)
+ historical_data["gas_used"] = historical_data["gas_used"].apply(Decimal)
+ historical_data["token_usd_gas_price"] = historical_data["token_usd_gas_price"].apply(Decimal)
+ historical_data["used_%_of_EG"] = (
+ (historical_data["gas_used"] / historical_data["gas_estimated"]) * Decimal("100")
+ ).apply(lambda x: x.quantize(Decimal("0.00"), rounding=ROUND_HALF_UP))
+
+ # analyze only the dapps that are present in the latest report
+ latest_timestamp = historical_data["timestamp"].max()
+ latest_report_data = historical_data[historical_data["timestamp"] == latest_timestamp]
+ dapp_names = latest_report_data["dapp_name"].unique()
+ metrics = ["fee_in_eth", "fee_in_usd", "acc_count", "trx_count", "gas_estimated", "gas_used", "used_%_of_EG"]
+
+ with PdfPages(output_pdf) as pdf:
+ for dapp_name in dapp_names:
+ # Filter data for the current dapp_name
+ dapp_data = historical_data[historical_data["dapp_name"] == dapp_name]
+ actions = dapp_data["action"].unique()
+
+ num_rows = len(actions)
+ num_cols = len(metrics)
+ fig, axes = plt.subplots(nrows=num_rows, ncols=num_cols, figsize=(5 * num_cols, 3 * num_rows), sharex="col")
+ fig.suptitle(t=f'Cost report for "{dapp_name}" dApp on {title_end}', fontsize=16, fontweight="bold")
+
+ for action_idx, action in enumerate(actions):
+ # Calculate y-axis limits for each metric
+ buffer_fraction = Decimal("0.5")
+ action_data = dapp_data[dapp_data["action"] == action]
+ y_limits = {}
+
+ for metric in metrics:
+ metric_data = action_data[metric]
+ min_val = metric_data.min()
+ max_val = metric_data.max()
+ range_val = max_val - min_val
+
+ if range_val == 0:
+ min_val -= abs(min_val) * buffer_fraction
+ max_val += abs(max_val) * buffer_fraction
+ else:
+ min_val -= range_val * buffer_fraction
+ max_val += range_val * buffer_fraction
+
+ y_limits[metric] = (min_val, max_val)
+
+ for metric_idx, metric in enumerate(metrics):
+ ax = axes[action_idx, metric_idx] if num_rows > 1 else axes[metric_idx]
+ data_subset = dapp_data[dapp_data["action"] == action].copy()
+
+ if not data_subset.empty:
+ # Convert timestamps to evenly spaced numeric values
+ data_subset["time_numeric"] = range(len(data_subset))
+ data_subset = data_subset.sort_values(by=["timestamp"])
+ prev_value = None
+ prev_is_valid = True
+
+ # Plot blue lines before scatter
+ ax.plot(
+ data_subset["time_numeric"], data_subset[metric], color="blue", linestyle="-", linewidth=1
+ )
+
+ for i, (x, y) in enumerate(zip(data_subset["time_numeric"], data_subset[metric])):
+ if prev_value is not None and y != prev_value:
+ ax.scatter(x, y, color="red")
+ ax.annotate(
+ f"{y}",
+ (x, y),
+ textcoords="offset points",
+ xytext=(25, 5),
+ ha="center",
+ color="red",
+ rotation=45,
+ )
+
+ if prev_is_valid:
+ ax.annotate(
+ f"{prev_value}",
+ (prev_x, prev_y),
+ textcoords="offset points",
+ xytext=(25, 5),
+ ha="center",
+ color="blue",
+ rotation=45,
+ )
+ prev_is_valid = False
+ else:
+ ax.scatter(x, y, color="blue")
+ prev_is_valid = True
+
+ prev_value = y
+ prev_x, prev_y = x, y
+
+ # Formatting x-axis to show evenly spaced points
+ ax.set_xticks(range(len(data_subset)))
+
+ # set tick labels
+ if data_subset["tag"].notna().any():
+ x_tick_labels = data_subset["tag"]
+ else:
+ x_tick_labels = data_subset["branch"]
+ ax.set_xticklabels(x_tick_labels, rotation=45)
+ ax.tick_params(axis="y", labelsize=8)
+
+ # Set y-axis limits and labels
+ ax.set_ylim(float(y_limits[metric][0]), float(y_limits[metric][1]))
+ has_decimals = any(Decimal(str(value)) % 1 != 0 for value in data_subset[metric])
+ if has_decimals:
+ ax.yaxis.set_major_formatter(ticker.FormatStrFormatter("%.3f"))
+ else:
+ ax.yaxis.set_major_formatter(ticker.FormatStrFormatter("%.0f"))
+
+ if metric_idx == 0:
+ multiline_action = textwrap.fill(action, width=30)
+ ax.set_ylabel(multiline_action)
+
+ if action_idx == 0:
+ ax.set_title(metric)
+
+ plt.tight_layout()
+ plt.subplots_adjust(top=0.9)
+ pdf.savefig(fig)
+ plt.close(fig)
+
+ return output_pdf
diff --git a/utils/cloud.py b/utils/cloud.py
index be7ec50e92..77cc5d30b1 100644
--- a/utils/cloud.py
+++ b/utils/cloud.py
@@ -1,12 +1,15 @@
import os
+import mimetypes
+
import boto3
import pathlib
-import mimetypes
NEON_TESTS_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET", "neon-test-allure")
+REGION = os.environ.get("AWS_REGION", "eu-central-1")
+S3_ENDPOINT = f"s3-website.{REGION}.amazonaws.com"
+S3_UPLOAD_URL = f"http://{NEON_TESTS_BUCKET_NAME}/{S3_ENDPOINT}/" + "{key}"
-
-client = boto3.client("s3", region_name=os.environ.get("AWS_REGION", "eu-central-1"))
+client = boto3.client("s3", region_name=REGION)
def download(source, destination, bucket=NEON_TESTS_BUCKET_NAME):
@@ -39,3 +42,4 @@ def upload(source, destination, bucket=NEON_TESTS_BUCKET_NAME):
def list_bucket(directory, bucket=NEON_TESTS_BUCKET_NAME):
result = client.list_objects_v2(Bucket=bucket, Prefix=str(directory))
return result.get("Contents", [])
+
diff --git a/utils/types.py b/utils/types.py
index 2185c089ec..db7c6c299c 100644
--- a/utils/types.py
+++ b/utils/types.py
@@ -26,6 +26,7 @@ class Contract:
solana_address: PublicKey
balance_account_address: PublicKey
+
@dataclass
class TreasuryPool:
index: int
@@ -43,3 +44,14 @@ class TreasuryPool:
"evm",
"compiler_compatibility",
]
+
+GithubEvent = tp.Literal[
+ "push_branch",
+ "push_tag",
+ "pull_request",
+ "merge_request",
+ "workflow_dispatch",
+ "unknown",
+]
+
+RepoType = tp.Literal["proxy", "evm", "tests"]
diff --git a/utils/version.py b/utils/version.py
new file mode 100644
index 0000000000..efe0b6c0e4
--- /dev/null
+++ b/utils/version.py
@@ -0,0 +1,6 @@
+import re
+
+
+def remove_heading_chars_till_first_digit(input_string):
+ result = re.sub(r'^[^\d]*', '', input_string)
+ return result