Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(metric): Add a Software Package Metric calculation scripts #9

Merged
merged 2 commits into from
Dec 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 90 additions & 0 deletions coverage-metrics/bin/utils/package-metrics/compare.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
#!/usr/bin/env python3

from argparse import ArgumentParser
import json
from prettytable import PrettyTable

COLOURS = {
"red": "\033[91m",
"green": "\033[92m",
"blue": "\033[94m",
"empty": "\x1b[0m",
}


# Validates and normalises the CLI arguments.
def normalise(args):
if not args.base_path or len(args.base_path) == 0:
raise "the --base parameter must not be empty"

if not args.target_path or len(args.target_path) == 0:
raise "the --target parameter must not be empty"


# Highlights the text with a specified colour.
def highlight(text, colour):
return f'{colour}{text}{COLOURS["empty"]}'


# Highlights the keywords in the input.
def highlight_delta(string, delta, is_new=False):
if is_new:
return highlight(string, COLOURS['blue'])

if delta > 0:
return highlight(string, COLOURS['red'])

if delta < 0:
return highlight(string, COLOURS['green'])

return string


if "__main__" == __name__:
parser = ArgumentParser()
parser.add_argument("-b", "--base", dest="base_path", help="A path to the json file with a base metrics")
parser.add_argument("-t", "--target", dest="target_path", help="A path to the json file with a target metrics")

args = parser.parse_args()
normalise(args)

# Read the file contents
with open(args.base_path, "r") as base_file:
base = json.load(base_file)
with open(args.target_path, "r") as target_file:
target = json.load(target_file)

base = dict(sorted(base.items()))
target = dict(sorted(target.items()))

table = PrettyTable(("Package", "Efferent", "Afferent", "External"))

status = os.EX_OK
for pkg, pkg_metrics in target.items():
is_new = pkg not in base

delta_efferent = pkg_metrics["efferent"] - base.get(pkg, {}).get("efferent", 0)
delta_afferent = pkg_metrics["afferent"] - base.get(pkg, {}).get("afferent", 0)
delta_external = pkg_metrics["external"] - base.get(pkg, {}).get("external", 0)

efferent_label = " %+d" % delta_efferent if delta_efferent != 0 else ""
afferent_label = " %+d" % delta_afferent if delta_afferent != 0 else ""
external_label = " %+d" % delta_external if delta_external != 0 else ""

if not is_new and any(delta > 0 for delta in [delta_efferent, delta_afferent, delta_external]):
status = os.EX_DATAERR

table.add_row(
(pkg,
"%d%s" % (pkg_metrics["efferent"], highlight_delta(efferent_label, delta_efferent, is_new=is_new)),
"%d%s" % (pkg_metrics["afferent"], highlight_delta(afferent_label, delta_afferent, is_new=is_new)),
"%d%s" % (pkg_metrics["external"], highlight_delta(external_label, delta_external, is_new=is_new)),
))

table.align["Package"] = "l"
table.align["Efferent"] = "r"
table.align["Afferent"] = "r"
table.align["External"] = "r"
print(table)

exit(status)
3 changes: 3 additions & 0 deletions coverage-metrics/bin/utils/package-metrics/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
gitdb==4.0.11
GitPython==3.1.40
prettytable==3.7.0
129 changes: 129 additions & 0 deletions coverage-metrics/bin/utils/package-metrics/spm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#!/usr/bin/env python3

from argparse import ArgumentParser
from git import Repo
import json
import os
import re

DIRS_TO_SKIP = (".", "config", "tests") # The list of directories to skip metric calculation
GO_TEST_SUFFIX = "_test.go"
GO_SUFFIX = ".go"

# The regular expression to match an import section content inside a go file (https://regex101.com/r/JW2UD0/1).
go_imports_regexp = re.compile(r"import \((.*?)\)|import (\".*?\")", flags=re.MULTILINE | re.DOTALL)


def trim_prefix(text, prefix):
return text[len(prefix):] if text.startswith(prefix) else text


# Extracts the list of dependencies from the go file content.
def extract_deps(file_contents):
imports_match = go_imports_regexp.search(file_contents)
if not imports_match:
return []

# Normalise the imports section, weed out empty lines and comments.
raw_imports = imports_match.group(1) if imports_match.group(1) else imports_match.group(2)
imports = [i.strip() for i in raw_imports.split("\n")]
imports = [i for i in imports if len(i) > 0 and not i.startswith("//")]

dependencies = []
for i in imports:
# Extract the imported package name only (without any aliases and quote characters).
dependencies.append(re.match(r'.*\"(.*)\".*', i)[1])

return list(set(dependencies))


# Returns the dict of all go packages discovered under the given path.
def fetch_deps(path, skipped_dirs):
packages = {}

for root, dirs, files in os.walk(path):
package_name = trim_prefix(root, path)

# Skip all unwanted directories.
if any(True for to_skip in skipped_dirs if package_name.startswith(to_skip)):
continue

# Fetch the list of go files in the directory excluding test ones.
go_files = [f for f in files if not f.endswith(GO_TEST_SUFFIX) and f.endswith(GO_SUFFIX)]
if len(go_files) == 0:
continue

dependencies = []
for f in go_files:
file = open(os.path.join(root, f), "r")
dependencies += extract_deps(file.read())

packages[package_name] = list(set(dependencies))

return packages


# Groups the dependencies into efferent, afferent and external categories.
def group_deps(imported_packages, module_name):
packages = {}
for package in imported_packages:
package_name = module_name + package
package_imports = imported_packages[package]
packages[package] = {
# The list of all imported packages prefixed with a module_name.
"efferent": len([i for i in package_imports if i.startswith(module_name)]),
# The list of all packages that import a package_name.
"afferent": len([1 for p in imported_packages if package_name in imported_packages[p]]),
# The list of all external packages (the ones that contain a "." as a domain-name and "/"
# as a path separator to distinguish them for the standard packages).
"external": len(set([i for i in package_imports if
not i.startswith(module_name) and '/' in i and "." in i.split("/")[0]])),
}

return packages


# Validates and normalises the CLI arguments.
def normalise(args):
if not args.go_module or len(args.go_module) == 0:
raise "the --module parameter must not be empty"

if not args.go_module.endswith("/"):
args.go_module += "/"

if not args.repo_path or len(args.repo_path) == 0:
raise "the --path parameter must not be empty"
if not args.repo_path.endswith("/"):
args.repo_path += "/"

if not args.out or len(args.out) == 0:
raise "the --out parameter must not be empty"

if args.skip:
skip = args.skip.split(",")
skip = [s.trim() for s in skip]
skip = [s for s in skip if len(s) > 0]
args.skip = list(DIRS_TO_SKIP) + skip
else:
args.skip = list(DIRS_TO_SKIP)


if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-p", "--path", dest="repo_path", help="A path to the Go project's source code")
parser.add_argument("-o", "--out", dest="out", help="A path to the resulting JSON file")
parser.add_argument("-s", "--skip", dest="skip",
help="A comma-separated list of directories to be skipped for the analysis")
parser.add_argument("-m", "--module", dest="go_module",
help="Fully qualified go module name (e.g.: github.com/kyma-project/lifecycle-manager)")

args = parser.parse_args()
normalise(args)

repo = Repo(args.repo_path)
dependencies = fetch_deps(args.repo_path, args.skip)
grouped_dependencies = group_deps(dependencies, args.go_module)

out_file = open(args.out, "w")
json.dump(grouped_dependencies, out_file, indent=4)
out_file.close()