Skip to content

Commit

Permalink
Changed coverage gathering
Browse files Browse the repository at this point in the history
  • Loading branch information
AryazE committed Feb 26, 2024
1 parent 830f9a7 commit d2b2250
Show file tree
Hide file tree
Showing 9 changed files with 75 additions and 75 deletions.
23 changes: 20 additions & 3 deletions src/dynapyt/run_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,13 @@
from os.path import abspath
from tempfile import gettempdir
import sys
import uuid
import json
from pathlib import Path
from . import runtime as _rt
from .utils.runtimeUtils import merge_coverage

session_id = str(uuid.uuid4())


def run_analysis(
Expand All @@ -20,16 +25,16 @@ def run_analysis(

if coverage:
if coverage_dir is None:
coverage_path = Path(gettempdir()) / "dynapyt_coverage"
coverage_path = Path(gettempdir()) / f"dynapyt_coverage-{session_id}"
coverage_path.mkdir(exist_ok=True)
else:
coverage_path = Path(coverage_dir)
coverage_path = Path(coverage_dir) / f"dynapyt_coverage-{session_id}"
coverage_path.mkdir(exist_ok=True)
_rt.set_coverage(coverage_path)
else:
_rt.set_coverage(None)

analyses_file = Path(gettempdir()) / "dynapyt_analyses.txt"
analyses_file = Path(gettempdir()) / f"dynapyt_analyses-{session_id}.txt"
if analyses_file.exists():
analyses_file.unlink()
with open(str(analyses_file), "w") as f:
Expand All @@ -54,6 +59,18 @@ def run_analysis(
importlib.import_module(entry)
_rt.end_execution()

# read all files in coverage directory and merge them
analysis_coverage = {}
if coverage:
for cov_file in coverage_path.glob("coverage-*.json"):
with open(coverage_path / cov_file, "r") as f:
new_coverage = json.load(f)
analysis_coverage = merge_coverage(analysis_coverage, new_coverage)
with open(coverage_path / "coverage.json", "w") as f:
json.dump(analysis_coverage, f)

return session_id


if __name__ == "__main__":
parser = argparse.ArgumentParser()
Expand Down
57 changes: 17 additions & 40 deletions src/dynapyt/runtime.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
from typing import List, Tuple, Any
from pathlib import Path
from sys import exc_info
import sys
import uuid
import atexit
import signal
import json
import tempfile
from filelock import FileLock
import libcst as cst
from .utils.hooks import snake, get_name
from .instrument.IIDs import IIDs
from .instrument.filters import START, END, SEPERATOR
from .utils.load_analysis import load_analyses
from .utils.runtimeUtils import load_analyses

analyses = None
covered = None
coverage_path = None
current_file = None
end_execution_called = False
engine_id = str(uuid.uuid4())
session_id = None


def end_execution():
Expand All @@ -27,31 +28,8 @@ def end_execution():
end_execution_called = True
call_if_exists("end_execution")
if covered is not None:
with FileLock(f"{str(coverage_path)}.lock"):
if coverage_path.exists():
existing_coverage = {}
with open(str(coverage_path), "r") as f:
content = f.read().splitlines()
for c in content:
tmp = json.loads(c)
existing_coverage.update(tmp)
coverage_path.unlink()
else:
existing_coverage = {}
for r_file, line_nums in covered.items():
if r_file not in existing_coverage:
existing_coverage[r_file] = {}
for ln, anas in line_nums.items():
if ln not in existing_coverage[r_file]:
existing_coverage[r_file][ln] = {}
for ana, count in anas.items():
if ana not in existing_coverage[r_file][ln]:
existing_coverage[r_file][ln][ana] = 0
existing_coverage[r_file][ln][ana] += count
with open(str(coverage_path), "w") as f:
for r_file, line_nums in existing_coverage.items():
tmp = {r_file: line_nums}
f.write(json.dumps(tmp) + "\n")
with open(str(coverage_path), "w") as f:
json.dump(covered, f)


def set_analysis(new_analyses: List[Any]):
Expand All @@ -64,18 +42,14 @@ def set_analysis(new_analyses: List[Any]):


def set_coverage(coverage_dir: Path):
global covered, coverage_path
global covered, coverage_path, session_id
if coverage_dir is not None:
covered = {}
session_id = str(coverage_dir).split("-")[-1]
coverage_dir.mkdir(exist_ok=True)
coverage_path = coverage_dir / "covered.jsonl"
coverage_path = coverage_dir / f"coverage-{engine_id}.json"
if coverage_path.exists():
coverage_path.unlink()
# with open(str(coverage_path), "r") as f:
# content = f.read().splitlines()
# for c in content:
# tmp = json.loads(c)
# covered.update(tmp)


def filtered(func, f, args):
Expand Down Expand Up @@ -107,7 +81,9 @@ def call_if_exists(f, *args):
global covered, analyses, current_file
return_value = None
if analyses is None:
analyses_file = Path(tempfile.gettempdir()) / "dynapyt_analyses.txt"
analyses_file = (
Path(tempfile.gettempdir()) / f"dynapyt_analyses-{session_id}.txt"
)
with open(str(analyses_file), "r") as af:
analysis_list = af.read().split("\n")
set_analysis(analysis_list)
Expand All @@ -124,11 +100,12 @@ def call_if_exists(f, *args):
line_no = current_file.iid_to_location[
iid
].start_line # This is not accurate for multiline statements like if, for, multiline calls, etc.
analysis_class_name = analysis.__class__.__name__
if line_no not in covered[r_file]:
covered[r_file][line_no] = {analysis.__class__.__name__: 0}
if analysis.__class__.__name__ not in covered[r_file][line_no]:
covered[r_file][line_no][analysis.__class__.__name__] = 0
covered[r_file][line_no][analysis.__class__.__name__] += 1
covered[r_file][line_no] = {analysis_class_name: 0}
if analysis_class_name not in covered[r_file][line_no]:
covered[r_file][line_no][analysis_class_name] = 0
covered[r_file][line_no][analysis_class_name] += 1
return return_value


Expand Down
2 changes: 1 addition & 1 deletion src/dynapyt/utils/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import keyword

from ..instrument.filters import START, END, SEPERATOR, get_details
from .load_analysis import load_analyses
from .runtimeUtils import load_analyses


def snake(x):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,17 @@ def load_analyses(analyses: List[Any]) -> List[BaseAnalysis]:
else:
continue
return res_analyses


def merge_coverage(base_coverage: dict, new_coverage: dict) -> dict:
for cov_file, coverage in new_coverage.items():
if cov_file not in base_coverage:
base_coverage[cov_file] = {}
for line, analysis_cov in coverage.items():
if line not in base_coverage[cov_file]:
base_coverage[cov_file][line] = {}
for analysis, count in analysis_cov.items():
if analysis not in base_coverage[cov_file][line]:
base_coverage[cov_file][line][analysis] = 0
base_coverage[cov_file][line][analysis] += count
return base_coverage
7 changes: 6 additions & 1 deletion tests/cleanup.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,12 @@
if dirty_file.exists():
dirty_file.unlink()

dirty_files = here.glob("**/covered.jsonl*")
dirty_files = here.glob("**/coverage*.json")
for dirty_file in dirty_files:
if dirty_file.exists():
dirty_file.unlink()

dirty_dirs = here.glob("**/dynapyt_coverage-*")
for dirty_dir in dirty_dirs:
if dirty_dir.exists():
shutil.rmtree(dirty_dir)
43 changes: 17 additions & 26 deletions tests/run_single_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from os import sep, remove
from os.path import join, exists
from pathlib import Path
from shutil import copyfile, move
from shutil import copyfile, move, rmtree
from inspect import getmembers, isclass
from typing import Tuple
import json
Expand All @@ -11,7 +11,6 @@
from dynapyt.instrument.instrument import instrument_file
from dynapyt.utils.hooks import get_hooks_from_analysis
from dynapyt.run_analysis import run_analysis
import dynapyt.runtime as rt
from dynapyt.analyses.BaseAnalysis import BaseAnalysis


Expand All @@ -35,23 +34,12 @@ def correct_output(expected: str, actual: str) -> bool:


def correct_coverage(expected: str, actual: str) -> bool:
actual_lines = sorted(actual.strip().split("\n"))
expected_lines = sorted(expected.strip().split("\n"))
if len(actual_lines) != len(expected_lines):
return False
for i in range(len(actual_lines)):
actual_line = json.loads(actual_lines[i].replace("\\\\", "/"))
expected_line = json.loads(expected_lines[i])
for f, cov in actual_line.items():
file_path = f.split("/tests/")[1]
if file_path not in expected_line:
return False
for l in cov:
if l not in expected_line[file_path]:
return False
if expected_line[file_path][l] != cov[l]:
return False
return True
actual_cov = {
k.replace("\\\\", "/").split("/tests/")[1]: v
for k, v in json.loads(actual).items()
}
expected_cov = json.loads(expected)
return actual_cov == expected_cov


def test_runner(directory_pair: Tuple[str, str], capsys):
Expand All @@ -67,7 +55,7 @@ def test_runner(directory_pair: Tuple[str, str], capsys):
[f"{module_prefix}.analysis.{ac[0]}" for ac in analysis_classes]
)

if (Path(abs_dir) / "exp_coverage.jsonl").exists():
if (Path(abs_dir) / "exp_coverage.json").exists():
cov = True
coverage_dir = str(abs_dir)
else:
Expand Down Expand Up @@ -103,14 +91,14 @@ def test_runner(directory_pair: Tuple[str, str], capsys):
captured = capsys.readouterr() # clear stdout
# print(f"Before analysis: {captured.out}") # for debugging purposes
if run_as_file:
run_analysis(
session_id = run_analysis(
program_file,
[f"{module_prefix}.analysis.TestAnalysis"],
coverage=cov,
coverage_dir=coverage_dir,
)
else:
run_analysis(
session_id = run_analysis(
f"{module_prefix}.program",
[
f"{module_prefix}.analysis.{ac[0]}"
Expand All @@ -134,11 +122,13 @@ def test_runner(directory_pair: Tuple[str, str], capsys):
f"Output of {rel_dir} does not match expected output.\n--> Expected:\n{expected}\n--> Actual:\n{captured.out}"
)

expected_coverage = join(abs_dir, "exp_coverage.jsonl")
expected_coverage = join(abs_dir, "exp_coverage.json")
if exists(expected_coverage):
with open(expected_coverage, "r") as file:
expected = file.read()
with open(join(coverage_dir, "covered.jsonl"), "r") as file:
with open(
join(coverage_dir, f"dynapyt_coverage-{session_id}", "coverage.json"), "r"
) as file:
actual = file.read()
if not correct_coverage(expected, actual):
pytest.fail(
Expand All @@ -152,8 +142,9 @@ def test_runner(directory_pair: Tuple[str, str], capsys):
move(orig_program_file, program_file)
remove(join(abs_dir, f"{program_file[:-3]}-dynapyt.json"))
if cov:
remove(join(abs_dir, "covered.jsonl"))
remove(join(abs_dir, "covered.jsonl.lock"))
cov_dirs = Path(coverage_dir).glob("dynapyt_coverage-*")
for cov_dir in cov_dirs:
rmtree(cov_dir)
if exists(join(abs_dir, "__init__.py")) and exists(
join(abs_dir, "__init__.py.orig")
):
Expand Down
2 changes: 0 additions & 2 deletions tests/test_coverage/multi_file/exp_coverage.jsonl

This file was deleted.

1 change: 0 additions & 1 deletion tests/test_coverage/runtime_event/exp_coverage.jsonl

This file was deleted.

1 change: 0 additions & 1 deletion tests/test_coverage/single_hook/exp_coverage.jsonl

This file was deleted.

0 comments on commit d2b2250

Please sign in to comment.