Skip to content

Commit

Permalink
work with Upstream-testsuite logs; skip passed beaker logs
Browse files Browse the repository at this point in the history
The testout.log format used by Upstream-testsuite is slightly different.
Improve performance by only parsing ansible logs from failed
test runs in beaker.
  • Loading branch information
richm committed Jan 30, 2025
1 parent 175fd61 commit b012d47
Showing 1 changed file with 36 additions and 15 deletions.
51 changes: 36 additions & 15 deletions check_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,8 @@ def get_statuses(gh, org, repo, pr_num):
statuses.append(status)
return statuses

SYSTEM_ROLE_LOG_RE = re.compile(r"/SYSTEM-ROLE-(?P<role>[a-z0-9_]+)_(?P<test_name>tests_[a-z0-9_]+[.]yml)-.*-ANSIBLE-(?P<ansible_ver>[0-9.]+).*[.]log$")


def get_errors_from_ansible_log(args, log_url):
errors = []
Expand All @@ -258,11 +260,8 @@ def get_errors_from_ansible_log(args, log_url):
)
else:
# https://....//SYSTEM-ROLE-$ROLENAME_$TEST_NAME.yml-legacy-ANSIBLE-2.log
match = re.search(
r"/SYSTEM-ROLE-([a-z0-9_]+)_(tests_[a-z0-9_]+[.]yml)-.*-ANSIBLE-([0-9.]+)[.]log$",
log_url,
)
role = match.group(1)
match = SYSTEM_ROLE_LOG_RE.search(log_url)
role = match.group("role")
if args.role != ["ALL"] and role not in args.role:
logging.info(
"Skipping log - role [%s] not in args.role [%s]: [%s]",
Expand All @@ -272,7 +271,7 @@ def get_errors_from_ansible_log(args, log_url):
)
return []
# test = match.group(2) # unused for now
ansible_version = match.group(3)
ansible_version = match.group("ansible_ver")

for line in get_file_data(args, log_url):
if (
Expand Down Expand Up @@ -466,9 +465,11 @@ def parse_beaker_job_log(args, start_dt_utc, taskout_url):
result_re = re.compile(r"^:: OVERALL RESULT: ([A-Z]+)")
test_re_str = (
r"^:: \[ (?P<hour>[0-9]{2}):(?P<min>[0-9]{2}):(?P<sec>[0-9]{2}) \] "
r":: \[ +(?P<status>[A-Z]+) +\] :: Test (?P<role>[a-z0-9_]+)/(?P<test_name>[^ ]+)"
r":: \[ +(?P<status>[A-Z]+) +\] :: Test ((?P<role>[a-z0-9_]+)/)?(?P<test_name>tests_[^ ]+)"
)
test_re = re.compile(test_re_str)
test_role_re_str = r"^::\s+Test role: (?P<role>[a-z0-9_]+)"
test_role_re = re.compile(test_role_re_str)
duration_re = re.compile(r"Duration: ([0-9a-zA-Z_]+)")
start_dt = None
start_data = None
Expand All @@ -479,6 +480,7 @@ def parse_beaker_job_log(args, start_dt_utc, taskout_url):
"status": "RUNNING",
"last_test": "N/A",
}
role = None # Upstream-testsuite does not report role name in test_re
for line in get_file_data(args, taskout_url):
match = duration_re.search(line)
if match:
Expand All @@ -487,9 +489,17 @@ def parse_beaker_job_log(args, start_dt_utc, taskout_url):
if match:
job_data["status"] = match.group(1)
break
match = test_role_re.match(line)
if match:
role = match.group(1)
match = test_re.search(line)
if match:
data = match.groupdict()
if data["role"] is None:
data["role"] = role
if data["role"] in job_data["roles"] and data["test_name"] in job_data["roles"][data["role"]]:
logging.debug("Already processed result for [%s/%s] - skipping [%s]", data["role"], data["test_name"], line)
continue
if not start_dt:
# figure out TZ offset and set in start_dt
start_dt = start_dt_utc.replace(
Expand All @@ -506,7 +516,7 @@ def parse_beaker_job_log(args, start_dt_utc, taskout_url):
start_data = data
elif start_data:
btr = BeakerTestRec(start_dt, start_data, data)
job_data["roles"].setdefault(data["role"], []).append(btr)
job_data["roles"].setdefault(data["role"], {})[data["test_name"]] = btr
if data["status"] == "PASS":
job_data["passed"].append(btr)
else:
Expand Down Expand Up @@ -567,8 +577,11 @@ def get_beaker_job_info(args, job):
if key in ("start_time", "finish_time") and task_data[key]:
dt = datetime.datetime.fromisoformat(task_data[key] + "+00:00")
task_data[key] = dt
if task_data["name"].endswith("basic-smoke-test"):
task_data["name"] = "basic-smoke-test"
if task_data["name"].endswith("basic-smoke-test") or task_data["name"].endswith("Upstream-testsuite"):
if task_data["name"].endswith("basic-smoke-test"):
task_data["name"] = "basic-smoke-test"
else:
task_data["name"] = "Upstream-testsuite"
log_urls = []
for log in task.find("logs"):
if hasattr(log, "get"):
Expand All @@ -580,13 +593,21 @@ def get_beaker_job_info(args, job):
args, task_data["start_time"], link
)
elif name.startswith("SYSTEM-ROLE-"):
if args.gather_errors:
logging.debug(" Processing test log [%s]", link)
task_data["errors"].extend(
get_errors_from_ansible_log(args, link)
)
log_urls.append(link)
task_data["logs"] = log_urls
if args.gather_errors:
for log in log_urls:
match = SYSTEM_ROLE_LOG_RE.search(log)
if match:
role = match.group("role")
test_name = match.group("test_name")
btr = task_data["job_data"].get("roles", {}).get(role, {}).get(test_name)
if btr and btr.status != "PASS":
logging.debug(" Processing test log [%s]", log)
task_data["errors"].extend(
get_errors_from_ansible_log(args, log)
)

role = None
task_data["avcs"] = {}
for result in task.find_all("result"):
Expand Down

0 comments on commit b012d47

Please sign in to comment.