Skip to content

Commit

Permalink
[health-check] Apply linting and formatting rules
Browse files Browse the repository at this point in the history
  • Loading branch information
m-czernek authored Feb 6, 2025
1 parent d5adc88 commit 1bdc8b6
Show file tree
Hide file tree
Showing 18 changed files with 381 additions and 522 deletions.
4 changes: 4 additions & 0 deletions health-check/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,7 @@ __pycache__
**/config/grafana/dashboards/supportconfig_with_logs.json

.vscode/

# Generated files
**/exporters/config.yml
**/exporters/metrics
33 changes: 27 additions & 6 deletions health-check/src/uyuni_health_check/config.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
"""
Module that contains functionality related to reading `config.toml`
and getting paths used for configuration, templating, or building
containers
"""

import functools
import os
from typing import Any, Dict, List
Expand All @@ -10,12 +16,16 @@
CONFIG_DIR = os.path.join(BASE_DIR, "config")
TEMPLATES_DIR = os.path.join(CONFIG_DIR, "templates")
CONTAINERS_DIR = os.path.join(BASE_DIR, "containers")
CONFIG_TOML_PATH = os.environ.get("HEALTH_CHECK_TOML", os.path.join(BASE_DIR, "config.toml"))
CONFIG_TOML_PATH = os.environ.get(
"HEALTH_CHECK_TOML", os.path.join(BASE_DIR, "config.toml")
)


@functools.lru_cache
def _init_jinja_env() -> jinja2.Environment:
return jinja2.Environment(loader=jinja2.FileSystemLoader(TEMPLATES_DIR))


@functools.lru_cache
def parse_config() -> Dict:
if not os.path.exists(CONFIG_TOML_PATH):
Expand All @@ -25,56 +35,67 @@ def parse_config() -> Dict:
conf = tomli.load(f)
return conf


def get_json_template_filepath(json_relative_path: str) -> str:
return os.path.join(TEMPLATES_DIR, json_relative_path)


def load_jinja_template(template: str) -> jinja2.Template:
return _init_jinja_env().get_template(template)


def load_dockerfile_dir(dockerfile_dir: str) -> str:
return os.path.join(CONTAINERS_DIR, dockerfile_dir)


def get_config_dir_path(component: str) -> str:
return os.path.join(CONFIG_DIR, component)

def load_prop(property: str) -> Any:

def load_prop(property_path: str) -> Any:
res = parse_config().copy()
for prop_part in property.split('.'):
for prop_part in property_path.split("."):
try:
res = res[prop_part]
except Exception as e:
raise ValueError(
f"Invalid config lookup ({property}); trying to get {prop_part} from {res}"
f"Invalid config lookup ({property_path}); trying to get {prop_part} from {res}"
) from e
return res


def write_config(component: str, config_file_path: str, content: str, is_json=False):
basedir = Path(get_config_dir_path(component))
if not basedir.exists():
basedir.mkdir(parents=True)
file_path = os.path.join(basedir, config_file_path)
with open(file_path, "w") as file:
with open(file_path, "w", encoding="UTF-8") as file:
if is_json:
json.dump(content, file, indent=4)
else:
file.write(content)


def get_config_file_path(component):
return os.path.join(get_config_dir_path(component), "config.yaml")


def get_sources_dir(component):
return os.path.join(BASE_DIR, component)


def get_grafana_config_dir():
return os.path.join(CONFIG_DIR, "grafana")


def get_prometheus_config_dir():
return os.path.join(CONFIG_DIR, "prometheus")


def get_all_container_image_names() -> List[str]:
res = []
conf = parse_config().copy()
for section in conf.values():
if "image" in section:
res.append(section.get("image"))
return res
return res
4 changes: 0 additions & 4 deletions health-check/src/uyuni_health_check/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,6 @@ container_name = "health_check_loki"
jobs = ["cobbler", "postgresql", "rhn", "apache"]
image = "docker.io/grafana/loki"

[logcli]
logcli_container_name = "uyuni_health_check_logcli"
logcli_image_name = "logcli"

[exporter]
container_name = "health_check_supportconfig_exporter"
image = "localhost/supportconfig-exporter"
Expand Down

This file was deleted.

20 changes: 17 additions & 3 deletions health-check/src/uyuni_health_check/containers/manager.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Module that contains podman-related functionality"""

from typing import List
from uyuni_health_check import config
from uyuni_health_check.utils import run_command, console
Expand All @@ -12,12 +14,18 @@ def podman(cmd: List[str], verbose=False, raise_exc=True) -> List:
return run_command(["podman"] + cmd, verbose, raise_exc)


def build_image(name: str, containerfile_path: str, build_args: List[str] | None = None, verbose: bool = False) -> None:
def build_image(
name: str,
containerfile_path: str,
build_args: List[str] | None = None,
verbose: bool = False,
) -> None:
"""
Build a container image
"""
podman_args = ["build", "-t", f"{name}"]
if build_args:
# pylint: disable-next=expression-not-assigned
[podman_args.append(f'--build-arg="{param}"') for param in build_args]
podman_args.append(containerfile_path)

Expand All @@ -28,15 +36,21 @@ def image_exists(image):
"""
Check if the image is present in podman images result
"""
stdout, _, _ = podman(["images", "--quiet", "-f", f"reference={image}"], verbose=False, raise_exc=False)
stdout, _, _ = podman(
["images", "--quiet", "-f", f"reference={image}"],
verbose=False,
raise_exc=False,
)
return stdout.strip() != ""


def network_exists(network):
"""
Check if the podman network is up and running
"""
_, _, returncode = podman(["network", "exists", f"{network}"], verbose=False, raise_exc=False)
_, _, returncode = podman(
["network", "exists", f"{network}"], verbose=False, raise_exc=False
)
return returncode == 0


Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#!/usr/bin/env python3
"""Check when Promptail has finished processing logs"""

import os
import re
Expand All @@ -8,10 +9,10 @@
import logging


path_list= ""
path_list = ""
positions_file = "/tmp/positions.yaml"

logging.basicConfig(filename='/var/log/complete_checker.log', level=logging.INFO)
logging.basicConfig(filename="/var/log/complete_checker.log", level=logging.INFO)
logger = logging.getLogger(__name__)


Expand All @@ -23,66 +24,64 @@ def complete() -> bool:
time.sleep(1)
logger.info("the positions file is present!")


while True:
with open(positions_file) as f:
with open(positions_file, encoding="UTF-8") as f:
logger.info("before opening positions file")
data = f.read()
#fpath_pos_list = re.findall(r'([\w\/\.-]+\.log(?:\.gz)?)\s*:\s*"(\d+)"', data)
fpath_pos_list = re.findall(r'([\w\/\.-]+\.log)\s*:\s*"(\d+)"', data)
logger.info(f"matches in path and pos list: {fpath_pos_list}")
logger.info("matches in path and pos list: %s", fpath_pos_list)
if fpath_pos_list:
break
time.sleep(5)
with open(positions_file) as f:

with open(positions_file, encoding="UTF-8") as f:
for fpath_size in fpath_pos_list:
log_file_path = fpath_size[0]
log_file_pos = int(fpath_size[1])
file_size = os.path.getsize(log_file_path)
if log_file_pos != file_size:
logging.info(f"Final of file not reached yet for: {log_file_path}")
logging.info("Final of file not reached yet for: %s", log_file_path)
return False

logging.info("Promtail completed processing!")
return True

def push_flag_to_loki(loki_url="http://health_check_loki:3100", job_name="promtail-complete-job", flag="complete"):

def push_flag_to_loki(
loki_url="http://health_check_loki:3100",
job_name="promtail-complete-job",
flag="complete",
):

log_entry = {
"streams": [
{
"stream": {
"job": job_name,
"flag": flag
},
"values": [
[str(int(time.time() * 1e9)), "Promtail finished!d"]
]
"stream": {"job": job_name, "flag": flag},
"values": [[str(int(time.time() * 1e9)), "Promtail finished!d"]],
}
]
}

response = requests.post(
f"{loki_url}/loki/api/v1/push",
headers={"Content-Type": "application/json"},
data=json.dumps(log_entry)
data=json.dumps(log_entry),
)

if response.status_code == 204:
print("Flag log successfully pushed to Loki.")
else:
print("Failed to push log to Loki:", response.text)


if __name__ == "__main__":

logging.basicConfig(filename='/var/log/complete_checker.log', level=logging.INFO)
logging.basicConfig(filename="/var/log/complete_checker.log", level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info('Started')
while(1):
logger.info("Started")
while True:
if complete():
break
time.sleep(10)

push_flag_to_loki()

19 changes: 14 additions & 5 deletions health-check/src/uyuni_health_check/containers/promtail/run.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,28 @@
#!/usr/bin/env python3.11
"""Manage the Promptail process"""

import subprocess
import time
import os
import signal


def is_process_running(process_name):
try:
subprocess.run(["pgrep", "-f", process_name], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
subprocess.run(
["pgrep", "-f", process_name],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
return True
except subprocess.CalledProcessError:
return False


def launch_process(command):
return subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)


promtail_command = "promtail --config.file=/etc/promtail/config.yml"
promtail_process = launch_process(promtail_command)
Expand All @@ -25,6 +34,6 @@ def launch_process(command):
if not is_process_running("promtail"):
print("Promtail process is not running. Relaunching...")
promtail_process = launch_process(promtail_command)

# Delay between checks to prevent constant CPU usage
time.sleep(10)
16 changes: 7 additions & 9 deletions health-check/src/uyuni_health_check/exporters/exporter.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Module that manages the supportconfig exporter container"""

from uyuni_health_check import config
from uyuni_health_check.utils import console
from uyuni_health_check.containers.manager import (
Expand All @@ -10,12 +12,12 @@

def prepare_exporter(supportconfig_path: str, verbose: bool):
"""
Build the prometheus exporter image and deploy it on the server
Build the exporter image and deploy it on the server
:param server: the Uyuni server to deploy the exporter on
"""
exporter_name = config.load_prop('exporter.container_name')
image = config.load_prop('exporter.image')
exporter_name = config.load_prop("exporter.container_name")
image = config.load_prop("exporter.image")
console.log("[bold]Deploying supportconfig exporter")

if container_is_running(f"{exporter_name}"):
Expand All @@ -26,11 +28,7 @@ def prepare_exporter(supportconfig_path: str, verbose: bool):

if not image_exists(image):
console.log(f"[bold]Building {image} image")
build_image(
image,
config.load_dockerfile_dir("exporter"),
verbose=verbose
)
build_image(image, config.load_dockerfile_dir("exporter"), verbose=verbose)
console.log(f"[green]The {image} image was built successfully")

console.log(f"[bold]Deploying {exporter_name} container")
Expand All @@ -39,7 +37,7 @@ def prepare_exporter(supportconfig_path: str, verbose: bool):
"--replace",
"--detach",
"--network",
config.load_prop('podman.network_name'),
config.load_prop("podman.network_name"),
"--publish",
"9000:9000",
"--volume",
Expand Down
Loading

0 comments on commit 1bdc8b6

Please sign in to comment.