diff --git a/auxiliary_tools/cdat_regression_testing/562-index-html/562_index_html.cfg b/auxiliary_tools/cdat_regression_testing/562-index-html/562_index_html.cfg
new file mode 100644
index 000000000..30c76caa5
--- /dev/null
+++ b/auxiliary_tools/cdat_regression_testing/562-index-html/562_index_html.cfg
@@ -0,0 +1,13 @@
+[#]
+sets = ["polar"]
+case_id = "GPCP_v3.2"
+variables = ["PRECT"]
+ref_name = "GPCP_v3.2"
+reference_name = "GPCP v2.2"
+seasons = ["ANN", "DJF", "MAM", "JJA", "SON"]
+regions = ["polar_S"]
+test_colormap = "WhiteBlueGreenYellowRed.rgb"
+reference_colormap = "WhiteBlueGreenYellowRed.rgb"
+diff_colormap = "BrBG"
+contour_levels = [0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5]
+diff_levels = [-2, -1.5, -1, -0.75, -0.5, -0.25, 0.25, 0.5, 0.75, 1, 1.5, 2]
diff --git a/auxiliary_tools/cdat_regression_testing/562-index-html/run_script.py b/auxiliary_tools/cdat_regression_testing/562-index-html/run_script.py
new file mode 100644
index 000000000..a1e0a22cf
--- /dev/null
+++ b/auxiliary_tools/cdat_regression_testing/562-index-html/run_script.py
@@ -0,0 +1,11 @@
+# python -m auxiliary_tools.cdat_regression_testing.792-lat-lon-run-script.792_lat_lon_run_script
+from auxiliary_tools.cdat_regression_testing.base_run_script import run_set
+
+SET_NAME = "polar"
+SET_DIR = "562-index-html"
+# CFG_PATH: str | None = None
+CFG_PATH: str | None = "auxiliary_tools/cdat_regression_testing/562-index-html/562_index_html.cfg"
+MULTIPROCESSING = False
+
+# %%
+run_set(SET_NAME, SET_DIR, CFG_PATH, MULTIPROCESSING)
diff --git a/e3sm_diags/e3sm_diags_driver.py b/e3sm_diags/e3sm_diags_driver.py
index b1726718c..e3e8f1649 100644
--- a/e3sm_diags/e3sm_diags_driver.py
+++ b/e3sm_diags/e3sm_diags_driver.py
@@ -141,26 +141,23 @@ def save_provenance(results_dir, parser):
if not os.path.exists(results_dir):
os.makedirs(results_dir, 0o755)
- # Create a PHP file to list the contents of the prov dir.
- php_path = os.path.join(results_dir, "index.php")
- with open(php_path, "w") as f:
- contents = """
- $file
";
- $i++;
- }
- }
- closedir($dh);
- ?>
- """
- f.write(contents)
+ # Create an HTML file to list the contents of the prov dir.
+ index_html_path = os.path.join(results_dir, "index.html")
+
+ with open(index_html_path, "w") as f:
+ f.write("
Provenance Files
")
+
+ for file_name in os.listdir(results_dir):
+ file_path = os.path.join(results_dir, file_name)
+ if os.path.isfile(file_path):
+ f.write(
+ f'- {file_name}
'
+ )
+
+ f.write("
")
+
+ logger.info("Created provenance index HTML file at: {}".format(index_html_path))
+
try:
_save_env_yml(results_dir)
except Exception:
@@ -388,11 +385,7 @@ def main(parameters=[]) -> List[CoreParameter]: # noqa B006
if parameters_results[0].no_viewer:
logger.info("Viewer not created because the no_viewer parameter is True.")
else:
- path = os.path.join(parameters_results[0].results_dir, "viewer")
- if not os.path.exists(path):
- os.makedirs(path)
-
- index_path = create_viewer(path, parameters_results)
+ index_path = create_viewer(parameters_results)
logger.info("Viewer HTML generated at {}".format(index_path))
# Validate actual and expected parameters are aligned
diff --git a/e3sm_diags/viewer/core_viewer.py b/e3sm_diags/viewer/core_viewer.py
index 1e29930b7..2bf3e9f65 100644
--- a/e3sm_diags/viewer/core_viewer.py
+++ b/e3sm_diags/viewer/core_viewer.py
@@ -100,7 +100,7 @@ def generate_page(self):
)
return url
- raise RuntimeError("Error geneating the page.")
+ raise RuntimeError("Error generating the page.")
def generate_viewer(self, prompt_user=True):
"""Generate the webpage and ask the user if they want to see it."""
diff --git a/e3sm_diags/viewer/main.py b/e3sm_diags/viewer/main.py
index 2410a877b..a813b788e 100644
--- a/e3sm_diags/viewer/main.py
+++ b/e3sm_diags/viewer/main.py
@@ -1,10 +1,12 @@
import collections
import os
+from typing import List
from bs4 import BeautifulSoup
import e3sm_diags
from e3sm_diags.logger import custom_logger
+from e3sm_diags.parameter.core_parameter import CoreParameter
from . import (
aerosol_budget_viewer,
@@ -72,6 +74,7 @@ def insert_data_in_row(row_obj, name, url):
td = soup.new_tag("td")
a = soup.new_tag("a")
a["href"] = url
+ a["target"] = "_blank" # Open link in a new tab
a.string = name
td.append(a)
row_obj.append(td)
@@ -113,11 +116,17 @@ def insert_data_in_row(row_obj, name, url):
return output
-def create_viewer(root_dir, parameters):
+def create_viewer(parameters: List[CoreParameter]) -> str:
"""
Based of the parameters, find the files with the
certain extension and create the viewer in root_dir.
"""
+ root_dir = parameters[0].results_dir
+ viewer_dir = os.path.join(root_dir, "viewer")
+
+ if not os.path.exists(viewer_dir):
+ os.makedirs(viewer_dir)
+
# Group each parameter object based on the `sets` parameter.
set_to_parameters = collections.defaultdict(list)
for param in parameters:
@@ -127,11 +136,12 @@ def create_viewer(root_dir, parameters):
# A list of (title, url) tuples that each viewer generates.
# This is used to create the main index.
title_and_url_list = []
+
# Now call the viewers with the list of parameters as the arguments.
for set_name, parameters in set_to_parameters.items():
- logger.info(f"{set_name} {root_dir}")
+ logger.info(f"{set_name} {viewer_dir}")
viewer_function = SET_TO_VIEWER[set_name]
- result = viewer_function(root_dir, parameters)
+ result = viewer_function(viewer_dir, parameters)
logger.info(result)
title_and_url_list.append(result)
@@ -139,7 +149,38 @@ def create_viewer(root_dir, parameters):
prov_tuple = ("Provenance", "../prov")
title_and_url_list.append(prov_tuple)
- index_url = create_index(root_dir, title_and_url_list)
- utils.add_header(root_dir, index_url, parameters)
+ index_url = create_index(viewer_dir, title_and_url_list)
+ _create_root_index(root_dir, index_url)
+
+ utils.add_header(viewer_dir, index_url, parameters)
return index_url
+
+
+def _create_root_index(root_dir: str, viewer_index_url: str):
+ """Create a root level `index.html` file that redirects to the viewer index.
+
+ Parameters
+ ----------
+ root_dir : str
+ The root directory.
+ index_url : str
+ The url to the viewer index.html file.
+ """
+ root_index_path = os.path.join(root_dir, "index.html")
+ relative_viewer_index_url = os.path.relpath(viewer_index_url, root_dir)
+ root_soup = BeautifulSoup(
+ f"""
+
+
+
+
+
+
+ """,
+ "lxml",
+ )
+
+ # Write the root index file
+ with open(root_index_path, "wb") as f:
+ f.write(root_soup.prettify("utf-8"))