Skip to content

Commit

Permalink
fixup tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathan-eq committed Feb 27, 2025
1 parent 9a5b094 commit e0d02ea
Show file tree
Hide file tree
Showing 2 changed files with 82 additions and 69 deletions.
116 changes: 58 additions & 58 deletions tests/ert/unit_tests/scheduler/test_lsf_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ async def test_submit_sets_stderr():


@pytest.mark.usefixtures("capturing_bsub")
async def test_submit_with_realization_memory_with_bsub_capture(): # JONAK - CAN THIS BE REMOVED?
async def test_submit_with_realization_memory_with_bsub_capture():
driver = LsfDriver()
await driver.submit(0, "sleep", realization_memory=1024**2)
assert "-R rusage[mem=1]" in Path("captured_bsub_args").read_text(encoding="utf-8")
Expand Down Expand Up @@ -1203,63 +1203,6 @@ async def test_submit_with_num_cpu_with_bsub_capture():
assert "-n 4" in Path("captured_bsub_args").read_text(encoding="utf-8")


@pytest.mark.integration_test
@pytest.mark.usefixtures("use_tmpdir")
@pytest.mark.parametrize(
"driver_submit_option,expected_in_cmd",
[
({"realization_memory": 50 * 1024 * 1024}, "-R rusage[mem=50]"),
({"num_cpu": 2}, "-n 2"),
],
)
async def test_submit_works_with_submit_options(
driver_submit_option, expected_in_cmd, job_name, intercept_bsub_input_cmd
):
driver = LsfDriver(bsub_cmd=intercept_bsub_input_cmd)
await driver.submit(
0,
"sh",
"-c",
"echo foo",
name=job_name,
**driver_submit_option,
)
complete_command_invocation = Path("captured_bsub_args").read_text(encoding="utf-8")
assert expected_in_cmd in complete_command_invocation


@pytest.mark.integration_test
@pytest.mark.usefixtures("use_tmpdir")
@pytest.mark.parametrize(
"driver_options,expected_in_cmd",
[
(
{"queue_name": os.getenv("_ERT_TESTS_ALTERNATIVE_QUEUE", "foo_bar_queue")},
f"-q {os.getenv('_ERT_TESTS_ALTERNATIVE_QUEUE', 'foo_bar_queue')}",
),
({"project_code": "project_foo"}, "-P project_foo"),
({"exclude_hosts": "foo,bar"}, "-R select[hname!='foo' && hname!='bar']"),
(
{"resource_requirement": "select[cs && x86_64Linux]"},
"-R select[cs && x86_64Linux]",
),
],
)
async def test_submit_works_with_queue_options(
driver_options, expected_in_cmd, job_name, intercept_bsub_input_cmd
):
driver = LsfDriver(bsub_cmd=intercept_bsub_input_cmd, **driver_options)
await driver.submit(
0,
"sh",
"-c",
"echo foo",
name=job_name,
)
complete_command_invocation = Path("captured_bsub_args").read_text(encoding="utf-8")
assert expected_in_cmd in complete_command_invocation


@pytest.mark.integration_test
async def test_polling_bhist_fallback(not_found_bjobs, caplog, job_name):
caplog.set_level(logging.DEBUG)
Expand Down Expand Up @@ -1426,3 +1369,60 @@ def test_queue_options_are_propagated_from_config_to_bsub(intercept_bsub_input_c
f"""select[{" && ".join(f"hname!='{host_name}'" for host_name in expected_excluded_hosts.split(","))}]"""
in complete_command_invocation
)


@pytest.mark.integration_test
@pytest.mark.usefixtures("use_tmpdir")
@pytest.mark.parametrize(
"driver_submit_option,expected_in_cmd",
[
({"realization_memory": 50 * 1024 * 1024}, "-R rusage[mem=50]"),
({"num_cpu": 2}, "-n 2"),
],
)
async def test_submit_works_with_submit_options(
driver_submit_option, expected_in_cmd, job_name, intercept_bsub_input_cmd
):
driver = LsfDriver(bsub_cmd=intercept_bsub_input_cmd)
await driver.submit(
0,
"sh",
"-c",
"echo foo",
name=job_name,
**driver_submit_option,
)
complete_command_invocation = Path("captured_bsub_args").read_text(encoding="utf-8")
assert expected_in_cmd in complete_command_invocation


@pytest.mark.integration_test
@pytest.mark.usefixtures("use_tmpdir")
@pytest.mark.parametrize(
"driver_options,expected_in_cmd",
[
(
{"queue_name": os.getenv("_ERT_TESTS_ALTERNATIVE_QUEUE", "foo_bar_queue")},
f"-q {os.getenv('_ERT_TESTS_ALTERNATIVE_QUEUE', 'foo_bar_queue')}",
),
({"project_code": "project_foo"}, "-P project_foo"),
({"exclude_hosts": "foo,bar"}, "-R select[hname!='foo' && hname!='bar']"),
(
{"resource_requirement": "select[cs && x86_64Linux]"},
"-R select[cs && x86_64Linux]",
),
],
)
async def test_submit_works_with_queue_options(
driver_options, expected_in_cmd, job_name, intercept_bsub_input_cmd
):
driver = LsfDriver(bsub_cmd=intercept_bsub_input_cmd, **driver_options)
await driver.submit(
0,
"sh",
"-c",
"echo foo",
name=job_name,
)
complete_command_invocation = Path("captured_bsub_args").read_text(encoding="utf-8")
assert expected_in_cmd in complete_command_invocation
35 changes: 24 additions & 11 deletions tests/ert/unit_tests/scheduler/test_slurm_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,7 @@ async def test_slurm_uses_sacct(

@pytest.mark.integration_test
@pytest.mark.usefixtures("copy_poly_case")
def test_queue_options_are_propagated_from_config_to_sbatch():
def test_queue_options_are_propagated_from_config_to_sbatch(pytestconfig):
"""
This end to end test is here to verify that queue_options are correctly
propagated all the way from ert config to the cluster.
Expand Down Expand Up @@ -492,12 +492,16 @@ def test_queue_options_are_propagated_from_config_to_sbatch():
QUEUE_SYSTEM SLURM
QUEUE_OPTION SLURM SBATCH {capture_sbatch_cmd.absolute()}
QUEUE_OPTION SLURM PARTITION {expected_partition}
QUEUE_OPTION SLURM INCLUDE_HOST {expected_include_hosts}
QUEUE_OPTION SLURM EXCLUDE_HOST {expected_exclude_hosts}
QUEUE_OPTION SLURM PROJECT_CODE {expected_project_code}
QUEUE_OPTION SLURM MAX_RUNTIME {expected_max_runtime}
NUM_REALIZATIONS 1
"""
+ (
f"""QUEUE_OPTION SLURM INCLUDE_HOST {expected_include_hosts}
QUEUE_OPTION SLURM EXCLUDE_HOST {expected_exclude_hosts}"""
if not pytestconfig.getoption("slurm")
else ""
)
)
)
run_cli(ENSEMBLE_EXPERIMENT_MODE, "--disable-monitoring", "poly.ert")
Expand All @@ -510,9 +514,9 @@ def test_queue_options_are_propagated_from_config_to_sbatch():
f"--mem={_parse_realization_memory_str(expected_realization_memory) // 1024**2}M"
in complete_command_invocation
)

assert f"--nodelist={expected_include_hosts}" in complete_command_invocation
assert f"--exclude={expected_exclude_hosts}" in complete_command_invocation
if not pytestconfig.getoption("slurm"):
assert f"--nodelist={expected_include_hosts}" in complete_command_invocation
assert f"--exclude={expected_exclude_hosts}" in complete_command_invocation
assert (
f"--time={_seconds_to_slurm_time_format(expected_max_runtime)}"
in complete_command_invocation
Expand Down Expand Up @@ -552,20 +556,29 @@ async def test_submit_works_with_submit_options(
@pytest.mark.integration_test
@pytest.mark.usefixtures("use_tmpdir")
@pytest.mark.parametrize(
"driver_options,expected_in_cmd",
"driver_options,expected_in_cmd,only_mocked_sbatch",
[
({"exclude_hosts": "foo_bar"}, "-exclude=foo_bar"),
({"exclude_hosts": "foo_bar"}, "--exclude=foo_bar", True),
({"include_hosts": "foo_bar"}, "--nodelist=foo_bar", True),
(
{"queue_name": os.getenv("_ERT_TESTS_DEFAULT_QUEUE_NAME", "foo_bar_queue")},
f"--partition={os.getenv('_ERT_TESTS_DEFAULT_QUEUE_NAME', 'foo_bar_queue')}",
False,
),
({"max_runtime": 20}, "--time=0:00:20"),
({"project_code": "project_foo"}, "--account=project_foo"),
({"max_runtime": 20}, "--time=0:00:20", False),
({"project_code": "project_foo"}, "--account=project_foo", True),
],
)
async def test_submit_works_with_queue_options(
driver_options, expected_in_cmd, job_name, intercept_sbatch_input_cmd
driver_options,
expected_in_cmd,
only_mocked_sbatch,
job_name,
intercept_sbatch_input_cmd,
pytestconfig,
):
if only_mocked_sbatch and pytestconfig.getoption("--slurm"):
pytest.skip("This test is only for mocked sbatch")
driver = SlurmDriver(sbatch_cmd=intercept_sbatch_input_cmd, **driver_options)
await driver.submit(
0,
Expand Down

0 comments on commit e0d02ea

Please sign in to comment.