diff --git a/tests/ert/unit_tests/forward_model_runner/test_event_reporter.py b/tests/ert/unit_tests/forward_model_runner/test_event_reporter.py index abd197b4522..b1d15dcf817 100644 --- a/tests/ert/unit_tests/forward_model_runner/test_event_reporter.py +++ b/tests/ert/unit_tests/forward_model_runner/test_event_reporter.py @@ -214,6 +214,7 @@ def mock_send(msg): assert len(lines) == 0, "expected 0 Job running messages" +@pytest.mark.integration_test @pytest.mark.flaky(reruns=5) @pytest.mark.skipif( sys.platform.startswith("darwin"), reason="Performance can be flaky" diff --git a/tests/ert/unit_tests/forward_model_runner/test_job.py b/tests/ert/unit_tests/forward_model_runner/test_job.py index f10b8a921c6..0e5859b4fac 100644 --- a/tests/ert/unit_tests/forward_model_runner/test_job.py +++ b/tests/ert/unit_tests/forward_model_runner/test_job.py @@ -142,6 +142,7 @@ def max_memory_per_subprocess_layer(layers: int) -> int: assert max_seens[1] + memory_per_numbers_list < max_seens[2] +@pytest.mark.integration_test @pytest.mark.flaky(reruns=3) @pytest.mark.usefixtures("use_tmpdir") def test_memory_profile_in_running_events(): diff --git a/tests/ert/unit_tests/scheduler/test_lsf_driver.py b/tests/ert/unit_tests/scheduler/test_lsf_driver.py index 558e45c94dd..e730791eec3 100644 --- a/tests/ert/unit_tests/scheduler/test_lsf_driver.py +++ b/tests/ert/unit_tests/scheduler/test_lsf_driver.py @@ -1119,6 +1119,7 @@ async def test_submit_with_resource_requirement_with_bsub_capture(): assert "hname" not in Path("captured_bsub_args").read_text(encoding="utf-8") +@pytest.mark.integration_test @pytest.mark.usefixtures("use_tmpdir") async def test_submit_with_num_cpu(pytestconfig, job_name): if not pytestconfig.getoption("lsf"): diff --git a/tests/ert/unit_tests/scheduler/test_scheduler.py b/tests/ert/unit_tests/scheduler/test_scheduler.py index 051da08fa03..ab651f5a1cc 100644 --- a/tests/ert/unit_tests/scheduler/test_scheduler.py +++ b/tests/ert/unit_tests/scheduler/test_scheduler.py @@ -479,6 +479,7 @@ async def wait(iens): assert killed_iens == [6, 7, 8, 9] +@pytest.mark.integration_test @pytest.mark.flaky(reruns=5) @pytest.mark.parametrize( "submit_sleep, iens_stride, realization_runtime", @@ -525,6 +526,7 @@ async def wait(): assert max(deltas) <= submit_sleep + 0.1 +@pytest.mark.integration_test @pytest.mark.flaky(reruns=5) @pytest.mark.parametrize( "submit_sleep, realization_max_runtime, max_running", diff --git a/tests/ert/unit_tests/scheduler/test_slurm_driver.py b/tests/ert/unit_tests/scheduler/test_slurm_driver.py index 49a41632899..f239e634745 100644 --- a/tests/ert/unit_tests/scheduler/test_slurm_driver.py +++ b/tests/ert/unit_tests/scheduler/test_slurm_driver.py @@ -350,6 +350,7 @@ async def test_submit_with_num_cpu(pytestconfig, job_name): assert Path("test").read_text(encoding="utf-8") == "test\n" +@pytest.mark.integration_test @pytest.mark.flaky(reruns=3) async def test_kill_before_submit_is_finished( tmp_path, monkeypatch, caplog, pytestconfig diff --git a/tests/ert/unit_tests/simulator/test_batch_sim.py b/tests/ert/unit_tests/simulator/test_batch_sim.py index b8f7cd33516..591467d1969 100644 --- a/tests/ert/unit_tests/simulator/test_batch_sim.py +++ b/tests/ert/unit_tests/simulator/test_batch_sim.py @@ -354,6 +354,7 @@ def test_batch_simulation_suffixes(batch_sim_example, storage): assert act == pytest.approx(exp) +@pytest.mark.integration_test @pytest.mark.flaky(reruns=3) # https://github.com/equinor/ert/issues/7309 @pytest.mark.timeout(10) def test_stop_sim(copy_case, storage):