Skip to content

Commit

Permalink
Mark browser-tests for conditional executions
Browse files Browse the repository at this point in the history
  • Loading branch information
AugustoMagalhaes authored Jul 31, 2024
1 parent 7cf2563 commit 1e839aa
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 1 deletion.
26 changes: 25 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,34 @@
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import TimeoutException


from tests.data.snake_oil_data import ensembles_response


def pytest_addoption(parser):
parser.addoption(
"--skip-browser-tests",
action="store_true",
default=False,
help="This option allows skipping tests that depend on chromedriver",
)


def pytest_configure(config):
config.addinivalue_line(
"markers", "browser_test: mark test as chromedriver dependent"
)


def pytest_collection_modifyitems(config, items):
skip_browser_tests = pytest.mark.skip(
reason="chromedriver missing in PATH or intentionally skipped"
)
browser_tests = [item for item in items if "browser_test" in item.keywords]
if config.getoption("--skip-browser-tests"):
for item in browser_tests:
item.add_marker(skip_browser_tests)


def pytest_setup_options():
options = Options()
options.add_argument("--headless")
Expand Down
3 changes: 3 additions & 0 deletions tests/views/test_ensemble_selector.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import dash
import pytest
from webviz_ert.assets import get_color
from webviz_ert.plugins import ParameterComparison
from tests.conftest import select_first, get_options
from tests.data.snake_oil_data import all_ensemble_names
from tests.conftest import setup_plugin, select_ensemble


@pytest.mark.browser_test
def test_ensemble_refresh(
mock_data,
dash_duo,
Expand Down Expand Up @@ -84,6 +86,7 @@ def test_ensemble_refresh(
)


@pytest.mark.browser_test
def test_ensemble_color(mock_data, dash_duo):
plugin = setup_plugin(dash_duo, __name__, ParameterComparison, (630, 1200))

Expand Down
3 changes: 3 additions & 0 deletions tests/views/test_general_stuff.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
pytest.param(ResponseCorrelation, False),
],
)
@pytest.mark.browser_test
def test_displaying_beta_warning(plugin_class, input: bool, dash_duo):
plugin = setup_plugin(dash_duo, __name__, plugin_class, beta=input)
beta_warning_element = dash_duo.find_element("#" + plugin.uuid("beta-warning"))
Expand All @@ -46,6 +47,7 @@ def test_displaying_beta_warning(plugin_class, input: bool, dash_duo):
pytest.param(ResponseCorrelation, [], id="ResponseCorrelation"),
],
)
@pytest.mark.browser_test
def test_selectors_visibility_toggle_button(plugin_class, skip, mock_data, dash_duo):
# we test whether the selector visibility toggle button changes class on
# all selectors, as expected
Expand Down Expand Up @@ -79,6 +81,7 @@ def test_selectors_visibility_toggle_button(plugin_class, skip, mock_data, dash_
# assert dash_duo.get_logs() == [], "browser console should contain no error"


@pytest.mark.browser_test
def test_response_selector_sorting(mock_data, dash_duo):
plugin = setup_plugin(dash_duo, __name__, ResponseComparison)
wanted_ensemble_name = "nr_42"
Expand Down
2 changes: 2 additions & 0 deletions tests/views/test_observation_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
)


@pytest.mark.browser_test
def test_observation_analyzer_view_ensemble_no_observations(
mock_data,
dash_duo,
Expand All @@ -29,6 +30,7 @@ def test_observation_analyzer_view_ensemble_no_observations(
# assert dash_duo.get_logs() == [], "browser console should contain no error"


@pytest.mark.browser_test
def test_observation_analyzer_view_ensemble_with_observations(
mock_data,
dash_duo,
Expand Down
3 changes: 3 additions & 0 deletions tests/views/test_parameter_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from tests.conftest import setup_plugin, select_ensemble, select_by_name


@pytest.mark.browser_test
def test_parameter_selector(
mock_data,
dash_duo,
Expand Down Expand Up @@ -57,6 +58,7 @@ def test_parameter_selector(
# assert dash_duo.get_logs() == []


@pytest.mark.browser_test
def test_search_input_return_functionality(
mock_data,
dash_duo,
Expand Down Expand Up @@ -128,6 +130,7 @@ def test_search_input_return_functionality(
# assert dash_duo.get_logs() == []


@pytest.mark.browser_test
def test_parameter_selector_sorting(
mock_data,
dash_duo,
Expand Down
5 changes: 5 additions & 0 deletions tests/views/test_plot_view.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import pytest
from webviz_ert.plugins._response_comparison import ResponseComparison
from tests.conftest import (
setup_plugin,
Expand All @@ -8,6 +9,7 @@
)


@pytest.mark.browser_test
def test_plot_view(
mock_data,
dash_duo,
Expand All @@ -26,6 +28,7 @@ def test_plot_view(
# assert dash_duo.get_logs() == [], "browser console should contain no error"


@pytest.mark.browser_test
def test_clearing_parameters_view(
mock_data,
dash_duo,
Expand Down Expand Up @@ -60,6 +63,7 @@ def test_clearing_parameters_view(
# assert dash_duo.get_logs() == [], "browser console should contain no error"


@pytest.mark.browser_test
def test_clearing_ensembles_view(
mock_data,
dash_duo,
Expand Down Expand Up @@ -108,6 +112,7 @@ def test_clearing_ensembles_view(
# assert dash_duo.get_logs() == [], "browser console should contain no error"


@pytest.mark.browser_test
def test_axis_labels(mock_data, dash_duo):
"""test_axis_labels loads two different plots in the plot view and checks
that axes are labelled correctly"""
Expand Down
4 changes: 4 additions & 0 deletions tests/views/test_response_correlation.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import pytest
from webviz_ert.plugins._response_correlation import ResponseCorrelation
from tests.conftest import (
setup_plugin,
Expand All @@ -8,6 +9,7 @@
)


@pytest.mark.browser_test
def test_axes_labels(mock_data, dash_duo):
"""test_axis_labels loads two different plots and checks that axes are
labelled correctly"""
Expand Down Expand Up @@ -53,6 +55,7 @@ def test_axes_labels(mock_data, dash_duo):
# assert dash_duo.get_logs() == [], "browser console should contain no error"


@pytest.mark.browser_test
def test_show_respo_with_obs(mock_data, dash_duo):
"""Test response observation filter works as expected"""
plugin = setup_plugin(dash_duo, __name__, ResponseCorrelation)
Expand All @@ -70,6 +73,7 @@ def test_show_respo_with_obs(mock_data, dash_duo):
)


@pytest.mark.browser_test
def test_info_text_appears_as_expected(
mock_data,
dash_duo,
Expand Down
2 changes: 2 additions & 0 deletions tests/views/test_state_saving.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import pytest
from webviz_ert.plugins import (
ResponseComparison,
WebvizErtPluginABC,
Expand All @@ -12,6 +13,7 @@
)


@pytest.mark.browser_test
def test_state_saved(mock_data, dash_duo, tmpdir):
root_path = tmpdir.strpath
plugin = setup_plugin(
Expand Down

0 comments on commit 1e839aa

Please sign in to comment.