diff --git a/packages/playground/tests/frontend_selenium/selenium.md b/packages/playground/tests/frontend_selenium/selenium.md index 92ab10f270..b2617ec4fd 100644 --- a/packages/playground/tests/frontend_selenium/selenium.md +++ b/packages/playground/tests/frontend_selenium/selenium.md @@ -7,17 +7,52 @@ | [requests](https://pypi.org/project/requests/) | `2.31.0` | | [selenium](https://pypi.org/project/selenium/) | `4.10.0` | | [PyVirtualDisplay](https://pypi.org/project/PyVirtualDisplay/) | `3.0` | -| [webdriver-manager](https://pypi.org/project/webdriver-manager/) | `3.9.1` | +| [webdriver-manager](https://pypi.org/project/webdriver-manager/) | `4.0.2` | ## Running selenium -### First +There are two ways to run the Automation selenium tests: + +- Run Tests against the local dashboard environment which installed from latest commit on development branch. + +- Run Tests against any live environment (Devnet, QAnet, Testnet and Mainnet) + +### First (Run Tests against local dashboard) + +#### Prepare dashboard locally + +- In the root directory 'tfgrid-sdk-ts/', run: + + ```bash + yarn install + yarn lerna run build + make run project=playground + ``` + +- You need to leave the localhost running and open a new terminal. + +### Second (Run Tests against any live environment) + +#### update the config files to point to the target environment + +- Update the env url and some other variables directly in these files config.ini and base.py +- Ex.: to run against Mainnet + + - config.ini + + ```ini + [Base] + net = main + ``` + + - base.py + + ```python + base_url = "https://dashboard.grid.tf/" + ``` + +### Setup environment configurations -- In the root directory, run `yarn install`, then `yarn lerna run build`, and finally `make run project=playground`. -- Change directory to frontend selenium by running `cd packages/playground/tests/frontend_selenium/` in the command line. -- Install the recommended version of the pip package listed above for a stable run, or you can just install Python 3 and use the command: - - `pip install -r requirements.txt --break-system-packages` (Use this if you don't use any of the listed packages). - - Or use Virtual Environments: First, create an environment using `python -m venv myenv`, then activate it using `source myenv/bin/activate`, and finally, install packages using `pip install -r requirements.txt`. - Add your configuration either in [config.ini](../frontend_selenium/Config.ini) or by exporting `TFCHAIN_MNEMONICS`, `TFCHAIN_NODE_MNEMONICS`, `STELLAR_ADDRESS`, and `EMAIL`. - Description of config under `Base` section: - `port`: the port that the localhost is running on. @@ -28,16 +63,19 @@ - `address`: a stellar address with a TFT trustline that will be used in TFT bridge-related tests. - `email`: a valid email that will be used for all the automated tests. - If the port in serve changes from `5173` for any reason, you should update the `port` under the `Base` section in [config.ini](../frontend_selenium/Config.ini) to reflect the new value. -- You'll also need to install `Xvfb`, Run `sudo apt install xvfb`. -### Second +### Prepare tests requirements -- You need to leave the localhost running and open a new terminal. +- Change directory to frontend selenium by running `cd packages/playground/tests/frontend_selenium/` in the command line. +- Install the recommended version of the pip package listed above for a stable run, or you can just install Python 3 and use the command: + - `pip install -r requirements.txt --break-system-packages` (Use this if you don't use any of the listed packages). + - Or use Virtual Environments: First, create an environment using `python -m venv myenv`, then activate it using `source myenv/bin/activate`, and finally, install packages using `pip install -r requirements.txt`. +- You'll also need to install `Xvfb`, Run `sudo apt install xvfb`. - You can run selenium tests with pytest through the command line using `python3 -m pytest -v`. ### More options to run tests -- If you want to run the tests visually to see how they are running, you need to comment out the lines `16` and `33` in the [conftest.py](../frontend_selenium/tests/conftest.py). +- If you want to run the tests visually to see how they are running, you need to comment out the lines `16` and `34` in the [conftest.py](../frontend_selenium/tests/conftest.py). - You can also run single test file through the command line using `python3 -m pytest -v tests/file/test_file.py`. - You can also run specific test cases through the command line using `python3 -m pytest -v tests/file/test_file.py::test_func`. - You can also run collection of test cases through the command line using `python3 -m pytest -v -k 'test_func or test_func'`. diff --git a/packages/playground/tests/frontend_selenium/tests/TFChain/test_homepage.py b/packages/playground/tests/frontend_selenium/tests/TFChain/test_homepage.py index 4433076b39..217620918e 100644 --- a/packages/playground/tests/frontend_selenium/tests/TFChain/test_homepage.py +++ b/packages/playground/tests/frontend_selenium/tests/TFChain/test_homepage.py @@ -24,7 +24,11 @@ def test_validate_homepage_links(browser): """ dashboard_page = before_test_setup(browser) assert dashboard_page.navigate_to_find_more() == ('https://threefold.io/') - assert dashboard_page.navigate_to_explore_capacity() == ( 'https://stats.' + Base.net + '.grid.tf/') + if Base.net == 'main': + stats_url = 'https://stats.grid.tf/' + else: + stats_url = 'https://stats.' + Base.net + '.grid.tf/' + assert dashboard_page.navigate_to_explore_capacity() == ( stats_url ) assert dashboard_page.navigate_to_learn_about_grid() == ('https://www.manual.grid.tf/') diff --git a/packages/playground/tests/frontend_selenium/tests/conftest.py b/packages/playground/tests/frontend_selenium/tests/conftest.py index 50e29c43d8..3aae3f0e30 100644 --- a/packages/playground/tests/frontend_selenium/tests/conftest.py +++ b/packages/playground/tests/frontend_selenium/tests/conftest.py @@ -1,7 +1,7 @@ import pytest from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager -from selenium.webdriver.chrome.service import Service +from selenium.webdriver.chrome.service import Service as ChromeService from pyvirtualdisplay import Display """ @@ -19,6 +19,7 @@ def browser(): options = webdriver.ChromeOptions() #options.add_extension('extension.crx') # For Adding Extension driver = webdriver.Chrome(options=options) + # driver = webdriver.Chrome(options=options, service=ChromeService(ChromeDriverManager().install())) driver.set_window_size(1920, 1080) # Make its calls wait up to 60 seconds for elements to appear diff --git a/packages/playground/tests/frontend_selenium/utils/base.py b/packages/playground/tests/frontend_selenium/utils/base.py index b9f632e1de..bace6a6f94 100644 --- a/packages/playground/tests/frontend_selenium/utils/base.py +++ b/packages/playground/tests/frontend_selenium/utils/base.py @@ -6,4 +6,7 @@ class Base: port = config['Base']['port'] net = config['Base']['net'] base_url = 'http://localhost:' + str(port) + '/' - gridproxy_url = 'https://gridproxy.' + str(net) + '.grid.tf/' \ No newline at end of file + if str(net) == 'main': + gridproxy_url = 'https://gridproxy.grid.tf/' + else: + gridproxy_url = 'https://gridproxy.' + str(net) + '.grid.tf/' \ No newline at end of file diff --git a/packages/playground/tests/frontend_selenium/utils/grid_proxy.py b/packages/playground/tests/frontend_selenium/utils/grid_proxy.py index b7f3a9b1af..4f301144c8 100644 --- a/packages/playground/tests/frontend_selenium/utils/grid_proxy.py +++ b/packages/playground/tests/frontend_selenium/utils/grid_proxy.py @@ -63,7 +63,11 @@ def get_twin_node(self, twin_id): return details def get_stats_capicity(self): - r = requests.post('https://stats.' + Base.net + '.grid.tf/api/stats-summary', timeout=10) + if Base.net == 'main': + stats_url = 'https://stats.grid.tf/api/stats-summary' + else: + stats_url = 'https://stats.' + Base.net + '.grid.tf/api/stats-summary' + r = requests.post(stats_url, timeout=10) stats_json = r.json() return list(stats_json.values())