diff --git a/etc/docker/dev/docker-compose.yml b/etc/docker/dev/docker-compose.yml index 897e793f9b8..6ed4981b87c 100644 --- a/etc/docker/dev/docker-compose.yml +++ b/etc/docker/dev/docker-compose.yml @@ -3,6 +3,8 @@ services: rucioclient: image: docker.io/rucio/rucio-dev:latest-alma9 command: ["sleep", "infinity"] + profiles: + - client volumes: - ../../certs/rucio_ca.pem:/etc/grid-security/certificates/5fca1cb1.0:z - ../../certs/hostcert_rucio.pem:/etc/grid-security/hostcert.pem:z diff --git a/tools/test/run_tests.py b/tools/test/run_tests.py index 68ec43f3e77..45b712c566c 100755 --- a/tools/test/run_tests.py +++ b/tools/test/run_tests.py @@ -15,6 +15,8 @@ # limitations under the License. import json +import io +import itertools import multiprocessing import os import pathlib @@ -24,11 +26,29 @@ import time import traceback import uuid -from collections.abc import Callable from datetime import datetime -from typing import Optional +from typing import Optional, Union, NoReturn -from suites import run, Container, rdbms_container, services, CumulativeContextManager, service_hostnames, env_args + +def run(*args, check=True, return_stdout=False, env=None) -> Union[NoReturn, io.TextIOBase]: + kwargs = {'check': check, 'stdout': sys.stderr, 'stderr': subprocess.STDOUT} + if env is not None: + kwargs['env'] = env + if return_stdout: + kwargs['stderr'] = sys.stderr + kwargs['stdout'] = subprocess.PIPE + args = [str(a) for a in args] + print("** Running", " ".join(map(lambda a: repr(a) if ' ' in a else a, args)), kwargs, file=sys.stderr, flush=True) + proc = subprocess.run(args, **kwargs) + if return_stdout: + return proc.stdout + + +def env_args(caseenv): + environment_args = list(itertools.chain(*map(lambda x: ('--env', f'{x[0]}={x[1]}'), caseenv.items()))) + environment_args.append('--env') + environment_args.append('GITHUB_ACTIONS') + return environment_args def matches(small: dict, group: dict): @@ -177,8 +197,6 @@ def run_case(caseenv, image, use_podman, use_namespace, use_httpd, copy_rucio_lo success = run_with_httpd( caseenv=caseenv, image=image, - use_podman=use_podman, - pod=pod, namespace_args=namespace_args, namespace_env=namespace_env, copy_rucio_logs=copy_rucio_logs, @@ -254,74 +272,45 @@ def run_test_directly( def run_with_httpd( caseenv: dict[str, str], image: str, - use_podman: bool, - pod: str, namespace_args: list[str], namespace_env: dict[str, str], copy_rucio_logs: bool, logs_dir: pathlib.Path, tests: list[str], ) -> bool: - pod_net_arg = ['--pod', pod] if use_podman else [] # Running rucio container from given image - with Container(image, runtime_args=namespace_args, run_args=pod_net_arg, environment=caseenv) as rucio_container: + from tempfile import NamedTemporaryFile + with NamedTemporaryFile() as compose_override: try: - network_arg = ('--network', 'container:' + rucio_container.cid) - container_run_args = pod_net_arg if use_podman else network_arg - additional_containers = [] - - def create_cnt(cnt_class: Callable) -> Container: - return cnt_class( - runtime_args=namespace_args, - run_args=container_run_args, - ) - - db_container = None - rdbms = caseenv.get('RDBMS', '') - if rdbms: - service_key = caseenv.get('SERVICES', 'default') - db_container_class = rdbms_container.get(rdbms, None) - if db_container_class: - db_container = create_cnt(db_container_class) - additional_containers.append(db_container) - additional_containers += list(map(create_cnt, services[service_key])) - - with CumulativeContextManager(*additional_containers): - db_env = dict() - if db_container: - db_env['CON_DB'] = db_container.cid - - # Running before_script.sh - run( - './tools/test/before_script.sh', - env={ - **os.environ, - **caseenv, - **namespace_env, - **db_env, - "CONTAINER_RUNTIME_ARGS": ' '.join(namespace_args), - "CON_RUCIO": rucio_container.cid, - }, - ) - - # register service hostnames - run('docker', *namespace_args, 'exec', rucio_container.cid, '/bin/sh', '-c', f'echo "127.0.0.1 {" ".join(service_hostnames)}" | tee -a /etc/hosts') + run('docker-compose', '--file', 'etc/docker/dev/docker-compose.yml', 'up', '-d') + # Running before_script.sh + run( + './tools/test/before_script.sh', + env={ + **os.environ, + **caseenv, + **namespace_env, + "CONTAINER_RUNTIME_ARGS": ' '.join(namespace_args), + "CON_RUCIO": 'dev-rucio-1', + "CON_DB": 'dev-ruciodb-1', + }, + ) - # Running install_script.sh - run('docker', *namespace_args, 'exec', rucio_container.cid, './tools/test/install_script.sh') + # Running install_script.sh + run('docker', *namespace_args, 'exec', 'dev-rucio-1', './tools/test/install_script.sh') - # Running test.sh - if tests: - tests_env = ('--env', 'TESTS=' + ' '.join(tests)) - tests_arg = ('-p', ) - else: - tests_env = () - tests_arg = () + # Running test.sh + if tests: + tests_env = ('--env', 'TESTS=' + ' '.join(tests)) + tests_arg = ('-p', ) + else: + tests_env = () + tests_arg = () - run('docker', *namespace_args, 'exec', *tests_env, rucio_container.cid, './tools/test/test.sh', *tests_arg) + run('docker', *namespace_args, 'exec', *tests_env, 'dev-rucio-1', './tools/test/test.sh', *tests_arg) - # if everything went through without an exception, mark this case as a success - return True + # if everything went through without an exception, mark this case as a success + return True except subprocess.CalledProcessError as error: print( f"** Process '{error.cmd}' exited with code {error.returncode}", @@ -330,12 +319,12 @@ def create_cnt(cnt_class: Callable) -> Container: flush=True, ) finally: - run('docker', *namespace_args, 'logs', rucio_container.cid, check=False) + run('docker', *namespace_args, 'logs', 'dev-rucio-1', check=False) if copy_rucio_logs: try: if logs_dir.exists(): shutil.rmtree(logs_dir) - run('docker', *namespace_args, 'cp', f'{rucio_container.cid}:/var/log', str(logs_dir)) + run('docker', *namespace_args, 'cp', f'dev-rucio-1:/var/log', str(logs_dir)) except Exception: print( "** Error on retrieving logs for", diff --git a/tools/test/suites.py b/tools/test/suites.py deleted file mode 100644 index 5d802239c94..00000000000 --- a/tools/test/suites.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright European Organization for Nuclear Research (CERN) since 2012 -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import io -import itertools -import subprocess -import sys -from typing import Any, NoReturn, Optional, Union - -DEFAULT_TIMEOUT = 10 -DEFAULT_DB_TIMEOUT = 27 - - -def run(*args, check=True, return_stdout=False, env=None) -> Union[NoReturn, io.TextIOBase]: - kwargs = {'check': check, 'stdout': sys.stderr, 'stderr': subprocess.STDOUT} - if env is not None: - kwargs['env'] = env - if return_stdout: - kwargs['stderr'] = sys.stderr - kwargs['stdout'] = subprocess.PIPE - args = [str(a) for a in args] - print("** Running", " ".join(map(lambda a: repr(a) if ' ' in a else a, args)), kwargs, file=sys.stderr, flush=True) - proc = subprocess.run(args, **kwargs) - if return_stdout: - return proc.stdout - - -def env_args(caseenv): - environment_args = list(itertools.chain(*map(lambda x: ('--env', f'{x[0]}={x[1]}'), caseenv.items()))) - environment_args.append('--env') - environment_args.append('GITHUB_ACTIONS') - return environment_args - - -class Container: - def __init__( - self, - image: "str", - *args, - runtime_args: Optional[list[str]] = None, - run_args: Optional[list[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_TIMEOUT, - ): - if runtime_args is None: - runtime_args = [] - self.runtime_args = runtime_args - if run_args is None: - run_args = [] - if environment is None: - environment = {} - self.stop_timeout = stop_timeout - self.args = ['docker', *runtime_args, 'run', '--detach', *run_args, *(env_args(environment)), image, *args] - self.cid = None - - def __enter__(self): - stdout = run(*self.args, return_stdout=True) - self.cid = stdout.decode().strip() - if not self.cid: - raise RuntimeError("Could not determine container id after starting the container") - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - run('docker', *self.runtime_args, 'stop', f'--time={self.stop_timeout}', self.cid, check=False) - run('docker', *self.runtime_args, 'rm', '--force', '--volumes', self.cid, check=False) - - def wait(self): - run('docker', *self.runtime_args, 'wait', self.cid, check=False) - - -class CumulativeContextManager: - def __init__(self, *context_managers): - self.context_managers = context_managers - - def __enter__(self): - for mgr in self.context_managers: - mgr.__enter__() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - for mgr in self.context_managers: - mgr.__exit__(exc_type, exc_val, exc_tb) - - -class OracleDB(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - if run_args is None: - run_args = tuple() - run_args = ("--no-healthcheck",) + run_args - if environment is None: - environment = dict() - environment['processes'] = "1000" - environment["sessions"] = "1105" - environment["transactions"] = "1215" - environment["ORACLE_ALLOW_REMOTE"] = "true" - environment["ORACLE_PASSWORD"] = "oracle" - environment["ORACLE_DISABLE_ASYNCH_IO"] = "true" - super(OracleDB, self).__init__( - "docker.io/gvenzl/oracle-xe:18.4.0", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -class MySQL5(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - if environment is None: - environment = dict() - environment["MYSQL_ROOT_PASSWORD"] = "secret" - environment["MYSQL_ROOT_HOST"] = "%" - super(MySQL5, self).__init__( - "docker.io/mysql/mysql-server:5.7", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -class MySQL8(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - if environment is None: - environment = dict() - environment["MYSQL_ROOT_PASSWORD"] = "secret" - environment["MYSQL_ROOT_HOST"] = "%" - super(MySQL8, self).__init__( - "docker.io/mysql/mysql-server:8.0", - "--default-authentication-plugin=mysql_native_password", - "--character-set-server=latin1", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -class Postgres14(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - if environment is None: - environment = dict() - environment["POSTGRES_PASSWORD"] = "secret" - super(Postgres14, self).__init__( - "docker.io/postgres:14", - "-c", "max_connections=300", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -class ActiveMQ(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - super(ActiveMQ, self).__init__( - "docker.io/webcenter/activemq:latest", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -class InfluxDB(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - if environment is None: - environment = dict() - environment["DOCKER_INFLUXDB_INIT_MODE"] = "setup" - environment["DOCKER_INFLUXDB_INIT_USERNAME"] = "myusername" - environment["DOCKER_INFLUXDB_INIT_PASSWORD"] = "passwordpasswordpassword" - environment["DOCKER_INFLUXDB_INIT_ORG"] = "rucio" - environment["DOCKER_INFLUXDB_INIT_BUCKET"] = "rucio" - environment["DOCKER_INFLUXDB_INIT_ADMIN_TOKEN"] = "mytoken" - super(InfluxDB, self).__init__( - "docker.io/influxdb:latest", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -class Elasticsearch(Container): - def __init__( - self, - runtime_args: Optional[tuple[str]] = None, - run_args: Optional[tuple[str]] = None, - environment: Optional[dict[str, str]] = None, - stop_timeout: int = DEFAULT_DB_TIMEOUT, - ): - if environment is None: - environment = dict() - environment["discovery.type"] = "single-node" - super(Elasticsearch, self).__init__( - "docker.elastic.co/elasticsearch/elasticsearch:6.4.2", - runtime_args=runtime_args, - run_args=run_args, - environment=environment, - stop_timeout=stop_timeout, - ) - - -rdbms_container: dict[str, Any] = { - "oracle": OracleDB, - "mysql5": MySQL5, - "mysql8": MySQL8, - "postgres14": Postgres14, - "sqlite": None, -} -services = { - 'default': [ActiveMQ], - 'influxdb_elastic': [ActiveMQ, InfluxDB, Elasticsearch], -} -service_hostnames = ['activemq', 'influxdb', 'elasticsearch'] + list(rdbms_container.keys())