Skip to content

Commit

Permalink
#425: fixed type errors found by mypy (#426)
Browse files Browse the repository at this point in the history
fixes #425
  • Loading branch information
tomuben authored Nov 25, 2024
1 parent 9ed4dad commit 6ecad55
Show file tree
Hide file tree
Showing 68 changed files with 394 additions and 320 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ jobs:
python-version: ${{ matrix.python-version }}

- name: Run type-check
run: poetry run nox -s lint:typing || echo ignoring...
run: poetry run nox -s lint:typing

Security:
name: Security Checks (Python-${{ matrix.python-version }})
Expand All @@ -109,7 +109,7 @@ jobs:
python-version: ${{ matrix.python-version }}

- name: Run security linter
run: poetry run nox -s lint:security || echo ignoring...
run: poetry run nox -s lint:security

- name: Upload Artifacts
uses: actions/[email protected]
Expand Down
3 changes: 2 additions & 1 deletion doc/changes/unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ Code name:
* #119: Refactored `pkg_resources` usage to `importlib.resources`
* #420: Added file `py.typed` to enable mypy to find project specific types
* #418: Use exasol/python-toolbox
* #411: Removed usage of exasol-bucketfs
* #411: Removed usage of exasol-bucketfs
* #425: Fixed type checks found by MyPy
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def _handle_failure(self, task_error: TaskRuntimeError):
def _print_task_failures(task_error: TaskRuntimeError):
print_err()
print_err("Task failure message: %s" % task_error.msg)
print_err(task_error.__cause__.args[0])
print_err(task_error.__cause__.args[0]) # type: ignore
print_err()

def _handle_success(self):
Expand Down
9 changes: 6 additions & 3 deletions exasol_integration_test_docker_environment/doctor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
package and also provide help to find potential fixes.
"""
import sys
from typing import Iterable
from collections.abc import Callable
from typing import Iterable, List, Tuple
from exasol import error
from enum import Enum

Expand Down Expand Up @@ -78,9 +79,11 @@ def health_checkup() -> Iterable[error.ExaError]:
return an iterator of error codes specifying which problems have been identified.
"""
examinations = [
check_function = Callable[[],bool]
diagnosis_function = Callable[[],Iterable[error.ExaError]]
examinations : List[Tuple[check_function, diagnosis_function]] = [
(is_docker_daemon_available, diagnose_docker_daemon_not_available),
(is_supported_platform, lambda: Error.TargetPlatformNotSupported),
(is_supported_platform, lambda: [Error.TargetPlatformNotSupported]),
]
for is_fine, diagnosis in examinations:
if not is_fine():
Expand Down
11 changes: 7 additions & 4 deletions exasol_integration_test_docker_environment/lib/api/api_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@ class HealthProblem(RuntimeError):
class TaskFailures(Exception):
"""Represents a potential cause of a TaskRuntimeError"""

def __init__(self, inner: List[str] = None):
def __init__(self, inner: Optional[List[str]] = None):
super().__init__(self._construct_exception_message(inner))
self.inner = inner

def _construct_exception_message(self, failures: Iterable[str]) -> str:
formatted_task_failures = "\n".join(failures)
return f"Following task failures were caught during the execution:\n{formatted_task_failures}"
def _construct_exception_message(self, failures: Optional[Iterable[str]]) -> str:
if failures is not None:
formatted_task_failures = "\n".join(failures)
return f"Following task failures were caught during the execution:\n{formatted_task_failures}"
else:
return f"No task failures were caught during the execution:"


class TaskRuntimeError(RuntimeError):
Expand Down
29 changes: 14 additions & 15 deletions exasol_integration_test_docker_environment/lib/api/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

class JobCounterSingleton(object):
"""
We use here a Singleton to avoid a unprotected global variable.
We use here a Singleton to avoid an unprotected global variable.
However, this counter needs to be global counter to guarantee unique job ids.
This is needed in case of task that finish in less than a second, to avoid duplicated job ids
"""
Expand All @@ -42,18 +42,19 @@ def __new__(cls):
return cls._instance

def get_next_value(self) -> int:
self._counter += 1
return self._counter
# self._counter is a class variable and because of this we need to suppress type checks
self._counter += 1 # type: ignore
return self._counter # type: ignore


def set_build_config(force_rebuild: bool,
force_rebuild_from: Tuple[str, ...],
force_pull: bool,
log_build_context_content: bool,
output_directory: str,
temporary_base_directory: str,
cache_directory: str,
build_name: str, ):
temporary_base_directory: Optional[str],
cache_directory: Optional[str],
build_name: Optional[str], ):
luigi.configuration.get_config().set('build_config', 'force_rebuild', str(force_rebuild))
luigi.configuration.get_config().set('build_config', 'force_rebuild_from', json.dumps(force_rebuild_from))
luigi.configuration.get_config().set('build_config', 'force_pull', str(force_pull))
Expand All @@ -72,9 +73,8 @@ def set_output_directory(output_directory):
luigi.configuration.get_config().set('build_config', 'output_directory', output_directory)


def set_docker_repository_config(docker_password: str, docker_repository_name: str, docker_username: str,
tag_prefix: str,
kind: str):
def set_docker_repository_config(docker_password: Optional[str], docker_repository_name: Optional[str],
docker_username: Optional[str], tag_prefix: str, kind: str):
config_class = f'{kind}_docker_repository_config'
luigi.configuration.get_config().set(config_class, 'tag_prefix', tag_prefix)
if docker_repository_name is not None:
Expand Down Expand Up @@ -105,6 +105,7 @@ def import_build_steps(flavor_path: Tuple[str, ...]):
path_to_build_steps = Path(path).joinpath("flavor_base/build_steps.py")
module_name_for_build_steps = extract_modulename_for_build_steps(path)
spec = importlib.util.spec_from_file_location(module_name_for_build_steps, path_to_build_steps)
assert spec and spec.loader
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)

Expand All @@ -119,7 +120,7 @@ def generate_root_task(task_class, *args, **kwargs) -> DependencyLoggerBaseTask:

def run_task(task_creator: Callable[[], DependencyLoggerBaseTask], workers: int = 2,
task_dependencies_dot_file: Optional[str] = None,
log_level: str = None, use_job_specific_log_file: bool = False) \
log_level: Optional[str] = None, use_job_specific_log_file: bool = False) \
-> Any:
setup_worker()
task = task_creator()
Expand All @@ -141,7 +142,6 @@ def run_task(task_creator: Callable[[], DependencyLoggerBaseTask], workers: int
f"The detailed log of the integration-test-docker-environment can be found at: {log_file_path}")
task.cleanup(success)


def _run_task_with_logging_config(
task: DependencyLoggerBaseTask,
log_file_path: Path,
Expand All @@ -153,7 +153,6 @@ def _run_task_with_logging_config(
local_scheduler=True, **run_kwargs)
return no_scheduling_errors


def _handle_task_result(
no_scheduling_errors: bool,
success: bool,
Expand Down Expand Up @@ -199,7 +198,7 @@ def _configure_logging(
yield run_kwargs


def _configure_logging_parameter(log_level: str, luigi_config: Path, use_job_specific_log_file: bool) \
def _configure_logging_parameter(log_level: Optional[str], luigi_config: Path, use_job_specific_log_file: bool) \
-> Tuple[bool, Dict[str, str]]:
if use_job_specific_log_file:
no_configure_logging = False
Expand All @@ -226,13 +225,13 @@ def generate_graph_from_task_dependencies(task: DependencyLoggerBaseTask, task_d


def collect_dependencies(task: DependencyLoggerBaseTask) -> Set[TaskDependency]:
dependencies = set()
dependencies : Set[TaskDependency] = set()
for root, directories, files in os.walk(task._get_dependencies_path_for_job()):
for file in files:
file_path = Path(root).joinpath(file)
with open(file_path) as f:
for line in f.readlines():
task_dependency = TaskDependency.from_json(line) # type: TaskDependency
task_dependency : TaskDependency = TaskDependency.from_json(line) # type: ignore
if task_dependency.state == DependencyState.requested.name:
dependencies.add(task_dependency)
return dependencies
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import functools
from typing import Tuple, Optional, Callable
from typing import Tuple, Optional, Callable, Any
import humanfriendly

from exasol_integration_test_docker_environment.lib.api.common import (
Expand All @@ -26,8 +26,10 @@


def _cleanup(environment_info: EnvironmentInfo) -> None:
remove_docker_container([environment_info.database_info.container_info.container_name])
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
if environment_info.database_info.container_info is not None:
remove_docker_container([environment_info.database_info.container_info.container_name])
if environment_info.database_info.container_info.volume_name is not None:
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
remove_docker_networks([environment_info.network_info.network_name])


Expand Down Expand Up @@ -69,7 +71,7 @@ def spawn_test_environment(
raises: TaskRuntimeError if spawning the test environment fails
"""
def str_or_none(x: any) -> str:
def str_or_none(x: Any) -> Optional[str]:
return str(x) if x is not None else None

parsed_db_mem_size = humanfriendly.parse_size(db_mem_size)
Expand All @@ -78,6 +80,8 @@ def str_or_none(x: any) -> str:
parsed_db_disk_size = humanfriendly.parse_size(db_disk_size)
if parsed_db_disk_size < humanfriendly.parse_size("100 MiB"):
raise ArgumentConstraintError("db_disk_size", "needs to be at least 100 MiB")
db_os_access_value = DbOsAccess[db_os_access] if db_os_access else DbOsAccess.DOCKER_EXEC

set_build_config(False,
tuple(),
False,
Expand All @@ -101,7 +105,7 @@ def str_or_none(x: any) -> str:
docker_runtime=docker_runtime,
docker_db_image_version=docker_db_image_version,
docker_db_image_name=docker_db_image_name,
db_os_access=DbOsAccess[db_os_access],
db_os_access=db_os_access_value,
db_user="sys",
db_password="exasol",
bucketfs_write_password="write",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import functools
from typing import Tuple, Optional, Callable
from typing import Tuple, Optional, Callable, Any
import humanfriendly

from exasol_integration_test_docker_environment.lib.api.common import set_build_config, set_docker_repository_config, \
Expand All @@ -22,14 +22,15 @@
.docker_db_test_environment_parameter import DbOsAccess

def _cleanup(environment_info: EnvironmentInfo) -> None:
if environment_info.test_container_info is None:
remove_docker_container([environment_info.database_info.container_info.container_name])
else:
remove_docker_container([environment_info.test_container_info.container_name,
environment_info.database_info.container_info.container_name])
remove_docker_volumes([environment_info.database_info.container_info.volume_name])
remove_docker_networks([environment_info.network_info.network_name])
if test_container_info := environment_info.test_container_info:
remove_docker_container([test_container_info.container_name])

if db_container_info := environment_info.database_info.container_info:
remove_docker_container([db_container_info.container_name])
if name := db_container_info.volume_name:
remove_docker_volumes([name])

remove_docker_networks([environment_info.network_info.network_name])

@no_cli_function
def spawn_test_environment_with_test_container(
Expand Down Expand Up @@ -71,7 +72,7 @@ def spawn_test_environment_with_test_container(
raises: TaskRuntimeError if spawning the test environment fails
"""
def str_or_none(x: any) -> str:
def str_or_none(x: Any) -> Optional[str]:
return str(x) if x is not None else None
parsed_db_mem_size = humanfriendly.parse_size(db_mem_size)
if parsed_db_mem_size < humanfriendly.parse_size("1 GiB"):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
import shutil
from pathlib import Path
from typing import Dict, List, Generator, Any, Union, Set, Optional
from typing import Dict, List, Generator, Any, Union, Set

import luigi
import six
Expand Down Expand Up @@ -85,8 +85,8 @@ def get_output(self) -> Any:


class BaseTask(Task):
caller_output_path = luigi.ListParameter([], significant=False, visibility=ParameterVisibility.HIDDEN)
job_id = luigi.Parameter()
caller_output_path : List[str] = luigi.ListParameter([], significant=False, visibility=ParameterVisibility.HIDDEN) # type: ignore
job_id : str = luigi.Parameter() # type: ignore

def __init__(self, *args, **kwargs):
self._registered_tasks = []
Expand Down Expand Up @@ -184,7 +184,7 @@ def get_log_path(self) -> Path:
return path

def get_cache_path(self) -> Path:
path = Path(build_config().output_directory, "cache")
path = Path(str(build_config().output_directory), "cache")
path.mkdir(parents=True, exist_ok=True)
return path

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import docker
import time
from docker import DockerClient
from typing import Protocol, runtime_checkable
from typing import Protocol, runtime_checkable, Optional
from docker.models.containers import Container, ExecResult
from exasol_integration_test_docker_environment \
.lib.base.ssh_access import SshKey
Expand Down Expand Up @@ -37,19 +37,18 @@ class DbOsExecutor(Protocol):
concrete implementations in sub-classes ``DockerExecutor`` and
``SshExecutor``.
"""
@abstractmethod
def exec(self, cmd: str) -> ExecResult:
...

def prepare(self):
pass
...


class DockerExecutor(DbOsExecutor):
def __init__(self, docker_client: DockerClient, container_name: str):
self._client = docker_client
self._container_name = container_name
self._container = None
self._container : Optional[Container] = None

def __enter__(self):
self._container = self._client.containers.get(self._container_name)
Expand All @@ -62,8 +61,12 @@ def __del__(self):
self.close()

def exec(self, cmd: str) -> ExecResult:
assert self._container
return self._container.exec_run(cmd)

def prepare(self):
pass

def close(self):
self._container = None
if self._client is not None:
Expand All @@ -75,7 +78,7 @@ class SshExecutor(DbOsExecutor):
def __init__(self, connect_string: str, key_file: str):
self._connect_string = connect_string
self._key_file = key_file
self._connection = None
self._connection : Optional[fabric.Connection] = None

def __enter__(self):
key = SshKey.read_from(self._key_file)
Expand All @@ -92,6 +95,7 @@ def __del__(self):
self.close()

def exec(self, cmd: str) -> ExecResult:
assert self._connection
result = self._connection.run(cmd, warn=True, hide=True)
output = result.stdout.encode("utf-8")
return ExecResult(result.exited, output)
Expand Down Expand Up @@ -143,9 +147,11 @@ def executor(self) -> DbOsExecutor:
return DockerExecutor(client, self._container_name)



class SshExecFactory(DbOsExecFactory):
@classmethod
def from_database_info(cls, info: DatabaseInfo):
assert info.ssh_info
return SshExecFactory(
f"{info.ssh_info.user}@{info.host}:{info.ports.ssh}",
info.ssh_info.key_file,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from pathlib import Path
from typing import Dict, Any
from typing import Dict, Any, List

import luigi

Expand All @@ -8,7 +8,7 @@


class FlavorsBaseTask(DependencyLoggerBaseTask):
flavor_paths = luigi.ListParameter()
flavor_paths : List[str] = luigi.ListParameter() # type: ignore

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Expand Down
Loading

0 comments on commit 6ecad55

Please sign in to comment.