Skip to content

Commit

Permalink
Ensure compliance with linting rule D300 by using triple quotes for d…
Browse files Browse the repository at this point in the history
…ocstrings (#1049)

Enable D300 to make docstring more consistent
  • Loading branch information
pankajastro authored Jun 18, 2024
1 parent c4cf7c6 commit d255556
Show file tree
Hide file tree
Showing 54 changed files with 112 additions and 92 deletions.
6 changes: 3 additions & 3 deletions cosmos/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def __post_init__(self) -> None:
self.validate_profile()

def validate_profile(self) -> None:
"Validates that we have enough information to render a profile."
"""Validates that we have enough information to render a profile."""
if not self.profiles_yml_filepath and not self.profile_mapping:
raise CosmosValueError("Either profiles_yml_filepath or profile_mapping must be set to render a profile")
if self.profiles_yml_filepath and self.profile_mapping:
Expand All @@ -253,15 +253,15 @@ def validate_profile(self) -> None:
)

def validate_profiles_yml(self) -> None:
"Validates a user-supplied profiles.yml is present"
"""Validates a user-supplied profiles.yml is present"""
if self.profiles_yml_filepath and not Path(self.profiles_yml_filepath).exists():
raise CosmosValueError(f"The file {self.profiles_yml_filepath} does not exist.")

@contextlib.contextmanager
def ensure_profile(
self, desired_profile_path: Path | None = None, use_mock_values: bool = False
) -> Iterator[tuple[Path, dict[str, str]]]:
"Context manager to ensure that there is a profile. If not, create one."
"""Context manager to ensure that there is a profile. If not, create one."""
if self.profiles_yml_filepath:
logger.info("Using user-supplied profiles.yml at %s", self.profiles_yml_filepath)
yield Path(self.profiles_yml_filepath), {}
Expand Down
2 changes: 1 addition & 1 deletion cosmos/dbt/selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def select_nodes_ids_by_intersection(self) -> set[str]:
return selected_nodes

def _should_include_node(self, node_id: str, node: DbtNode) -> bool:
"Checks if a single node should be included. Only runs once per node with caching."
"""Checks if a single node should be included. Only runs once per node with caching."""
logger.debug("Inspecting if the node <%s> should be included.", node_id)
if node_id in self.visited_nodes:
return node_id in self.selected_nodes
Expand Down
2 changes: 1 addition & 1 deletion cosmos/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Contains exceptions that Cosmos uses"
"""Contains exceptions that Cosmos uses"""


class CosmosValueError(ValueError):
Expand Down
8 changes: 4 additions & 4 deletions cosmos/operators/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -631,7 +631,7 @@ def __init__(
folder_dir: str | None = None,
**kwargs: Any,
) -> None:
"Initializes the operator."
"""Initializes the operator."""
self.connection_id = connection_id
self.bucket_name = bucket_name
self.folder_dir = folder_dir
Expand Down Expand Up @@ -674,7 +674,7 @@ def __init__(
super().__init__(*args, **kwargs)

def upload_to_cloud_storage(self, project_dir: str) -> None:
"Uploads the generated documentation to S3."
"""Uploads the generated documentation to S3."""
logger.info(
'Attempting to upload generated docs to S3 using S3Hook("%s")',
self.connection_id,
Expand Down Expand Up @@ -740,7 +740,7 @@ def __init__(
super().__init__(*args, **kwargs)

def upload_to_cloud_storage(self, project_dir: str) -> None:
"Uploads the generated documentation to Azure Blob Storage."
"""Uploads the generated documentation to Azure Blob Storage."""
logger.info(
'Attempting to upload generated docs to Azure Blob Storage using WasbHook(conn_id="%s")',
self.connection_id,
Expand Down Expand Up @@ -784,7 +784,7 @@ class DbtDocsGCSLocalOperator(DbtDocsCloudLocalOperator):
ui_color = "#4772d5"

def upload_to_cloud_storage(self, project_dir: str) -> None:
"Uploads the generated documentation to Google Cloud Storage"
"""Uploads the generated documentation to Google Cloud Storage"""
logger.info(
'Attempting to upload generated docs to Storage using GCSHook(conn_id="%s")',
self.connection_id,
Expand Down
2 changes: 1 addition & 1 deletion cosmos/profiles/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Contains a function to get the profile mapping based on the connection ID."
"""Contains a function to get the profile mapping based on the connection ID."""

from __future__ import annotations

Expand Down
2 changes: 1 addition & 1 deletion cosmos/profiles/athena/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Athena Airflow connection -> dbt profile mappings"
"""Athena Airflow connection -> dbt profile mappings"""

from .access_key import AthenaAccessKeyProfileMapping

Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/athena/access_key.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow AWS connections to a dbt Athena profile using an access key id and secret access key."
"""Maps Airflow AWS connections to a dbt Athena profile using an access key id and secret access key."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -57,7 +58,7 @@ class AthenaAccessKeyProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Gets profile. The password is stored in an environment variable."
"""Gets profile. The password is stored in an environment variable."""

self.temporary_credentials = self._get_temporary_credentials() # type: ignore

Expand All @@ -75,7 +76,7 @@ def profile(self) -> dict[str, Any | None]:

@property
def env_vars(self) -> dict[str, str]:
"Overwrites the env_vars for athena, Returns a dictionary of environment variables that should be set based on the self.temporary_credentials."
"""Overwrites the env_vars for athena, Returns a dictionary of environment variables that should be set based on the self.temporary_credentials."""

if self.temporary_credentials is None:
raise CosmosValueError(f"Could not find the athena credentials.")
Expand Down
6 changes: 3 additions & 3 deletions cosmos/profiles/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def _validate_disable_event_tracking(self) -> None:

@property
def conn(self) -> Connection:
"Returns the Airflow connection."
"""Returns the Airflow connection."""
if not self._conn:
conn = BaseHook.get_connection(self.conn_id)
if not conn:
Expand Down Expand Up @@ -197,7 +197,7 @@ def mock_profile(self) -> dict[str, Any]:

@property
def env_vars(self) -> dict[str, str]:
"Returns a dictionary of environment variables that should be set based on self.secret_fields."
"""Returns a dictionary of environment variables that should be set based on self.secret_fields."""
env_vars = {}

for field in self.secret_fields:
Expand Down Expand Up @@ -287,7 +287,7 @@ def get_dbt_value(self, name: str) -> Any:

@property
def mapped_params(self) -> dict[str, Any]:
"Turns the self.airflow_param_mapping into a dictionary of dbt fields and their values."
"""Turns the self.airflow_param_mapping into a dictionary of dbt fields and their values."""
mapped_params = {
DBT_PROFILE_TYPE_FIELD: self.dbt_profile_type,
}
Expand Down
2 changes: 1 addition & 1 deletion cosmos/profiles/bigquery/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"BigQuery Airflow connection -> dbt profile mappings"
"""BigQuery Airflow connection -> dbt profile mappings"""

from .oauth import GoogleCloudOauthProfileMapping
from .service_account_file import GoogleCloudServiceAccountFileProfileMapping
Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/bigquery/oauth.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow GCP connections to dbt BigQuery profiles that uses oauth via gcloud, if they don't use key file or JSON."
"""Maps Airflow GCP connections to dbt BigQuery profiles that uses oauth via gcloud, if they don't use key file or JSON."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -31,7 +32,7 @@ class GoogleCloudOauthProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Generates profile. Defaults `threads` to 1."
"""Generates profile. Defaults `threads` to 1."""
return {
**self.mapped_params,
"method": "oauth",
Expand All @@ -41,7 +42,7 @@ def profile(self) -> dict[str, Any | None]:

@property
def mock_profile(self) -> dict[str, Any | None]:
"Generates mock profile. Defaults `threads` to 1."
"""Generates mock profile. Defaults `threads` to 1."""
parent_mock_profile = super().mock_profile

return {
Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/bigquery/service_account_file.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account file."
"""Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account file."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -32,7 +33,7 @@ class GoogleCloudServiceAccountFileProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Generates profile. Defaults `threads` to 1."
"""Generates profile. Defaults `threads` to 1."""
return {
**self.mapped_params,
"threads": 1,
Expand All @@ -41,7 +42,7 @@ def profile(self) -> dict[str, Any | None]:

@property
def mock_profile(self) -> dict[str, Any | None]:
"Generates mock profile. Defaults `threads` to 1."
"""Generates mock profile. Defaults `threads` to 1."""
parent_mock_profile = super().mock_profile

return {
Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/bigquery/service_account_keyfile_dict.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account keyfile dict/json."
"""Maps Airflow GCP connections to dbt BigQuery profiles if they use a service account keyfile dict/json."""

from __future__ import annotations

import json
Expand Down Expand Up @@ -57,7 +58,7 @@ def profile(self) -> dict[str, Any | None]:

@property
def mock_profile(self) -> dict[str, Any | None]:
"Generates mock profile. Defaults `threads` to 1."
"""Generates mock profile. Defaults `threads` to 1."""
parent_mock_profile = super().mock_profile

return {**parent_mock_profile, "threads": 1, "keyfile_json": None}
Expand Down Expand Up @@ -86,5 +87,5 @@ def transform_keyfile_json(self, keyfile_json: str | dict[str, str]) -> dict[str

@property
def env_vars(self) -> dict[str, str]:
"Returns a dictionary of environment variables that should be set based on self.secret_fields."
"""Returns a dictionary of environment variables that should be set based on self.secret_fields."""
return self._env_vars
2 changes: 1 addition & 1 deletion cosmos/profiles/databricks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Databricks Airflow connection -> dbt profile mappings"
"""Databricks Airflow connection -> dbt profile mappings"""

from .token import DatabricksTokenProfileMapping

Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/databricks/token.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow Databricks connections with a token to dbt profiles."
"""Maps Airflow Databricks connections with a token to dbt profiles."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -37,7 +38,7 @@ class DatabricksTokenProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Generates profile. The token is stored in an environment variable."
"""Generates profile. The token is stored in an environment variable."""
return {
**self.mapped_params,
**self.profile_args,
Expand All @@ -46,5 +47,5 @@ def profile(self) -> dict[str, Any | None]:
}

def transform_host(self, host: str) -> str:
"Removes the https:// prefix."
"""Removes the https:// prefix."""
return host.replace("https://", "")
2 changes: 1 addition & 1 deletion cosmos/profiles/exasol/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Exasol Airflow connection -> dbt profile mappings"
"""Exasol Airflow connection -> dbt profile mappings"""

from .user_pass import ExasolUserPasswordProfileMapping

Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/exasol/user_pass.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow Exasol connections with a username and password to dbt profiles."
"""Maps Airflow Exasol connections with a username and password to dbt profiles."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -45,7 +46,7 @@ class ExasolUserPasswordProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Gets profile. The password is stored in an environment variable."
"""Gets profile. The password is stored in an environment variable."""
profile_vars = {
**self.mapped_params,
**self.profile_args,
Expand All @@ -57,7 +58,7 @@ def profile(self) -> dict[str, Any | None]:
return self.filter_null(profile_vars)

def transform_dsn(self, host: str) -> str:
"Adds the port if it's not already there."
"""Adds the port if it's not already there."""
if ":" not in host:
port = self.conn.port or self.default_port
return f"{host}:{port}"
Expand Down
2 changes: 1 addition & 1 deletion cosmos/profiles/postgres/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Postgres Airflow connection -> dbt profile mappings"
"""Postgres Airflow connection -> dbt profile mappings"""

from .user_pass import PostgresUserPasswordProfileMapping

Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/postgres/user_pass.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow Postgres connections using user + password authentication to dbt profiles."
"""Maps Airflow Postgres connections using user + password authentication to dbt profiles."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -37,7 +38,7 @@ class PostgresUserPasswordProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Gets profile. The password is stored in an environment variable."
"""Gets profile. The password is stored in an environment variable."""
profile = {
"port": 5432,
**self.mapped_params,
Expand All @@ -53,7 +54,7 @@ def profile(self) -> dict[str, Any | None]:

@property
def mock_profile(self) -> dict[str, Any | None]:
"Gets mock profile. Defaults port to 5432."
"""Gets mock profile. Defaults port to 5432."""
profile_dict = {
"port": 5432,
**super().mock_profile,
Expand Down
2 changes: 1 addition & 1 deletion cosmos/profiles/redshift/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Redshift Airflow connection -> dbt profile mappings"
"""Redshift Airflow connection -> dbt profile mappings"""

from .user_pass import RedshiftUserPasswordProfileMapping

Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/redshift/user_pass.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow Redshift connections to dbt Redshift profiles if they use a username and password."
"""Maps Airflow Redshift connections to dbt Redshift profiles if they use a username and password."""

from __future__ import annotations

from typing import Any
Expand Down Expand Up @@ -39,7 +40,7 @@ class RedshiftUserPasswordProfileMapping(BaseProfileMapping):

@property
def profile(self) -> dict[str, Any | None]:
"Gets profile."
"""Gets profile."""
profile = {
"port": 5439,
**self.mapped_params,
Expand All @@ -52,7 +53,7 @@ def profile(self) -> dict[str, Any | None]:

@property
def mock_profile(self) -> dict[str, Any | None]:
"Gets mock profile. Defaults port to 5439."
"""Gets mock profile. Defaults port to 5439."""
parent_mock = super().mock_profile

return {
Expand Down
2 changes: 1 addition & 1 deletion cosmos/profiles/snowflake/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"Snowflake Airflow connection -> dbt profile mapping."
"""Snowflake Airflow connection -> dbt profile mapping."""

from .user_encrypted_privatekey_env_variable import SnowflakeEncryptedPrivateKeyPemProfileMapping
from .user_encrypted_privatekey_file import SnowflakeEncryptedPrivateKeyFilePemProfileMapping
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow Snowflake connections to dbt profiles if they use a user/private key."
"""Maps Airflow Snowflake connections to dbt profiles if they use a user/private key."""

from __future__ import annotations

import json
Expand Down Expand Up @@ -73,7 +74,7 @@ def conn(self) -> Connection:

@property
def profile(self) -> dict[str, Any | None]:
"Gets profile."
"""Gets profile."""
profile_vars = {
**self.mapped_params,
**self.profile_args,
Expand All @@ -85,7 +86,7 @@ def profile(self) -> dict[str, Any | None]:
return self.filter_null(profile_vars)

def transform_account(self, account: str) -> str:
"Transform the account to the format <account>.<region> if it's not already."
"""Transform the account to the format <account>.<region> if it's not already."""
region = self.conn.extra_dejson.get("region")
if region and region not in account:
account = f"{account}.{region}"
Expand Down
7 changes: 4 additions & 3 deletions cosmos/profiles/snowflake/user_encrypted_privatekey_file.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"Maps Airflow Snowflake connections to dbt profiles if they use a user/private key path."
"""Maps Airflow Snowflake connections to dbt profiles if they use a user/private key path."""

from __future__ import annotations

import json
Expand Down Expand Up @@ -72,7 +73,7 @@ def conn(self) -> Connection:

@property
def profile(self) -> dict[str, Any | None]:
"Gets profile."
"""Gets profile."""
profile_vars = {
**self.mapped_params,
**self.profile_args,
Expand All @@ -84,7 +85,7 @@ def profile(self) -> dict[str, Any | None]:
return self.filter_null(profile_vars)

def transform_account(self, account: str) -> str:
"Transform the account to the format <account>.<region> if it's not already."
"""Transform the account to the format <account>.<region> if it's not already."""
region = self.conn.extra_dejson.get("region")
if region and region not in account:
account = f"{account}.{region}"
Expand Down
Loading

0 comments on commit d255556

Please sign in to comment.