Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DO NOT MERGE] Test RC provider packages for https://github.com/apache/airflow/issues/36117 #1380

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,14 @@
GCSCreateBucketOperator,
GCSDeleteBucketOperator,
)
from airflow.providers.google.cloud.sensors.gcs import GCSObjectsWithPrefixExistenceSensor
from airflow.providers.google.cloud.transfers.local_to_gcs import (
LocalFilesystemToGCSOperator,
)

from astronomer.providers.google.cloud.sensors.gcs import (
GCSObjectExistenceSensorAsync,
GCSObjectsWithPrefixExistenceSensorAsync,
# GCSObjectsWithPrefixExistenceSensorAsync,
GCSObjectUpdateSensorAsync,
GCSUploadSessionCompleteSensorAsync,
)
Expand Down Expand Up @@ -79,7 +80,7 @@
# [END howto_sensor_gcs_object_exists_async]

# [START howto_sensor_gcs_object_with_prefix_existence_async]
gcs_object_with_prefix_exists = GCSObjectsWithPrefixExistenceSensorAsync(
gcs_object_with_prefix_exists = GCSObjectsWithPrefixExistenceSensor(
bucket=BUCKET_1,
prefix=PATH_TO_UPLOAD_FILE_PREFIX,
task_id="gcs_object_with_prefix_exists_task",
Expand Down
12 changes: 8 additions & 4 deletions astronomer/providers/google/cloud/hooks/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,20 @@

from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
from google.api_core import gapic_v1
from google.api_core import gapic_v1, retry_async as retries
from google.api_core.client_options import ClientOptions
from google.api_core.retry import Retry
from google.cloud.dataproc_v1 import (
ClusterControllerAsyncClient,
Job,
JobControllerAsyncClient,
)
from google.cloud.dataproc_v1.types import clusters

try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: no cover
OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore[misc]

JobType = Union[Job, Any]


Expand Down Expand Up @@ -68,7 +72,7 @@ async def get_cluster(
region: str,
cluster_name: str,
project_id: str,
retry: Union[Retry, gapic_v1.method._MethodDefault] = gapic_v1.method.DEFAULT,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> clusters.Cluster:
"""
Expand Down Expand Up @@ -98,7 +102,7 @@ async def get_job(
timeout: float = 5,
region: Optional[str] = None,
location: Optional[str] = None,
retry: Union[Retry, gapic_v1.method._MethodDefault] = gapic_v1.method.DEFAULT,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> JobType:
"""
Expand Down
4 changes: 4 additions & 0 deletions astronomer/providers/snowflake/hooks/snowflake_sql_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,8 @@ def execute_query(
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e: # pragma: no cover
if e.response is None:
raise e
raise AirflowException(
f"Response: {e.response.content!r}, " f"Status Code: {e.response.status_code}"
) # pragma: no cover
Expand Down Expand Up @@ -203,6 +205,8 @@ def check_query_output(self, query_ids: list[str]) -> None:
response.raise_for_status()
self.log.info(response.json())
except requests.exceptions.HTTPError as e:
if e.response is None: # pragma: no cover
raise e
raise AirflowException(
f"Response: {e.response.content!r}, Status Code: {e.response.status_code}"
)
Expand Down
20 changes: 10 additions & 10 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ zip_safe = false

[options.extras_require]
amazon =
apache-airflow-providers-amazon>=3.0.0
apache-airflow-providers-amazon==8.13.0rc1
aiobotocore>=2.1.1
apache.hive =
apache-airflow-providers-apache-hive>=6.1.5
Expand All @@ -54,21 +54,21 @@ apache.livy =
apache-airflow-providers-apache-livy
paramiko
cncf.kubernetes =
apache-airflow-providers-cncf-kubernetes>=4
apache-airflow-providers-cncf-kubernetes @https://files.pythonhosted.org/packages/bb/f3/843e44661979a0d6581eb8eeddb80eae4a406a0798129b4618197b8b02bd/apache_airflow_providers_cncf_kubernetes-7.11.0rc1.tar.gz
kubernetes_asyncio
databricks =
apache-airflow-providers-databricks>=2.2.0
databricks-sql-connector>=2.0.4;python_version>='3.10'
dbt.cloud =
apache-airflow-providers-dbt-cloud>=2.1.0
google =
apache-airflow-providers-google>=8.1.0
apache-airflow-providers-google==10.13.0rc1
gcloud-aio-storage
gcloud-aio-bigquery
http =
apache-airflow-providers-http
apache-airflow-providers-http==4.8.0rc1
microsoft.azure =
apache-airflow-providers-microsoft-azure
apache-airflow-providers-microsoft-azure==8.4.0rc1
sftp =
apache-airflow-providers-sftp
asyncssh>=2.12.0
Expand Down Expand Up @@ -119,16 +119,16 @@ mypy =
# All extras from above except 'mypy', 'docs' and 'tests'
all =
aiobotocore>=2.1.1
apache-airflow-providers-amazon>=3.0.0
apache-airflow-providers-amazon==8.13.0rc1
apache-airflow-providers-apache-hive>=6.1.5
apache-airflow-providers-apache-livy
apache-airflow-providers-cncf-kubernetes>=4
apache-airflow-providers-cncf-kubernetes @https://files.pythonhosted.org/packages/bb/f3/843e44661979a0d6581eb8eeddb80eae4a406a0798129b4618197b8b02bd/apache_airflow_providers_cncf_kubernetes-7.11.0rc1.tar.gz
apache-airflow-providers-databricks>=2.2.0
apache-airflow-providers-google>=8.1.0
apache-airflow-providers-http
apache-airflow-providers-google==10.13.0rc1
apache-airflow-providers-http==4.8.0rc1
apache-airflow-providers-snowflake
apache-airflow-providers-sftp
apache-airflow-providers-microsoft-azure
apache-airflow-providers-microsoft-azure==8.4.0rc1
asyncssh>=2.12.0
databricks-sql-connector>=2.0.4;python_version>='3.10'
apache-airflow-providers-dbt-cloud>=2.1.0
Expand Down