diff --git a/astronomer/providers/google/cloud/example_dags/example_gcs.py b/astronomer/providers/google/cloud/example_dags/example_gcs.py index 212592630..0068f3ffd 100644 --- a/astronomer/providers/google/cloud/example_dags/example_gcs.py +++ b/astronomer/providers/google/cloud/example_dags/example_gcs.py @@ -8,13 +8,14 @@ GCSCreateBucketOperator, GCSDeleteBucketOperator, ) +from airflow.providers.google.cloud.sensors.gcs import GCSObjectsWithPrefixExistenceSensor from airflow.providers.google.cloud.transfers.local_to_gcs import ( LocalFilesystemToGCSOperator, ) from astronomer.providers.google.cloud.sensors.gcs import ( GCSObjectExistenceSensorAsync, - GCSObjectsWithPrefixExistenceSensorAsync, + # GCSObjectsWithPrefixExistenceSensorAsync, GCSObjectUpdateSensorAsync, GCSUploadSessionCompleteSensorAsync, ) @@ -79,7 +80,7 @@ # [END howto_sensor_gcs_object_exists_async] # [START howto_sensor_gcs_object_with_prefix_existence_async] - gcs_object_with_prefix_exists = GCSObjectsWithPrefixExistenceSensorAsync( + gcs_object_with_prefix_exists = GCSObjectsWithPrefixExistenceSensor( bucket=BUCKET_1, prefix=PATH_TO_UPLOAD_FILE_PREFIX, task_id="gcs_object_with_prefix_exists_task", diff --git a/astronomer/providers/google/cloud/hooks/dataproc.py b/astronomer/providers/google/cloud/hooks/dataproc.py index 258813566..923e298f5 100644 --- a/astronomer/providers/google/cloud/hooks/dataproc.py +++ b/astronomer/providers/google/cloud/hooks/dataproc.py @@ -3,9 +3,8 @@ from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import GoogleBaseHook -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, retry_async as retries from google.api_core.client_options import ClientOptions -from google.api_core.retry import Retry from google.cloud.dataproc_v1 import ( ClusterControllerAsyncClient, Job, @@ -13,6 +12,11 @@ ) from google.cloud.dataproc_v1.types import clusters +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: no cover + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore[misc] + JobType = Union[Job, Any] @@ -68,7 +72,7 @@ async def get_cluster( region: str, cluster_name: str, project_id: str, - retry: Union[Retry, gapic_v1.method._MethodDefault] = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> clusters.Cluster: """ @@ -98,7 +102,7 @@ async def get_job( timeout: float = 5, region: Optional[str] = None, location: Optional[str] = None, - retry: Union[Retry, gapic_v1.method._MethodDefault] = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> JobType: """ diff --git a/astronomer/providers/snowflake/hooks/snowflake_sql_api.py b/astronomer/providers/snowflake/hooks/snowflake_sql_api.py index 9ab17a112..081515001 100644 --- a/astronomer/providers/snowflake/hooks/snowflake_sql_api.py +++ b/astronomer/providers/snowflake/hooks/snowflake_sql_api.py @@ -140,6 +140,8 @@ def execute_query( try: response.raise_for_status() except requests.exceptions.HTTPError as e: # pragma: no cover + if e.response is None: + raise e raise AirflowException( f"Response: {e.response.content!r}, " f"Status Code: {e.response.status_code}" ) # pragma: no cover @@ -203,6 +205,8 @@ def check_query_output(self, query_ids: list[str]) -> None: response.raise_for_status() self.log.info(response.json()) except requests.exceptions.HTTPError as e: + if e.response is None: # pragma: no cover + raise e raise AirflowException( f"Response: {e.response.content!r}, Status Code: {e.response.status_code}" ) diff --git a/setup.cfg b/setup.cfg index f9daca796..84efa5065 100644 --- a/setup.cfg +++ b/setup.cfg @@ -45,7 +45,7 @@ zip_safe = false [options.extras_require] amazon = - apache-airflow-providers-amazon>=3.0.0 + apache-airflow-providers-amazon==8.13.0rc1 aiobotocore>=2.1.1 apache.hive = apache-airflow-providers-apache-hive>=6.1.5 @@ -54,7 +54,7 @@ apache.livy = apache-airflow-providers-apache-livy paramiko cncf.kubernetes = - apache-airflow-providers-cncf-kubernetes>=4 + apache-airflow-providers-cncf-kubernetes @https://files.pythonhosted.org/packages/bb/f3/843e44661979a0d6581eb8eeddb80eae4a406a0798129b4618197b8b02bd/apache_airflow_providers_cncf_kubernetes-7.11.0rc1.tar.gz kubernetes_asyncio databricks = apache-airflow-providers-databricks>=2.2.0 @@ -62,13 +62,13 @@ databricks = dbt.cloud = apache-airflow-providers-dbt-cloud>=2.1.0 google = - apache-airflow-providers-google>=8.1.0 + apache-airflow-providers-google==10.13.0rc1 gcloud-aio-storage gcloud-aio-bigquery http = - apache-airflow-providers-http + apache-airflow-providers-http==4.8.0rc1 microsoft.azure = - apache-airflow-providers-microsoft-azure + apache-airflow-providers-microsoft-azure==8.4.0rc1 sftp = apache-airflow-providers-sftp asyncssh>=2.12.0 @@ -119,16 +119,16 @@ mypy = # All extras from above except 'mypy', 'docs' and 'tests' all = aiobotocore>=2.1.1 - apache-airflow-providers-amazon>=3.0.0 + apache-airflow-providers-amazon==8.13.0rc1 apache-airflow-providers-apache-hive>=6.1.5 apache-airflow-providers-apache-livy - apache-airflow-providers-cncf-kubernetes>=4 + apache-airflow-providers-cncf-kubernetes @https://files.pythonhosted.org/packages/bb/f3/843e44661979a0d6581eb8eeddb80eae4a406a0798129b4618197b8b02bd/apache_airflow_providers_cncf_kubernetes-7.11.0rc1.tar.gz apache-airflow-providers-databricks>=2.2.0 - apache-airflow-providers-google>=8.1.0 - apache-airflow-providers-http + apache-airflow-providers-google==10.13.0rc1 + apache-airflow-providers-http==4.8.0rc1 apache-airflow-providers-snowflake apache-airflow-providers-sftp - apache-airflow-providers-microsoft-azure + apache-airflow-providers-microsoft-azure==8.4.0rc1 asyncssh>=2.12.0 databricks-sql-connector>=2.0.4;python_version>='3.10' apache-airflow-providers-dbt-cloud>=2.1.0