From 14f7e571674a66cf0c5eede57b9cd1b052bb9ba2 Mon Sep 17 00:00:00 2001 From: Pankaj Date: Thu, 21 Mar 2024 18:42:51 +0530 Subject: [PATCH] Fix import error TaskInstance not define --- .../providers/apache/hive/example_dags/example_hive.py | 7 ++----- .../providers/apache/livy/example_dags/example_livy.py | 7 ++----- astronomer/providers/sftp/example_dags/example_sftp.py | 7 ++----- 3 files changed, 6 insertions(+), 15 deletions(-) diff --git a/astronomer/providers/apache/hive/example_dags/example_hive.py b/astronomer/providers/apache/hive/example_dags/example_hive.py index 1723c6870..7cd2deecf 100644 --- a/astronomer/providers/apache/hive/example_dags/example_hive.py +++ b/astronomer/providers/apache/hive/example_dags/example_hive.py @@ -4,11 +4,11 @@ import os import time from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, List +from typing import Any, List from airflow import DAG, settings from airflow.exceptions import AirflowException -from airflow.models import Connection, Variable +from airflow.models import Connection, TaskInstance, Variable from airflow.operators.python import PythonOperator from airflow.providers.amazon.aws.operators.emr import ( EmrCreateJobFlowOperator, @@ -26,9 +26,6 @@ NamedHivePartitionSensorAsync, ) -if TYPE_CHECKING: - from airflow.models.taskinstance import TaskInstance - HIVE_CLUSTER = os.getenv("HIVE_CLUSTER", "example_hive_sensor_cluster") AWS_S3_CREDS = { "aws_access_key_id": os.getenv("AWS_ACCESS_KEY_ID", "aws_access_key"), diff --git a/astronomer/providers/apache/livy/example_dags/example_livy.py b/astronomer/providers/apache/livy/example_dags/example_livy.py index b55d3b10c..24da0e4eb 100644 --- a/astronomer/providers/apache/livy/example_dags/example_livy.py +++ b/astronomer/providers/apache/livy/example_dags/example_livy.py @@ -8,11 +8,11 @@ import os import time from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, List +from typing import Any, List from airflow import DAG, settings from airflow.exceptions import AirflowException -from airflow.models import Connection, Variable +from airflow.models import Connection, TaskInstance, Variable from airflow.operators.python import PythonOperator from airflow.providers.amazon.aws.operators.emr import ( EmrCreateJobFlowOperator, @@ -25,9 +25,6 @@ from astronomer.providers.apache.livy.operators.livy import LivyOperatorAsync -if TYPE_CHECKING: - from airflow.models.taskinstance import TaskInstance - LIVY_CLUSTER = os.getenv("LIVY_CLUSTER", "example_livy_operator_cluster") BOTO_DUPLICATE_PERMISSION_ERROR = "InvalidPermission.Duplicate" LIVY_JAVA_FILE = os.getenv("LIVY_JAVA_FILE", "/spark-examples.jar") diff --git a/astronomer/providers/sftp/example_dags/example_sftp.py b/astronomer/providers/sftp/example_dags/example_sftp.py index 832bfb481..9af9eedcb 100644 --- a/astronomer/providers/sftp/example_dags/example_sftp.py +++ b/astronomer/providers/sftp/example_dags/example_sftp.py @@ -3,10 +3,10 @@ import os import time from datetime import timedelta -from typing import TYPE_CHECKING, Any, List +from typing import Any, List from airflow import DAG, AirflowException, settings -from airflow.models import Connection, Variable +from airflow.models import Connection, TaskInstance, Variable from airflow.operators.python import PythonOperator, get_current_context from airflow.utils.state import State from airflow.utils.timezone import datetime @@ -15,9 +15,6 @@ from astronomer.providers.sftp.sensors.sftp import SFTPSensorAsync -if TYPE_CHECKING: - from airflow.models import TaskInstance - SFTP_CONN_ID = os.getenv("ASTRO_SFTP_CONN_ID", "sftp_default") EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6)) AWS_S3_CREDS = {