From 6753ce70a654fd781342e58c9bb9d8f4c5f9b9cd Mon Sep 17 00:00:00 2001 From: TJaniF Date: Wed, 11 Oct 2023 11:53:45 +0200 Subject: [PATCH] change back to schedule_interval --- README.rst | 2 +- dev/dags/basic_cosmos_task_group.py | 2 +- dev/dags/cosmos_manifest_example.py | 2 +- dev/dags/cosmos_profile_mapping.py | 2 +- dev/dags/cosmos_seed_dag.py | 2 +- dev/dags/dbt_docs.py | 2 +- dev/dags/example_cosmos_python_models.py | 2 +- dev/dags/example_model_version.py | 2 +- dev/dags/example_virtualenv.py | 2 +- dev/dags/user_defined_profile.py | 2 +- docs/configuration/scheduling.rst | 6 +++--- docs/getting_started/astro.rst | 2 +- docs/getting_started/gcc.rst | 2 +- docs/getting_started/mwaa.rst | 2 +- docs/getting_started/open-source.rst | 2 +- docs/index.rst | 2 +- 16 files changed, 18 insertions(+), 18 deletions(-) diff --git a/README.rst b/README.rst index d75c4eec9..e4f69af63 100644 --- a/README.rst +++ b/README.rst @@ -61,7 +61,7 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an with DAG( dag_id="extract_dag", start_date=datetime(2022, 11, 27), - schedule="@daily", + schedule_interval="@daily", ): e1 = EmptyOperator(task_id="pre_dbt") diff --git a/dev/dags/basic_cosmos_task_group.py b/dev/dags/basic_cosmos_task_group.py index fe7528ce9..2f875b8c3 100644 --- a/dev/dags/basic_cosmos_task_group.py +++ b/dev/dags/basic_cosmos_task_group.py @@ -25,7 +25,7 @@ @dag( - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, ) diff --git a/dev/dags/cosmos_manifest_example.py b/dev/dags/cosmos_manifest_example.py index ffcc2a03d..14ce42606 100644 --- a/dev/dags/cosmos_manifest_example.py +++ b/dev/dags/cosmos_manifest_example.py @@ -32,7 +32,7 @@ render_config=RenderConfig(load_method=LoadMode.DBT_MANIFEST, select=["path:models/customers.sql"]), operator_args={"install_deps": True}, # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="cosmos_manifest_example", diff --git a/dev/dags/cosmos_profile_mapping.py b/dev/dags/cosmos_profile_mapping.py index 1b3ea80aa..33619a39d 100644 --- a/dev/dags/cosmos_profile_mapping.py +++ b/dev/dags/cosmos_profile_mapping.py @@ -18,7 +18,7 @@ @dag( - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, ) diff --git a/dev/dags/cosmos_seed_dag.py b/dev/dags/cosmos_seed_dag.py index 0f3353bc1..cef84dd66 100644 --- a/dev/dags/cosmos_seed_dag.py +++ b/dev/dags/cosmos_seed_dag.py @@ -36,7 +36,7 @@ with DAG( dag_id="extract_dag", start_date=datetime(2022, 11, 27), - schedule="@daily", + schedule_interval="@daily", doc_md=__doc__, catchup=False, max_active_runs=1, diff --git a/dev/dags/dbt_docs.py b/dev/dags/dbt_docs.py index 7bf6c71d5..1fcd1c341 100644 --- a/dev/dags/dbt_docs.py +++ b/dev/dags/dbt_docs.py @@ -63,7 +63,7 @@ def which_upload(): with DAG( dag_id="docs_dag", start_date=datetime(2023, 1, 1), - schedule="@daily", + schedule_interval="@daily", doc_md=__doc__, catchup=False, default_args={"retries": 2}, diff --git a/dev/dags/example_cosmos_python_models.py b/dev/dags/example_cosmos_python_models.py index 9a0b1847f..7d9a61465 100644 --- a/dev/dags/example_cosmos_python_models.py +++ b/dev/dags/example_cosmos_python_models.py @@ -44,7 +44,7 @@ "append_env": True, }, # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="example_cosmos_python_models", diff --git a/dev/dags/example_model_version.py b/dev/dags/example_model_version.py index 0fef1c75a..78f38647d 100644 --- a/dev/dags/example_model_version.py +++ b/dev/dags/example_model_version.py @@ -30,7 +30,7 @@ profile_config=profile_config, operator_args={"install_deps": True}, # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="example_model_version", diff --git a/dev/dags/example_virtualenv.py b/dev/dags/example_virtualenv.py index 096baf8e3..7b1368f8c 100644 --- a/dev/dags/example_virtualenv.py +++ b/dev/dags/example_virtualenv.py @@ -37,7 +37,7 @@ "install_deps": True, }, # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="example_virtualenv", diff --git a/dev/dags/user_defined_profile.py b/dev/dags/user_defined_profile.py index 569a2f1dd..ab30cdb2f 100644 --- a/dev/dags/user_defined_profile.py +++ b/dev/dags/user_defined_profile.py @@ -16,7 +16,7 @@ @dag( - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, ) diff --git a/docs/configuration/scheduling.rst b/docs/configuration/scheduling.rst index bd7b7c890..de21f8495 100644 --- a/docs/configuration/scheduling.rst +++ b/docs/configuration/scheduling.rst @@ -17,7 +17,7 @@ To schedule a dbt project on a time-based schedule, you can use Airflow's schedu jaffle_shop = DbtDag( # ... start_date=datetime(2023, 1, 1), - schedule="@daily", + schedule_interval="@daily", ) @@ -45,12 +45,12 @@ Then, you can use Airflow's data-aware scheduling capabilities to schedule ``my_ project_one = DbtDag( # ... start_date=datetime(2023, 1, 1), - schedule="@daily", + schedule_interval="@daily", ) project_two = DbtDag( # ... - schedule=[get_dbt_dataset("my_conn", "project_one", "my_model")], + schedule_interval=[get_dbt_dataset("my_conn", "project_one", "my_model")], dbt_project_name="project_two", ) diff --git a/docs/getting_started/astro.rst b/docs/getting_started/astro.rst index 418f2a0c8..6188ae676 100644 --- a/docs/getting_started/astro.rst +++ b/docs/getting_started/astro.rst @@ -102,7 +102,7 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c dbt_executable_path=f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt", ), # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="my_cosmos_dag", diff --git a/docs/getting_started/gcc.rst b/docs/getting_started/gcc.rst index 8830b530a..cc0b8c318 100644 --- a/docs/getting_started/gcc.rst +++ b/docs/getting_started/gcc.rst @@ -71,7 +71,7 @@ Make sure to rename the ```` value below to your adapter's Python "py_requirements": [""], }, # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="my_cosmos_dag", diff --git a/docs/getting_started/mwaa.rst b/docs/getting_started/mwaa.rst index 75f9a9688..cb742495a 100644 --- a/docs/getting_started/mwaa.rst +++ b/docs/getting_started/mwaa.rst @@ -105,7 +105,7 @@ In your ``my_cosmos_dag.py`` file, import the ``DbtDag`` class from Cosmos and c ), profile_config=profile_config, # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="my_cosmos_dag", diff --git a/docs/getting_started/open-source.rst b/docs/getting_started/open-source.rst index 7a84b35a5..a560929e1 100644 --- a/docs/getting_started/open-source.rst +++ b/docs/getting_started/open-source.rst @@ -57,7 +57,7 @@ For example, if you wanted to put your dbt project in the ``/usr/local/airflow/d dbt_executable_path=f"{os.environ['AIRFLOW_HOME']}/dbt_venv/bin/dbt", ), # normal dag parameters - schedule="@daily", + schedule_interval="@daily", start_date=datetime(2023, 1, 1), catchup=False, dag_id="my_cosmos_dag", diff --git a/docs/index.rst b/docs/index.rst index f627c3fb6..3c61b645d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -63,7 +63,7 @@ You can render an Airflow Task Group using the ``DbtTaskGroup`` class. Here's an with DAG( dag_id="extract_dag", start_date=datetime(2022, 11, 27), - schedule="@daily", + schedule_interval="@daily", ): e1 = EmptyOperator(task_id="pre_dbt")