Skip to content

Commit

Permalink
Add v2.9.2 support
Browse files Browse the repository at this point in the history
  • Loading branch information
Rafid Al-Humaimidi committed Jul 2, 2024
1 parent 197b7f4 commit 438d0d5
Show file tree
Hide file tree
Showing 12 changed files with 564 additions and 541 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.8.1
2.9.2
6 changes: 3 additions & 3 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ LABEL maintainer="amazon"

# Airflow
## Version specific ARGs
ARG AIRFLOW_VERSION=2.8.1
ARG WATCHTOWER_VERSION=3.0.1
ARG PROVIDER_AMAZON_VERSION=8.16.0
ARG AIRFLOW_VERSION=2.9.2
ARG WATCHTOWER_VERSION=3.2.0
ARG PROVIDER_AMAZON_VERSION=8.24.0

## General ARGs
ARG AIRFLOW_USER_HOME=/usr/local/airflow
Expand Down
16 changes: 8 additions & 8 deletions docker/config/airflow.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ executor = SequentialExecutor
# This defines the maximum number of task instances that can run concurrently in Airflow
# regardless of scheduler count and worker count. Generally, this value is reflective of
# the number of task instances with the running state in the metadata database.
parallelism = 32
parallelism = 150

# The maximum number of task instances allowed to run concurrently in each DAG. To calculate
# the number of tasks that is running concurrently for a DAG, add up the number of running
Expand All @@ -35,7 +35,7 @@ parallelism = 32
#
# An example scenario when this would be useful is when you want to stop a new dag with an early
# start date from stealing all the executor slots in a cluster.
max_active_tasks_per_dag = 16
max_active_tasks_per_dag = 150

# Are DAGs paused by default at creation
dags_are_paused_at_creation = True
Expand Down Expand Up @@ -157,7 +157,7 @@ sensitive_var_conn_names =
# Task Slot counts for ``default_pool``. This setting would not have any effect in an existing
# deployment where the ``default_pool`` is already created. For existing deployments, users can
# change the number of slots using Webserver, API or the CLI
default_pool_task_slot_count = 10000
default_pool_task_slot_count = 200

[database]
# Collation for ``dag_id``, ``task_id``, ``key`` columns in case they have different encoding.
Expand Down Expand Up @@ -342,7 +342,7 @@ backend =
# See documentation for the secrets backend you are using. JSON is expected.
# Example for AWS Systems Manager ParameterStore:
# ``{{"connections_prefix": "/airflow/connections", "profile_name": "default"}}``
backend_kwargs =
backend_kwargs = '{"connections_lookup_pattern":"^(?!aws_default$).*$"}'

[cli]
# In what way should the cli access the API. The LocalClient will use the
Expand Down Expand Up @@ -457,7 +457,7 @@ reload_on_plugin_change = False
secret_key = $SECRET_KEY

# Number of workers to run the Gunicorn web server
workers = 4
workers = 9

# The worker class gunicorn should use. Choices include
# sync (default), eventlet, gevent
Expand Down Expand Up @@ -815,7 +815,7 @@ catchup_by_default = True
# complexity of query predicate, and/or excessive locking.
# Additionally, you may hit the maximum allowable query length for your db.
# Set this to 0 for no limit (not advised)
max_tis_per_query = 512
max_tis_per_query = 16

# Should the scheduler issue ``SELECT ... FOR UPDATE`` in relevant queries.
# If this is set to False then you should not run more than a single
Expand All @@ -832,11 +832,11 @@ max_dagruns_per_loop_to_schedule = 20
# Should the Task supervisor process perform a "mini scheduler" to attempt to schedule more tasks of the
# same DAG. Leaving this on will mean tasks in the same DAG execute quicker, but might starve out other
# dags in some circumstances
schedule_after_task_execution = True
schedule_after_task_execution = False

# The scheduler can run multiple processes in parallel to parse dags.
# This defines how many processes will run.
parsing_processes = 2
parsing_processes = 7

# One of ``modified_time``, ``random_seeded_by_host`` and ``alphabetical``.
# The scheduler will list and sort the dag files to decide the parsing order.
Expand Down
Loading

0 comments on commit 438d0d5

Please sign in to comment.