From fc4f5bad2c01c11c1d06d140fd48cafbf3ea1ecb Mon Sep 17 00:00:00 2001
From: "nathania.calista01@gmail.com" <nathania.calista01@gmail.com>
Date: Mon, 6 Jan 2025 11:39:12 +0700
Subject: [PATCH] feat: dockerize airflow

---
 Dockerfile                                    |   8 +
 airflow/airflow-webserver.pid                 |   1 +
 airflow.cfg => airflow/airflow.cfg            | 699 +++++++++++++++++-
 airflow.db => airflow/airflow.db              | Bin 569344 -> 577536 bytes
 .../__pycache__/welcome_dag.cpython-312.pyc   | Bin 0 -> 1611 bytes
 airflow/dags/welcome_dag.py                   |  87 +++
 airflow/standalone_admin_password.txt         |   1 +
 airflow/webserver_config.py                   | 132 ++++
 dags/example.py                               |  18 -
 docker-compose.yml                            |  12 +
 10 files changed, 931 insertions(+), 27 deletions(-)
 create mode 100644 Dockerfile
 create mode 100644 airflow/airflow-webserver.pid
 rename airflow.cfg => airflow/airflow.cfg (78%)
 rename airflow.db => airflow/airflow.db (93%)
 create mode 100644 airflow/dags/__pycache__/welcome_dag.cpython-312.pyc
 create mode 100644 airflow/dags/welcome_dag.py
 create mode 100644 airflow/standalone_admin_password.txt
 create mode 100644 airflow/webserver_config.py
 delete mode 100644 dags/example.py
 create mode 100644 docker-compose.yml

diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..19cc988
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,8 @@
+FROM apache/airflow:latest
+USER root
+
+RUN apt-get update && \
+  apt-get -y install git && \
+  apt-get clean
+  
+USER airflow
\ No newline at end of file
diff --git a/airflow/airflow-webserver.pid b/airflow/airflow-webserver.pid
new file mode 100644
index 0000000..f5c8955
--- /dev/null
+++ b/airflow/airflow-webserver.pid
@@ -0,0 +1 @@
+32
diff --git a/airflow.cfg b/airflow/airflow.cfg
similarity index 78%
rename from airflow.cfg
rename to airflow/airflow.cfg
index 6efbf37..2771560 100644
--- a/airflow.cfg
+++ b/airflow/airflow.cfg
@@ -4,7 +4,7 @@
 #
 # Variable: AIRFLOW__CORE__DAGS_FOLDER
 #
-dags_folder = /Users/apple/Desktop/itb/semester 7/if4054-xops/dags
+dags_folder = /opt/airflow/dags
 
 # Hostname by providing a path to a callable, which will resolve the hostname.
 # The format is "package.function".
@@ -127,7 +127,7 @@ load_examples = True
 #
 # Variable: AIRFLOW__CORE__PLUGINS_FOLDER
 #
-plugins_folder = /Users/apple/Desktop/itb/semester 7/if4054-xops/plugins
+plugins_folder = /opt/airflow/plugins
 
 # Should tasks be executed via forking of the parent process
 # 
@@ -453,7 +453,7 @@ database_access_isolation = False
 #
 # Variable: AIRFLOW__CORE__INTERNAL_API_SECRET_KEY
 #
-internal_api_secret_key = UqNjFyZBYpdJUMvELCKXcw==
+internal_api_secret_key = qbkITk4FtDBCkdn5dS8mNQ==
 
 # The ability to allow testing connections across Airflow UI, API and CLI.
 # Supported options: ``Disabled``, ``Enabled``, ``Hidden``. Default: Disabled
@@ -494,7 +494,7 @@ alembic_ini_file_path = alembic.ini
 #
 # Variable: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN
 #
-sql_alchemy_conn = sqlite:////Users/apple/Desktop/itb/semester 7/if4054-xops/airflow.db
+sql_alchemy_conn = sqlite:////opt/airflow/airflow.db
 
 # Extra engine specific keyword args passed to SQLAlchemy's create_engine, as a JSON-encoded value
 #
@@ -635,7 +635,7 @@ check_migrations = True
 #
 # Variable: AIRFLOW__LOGGING__BASE_LOG_FOLDER
 #
-base_log_folder = /Users/apple/Desktop/itb/semester 7/if4054-xops/logs
+base_log_folder = /opt/airflow/logs
 
 # Airflow can store logs remotely in AWS S3, Google Cloud Storage or Elastic Search.
 # Set this to ``True`` if you want to enable remote logging.
@@ -817,7 +817,7 @@ log_processor_filename_template = {{ filename }}.log
 #
 # Variable: AIRFLOW__LOGGING__DAG_PROCESSOR_MANAGER_LOG_LOCATION
 #
-dag_processor_manager_log_location = /Users/apple/Desktop/itb/semester 7/if4054-xops/logs/dag_processor_manager/dag_processor_manager.log
+dag_processor_manager_log_location = /opt/airflow/logs/dag_processor_manager/dag_processor_manager.log
 
 # Whether DAG processor manager will write logs to stdout
 #
@@ -1387,7 +1387,7 @@ access_denied_message = Access is Denied
 #
 # Variable: AIRFLOW__WEBSERVER__CONFIG_FILE
 #
-config_file = /Users/apple/Desktop/itb/semester 7/if4054-xops/webserver_config.py
+config_file = /opt/airflow/webserver_config.py
 
 # The base url of your website: Airflow cannot guess what domain or CNAME you are using.
 # This is used to create links in the Log Url column in the Browse - Task Instances menu,
@@ -1497,7 +1497,7 @@ reload_on_plugin_change = False
 #
 # Variable: AIRFLOW__WEBSERVER__SECRET_KEY
 #
-secret_key = UqNjFyZBYpdJUMvELCKXcw==
+secret_key = qbkITk4FtDBCkdn5dS8mNQ==
 
 # Number of workers to run the Gunicorn web server
 #
@@ -2162,7 +2162,7 @@ orphaned_tasks_check_interval = 300.0
 #
 # Variable: AIRFLOW__SCHEDULER__CHILD_PROCESS_LOG_DIRECTORY
 #
-child_process_log_directory = /Users/apple/Desktop/itb/semester 7/if4054-xops/logs/scheduler
+child_process_log_directory = /opt/airflow/logs/scheduler
 
 # Local task jobs periodically heartbeat to the DB. If the job has
 # not heartbeat in this many seconds, the scheduler will mark the
@@ -2437,6 +2437,489 @@ default_timeout = 604800
 #
 enabled = True
 
+[aws]
+# This section contains settings for Amazon Web Services (AWS) integration.
+
+# session_factory = 
+cloudwatch_task_handler_json_serializer = airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize_legacy
+
+[aws_batch_executor]
+# This section only applies if you are using the AwsBatchExecutor in
+# Airflow's ``[core]`` configuration.
+# For more information on any of these execution parameters, see the link below:
+# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.submit_job
+# For boto3 credential management, see
+# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
+
+conn_id = aws_default
+# region_name = 
+max_submit_job_attempts = 3
+check_health_on_startup = True
+# job_name = 
+# job_queue = 
+# job_definition = 
+# submit_job_kwargs = 
+
+[aws_ecs_executor]
+# This section only applies if you are using the AwsEcsExecutor in
+# Airflow's ``[core]`` configuration.
+# For more information on any of these execution parameters, see the link below:
+# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs/client/run_task.html
+# For boto3 credential management, see
+# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
+
+conn_id = aws_default
+# region_name = 
+assign_public_ip = False
+# cluster = 
+# capacity_provider_strategy = 
+# container_name = 
+# launch_type = 
+platform_version = LATEST
+# security_groups = 
+# subnets = 
+# task_definition = 
+max_run_task_attempts = 3
+# run_task_kwargs = 
+check_health_on_startup = True
+
+[aws_auth_manager]
+# This section only applies if you are using the AwsAuthManager. In other words, if you set
+# ``[core] auth_manager = airflow.providers.amazon.aws.auth_manager.aws_auth_manager.AwsAuthManager`` in
+# Airflow's configuration.
+
+enable = False
+conn_id = aws_default
+# region_name = 
+# saml_metadata_url = 
+# avp_policy_store_id = 
+
+[celery_kubernetes_executor]
+# This section only applies if you are using the ``CeleryKubernetesExecutor`` in
+# ``[core]`` section above
+
+# Define when to send a task to ``KubernetesExecutor`` when using ``CeleryKubernetesExecutor``.
+# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``),
+# the task is executed via ``KubernetesExecutor``,
+# otherwise via ``CeleryExecutor``
+#
+# Variable: AIRFLOW__CELERY_KUBERNETES_EXECUTOR__KUBERNETES_QUEUE
+#
+kubernetes_queue = kubernetes
+
+[celery]
+# This section only applies if you are using the CeleryExecutor in
+# ``[core]`` section above
+
+# The app name that will be used by celery
+#
+# Variable: AIRFLOW__CELERY__CELERY_APP_NAME
+#
+celery_app_name = airflow.providers.celery.executors.celery_executor
+
+# The concurrency that will be used when starting workers with the
+# ``airflow celery worker`` command. This defines the number of task instances that
+# a worker will take, so size up your workers based on the resources on
+# your worker box and the nature of your tasks
+#
+# Variable: AIRFLOW__CELERY__WORKER_CONCURRENCY
+#
+worker_concurrency = 16
+
+# The maximum and minimum number of pool processes that will be used to dynamically resize
+# the pool based on load.Enable autoscaling by providing max_concurrency,min_concurrency
+# with the ``airflow celery worker`` command (always keep minimum processes,
+# but grow to maximum if necessary).
+# Pick these numbers based on resources on worker box and the nature of the task.
+# If autoscale option is available, worker_concurrency will be ignored.
+# https://docs.celeryq.dev/en/latest/reference/celery.bin.worker.html#cmdoption-celery-worker-autoscale
+#
+# Example: worker_autoscale = 16,12
+#
+# Variable: AIRFLOW__CELERY__WORKER_AUTOSCALE
+#
+# worker_autoscale = 
+
+# Used to increase the number of tasks that a worker prefetches which can improve performance.
+# The number of processes multiplied by worker_prefetch_multiplier is the number of tasks
+# that are prefetched by a worker. A value greater than 1 can result in tasks being unnecessarily
+# blocked if there are multiple workers and one worker prefetches tasks that sit behind long
+# running tasks while another worker has unutilized processes that are unable to process the already
+# claimed blocked tasks.
+# https://docs.celeryq.dev/en/stable/userguide/optimizing.html#prefetch-limits
+#
+# Variable: AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER
+#
+worker_prefetch_multiplier = 1
+
+# Specify if remote control of the workers is enabled.
+# In some cases when the broker does not support remote control, Celery creates lots of
+# ``.*reply-celery-pidbox`` queues. You can prevent this by setting this to false.
+# However, with this disabled Flower won't work.
+# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html#broker-overview
+#
+# Variable: AIRFLOW__CELERY__WORKER_ENABLE_REMOTE_CONTROL
+#
+worker_enable_remote_control = true
+
+# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally
+# a sqlalchemy database. Refer to the Celery documentation for more information.
+#
+# Variable: AIRFLOW__CELERY__BROKER_URL
+#
+broker_url = redis://redis:6379/0
+
+# The Celery result_backend. When a job finishes, it needs to update the
+# metadata of the job. Therefore it will post a message on a message bus,
+# or insert it into a database (depending of the backend)
+# This status is used by the scheduler to update the state of the task
+# The use of a database is highly recommended
+# When not specified, sql_alchemy_conn with a db+ scheme prefix will be used
+# https://docs.celeryq.dev/en/latest/userguide/configuration.html#task-result-backend-settings
+#
+# Example: result_backend = db+postgresql://postgres:airflow@postgres/airflow
+#
+# Variable: AIRFLOW__CELERY__RESULT_BACKEND
+#
+# result_backend = 
+
+# Optional configuration dictionary to pass to the Celery result backend SQLAlchemy engine.
+#
+# Example: result_backend_sqlalchemy_engine_options = {"pool_recycle": 1800}
+#
+# Variable: AIRFLOW__CELERY__RESULT_BACKEND_SQLALCHEMY_ENGINE_OPTIONS
+#
+result_backend_sqlalchemy_engine_options = 
+
+# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start
+# it ``airflow celery flower``. This defines the IP that Celery Flower runs on
+#
+# Variable: AIRFLOW__CELERY__FLOWER_HOST
+#
+flower_host = 0.0.0.0
+
+# The root URL for Flower
+#
+# Example: flower_url_prefix = /flower
+#
+# Variable: AIRFLOW__CELERY__FLOWER_URL_PREFIX
+#
+flower_url_prefix = 
+
+# This defines the port that Celery Flower runs on
+#
+# Variable: AIRFLOW__CELERY__FLOWER_PORT
+#
+flower_port = 5555
+
+# Securing Flower with Basic Authentication
+# Accepts user:password pairs separated by a comma
+#
+# Example: flower_basic_auth = user1:password1,user2:password2
+#
+# Variable: AIRFLOW__CELERY__FLOWER_BASIC_AUTH
+#
+flower_basic_auth = 
+
+# How many processes CeleryExecutor uses to sync task state.
+# 0 means to use max(1, number of cores - 1) processes.
+#
+# Variable: AIRFLOW__CELERY__SYNC_PARALLELISM
+#
+sync_parallelism = 0
+
+# Import path for celery configuration options
+#
+# Variable: AIRFLOW__CELERY__CELERY_CONFIG_OPTIONS
+#
+celery_config_options = airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG
+
+#
+# Variable: AIRFLOW__CELERY__SSL_ACTIVE
+#
+ssl_active = False
+
+# Path to the client key.
+#
+# Variable: AIRFLOW__CELERY__SSL_KEY
+#
+ssl_key = 
+
+# Path to the client certificate.
+#
+# Variable: AIRFLOW__CELERY__SSL_CERT
+#
+ssl_cert = 
+
+# Path to the CA certificate.
+#
+# Variable: AIRFLOW__CELERY__SSL_CACERT
+#
+ssl_cacert = 
+
+# Celery Pool implementation.
+# Choices include: ``prefork`` (default), ``eventlet``, ``gevent`` or ``solo``.
+# See:
+# https://docs.celeryq.dev/en/latest/userguide/workers.html#concurrency
+# https://docs.celeryq.dev/en/latest/userguide/concurrency/eventlet.html
+#
+# Variable: AIRFLOW__CELERY__POOL
+#
+pool = prefork
+
+# The number of seconds to wait before timing out ``send_task_to_executor`` or
+# ``fetch_celery_task_state`` operations.
+#
+# Variable: AIRFLOW__CELERY__OPERATION_TIMEOUT
+#
+operation_timeout = 1.0
+
+task_acks_late = True
+# Celery task will report its status as 'started' when the task is executed by a worker.
+# This is used in Airflow to keep track of the running tasks and if a Scheduler is restarted
+# or run in HA mode, it can adopt the orphan tasks launched by previous SchedulerJob.
+#
+# Variable: AIRFLOW__CELERY__TASK_TRACK_STARTED
+#
+task_track_started = True
+
+# The Maximum number of retries for publishing task messages to the broker when failing
+# due to ``AirflowTaskTimeout`` error before giving up and marking Task as failed.
+#
+# Variable: AIRFLOW__CELERY__TASK_PUBLISH_MAX_RETRIES
+#
+task_publish_max_retries = 3
+
+# Worker initialisation check to validate Metadata Database connection
+#
+# Variable: AIRFLOW__CELERY__WORKER_PRECHECK
+#
+worker_precheck = False
+
+[celery_broker_transport_options]
+# This section is for specifying options which can be passed to the
+# underlying celery broker transport. See:
+# https://docs.celeryq.dev/en/latest/userguide/configuration.html#std:setting-broker_transport_options
+
+# The visibility timeout defines the number of seconds to wait for the worker
+# to acknowledge the task before the message is redelivered to another worker.
+# Make sure to increase the visibility timeout to match the time of the longest
+# ETA you're planning to use.
+# visibility_timeout is only supported for Redis and SQS celery brokers.
+# See:
+# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#visibility-timeout
+#
+# Example: visibility_timeout = 21600
+#
+# Variable: AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__VISIBILITY_TIMEOUT
+#
+# visibility_timeout = 
+
+# The sentinel_kwargs parameter allows passing additional options to the Sentinel client.
+# In a typical scenario where Redis Sentinel is used as the broker and Redis servers are
+# password-protected, the password needs to be passed through this parameter. Although its
+# type is string, it is required to pass a string that conforms to the dictionary format.
+# See:
+# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#configuration
+#
+# Example: sentinel_kwargs = {"password": "password_for_redis_server"}
+#
+# Variable: AIRFLOW__CELERY_BROKER_TRANSPORT_OPTIONS__SENTINEL_KWARGS
+#
+# sentinel_kwargs = 
+
+[local_kubernetes_executor]
+# This section only applies if you are using the ``LocalKubernetesExecutor`` in
+# ``[core]`` section above
+
+# Define when to send a task to ``KubernetesExecutor`` when using ``LocalKubernetesExecutor``.
+# When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``),
+# the task is executed via ``KubernetesExecutor``,
+# otherwise via ``LocalExecutor``
+#
+# Variable: AIRFLOW__LOCAL_KUBERNETES_EXECUTOR__KUBERNETES_QUEUE
+#
+kubernetes_queue = kubernetes
+
+[kubernetes_executor]
+# Kwargs to override the default urllib3 Retry used in the kubernetes API client
+#
+# Example: api_client_retry_configuration = { "total": 3, "backoff_factor": 0.5 }
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__API_CLIENT_RETRY_CONFIGURATION
+#
+api_client_retry_configuration = 
+
+# Flag to control the information added to kubernetes executor logs for better traceability
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__LOGS_TASK_METADATA
+#
+logs_task_metadata = False
+
+# Path to the YAML pod file that forms the basis for KubernetesExecutor workers.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__POD_TEMPLATE_FILE
+#
+pod_template_file = 
+
+# The repository of the Kubernetes Image for the Worker to Run
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_CONTAINER_REPOSITORY
+#
+worker_container_repository = 
+
+# The tag of the Kubernetes Image for the Worker to Run
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_CONTAINER_TAG
+#
+worker_container_tag = 
+
+# The Kubernetes namespace where airflow workers should be created. Defaults to ``default``
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__NAMESPACE
+#
+namespace = default
+
+# If True, all worker pods will be deleted upon termination
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_WORKER_PODS
+#
+delete_worker_pods = True
+
+# If False (and delete_worker_pods is True),
+# failed worker pods will not be deleted so users can investigate them.
+# This only prevents removal of worker pods where the worker itself failed,
+# not when the task it ran failed.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_WORKER_PODS_ON_FAILURE
+#
+delete_worker_pods_on_failure = False
+
+worker_pod_pending_fatal_container_state_reasons = CreateContainerConfigError,ErrImagePull,CreateContainerError,ImageInspectError, InvalidImageName
+# Number of Kubernetes Worker Pod creation calls per scheduler loop.
+# Note that the current default of "1" will only launch a single pod
+# per-heartbeat. It is HIGHLY recommended that users increase this
+# number to match the tolerance of their kubernetes cluster for
+# better performance.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_PODS_CREATION_BATCH_SIZE
+#
+worker_pods_creation_batch_size = 1
+
+# Allows users to launch pods in multiple namespaces.
+# Will require creating a cluster-role for the scheduler,
+# or use multi_namespace_mode_namespace_list configuration.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__MULTI_NAMESPACE_MODE
+#
+multi_namespace_mode = False
+
+# If multi_namespace_mode is True while scheduler does not have a cluster-role,
+# give the list of namespaces where the scheduler will schedule jobs
+# Scheduler needs to have the necessary permissions in these namespaces.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__MULTI_NAMESPACE_MODE_NAMESPACE_LIST
+#
+multi_namespace_mode_namespace_list = 
+
+# Use the service account kubernetes gives to pods to connect to kubernetes cluster.
+# It's intended for clients that expect to be running inside a pod running on kubernetes.
+# It will raise an exception if called from a process not running in a kubernetes environment.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__IN_CLUSTER
+#
+in_cluster = True
+
+# When running with in_cluster=False change the default cluster_context or config_file
+# options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__CLUSTER_CONTEXT
+#
+# cluster_context = 
+
+# Path to the kubernetes configfile to be used when ``in_cluster`` is set to False
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__CONFIG_FILE
+#
+# config_file = 
+
+# Keyword parameters to pass while calling a kubernetes client core_v1_api methods
+# from Kubernetes Executor provided as a single line formatted JSON dictionary string.
+# List of supported params are similar for all core_v1_apis, hence a single config
+# variable for all apis. See:
+# https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__KUBE_CLIENT_REQUEST_ARGS
+#
+kube_client_request_args = 
+
+# Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client
+# ``core_v1_api`` method when using the Kubernetes Executor.
+# This should be an object and can contain any of the options listed in the ``v1DeleteOptions``
+# class defined here:
+# https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19
+#
+# Example: delete_option_kwargs = {"grace_period_seconds": 10}
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__DELETE_OPTION_KWARGS
+#
+delete_option_kwargs = 
+
+# Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely
+# when idle connection is time-outed on services like cloud load balancers or firewalls.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__ENABLE_TCP_KEEPALIVE
+#
+enable_tcp_keepalive = True
+
+# When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has
+# been idle for `tcp_keep_idle` seconds.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_IDLE
+#
+tcp_keep_idle = 120
+
+# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond
+# to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_INTVL
+#
+tcp_keep_intvl = 30
+
+# When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond
+# to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before
+# a connection is considered to be broken.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TCP_KEEP_CNT
+#
+tcp_keep_cnt = 6
+
+# Set this to false to skip verifying SSL certificate of Kubernetes python client.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__VERIFY_SSL
+#
+verify_ssl = True
+
+# How often in seconds to check for task instances stuck in "queued" status without a pod
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__WORKER_PODS_QUEUED_CHECK_INTERVAL
+#
+worker_pods_queued_check_interval = 60
+
+# Path to a CA certificate to be used by the Kubernetes client to verify the server's SSL certificate.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__SSL_CA_CERT
+#
+ssl_ca_cert = 
+
+# The Maximum number of retries for queuing the task to the kubernetes scheduler when
+# failing due to Kube API exceeded quota errors before giving up and marking task as failed.
+# -1 for unlimited times.
+#
+# Variable: AIRFLOW__KUBERNETES_EXECUTOR__TASK_PUBLISH_MAX_RETRIES
+#
+task_publish_max_retries = 0
+
 [common.io]
 # Common IO configuration section
 
@@ -2469,6 +2952,87 @@ xcom_objectstorage_threshold = -1
 #
 xcom_objectstorage_compression = 
 
+[elasticsearch]
+# Elasticsearch host
+#
+# Variable: AIRFLOW__ELASTICSEARCH__HOST
+#
+host = 
+
+# Format of the log_id, which is used to query for a given tasks logs
+#
+# Variable: AIRFLOW__ELASTICSEARCH__LOG_ID_TEMPLATE
+#
+log_id_template = {dag_id}-{task_id}-{run_id}-{map_index}-{try_number}
+
+# Used to mark the end of a log stream for a task
+#
+# Variable: AIRFLOW__ELASTICSEARCH__END_OF_LOG_MARK
+#
+end_of_log_mark = end_of_log
+
+# Qualified URL for an elasticsearch frontend (like Kibana) with a template argument for log_id
+# Code will construct log_id using the log_id template from the argument above.
+# NOTE: scheme will default to https if one is not provided
+#
+# Example: frontend = http://localhost:5601/app/kibana#/discover?_a=(columns:!(message),query:(language:kuery,query:'log_id: "{log_id}"'),sort:!(log.offset,asc))
+#
+# Variable: AIRFLOW__ELASTICSEARCH__FRONTEND
+#
+frontend = 
+
+# Write the task logs to the stdout of the worker, rather than the default files
+#
+# Variable: AIRFLOW__ELASTICSEARCH__WRITE_STDOUT
+#
+write_stdout = False
+
+# Instead of the default log formatter, write the log lines as JSON
+#
+# Variable: AIRFLOW__ELASTICSEARCH__JSON_FORMAT
+#
+json_format = False
+
+# Log fields to also attach to the json output, if enabled
+#
+# Variable: AIRFLOW__ELASTICSEARCH__JSON_FIELDS
+#
+json_fields = asctime, filename, lineno, levelname, message
+
+# The field where host name is stored (normally either `host` or `host.name`)
+#
+# Variable: AIRFLOW__ELASTICSEARCH__HOST_FIELD
+#
+host_field = host
+
+# The field where offset is stored (normally either `offset` or `log.offset`)
+#
+# Variable: AIRFLOW__ELASTICSEARCH__OFFSET_FIELD
+#
+offset_field = offset
+
+# Comma separated list of index patterns to use when searching for logs (default: `_all`).
+# The index_patterns_callable takes precedence over this.
+#
+# Example: index_patterns = something-*
+#
+# Variable: AIRFLOW__ELASTICSEARCH__INDEX_PATTERNS
+#
+index_patterns = _all
+
+index_patterns_callable = 
+
+[elasticsearch_configs]
+#
+# Variable: AIRFLOW__ELASTICSEARCH_CONFIGS__HTTP_COMPRESS
+#
+http_compress = False
+
+#
+# Variable: AIRFLOW__ELASTICSEARCH_CONFIGS__VERIFY_CERTS
+#
+verify_certs = True
+
 [fab]
 # This section contains configs specific to FAB provider.
 
@@ -2496,6 +3060,123 @@ update_fab_perms = True
 
 # ssl_context = 
 
+[azure_remote_logging]
+# Configuration that needs to be set for enable remote logging in Azure Blob Storage
+
+remote_wasb_log_container = airflow-logs
+
+[openlineage]
+# This section applies settings for OpenLineage integration.
+# More about configuration and it's precedence can be found at
+# https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/guides/user.html#transport-setup
+
+# Disable sending events without uninstalling the OpenLineage Provider by setting this to true.
+#
+# Variable: AIRFLOW__OPENLINEAGE__DISABLED
+#
+disabled = False
+
+# Exclude some Operators from emitting OpenLineage events by passing a string of semicolon separated
+# full import paths of Operators to disable.
+#
+# Example: disabled_for_operators = airflow.providers.standard.operators.bash.BashOperator; airflow.providers.standard.operators.python.PythonOperator
+#
+# Variable: AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS
+#
+disabled_for_operators = 
+
+# If this setting is enabled, OpenLineage integration won't collect and emit metadata,
+# unless you explicitly enable it per `DAG` or `Task` using  `enable_lineage` method.
+#
+# Variable: AIRFLOW__OPENLINEAGE__SELECTIVE_ENABLE
+#
+selective_enable = False
+
+# Set namespace that the lineage data belongs to, so that if you use multiple OpenLineage producers,
+# events coming from them will be logically separated.
+#
+# Example: namespace = my_airflow_instance_1
+#
+# Variable: AIRFLOW__OPENLINEAGE__NAMESPACE
+#
+# namespace = 
+
+# Register custom OpenLineage Extractors by passing a string of semicolon separated full import paths.
+#
+# Example: extractors = full.path.to.ExtractorClass;full.path.to.AnotherExtractorClass
+#
+# Variable: AIRFLOW__OPENLINEAGE__EXTRACTORS
+#
+# extractors = 
+
+# Register custom run facet functions by passing a string of semicolon separated full import paths.
+#
+# Example: custom_run_facets = full.path.to.custom_facet_function;full.path.to.another_custom_facet_function
+#
+# Variable: AIRFLOW__OPENLINEAGE__CUSTOM_RUN_FACETS
+#
+custom_run_facets = 
+
+# Specify the path to the YAML configuration file.
+# This ensures backwards compatibility with passing config through the `openlineage.yml` file.
+#
+# Example: config_path = full/path/to/openlineage.yml
+#
+# Variable: AIRFLOW__OPENLINEAGE__CONFIG_PATH
+#
+config_path = 
+
+# Pass OpenLineage Client transport configuration as JSON string. It should contain type of the
+# transport and additional options (different for each transport type). For more details see:
+# https://openlineage.io/docs/client/python/#built-in-transport-types
+# 
+# Currently supported types are:
+# 
+#   * HTTP
+#   * Kafka
+#   * Console
+#   * File
+#
+# Example: transport = {"type": "http", "url": "http://localhost:5000", "endpoint": "api/v1/lineage"}
+#
+# Variable: AIRFLOW__OPENLINEAGE__TRANSPORT
+#
+transport = 
+
+# Disable the inclusion of source code in OpenLineage events by setting this to `true`.
+# By default, several Operators (e.g. Python, Bash) will include their source code in the events
+# unless disabled.
+#
+# Variable: AIRFLOW__OPENLINEAGE__DISABLE_SOURCE_CODE
+#
+disable_source_code = False
+
+# Number of processes to utilize for processing DAG state changes
+# in an asynchronous manner within the scheduler process.
+#
+# Variable: AIRFLOW__OPENLINEAGE__DAG_STATE_CHANGE_PROCESS_POOL_SIZE
+#
+dag_state_change_process_pool_size = 1
+
+# Maximum amount of time (in seconds) that OpenLineage can spend executing metadata extraction.
+#
+# Variable: AIRFLOW__OPENLINEAGE__EXECUTION_TIMEOUT
+#
+execution_timeout = 10
+
+# If true, OpenLineage event will include full task info - potentially containing large fields.
+#
+# Variable: AIRFLOW__OPENLINEAGE__INCLUDE_FULL_TASK_INFO
+#
+include_full_task_info = False
+
+# If true, OpenLineage events will include information useful for debugging - potentially
+# containing large fields e.g. all installed packages and their versions.
+#
+# Variable: AIRFLOW__OPENLINEAGE__DEBUG_MODE
+#
+debug_mode = False
+
 [smtp_provider]
 # Options for SMTP provider.
 
diff --git a/airflow.db b/airflow/airflow.db
similarity index 93%
rename from airflow.db
rename to airflow/airflow.db
index 3c32d4229139f85be87e2ceee33a497f96fa02d0..8eeb1c7af263e7ddde55342aa0bc49fc43968beb 100644
GIT binary patch
delta 21564
zcmd^n33wdEvGDZF?Cd$yl5J@%%d*#!Wl30C?R^N0S$xPGj<IZWIm&vES$VOvD|T1d
z#wddf1_CCDvCUxk6OQC1A%Q;zLpC8HB;kMgfbd>INC0ysAqNRBT;b(-3B-T(%&auK
zS{*oq_rC85JUi3XT~%FGUEN(%Jy-s``N}(5KD}TL^HYkVZiIiOzl#radN?R`CeVG!
zQM@<@fMgmj8!v)OWF1_(!*E&H0ha~#yK07SK1y4e8}Z+c)jHn9r&9atEC87={FTD*
z;y>d*;MehQ@h|bq_&ELv{vmz>KZL)GZ^hT+%kd@nVth94!KdH_*dqKD5PVONFw=r|
zii3&lkQhzI2If~h+bf>yE1v5rp669O&#ic#Q}Ha-Rut4!JmZRIq2igZc($<&g{<Ua
zAr~{bn8?LQE*!ZS$b}^rhCTR~+ikx2JVoKZ;5YGa@Jk@IAK>rdZ{VZ&i}<tndf{fl
zEo|Ul<NqJOfgj;1oEH9$7Yh%7^d7TGA2GJL`;l2M!QoswQ;6nbnPhe-dg(~EAP&Vc
zBeB6~)Z_B_yIk%rmw&U{-{bf7xV%ok#|OpBU9KLN>%{z4KnL2K9-V!Hs5Gv(e}HQJ
ztf<zlHMT6N?5d>nH5q#!eVo?>c&hf1pqX2Z%G5-psU$gPS}L_?8C%*a`=U_MM#5}0
zCY`8p0II6p(NN(Jiq;8XGO750s&|bRKiAj-yp-9G&157+FeXYjmeP&6bYm*r7)v)?
z>BeyIrLVTZ0BNA`EBI0TdAtv&a4&AhX5lsA2f{tVjY3A)Ah?8D{tx`q`~&=9{xbdo
zKEyY`0QrUO;J!z&q#1k2=?l06p1}M?Oax7g1;+%~9S*wP_Nl=xuP<Cb6_{WQh3h7R
zB{I^2PM6mo42I`Th7)kW84i1WLGRot!6C2P=bbYZI2a5DyrJ5vV9Di&Veav`y){+g
z0QLnuUY{RN35MprurL+a;|T==ZoWF$>y-TNkk1peO@{jdJzlpn;Pr-FZtIj_kKY^g
zTc!qk{T{cuD%cZphC_ZS5b~I+z<oVFfP4IbpxZb#*yjm&xv9V*zt8Or8>)i?PPaGU
z4TL1NDjdWf2!_2b7c&*u@ACM(A-ai)n9I%82NnZL3x|UNk2?EaPvM90ZkRc)#RGU5
z436ImKNRj44#Rx%m3q7drj{dkm2f>?ApBhTg7BX3C=SB-=o2Ih(|}U3^q`oWU&FMb
z#9%sFh~;-gb0ZnMm1#uqn@MK|>MMbD1ehO5B*c7vUL|m@uH~FcV6CoYjZ_K6x~4)U
zkSD-GE<G?H=4`c0o5Jo+F`gH5mx(!RsYI(G<}9U>d}6zp92pdIW}aEBw1R@f4pXh9
zAsMe=EHxQN%auyT(HhE-uqF674yIHx-h!@S+RQ|@<5*$Fr>Gk0m~q>Ci|ftSH)&9i
z2OL_Ge~-VJH`}hTz6lEYsZ|=Yn5|SxV{0T5p;{YtD<w_%=VlYJ!Oftuv$OGXD<OU*
z<n)F;9+$rw#251TU4d#4Zzu?M>GQ_JE-9Eyg~J{&kz8spyK~H9`UkiHFBf!q+@Z-d
z!9Du@p%AcITB2xfSt7165D16E3OkkD82<roU<7&nA)j{=P2w+uKEH>^M$%SHrCEYB
zf+Z6SOb|{vHzH@HC{Pqd6s3)uO(nydq#fe>JWdyJ0F~MSh}Q`oH4qB>q$+d_lj&?$
z2ZflcO2_RC`vW0&6*>S3fOxBQC&>bt21)CW>LaG<4tUeB)9VFyNfwkksX<3QE~m%k
zhAE_q)F_det~*ktRfF#XLT<m?7pTx2A>=0NfeE1sp(IWEGyCp<;lbP>g##600|q<+
zr_bZ_5#yO;1(b-U>ra*LfVqWyfUm;%RXSduE9iCms}LfN-z<z!!oLZ33M2S#yixcs
z{8K!NTfuQ`6{O{CYXr^c9h8x7o<-f#Ff`4gE*T^e>5x&WJhj#Mb*9cKb*DCDQnzHT
zh8ggJDM;f+$IgIQZ#T?Lh;_=RPXP&=r((F>-=kTIqCiDk_*nTS`Ds|u9s_M1flYDw
zHrQP8Y^r$HZ6*)Ce!IQiEZjxoV_3w`<Nf$_Y!}`V9vAKc)Yl2pGPW^NnySl(rZz*U
zHZXm)arIAMt>hk!N;Sm^N|}KQs(PhQ)T>r)T;Hv1LquDrlq1u77nZ7qZJWzaoOhX)
zr$(pOFVzZ?Gi#o_f~;CkQSH}Uh;^B0Ay46~kO!F$2+Y`SS~h0UYiAP5Fb{)2hhgjr
zj|qsQ#>_*JJ$oF5v=agz(R9)=I@+y1D@(75f;@r9pUtB7GsInDVx*AHW*|sX5J$WA
z6mq+xnUSHmm>ZqMnnKPKa)-$(16kvgH4bcDXq7(Eh&XP2<bap5ncN-QpZH~WcDT^3
z&F$U5O}<;V!f+1nE=LVR&>eIJ-9BH?pUP#299m09dT2PCD>zn1dMy?`z?mHubFo4;
zmj_&h?GmtGDk-%&56ZKd4O+d5=FsE8JV>b^=1cXHvEBJ-Y#^)EB(OqyNSpwx&lZcu
zQ%<~eM9dfR7Wk8rV#-mrV{Px~u~;1Nr<At0U7$vy9EGeSqON$Bv~`fC6D<kF%4VCh
zz?LJOckCH$bEdMnp;)24#F?`|fbAX74T-eusKBR>)QscJiTUAdCNDZxI7%IG4v2;J
zw(W(&aK5LzJ2sqllHg7(J}5fV*=`jJ;g$483HvB*RW!~^^4Uy#hqSdVk<ApuOrfp6
ztlJw&n8=Y$ISSiF2XNctXx=m0Jl+)royK=;0PupM+f@W@I(upBRLekCv5~<-G?p94
zuh`R;FT`>M#cZXv9)exbU%R_wRI6WFI7cCH1uOa3;EFbv!{c?FLjEfuO~eX`?IXi0
z*2V_&qQ#;Ju}qc9z6E$IHz<uwbzn)!6>Vj0O2~<Ef^EV|dXNZoMOmd%uVMo$V1$&B
zCOV!4-4a<zk;3#u3Cd|G>aAt^&X$PkLV;j^(-YTYugXM=W0P1=twbCaGf6R%NW<8S
zi-nz{m{9;k)G*r0M5xN>boc4n=$nw+>7^n)X);+N{GSy5PjFkW;rUjEGnYITZ^}}3
z>N^xfljqB}Pou`U&lvU_7O``fpV7CVqtthhWP0pitz!!d4{yOu7c(|wmmmE!dK1;l
z#}1%ZQC;!2o6s7B=E=V}h`eZSvF;FrQ0c)r^5cimGBj6y>o8hFud9`%!)P7dUn4sX
zqb5`<pMDsvWcIGb#n8=Y9YVZ3CZj(a1kS!=jXY*HER>Jjf<nk9|M(WP7Fp$nBWM+}
z$o)sqmB=i=d<3mVree#jD29+x-gg_?i@2ic|Dg2<8RYXngU&~6@!roM4k1Q<;IruK
z2o;BJM~7=nzoX_*KFaiU*>V&u7H%<r$-LFPl??o`70q(+DEc;Pkv~Gn&$$oHMGkq{
zedsx~L_TpJ3Zq4G_<r<~VKHZ4-zLA^$<CL5#2AEHcPt(Sk2{pk=V3}|-q|Gg9Yed&
zLiq>B&_=YN*z^E8#-K*|sUm7a4f1~#(Ia&CeEIel>4kF5GM1H}cnDz@wDbrF_?sZ$
z4tekedI9Q`uYH03F}Jdvvv2BB>Amq|x`nx_x2^c%L+CLiKm9G#YV`p55!keMbhWCS
zzx6HPeCZe2hY`ae;XO)t4;257Anm14QwwD>nIk}pTHoDBQFX0NbqJYECfE>1bk1k;
zOa<}2pfBw4&NRnOf@`zdKu$_0n<qR`bEE)X3F73Lk-@>5nsI_YwG;qOm-06Yh=0m9
zdI(m~siWa-3cn3Pc@;MZOp969aTWY7tSkZiCawiRF$`r^i(i4TDHK147ga=&l#WcW
z`~b~5n4s;uve7~BU~R{3--TA^*gETj4CMuH@8}!J^yrAr2{6=hDA$)2U+<*t%+jFk
zdlaq{MB!dx6yGL%LpaQTOlT5#zJvdq?R&x<yvg=_ajA>G)z)ceyr>HD;9O=oDc2(p
z6yG?Fp5MYQpzs1<tp-bD2~<Pb>+E9}nY$xqFb8;Dl^jZEwt!*9$jxwkM>6H<$#0Lr
zJ`VP5M3_rg59AZQm*p<qxn?lq^R4b%>Psd=t}sOL0!d%o?M{e(Pco4bU4Gcv#RG|W
z(icu8LjefwLIgIMh$RyKa6BOfOs<g2lXRtm@|{__=}cJW1$*3Hrz_|Q`71Z3umJQq
zy+N0o#A3?!#uE_#7l!FG?sC*9uh(wSUa!&A^~Dp9(1x0x8nlWM)(LZMU$CXEf46+Y
za-C(h`6K2gQ>Wo3Loa(Ti=e{qsUGX&WTZ^|87>5A84$DY)NpZ47yC`xGS(3?+G9a9
z)+--a&MuaFo~HMqp!}Vu>01l|&fXs+<JKwP@DsWL`HTPZ6I!O3v5ZgNe4Jj2Lh|n8
z^b^Rdx`J)b({q*t$MPPdy{QRZ6;@Uwm&I~vGKrRc1M<P=>1XIO-SXBK2}*h7MY;pI
z2nHwX+k0lW{1mtq<SoAOBHfDUv{OF)S^5;@k<-u8D-B(oeM5)pN&fX&dJ*g1dr7DK
z>a%n^ausWyqa#S3`!elAPI=wSbPMV#4!#Vt0Ed{h6tfn3ei|bk2_j3&dhX~BDzaX`
zx}7xa^*~szmL{A%&Cw`C7Zpd@<8XI6nqd{t+~e5OOxB&3XEP!^G;i6wsu@6HazKP-
zOe#w%j6y+5g4pU{HbLslM3Ep+X3FS5E;}-;&`@t-lK@Y|??9E|oR~`Q(gO;)5fK1|
zY<94aCheLDs*!XwksXw>Ia07Ho6C>@o|rpxa75I=sR5{<!dqZ)kkZ8V^k6b4W)$LO
zb2yZ2Z7!|eN!4;_|9A<R+$Snau=<~<t-vO#>L>k)CS@(ks`L|)jtu7uIWaagE`dC0
zN{}YAJ2RCqsRSHvrfssSwJHM<TKc6dAX020Mg+R2d16^x25<+x{;)3;BC`P?h)U7I
z?qS75NZ*OcG>;O@(TVbOqR>o=v?VT}OOZt`l#uGmpwdW`P-!!ia~z&`me$VBRk>_t
zb*vCOOI=J$gPX^ZC}d$=D%iwbU`Xox5B8y(<}|u`%T_s7cdO92SvM09-A0FVS@I$v
zn<KCA^U+jxFexf#*PWFrHkhQovP1+tTlMmUceulYWIdfqi-XC06pRp%z`#?uEkX0q
zBoP<zK(;glS$0_2Xpo}e-CDtpon-H!6iK@k*{1W+yjV~MRY|z8;3rYdksOeOU&1=<
z^`TLzLUb@Y07EE~N)M1*c<B($#6W*QX<PxY*qzwqDt#;OMzchQO$GNluGL$O-wGSn
zTcbAG#>Y>Y^;OthiCR?LRZ!>?yu;ue2<Ih>R-U0;MMO63QSEhwP6Db6(L6*dC8oqU
z=_3fUAjWcJxRqC~;!vb$MifDvU~aLD=0r;Z);dVoDt9J8W2cxN*baFN@YY6QdwggU
z22?Lu8V%(;z6tnH7T)<tW%?@lFEt*a8c-Cv14=9G*7Ze|iYAbfeQEx8!r&CA1CQh8
zwx?of135{Fe-In|5RAQwtxKvW`EZQAimgkau9C#m+gTfXx$B3<vo?BdJ|H$u{Qrk;
z^g5fDW;A+bOhfNJD2p#Qc&RJ`_hDFkxxq`?bTjUhK3Ml&#p25iUi#;{_bL`&Zt&8C
zf0)L-6VS<6_sqsEk$Fq!***Z5uCwkLjXNjl648ij{^wZv)J87-ll;17<%&)ril5Yl
z!$8`~%rG3w#fJLTb>?VAl-}<quYbJ(AN;}laGy62gjeGUe<JP)1;S!D6!XSo?u0i6
zX+x9t4YP^}jEB!``taFZ+Z=bfV4n%sn!8*sxZdwVHE#186h>W?^<vAH%zrZOGVkNQ
z#9qXm!`z9wn6>y^$f;Ux+spUyEj%Q6!RG43!CJ@Pn5*ko6_>WK^)z?UYR*2?tM1c&
z)y%G7#{6f>D_YqH@m1L~jP|q6Lf2fREM!UeXi@QsW$eEqZfk_IXU|eYXJ3GKEpqoV
z)`7>?uQb|sY(ZlKX;=X6lJ9G0yJe}Jy@|QnvO*s1U^k;P<=d9A&GKhESX^XIVXsEq
z!0DWQ&z5miwF*q$)WI$@ud?1pnaGA%4tD~kGZu3u`P<9cQFN+&S|@uH^~kSxvRg<n
z>nZ5ezcFLW*2#X}@I}-szr-8PVy@b>wz$&4{uH4#a$PffpCUmJ9Qpr#7h3}Xs|P40
zAM?u)usRFPqgqI4s-d4UH`K$;dK59~*30Dq0P+UT>kNBC;ee|eL`~0}4Dms}L@4YE
zS3o2{7jg!|UcW!AzOkQeCZSsIO2g<OcvY{3D7;e!siq2fT?o0QrA!IJ2iUS_ChMlh
z>6JD0xx7Ku&Yz+1Gq9C^68k#XmWV#|HMwOep;c}97=<5$mfyyg1lg8;eaqR3h3BrO
zYN_)n)9t2VQ^fdt<4wj|ZogqYw}pL&*}{IAf1AGt-OuN#^ZC<kPf-#jfekHX_Fi<+
zgI_ajMd*U!9NKUPV)piJD*j)=(1+;W&GMB^++z7E!O%`$*e9Df!^P;lVuCY3<aZ+p
zhdSizjRxLoYg5g3jNTwWVKOX7TjT?+Y@KX18zQvy5&0dXL832QFCQ@)mZD86>}jK6
z(UMK)a`x6nG@=Y2b<Y-sxuqbId)VRh$p=jaAKi5hA?8G9lb@wFw+7vCt>LWVy=KEc
z28IMW7sR&?(ZLHzIC-($D;VZ;tIp@_7r5224$<Tew59l0#$cl@s?cAf@N3ZO=h#ul
zwlwI4u3F?QoEIcUsYrf_QT@R;x);#+cYyL0jO+&@bHRDRxN|WH+aG1)9fmvFZs4!t
zuiy*(AfMzv%5Ub+<yZ5k@jkwjZ{?f#I-chZw)bp*1PpDqAeUe%h`0y2IQeCQT#WqE
zL2euQWrJK)`3-O%Q_2H|ewH$$fZTbn;k<f7A9?8HHY#;Gx$~4d#hdOm<mg2S+n*@>
zSNtFNMaaebByJO)6YduJgckny{3yRnb{sXhSMKNgd66IDxA5!vHT(*G5zpZ#@n>*A
zcvbki5aa*G`)z;658`Ve#rsE)*KxLR3jZtqN&afSi?8Pe`I@7K9@_RN=t{dSXoR32
z^dzDDGQqg=O9x}hFB{yZ{sy9IdEjFlYep#0FLym`IIm85=<HMLbZ%7Z6mNRikn20f
zLQ>i)ZWPpTfHJ>}51UxyH;ql)K6Ej6CiM<=fZqiY{TExW^&N}X{4?{<D9ScSJgoi?
znr}Q<>sU{3OUS>O$F<6ztLJLvg>~F=6qnc4aSr)Uq+oX)=VLx0ZYw72+^<<K9GiGk
zQQu(_>~3|B2w|OACLwM&|5)?ozgftwkw3MNYnO2oIP|SeTnjVS_%ZpMh1_a+c@vZu
z7joTnZ$BArYuUtA=X_LtqKWgNc=1FNcQa!7$Ej0*zh$)lV)?)vE{M{_$LDYlqx!z>
zoPBW{f&eniK81Ajl58BpZ}Mm6af{>!>NyAf(E<6od0ew>t>fzCM@ZN1t>fC{?RC(d
zU)OOP<e!kcHTB%7^oFGT3^b8_^<2GdvvUn(Og#zOdkg}dTV;7MmqwP)p=}PH{~^U#
z_fWRGg`ZpZK!qWzw0AkP#NE=5A`xSQ*#slH)!b<ApdtuS^~(DYigK&lX8lG<H`UD6
z$(@+&N8#Ox<RaOrRY3tagt%6IW%qIO+i#z(j_N82N@jhiLAx*Cs!v89bb27v?(%S#
zUMZE$MY_2GvbZ6icyiD8wxA_#s=OLY@<QeEDu4Ssy{JlQ<E1G@J0mffXUKVhOx?<$
zKbhH;ylY7BTX}zM$L3d#Cbg8!DzY+8meofK%uLdPRRAO*LVohr@`-y~SZd;TP%{&)
zzhHv3U|~Fx_BPQ=mP{|PC90gs9(8(gX<76$N(&N+;C+jqtXwK{E0rAifG@Q29?G-%
zH?K5LEiTh!aTVy9q(#!~JYm0=OKH-A{66(KOlirYFaSxY6MZI=6@!;jIER-$SVPap
z(t3`ZxS9Th%&Z3<vxXywS|(#7@~(<yN40Z1bT6lzC(&t0?SfWN6;m}tYIpD9@b~er
z@jYN9S7R1`pIUGrQXg+HfD0-c)0%#m`G(d8Gt#R>^$CnRA1ojZ%fMPgnpWu*8T{|K
zh<^+84gkG>r{*7s%)6kTZE4i69<)YK+@P1&1SvZ-&_ha8R3a}ha5n8#`~nd9E?$ZM
zK-o&Ol`bEhwlIxmt=&XsJn4r8w^596oWc*_m~b4BJT7d;-vDE1>o4Q-q~xwq1TsAb
zYaIcpkJwmQFIBX9KR;WYYVtXU7+RTlCXBoi73?jl9}p{-=pVFXMi9oY@Iv)C8=PsQ
zOK+bUd_Jtd@u%?lP-7jILa+wc>!*_JR8*OhZAh1``oplmoh?ZV*aw4OW#BO5pYf}}
z{&8GyU=T7hEQ8GCK$C$Y&nCL~-kV%~gL$K24AzOr+$S$I8+&0rpEMg6A<ISsH|8a4
z#J=Kwi;+j@Jo#3uvDdPZ<;HAEx%?ki<KLk8Rh#imX!a~`JjBfH=by6u!g|omTVJqT
zVXm|O%CfKc@mk{{qj@9sGOX0hbf4VVpwLM)sC1|YOt2cH`-(R#Q0W|LROnDQs^#))
zjmCEgorNl$CSw;EyO5^vZsCtY8oCH6!((PfIZrrRH_4NnjOm?#SESn8knz_x)5Xvw
z^_>j_(Mhc_L@Ou$sxUL9cdX{qX3vCF9?4gjV3BfEuL=|I*SF5h$1*K$G!Zc*FOd~t
z{EwT5q-vu6z~&*IeV#JYJ1NWS=HD5=X54BRW?!LqvUl@~(bJZG+c4VCp*hr()RPBm
z9iL~}=h0WdfWL^CtGkDbmqm;&8eOVpC_K8#IM-?kX*+3aRzAMUcq7Ub2UZ(@i@1x1
zCZbk%tTV2_SGxv{_TFA}ExcfY6b2azn~HnZ8jUo!eg|idojShjoh$$BT%$p5Jj>Wf
zQx5_g_W|)sif^55bkitTY+i4Ch2{{xoWhp_Yr~MP0*-@Hm-D8{-ds1{X6vJ>uiB?t
zwAYfukFaVdhJ;8SI>|e~8?QfCS<dL^*~!e!nxp%-u@2ZgzPorlvgm3ty+sM=5Qsr|
z#`dV~R@)})ar5b>zc5MD=jpwS4;`W-L_mlcJ4KQ|xx};_T_-=Z#B?P`arTYNY5C4h
zlU=@`*|fy*iLvfcqrI__-s@GglULtfYd_RA@}145L)^zXd!$iqR;PiY?oyp=uV1mJ
zIM8A`-N0QzQe8CkWTz?rsNJ-<W*>-02^OxtoP!|81pE}VyBO&(9ih=K`Gw`C-Ds!0
zrPH+5a2aPmwNAB)uXmbwG*bM2r>T~f`@2mmWZZ39CV!{fbOtlV<mKZ|Q$W6z6#2VN
zZA4ZU#`YeC&k=66k$ksZHkzJuu+}l+xvHMws4e?kkyyY5M?-?au6QgS>hgufU{^St
zaCiB`$yCZMdR@tQ>`L}<FOo;ICWjo#m}=SsfmpzoNTm|4gqRA(5+Pq8<WHpH$$;oh
z#bR!kCzVKr6LEPsV_GUDlCF5%2MJQKxJL|nVgYw504Hl=qQ~oYdlT*$WP1mK{-E0n
zi|1q<iU3U@1Shh*kO@W3^u+w`KrH1C$AAVovII#~zJOm8<Ed0U?DxAwU&tME#l7A@
z%%2Ja{$MH&i+ninl8mP$*P;EjB=ST8R3nf%kRF5-;6wWv$izLg-!>eBbeiqyJY<CI
zKGc6`|6-E<4B5|+P!k*6p3N6}LXd_9j|1I<P^<e;Y(xuRdb(UbZCURRJA-iU%<mfW
z&>+^a?GcwR6%?VYE4;uHbanZX!Bkf)>~eL%am_$H=Ji9`U|8OjHNn}@SxWQJeokRZ
zJhYz{GlweKfaEZ;8}!14IymNmGXR7QKZwQ~cO|<Z?C%Ec#ywrJfY;v@Oa?suSlkDj
zO5`SN1h&QeAy3jDaD!@7@i?3d3yZE8oC*@dZjaX^g6QE$TEagQ7l9rS2Gn$Wz3Pl|
z4TZmopTpM>^TSd*vjxq@<QP(MLWon#kZxnPm}k*Vw{lRSXt>h!w{9c3U&=*f;~6Gj
z-F2q82^qg-yxe%UaRK*R?ji0PjyR0B4aIqDO}%tm(92UD4HWrC1tf}6`AY}WkVB$v
zM%1V7!q9N>tLsb~jS|6KN1aOHsPJu}iT}9mCEID%+br)|wwfO@H<-R+%yKUp?tlZH
z&oaGqEgFNWrN6za>I5nR-$!VLQ-0J{32JgAAfy;yD6I45<=)go^6d3zY_&X+J<Y_J
z$FocH0fK`kK53V>!`Cs!k7+m+L+g&GGo_-oPDh)PQ$ubb!b^U1Jdnq7J_sA6be5i7
za*@hiT?%3KHa`+irgP*T5;ZqP*KRm>^_sql47#@d=Fu^J9uHDmf*K?JkiA=RkeLKY
zN+;uW46DqNRo)3@%OU0SlZ9Q1G|aLOE=|;Xy>p2gmuPC-R7H&tqEHUwk8{)$)4p>+
zSo&{NA0huf)K=1(M7-tuRok2y)JEbs<dX#BLek6V1Q;PXHm+(-BA*;7#aK>SZM8G2
zP5lbOB#u1)d(>7_L)6wb^=MNgg6_#`)1;;jIAy$5hUZRJb74$0*QO1)i>eGbh@dJT
zpeWZ|2tH$@mHao3IgNw&JM5&B^OiNjjQWFv>9Fc57uAHMN+S=Bzg9*!Pg;Nc%=%M5
z0it8-RMm(7FVo+kji`^T4=DSREmbTXbfA0;MJFjO$xu16pd4UukP{5jp5`o^=}(gc
z%jQWZGrI?JkP$vQw#W*qfdvYD0jQB0+gL#mj=YY4UZw2g6Em+mns544{oE?6j)R#`
zF6bxI`Xb9LC@WuXQO0iR^8_<fp7naCgi={$EHclA9rC3Z!cOVHVQva`th!_Uv*l+T
zr^3%dlgiIxMKP6@K+Ym7pQeE?(-3}?kC9Bl3t`2meQ08~yd>kqT`5>q=@_S#j_yM^
zGfRhWKAF{NxpWqBv*Cw)d57>b{>_vrBvh%b>Fw4qGimj!K7_P3HsL^#dKmbFSf`iS
z?Lu~1I|0p<z9RIY*a<U(a9|IsezWL9vJ-k{e$<a7%|=<3b~3A7RhhOHp_%znzsf}T
z`Jn8!7W^&Qw=(<<6y7d0^Ebf$ca)dL=D~LZVMB@<rrKJSzIdjaZ6@ynPpEH*-uHTe
z<eHV16zaQiZIJ9;Wd||Muu=9Y-&&iNF-51^x2UR&KMWJ&$+)8bsT<|GdR3cKZj?cr
WZt?}XX@s^3cFbj4uMc47_5T583hOff

delta 4794
zcmc&&dvFxTnV;^N>6u;a%t#2a=!F&u5Q2To?9S}W3JD~EkZi0V39yYp7W;&?R*%z)
zHyBnRbAAN}f!pBY0{EGuhyfV}9Fw@pDcj}47vl19b{vv(3Ty}S@TH1#Vms&S(E=t;
zQt{m%cU`NV?ytMQ@7LdJzR~7=6`TLP{IQ}j%zlcZ9wvV`+WGIf{fJP#Jue-JZIsH=
zWFc}N>Dulg-RvgPc|)WtnM%5%@dtAIcDNV^<CsQ?yM<RA*BsME@?1rZXr`GFsd}=f
zlA@f{U=`(;{hCKsJhJAJwdz2y+OK+bO_qb9Xe!ycIvz>K`&Y&L!+m|J`1<k{z3G9@
zo^W@(yxLXX5{@?X_I7#dll`92b#JOS8cyLOhk2*Eh@4CNM#;W8@pRWfZ=WwY5b>qs
z-SPB5yx*n!k_lDTRL`2;zO>J~D&DgSzw$CaA<|6$Erg0U5LEmj)k2t#u87B0rv9%G
z?*B1CcqhCSVdFP-s8#!e-k=gtRpb9hheey*u1Zf|CFu>>kSjaNHx9-?7PH5)`_?6Y
zj#*K>em$tF0Sog=%X*#B{&;vG{y#8%x!YBqSeZ)UlZSa})BnZ=e34|2FP!X8q<U9d
z_(y?QgDe(n<ndTU%L%PnIM$u)$&wSv{`9~NdVOru&4pC>R{BkHe{X7(Mg7+-;@#n7
z>L%N|_?mDx@q2Hyw>vAMFPu)V?(L6RvuW3xrCvLUC`P(WNnc8zNxvfl(nbng?@7a#
zzwdM~(m6`{y>wOjP&!A#@GWWh+zXY2Iz&mokzSRamWBv*r8Hc4O4{^|AY>-c_2AEA
z^hN~Ge_{C+_k*Q$z3m&Pwd$Le&Z0g}NpDD(q+dwKEzo0KPf2-sbS3x`*ByTIXT=k4
zCIgsHdu-oylLK{i2jtwNGL!opBLf)$iJ>HhT<mj^&a4G^RS&o*S+`TGRl^$!g^Yl4
zv<|p(_+H|XuTau>=|1du3>4?T&TeEUFk|Rn!zaLDva+3jV>r+C0G;U`k6(QZTm?=%
z@;&eo7-v4Y12h6qfIk=p0Wj7q*a>U^cI4xCcY#unkN<NQm<5~1;P37N%`laRUAw>(
zkcXRgfkt|8ZmwxO4w?ZFaR!6WSP4118gUz9O7QL{fB^*j_7h+(aNxq-U^d9X9lOD1
zV8_4O4dwuwS+NI%0pRh_lVA{VCjSqh1ptKa+zajmta)fJKmaiK#eW1R0Bv@EAMDEI
zPf)p(O4*;s_QRk|TyCFdpXX>F2uD(x+6t^620vyeA!mKD1z|iMa|GmriFnEp@GdCF
zUmXD<P=-TC!8z82oGq2vO>YXUn5Tpzoqh5C?qoWh?CmLEJps3mfHj~L|MLiF1I1?P
zi(rHXCHUtis02m$4<`5tLs8&UVwp<-`U-K`Z(#{OdjjOsTbibtzkCV20ocl~k$n0x
z;3Yp1Vntn*XUd<f<b%o2F{c1M!qk0PcHR8XFwjM7j?0*H!7d_Yy~DihhECe0Ij&Gr
zffyGLi5sOS#gpPL;a;&+6oe|_DRG~`n^R@D$1&`3$>x<BIKF}@q@+T^K$13PVj!1t
zI-QxtHebDsuP1M?(Wo8mMpHmvq$`&2SEpBm{Tg|R1!dAEb<OSRlJ(hh<;MA4deRt}
zq=aOD!XSB0OK4h5CJ8zgFcei&RKrkZBca41K_d_j8gj_cm4L3x0Y7163r1zlFB{PS
z-royL8~oJ);>em;R~01`x+TDr(G?P4K|QEz>R(+kp6!EmJ2vOgL(&)IhF2v>J#2pU
zGsxz;^WYf@o)MeHe8;~y_Bs~lB<*Q?gRPA_&ppT0Fb9~s=u>nafK)5hOpfk@XjT<+
zF7RYs;`mFT!k(!Lan7&~GEI-$nKC@>JRAZBK75|Ula4(CGYN0bVMM(C0xaZ%oKx4q
zgDtq_16X7pzW`T3nA9G>1gDWB(=WkUtcslTtFqj;U4j!Beek{j{=p?U6$H(<FTr|1
zXL|hD_c5FVG`#R*_$E*+pYk$lCN1MO;$0wsdwvVgk^uJMgC{`|@Z;A`g11W=yvSKt
z3ObTKvG|&FcV{fTJe>-6TCS|%Wfx%;@MOL7*^6)tBM&yX@yizpJD>U9MK~GY{3}o;
zTbi%H3g9+VSIGUYJEH4=OFrOo9gt<>l-ZtSCV7TP)lfo%;}b_HXPa%EEsw89^V#3A
z)$~O88XQB_2pOSWs1Pcw{C#WNvY^RaD=_~Gz+60EVveB2jmX*El-0_zAYyTnW(<1k
zxH+Z_m~oKKXtVLtG;<6tX}~RrSwe5=tv8o4%vGYTmO6Y8F>j&evygNBB8!IypFvD6
ze#6F0W*WBIXX5o7vk=sqXE^2<IpC|s2knd-Zww(Gd+f|cP-77evonx0IU6ei<FYKO
z&_gq-&AA`IA3<Or0l8oKMsH43AX{nKollL>!ux{X_D{C;wmbOO`F`#{IE<EYD-aS7
ziV^xV`Y4pdDbxtHVK~pVg&tgd&#}eq5&-Ttj{$ZcfV1zyKNQ%;oI5$EqJV9)N%q2d
z^SH6*7?HgK;N1)GHHN(hw3)3eivZhxBUhB*)rb|~-K}^h$4&x^@Jx;^#BXzK5!^H%
z=X2~PxZpeZ3dc^sCpcEbMLb(cXJ8B7j@ap-J-huK#MUCu9XELj{VRJq{lsTF$)FY2
z^XwFda(y~~8+*HXfM<tjIy2=qJmg@W1I?D}-v;%?Ryr-iI|X*Ujo7$p5ppikEGv}Y
z(+rV2*uK!bn`YaArNp|gthAQZpiD0n+?d<P?kjvm*eYxg285Il6P5}Kg*${fLXDsb
zZW56x2X-G~TgRbxh9WNkH)^w<(9O0olnt0W4zc~PWU1hxq|c>Kq+d#>r0+_V;=AGx
z#CEYlxFkG^%MY`PHY9WiaiLpSB(w;PLalUK+A9UckHiyVSolKFq+`+pQki&0+%MiP
zP7}@xr-hAz2k$t{Ruc@}4(vI_wia*<upqRLKB1dj(gp~GQ*3{`I~Q)J;C5-3@Vaon
z<3mR?w~U*N-a(s@z#d}fGUu5G;A>PPRZDdABw^VJp|%A7R|x6gJ~LR2P64<yiEpbx
z<#=2T%EKFKP!(LZ0uRe*GVZED1^6u)Rf6UCnv7idw6$f5f>iuW4XVWMS~5LXgXZHs
zwP+k}s6|289>dpWpo#c|Ox7xEkPFU_;&rv;aDhT72W!y;6pDQP_9J0eN<SD6n;i<e
zz|fiEW!R;oIrz;Wnu;IMkq-|AQ6rA&WctG(iJZ@ar~)?Ki%;ojE)&}_uLE-iQbE+L
zG0@`xCYNG73ndTKk+g9=XN(TW_~y^`BA0KowX>NHVpCfyF0pY<#NA^yZUV5kvnUg=
zY~5-;V&?<^+VGwnuF2lcpp0Xbjz7uaz98g}9NblM?B4`#C!JrAbKX8=>kxkK_#kK4
zCgi+t-(cRK$L-{7?bNS`;cT!K7Z;Aoi56z%P%rYtu&~v9xF{=Uck!qk>d`D6Uo7Un
zwB(dz<&<(BNHXIe%ck?^h!3VI`(^$Dn$Hh1G5Rb>)6WQHIoE6_b57g(sXvhLAE7AT
zs#Kc*z18=3=DG$>hM+&2i$}BF={X#!>T>$<g*n`#pw~<{avzhua<_T1iR^`odvHe+
zw^+2P6mR9xmK1V^XJpeQsao>!ukYYkTy{HGOx!Uq7aq3+b(x>u&M6S2&GHuRHx}S8
zXWdNVM@dat0oPI@c!-jgif0|KI`%jg<h*B_$^V{?@lV4+S_Kb5gP=|TCNnKVVLXwa
z4j#m>PUJTufShg9Ar9v-PQ0v~pIG$}|0!jKKSgDH_jAtTVmKJEim`Y)K3Yg!e^A@-
z{&IdN+GM4j?D>N0OP$J%J%9gD?FMss1wWHT8<Dd^zab}xM0{o{UzYm-5i_cXIqQ-0
zF6GwsNmYCaSZCH(@w*{di{GEluLW!HA~!#mU5%VG3bHyo;pPRf%6!eu=RruX#2bBl
z5RdioQ}BB}z7n7H@-y*>HT|QP=W&CNH;(2qx%4D7mgFm5Em*>9+&kQMu7xW_m(h>V
zqi8W2OH}n!)~tV)Z-Ta1K%}Y)sZ1Vz?0LS#w<_H4OZ28<);IjXa$k3+zAhLHgmh(%
zo(iq#H<T6Ydh`|DR*|*5$G6aY;d#D|19lU%0w$LeKBT01!iR+aF9c;U0ui-nYjb@}
zi$)U(BkYkAM#Q7)p{OUU7$J`qifSQ6QAjZr-^}dF$KDQJnW`zeU-QR}Kr|AG`V~2$
zD8X1XpczI&j)Y>ta3r7;x`wxP@RK$r;*qGXha<5>ET+T_IW7m{av~8AMUA+g2t+km
z*NuQ0j7I{p9M>XBP%(l*Effia6g^HVI9)YDp)diatE6)C>#=B1RkWbySGBMlB?tn3
z^1&iU6LCe615s5C2g9+17F9{LsL6QRy?n`cR(iAbp5G&@*Z-NKs$L~%5a}5|AX><*
zs5j!8s*v*1qv>(OqbmM{C*%*rJR!v(jMa!DYhj{=D206;ybJH`<nt=Ottwodd`YqO
viA8<^udI-&R=2+Kp!5PIy(_&y4DyuJHkDokzKtzlCB4Y@ZQwrJ$IJf>N#rTA

diff --git a/airflow/dags/__pycache__/welcome_dag.cpython-312.pyc b/airflow/dags/__pycache__/welcome_dag.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f135560f187375c54b9a6628250c0c5dacc4cdd
GIT binary patch
literal 1611
zcmZux&2Jk;6rcUF*XwnRV=FBXl66F?E`qp}^3?{UN_-xgf<Qv@#ag_R*k<i@XU4Q~
zlgbAUtvHkmDo9AE;!@=PAxK;tSy7~skX9-=^wuOjapKLaxA}ls&D%G>`FQXB=Dqnd
zm$MMG`@{a)7dk?Z#2{(uiE{7<DEE+$d?i3_rK%`GR|B=JRW+$=f!;Q%hSc>S)lOAY
zQa6HhHSMRqLsipHgUt9QNJ~Xn@iV^o8-8}E#CDpBZ~M82Q$z4Wv}zXSH9wC{oPGG*
zP@AB(L<=}`49!V&8e7Lu3)?u?&me>?Ar-W4xW&ifAXnkq2bVvF-_&R8tPysub}{i-
zNB}W?Z=Ke>wJ_EhX3aKUsmJzXDV!1g-~|x(5SxU6fTAQEFos-prC<CC2lcRx9Tqy5
zn`AWzub1Iqqf43{_B%Qd2ma$!!Y-?L@pQ%a)@bE=ysZX<rS3Y<$@R4fN^IDbgHE#`
z2Iy}+GqUElb%%)SJmzW&0O{%tP>#FW68MS6j09+johRCGGHwBzCtEU?5Rm*6BC=nG
zK&;cG&dp8NASUeRsnw9QJw}8nZZNTc>%PY@5e8&1L`2A~ID9!(0H6q+E`pG;-+pb!
zn%lDGe!21Mt)Fj==PNt&7q;dvjOX7PTW@dc@BEh)5v641HYi6~X{i95Xor`&i8fH{
z8AFH5lGuIqc>p&RmedaNwg2O4<5`*<V@Y4>&ovn9(u<Xf*KICc>xInwI>1ZKutL0!
zAGUeA9(EY+u>Q>FFo~UT)nN_n0C5+cg`1lTuE9-$uk|oxlxu64alJ*uj-;DRfpe}Z
z7SL|kp_m)O0;VEn$BCJqj+u_l)#N-8L5WV^1ObfaPu<<PvoV@o9_Qa1d=lBEyI1a9
z`TpuppKjS_1|P~~T-dSPEz5ndIJVAh>#xfj;MpTd%`M72!XUpHek|&zZ=G|_zu~+h
zzK>ju+df|PdI76>WR3Cytv9gW3vdm}29s~S0I2X-z0vE+Vt@^4=Fx9zO`qqxvZQNu
zF9-yET!SJaB5h<O?i5c?N{xsd;N{6^DV#h46e><U3bh0v;!WbkWL=Ln1G*$Ur$l6f
z%wJLxnkdXeQ)EHxn^2krFWdwlMg7a@YXUf#@aA$m6yd&4&cYFbANnJRCz_%tf1y*m
zXn7Z%-9;}wLiv4DRm%Ies=T{zr<C)1C6u-zy%0^$-8UZ;e#?#uuSPQqkzIINu+r*4
z+nYr*FGu!^QK=l6*{3;kMjfH)z0;^zj_lJ>X)Z>K>L3rGSn5QjGe?#_nMF$Rhj0rm
Mj@(OQbV*+BzX(-!F#rGn

literal 0
HcmV?d00001

diff --git a/airflow/dags/welcome_dag.py b/airflow/dags/welcome_dag.py
new file mode 100644
index 0000000..7628de6
--- /dev/null
+++ b/airflow/dags/welcome_dag.py
@@ -0,0 +1,87 @@
+from airflow import DAG
+
+from airflow.operators.python_operator import PythonOperator
+
+from airflow.utils.dates import days_ago
+
+from datetime import datetime
+
+import requests
+
+
+
+def print_welcome():
+
+    print('Welcome to Airflow!')
+
+
+
+def print_date():
+
+    print('Today is {}'.format(datetime.today().date()))
+
+
+
+def print_random_quote():
+
+    response = requests.get('https://api.quotable.io/random')
+
+    quote = response.json()['content']
+
+    print('Quote of the day: "{}"'.format(quote))
+
+
+
+dag = DAG(
+
+    'welcome_dag',
+
+    default_args={'start_date': days_ago(1)},
+
+    schedule_interval='0 23 * * *',
+
+    catchup=False
+
+)
+
+
+
+print_welcome_task = PythonOperator(
+
+    task_id='print_welcome',
+
+    python_callable=print_welcome,
+
+    dag=dag
+
+)
+
+
+
+print_date_task = PythonOperator(
+
+    task_id='print_date',
+
+    python_callable=print_date,
+
+    dag=dag
+
+)
+
+
+
+print_random_quote = PythonOperator(
+
+    task_id='print_random_quote',
+
+    python_callable=print_random_quote,
+
+    dag=dag
+
+)
+
+
+
+# Set the dependencies between the tasks
+
+print_welcome_task >> print_date_task >> print_random_quote
\ No newline at end of file
diff --git a/airflow/standalone_admin_password.txt b/airflow/standalone_admin_password.txt
new file mode 100644
index 0000000..11c68bd
--- /dev/null
+++ b/airflow/standalone_admin_password.txt
@@ -0,0 +1 @@
+SbyPAHK96d2ZCYR3
\ No newline at end of file
diff --git a/airflow/webserver_config.py b/airflow/webserver_config.py
new file mode 100644
index 0000000..3048bb2
--- /dev/null
+++ b/airflow/webserver_config.py
@@ -0,0 +1,132 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Default configuration for the Airflow webserver."""
+
+from __future__ import annotations
+
+import os
+
+from flask_appbuilder.const import AUTH_DB
+
+# from airflow.www.fab_security.manager import AUTH_LDAP
+# from airflow.www.fab_security.manager import AUTH_OAUTH
+# from airflow.www.fab_security.manager import AUTH_OID
+# from airflow.www.fab_security.manager import AUTH_REMOTE_USER
+
+
+basedir = os.path.abspath(os.path.dirname(__file__))
+
+# Flask-WTF flag for CSRF
+WTF_CSRF_ENABLED = True
+WTF_CSRF_TIME_LIMIT = None
+
+# ----------------------------------------------------
+# AUTHENTICATION CONFIG
+# ----------------------------------------------------
+# For details on how to set up each of the following authentication, see
+# http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods
+# for details.
+
+# The authentication type
+# AUTH_OID : Is for OpenID
+# AUTH_DB : Is for database
+# AUTH_LDAP : Is for LDAP
+# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server
+# AUTH_OAUTH : Is for OAuth
+AUTH_TYPE = AUTH_DB
+
+# Uncomment to setup Full admin role name
+# AUTH_ROLE_ADMIN = 'Admin'
+
+# Uncomment and set to desired role to enable access without authentication
+# AUTH_ROLE_PUBLIC = 'Viewer'
+
+# Will allow user self registration
+# AUTH_USER_REGISTRATION = True
+
+# The recaptcha it's automatically enabled for user self registration is active and the keys are necessary
+# RECAPTCHA_PRIVATE_KEY = PRIVATE_KEY
+# RECAPTCHA_PUBLIC_KEY = PUBLIC_KEY
+
+# Config for Flask-Mail necessary for user self registration
+# MAIL_SERVER = 'smtp.gmail.com'
+# MAIL_USE_TLS = True
+# MAIL_USERNAME = 'yourappemail@gmail.com'
+# MAIL_PASSWORD = 'passwordformail'
+# MAIL_DEFAULT_SENDER = 'sender@gmail.com'
+
+# The default user self registration role
+# AUTH_USER_REGISTRATION_ROLE = "Public"
+
+# When using OAuth Auth, uncomment to setup provider(s) info
+# Google OAuth example:
+# OAUTH_PROVIDERS = [{
+#   'name':'google',
+#     'token_key':'access_token',
+#     'icon':'fa-google',
+#         'remote_app': {
+#             'api_base_url':'https://www.googleapis.com/oauth2/v2/',
+#             'client_kwargs':{
+#                 'scope': 'email profile'
+#             },
+#             'access_token_url':'https://accounts.google.com/o/oauth2/token',
+#             'authorize_url':'https://accounts.google.com/o/oauth2/auth',
+#             'request_token_url': None,
+#             'client_id': GOOGLE_KEY,
+#             'client_secret': GOOGLE_SECRET_KEY,
+#         }
+# }]
+
+# When using LDAP Auth, setup the ldap server
+# AUTH_LDAP_SERVER = "ldap://ldapserver.new"
+
+# When using OpenID Auth, uncomment to setup OpenID providers.
+# example for OpenID authentication
+# OPENID_PROVIDERS = [
+#    { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
+#    { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
+#    { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
+#    { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
+
+# ----------------------------------------------------
+# Theme CONFIG
+# ----------------------------------------------------
+# Flask App Builder comes up with a number of predefined themes
+# that you can use for Apache Airflow.
+# http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes
+# Please make sure to remove "navbar_color" configuration from airflow.cfg
+# in order to fully utilize the theme. (or use that property in conjunction with theme)
+# APP_THEME = "bootstrap-theme.css"  # default bootstrap
+# APP_THEME = "amelia.css"
+# APP_THEME = "cerulean.css"
+# APP_THEME = "cosmo.css"
+# APP_THEME = "cyborg.css"
+# APP_THEME = "darkly.css"
+# APP_THEME = "flatly.css"
+# APP_THEME = "journal.css"
+# APP_THEME = "lumen.css"
+# APP_THEME = "paper.css"
+# APP_THEME = "readable.css"
+# APP_THEME = "sandstone.css"
+# APP_THEME = "simplex.css"
+# APP_THEME = "slate.css"
+# APP_THEME = "solar.css"
+# APP_THEME = "spacelab.css"
+# APP_THEME = "superhero.css"
+# APP_THEME = "united.css"
+# APP_THEME = "yeti.css"
diff --git a/dags/example.py b/dags/example.py
deleted file mode 100644
index 2059703..0000000
--- a/dags/example.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from airflow.decorators import dag, task
-from pendulum import datetime
-
-@dag(
-    start_date=datetime(2024, 1, 1),
-    schedule="@daily",
-    catchup=False,
-)
-def minimal_test_dag():
-    
-    @task()
-    def say_hello():
-        print("Hello")
-        return "Hello"
-    
-    say_hello()
-
-minimal_test_dag()
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..2b790a3
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,12 @@
+version: '3'
+services:
+  sleek-airflow:
+    build: 
+      context: .
+      dockerfile: Dockerfile
+    volumes:
+      - ./airflow:/opt/airflow
+    ports:
+      - "8080:8080"
+    command: airflow standalone
+
-- 
GitLab