diff --git a/airflow/airflow.db b/airflow/airflow.db index f9568a1d587b8d949abc3e561275d03af28b8bb5..67be4f24f56cd61a813f0314f1c844c8e82f8406 100644 Binary files a/airflow/airflow.db and b/airflow/airflow.db differ diff --git a/airflow/dags/__pycache__/test_dag.cpython-312.pyc b/airflow/dags/__pycache__/test_dag.cpython-312.pyc index 11ece8b187243d74b82c12a08c5b121e96c3687a..22946e5a85413bd83171a6d27aa855e8815f73c6 100644 Binary files a/airflow/dags/__pycache__/test_dag.cpython-312.pyc and b/airflow/dags/__pycache__/test_dag.cpython-312.pyc differ diff --git a/docker-compose.yml b/docker-compose.yml index a8d278d7a3235f615920ce1fc189fd0d766a5759..71a008bf5d8c86497764b0fff7dacc4ebe8d02e0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,11 +11,11 @@ services: - "8080:8080" networks: - airflow-spark-network - environment: - - AIRFLOW_CONN_SPARK_DEFAULT=spark://spark-master:7077 + depends_on: + - spark-master command: bash -c "rm -f /opt/airflow/airflow-webserver.pid && airflow db init && (airflow scheduler & airflow webserver)" spark-master: - image: andreper/spark-master:3.0.0 + image: bde2020/spark-master:3.3.0-hadoop3.3 container_name: spark-master ports: - 8081:8080 @@ -25,7 +25,7 @@ services: volumes: - ./workspace:/opt/workspace spark-worker-1: - image: andreper/spark-worker:3.0.0 + image: bde2020/spark-worker:3.3.0-hadoop3.3 container_name: spark-worker-1 environment: - SPARK_WORKER_CORES=1 @@ -39,7 +39,7 @@ services: depends_on: - spark-master spark-worker-2: - image: andreper/spark-worker:3.0.0 + image: bde2020/spark-worker:3.3.0-hadoop3.3 container_name: spark-worker-2 environment: - SPARK_WORKER_CORES=1