From 14086845ab753bd71537acafe0fb327a8f6316fe Mon Sep 17 00:00:00 2001
From: jrybicki-jsc <j.rybicki@fz-juelich.de>
Date: Wed, 10 Nov 2021 11:51:42 +0100
Subject: [PATCH] not beutifull yet but works, variable used for common working
 direcotry, dag updated accordingly, few bugfixes

---
 dags/taskflow.py            |  6 +++++-
 dockers/connections.json    | 22 ++++++++++++++++++++++
 dockers/docker-compose.yaml | 25 +++++++++++++++----------
 3 files changed, 42 insertions(+), 11 deletions(-)
 create mode 100644 dockers/connections.json

diff --git a/dags/taskflow.py b/dags/taskflow.py
index c0153ed..c86066f 100644
--- a/dags/taskflow.py
+++ b/dags/taskflow.py
@@ -2,6 +2,7 @@
 from airflow.decorators import dag, task
 from airflow.models.connection import Connection
 from airflow.providers.ssh.hooks.ssh import SSHHook
+from airflow.models import Variable
 from airflow.utils.dates import days_ago
 import os
 
@@ -39,9 +40,12 @@ def taskflow_example():
     @task(multiple_outputs=True)
     def transform(flist: dict):
         name_mappings = {}
+        tmp_dir = Variable.get("working_dir", default_var='/tmp/')
+        print(f"Local working dir is: {tmp_dir}")
+        
         for fname, url in flist.items():
             print(f"Processing: {fname} --> {url}")
-            tmpname = download_file(url=url, target_dir='/tmp/')
+            tmpname = download_file(url=url, target_dir=tmp_dir)
             name_mappings[fname] = tmpname
         return name_mappings
 
diff --git a/dockers/connections.json b/dockers/connections.json
new file mode 100644
index 0000000..232a3bf
--- /dev/null
+++ b/dockers/connections.json
@@ -0,0 +1,22 @@
+{
+  "default_b2share": {
+    "conn_type": "https",
+    "description": null,
+    "host": "b2share-testing.fz-juelich.de",
+    "login": null,
+    "password": null,
+    "schema": "",
+    "port": null,
+    "extra": null
+  },
+  "default_ssh": {
+    "conn_type": "ssh",
+    "description": null,
+    "host": "openssh-server",
+    "login": "eflows",
+    "password": "rand",
+    "schema": null,
+    "port": 2222,
+    "extra": null
+  }
+}
\ No newline at end of file
diff --git a/dockers/docker-compose.yaml b/dockers/docker-compose.yaml
index 3102c35..3f0e412 100644
--- a/dockers/docker-compose.yaml
+++ b/dockers/docker-compose.yaml
@@ -59,9 +59,9 @@ x-airflow-common:
     _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
   volumes:
     - ./dags:/opt/airflow/dags
+    - ./config/airflow.cfg:/opt/airflow/airflow.cfg
     - ./logs:/opt/airflow/logs
     - ./plugins:/opt/airflow/plugins
-    - ./config/airflow.cfg:/opt/airflow/airflow.cfg
     - ./templates/main.html:/home/airflow/.local/lib/python3.7/site-packages/airflow/www/templates/airflow/main.html
   user: "${AIRFLOW_UID:-50000}:0"
   depends_on:
@@ -134,7 +134,7 @@ services:
       test:
         - "CMD-SHELL"
         - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
-      interval: 10s
+      interval: 30s
       timeout: 10s
       retries: 5
     environment:
@@ -143,6 +143,11 @@ services:
       # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
       DUMB_INIT_SETSID: "0"
     restart: always
+    volumes:
+      - ./dags:/opt/airflow/dags
+      - ./config/airflow.cfg:/opt/airflow/airflow.cfg
+      - ./logs:/opt/airflow/logs
+      - ./tmp/:/work/
     depends_on:
       <<: *airflow-common-depends-on
       airflow-init:
@@ -153,7 +158,7 @@ services:
     command: triggerer
     healthcheck:
       test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"']
-      interval: 10s
+      interval: 60s
       timeout: 10s
       retries: 5
     restart: always
@@ -257,14 +262,14 @@ services:
     environment:
       <<: *airflow-common-env
       CONNECTION_CHECK_MAX_COUNT: "0"
-    # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
+    entrypoint: /bin/bash
     command:
-      - airflow
-      - connections
-      - add
-      - 'my_conn'
-      - --conn-uri
-      - "http://www.ooo.aaa"
+      - -c
+      - |
+        exec /entrypoint airflow variables import /opt/airflow/variables.json
+        echo "Variables added"
+    volumes:
+      - ./dockers/variables.json:/opt/airflow/variables.json
     depends_on:
       <<: *airflow-common-depends-on
       airflow-init:
-- 
GitLab