diff --git a/dags/taskflow.py b/dags/taskflow.py
index c0153ed3d415f23bdf2adb6e9128132694919f84..c86066f98a352dd536dc1e92fbb1be129f1b29d6 100644
--- a/dags/taskflow.py
+++ b/dags/taskflow.py
@@ -2,6 +2,7 @@
 from airflow.decorators import dag, task
 from airflow.models.connection import Connection
 from airflow.providers.ssh.hooks.ssh import SSHHook
+from airflow.models import Variable
 from airflow.utils.dates import days_ago
 import os
 
@@ -39,9 +40,12 @@ def taskflow_example():
     @task(multiple_outputs=True)
     def transform(flist: dict):
         name_mappings = {}
+        tmp_dir = Variable.get("working_dir", default_var='/tmp/')
+        print(f"Local working dir is: {tmp_dir}")
+        
         for fname, url in flist.items():
             print(f"Processing: {fname} --> {url}")
-            tmpname = download_file(url=url, target_dir='/tmp/')
+            tmpname = download_file(url=url, target_dir=tmp_dir)
             name_mappings[fname] = tmpname
         return name_mappings
 
diff --git a/dockers/connections.json b/dockers/connections.json
new file mode 100644
index 0000000000000000000000000000000000000000..232a3bfa48c3b0353d3aca016d0db544637523d9
--- /dev/null
+++ b/dockers/connections.json
@@ -0,0 +1,22 @@
+{
+  "default_b2share": {
+    "conn_type": "https",
+    "description": null,
+    "host": "b2share-testing.fz-juelich.de",
+    "login": null,
+    "password": null,
+    "schema": "",
+    "port": null,
+    "extra": null
+  },
+  "default_ssh": {
+    "conn_type": "ssh",
+    "description": null,
+    "host": "openssh-server",
+    "login": "eflows",
+    "password": "rand",
+    "schema": null,
+    "port": 2222,
+    "extra": null
+  }
+}
\ No newline at end of file
diff --git a/dockers/docker-compose.yaml b/dockers/docker-compose.yaml
index 3102c35e738818afff0b8ac2d4eec21ff8e51b93..3f0e412be9ce8b4d862883ec32b66225b9141794 100644
--- a/dockers/docker-compose.yaml
+++ b/dockers/docker-compose.yaml
@@ -59,9 +59,9 @@ x-airflow-common:
     _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
   volumes:
     - ./dags:/opt/airflow/dags
+    - ./config/airflow.cfg:/opt/airflow/airflow.cfg
     - ./logs:/opt/airflow/logs
     - ./plugins:/opt/airflow/plugins
-    - ./config/airflow.cfg:/opt/airflow/airflow.cfg
     - ./templates/main.html:/home/airflow/.local/lib/python3.7/site-packages/airflow/www/templates/airflow/main.html
   user: "${AIRFLOW_UID:-50000}:0"
   depends_on:
@@ -134,7 +134,7 @@ services:
       test:
         - "CMD-SHELL"
         - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
-      interval: 10s
+      interval: 30s
       timeout: 10s
       retries: 5
     environment:
@@ -143,6 +143,11 @@ services:
       # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
       DUMB_INIT_SETSID: "0"
     restart: always
+    volumes:
+      - ./dags:/opt/airflow/dags
+      - ./config/airflow.cfg:/opt/airflow/airflow.cfg
+      - ./logs:/opt/airflow/logs
+      - ./tmp/:/work/
     depends_on:
       <<: *airflow-common-depends-on
       airflow-init:
@@ -153,7 +158,7 @@ services:
     command: triggerer
     healthcheck:
       test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"']
-      interval: 10s
+      interval: 60s
       timeout: 10s
       retries: 5
     restart: always
@@ -257,14 +262,14 @@ services:
     environment:
       <<: *airflow-common-env
       CONNECTION_CHECK_MAX_COUNT: "0"
-    # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
+    entrypoint: /bin/bash
     command:
-      - airflow
-      - connections
-      - add
-      - 'my_conn'
-      - --conn-uri
-      - "http://www.ooo.aaa"
+      - -c
+      - |
+        exec /entrypoint airflow variables import /opt/airflow/variables.json
+        echo "Variables added"
+    volumes:
+      - ./dockers/variables.json:/opt/airflow/variables.json
     depends_on:
       <<: *airflow-common-depends-on
       airflow-init: