From 5f5a6d385c9f18ae24ce36043382304870116b8d Mon Sep 17 00:00:00 2001
From: Christian Boettcher <c.boettcher@fz-juelich.de>
Date: Thu, 16 Dec 2021 12:24:17 +0100
Subject: [PATCH] working datacta integration, rough adaption of CI/CD (not
 finished)

---
 .gitlab-ci.yml               | 21 +++++++++++++++++++++
 config/airflow.cfg           |  4 ++--
 dags/test_secrets_backend.py | 21 +++++++++++++++++++++
 dockers/docker-compose.yaml  |  2 ++
 requirements.txt             |  4 ++--
 5 files changed, 48 insertions(+), 4 deletions(-)
 create mode 100644 dags/test_secrets_backend.py

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 9fd074e..7bd4a0e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -30,6 +30,24 @@ test:
    - airflow dags test testdag 2021-08-18
    - nosetests
 
+build-custom-image:
+  stage: build
+  image: docker:latest
+  services:
+    - docker:dind
+  when: manual
+  variables:
+    IMAGE_COMMIT_TAG: $CI_REGISTRY_IMAGE/eflows-airflow:$CI_COMMIT_SHORT_SHA
+    IMAGE_LATEST_TAG: $CI_REGISTRY_IMAGE/eflows-airflow:latest
+
+  script:
+    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
+    - docker build --no-cache=true --pull -t $IMAGE_COMMIT_TAG -f dockers/eflows-airflow.docker .
+    - docker push $IMAGE_COMMIT_TAG
+    - docker tag $IMAGE_COMMIT_TAG $IMAGE_LATEST_TAG
+    - docker push $IMAGE_LATEST_TAG
+
+
 deploy-test:
   stage: deploy
   environment: Testing
@@ -40,6 +58,9 @@ deploy-test:
     OS_IDENTITY_API_VERSION: 3
     OS_REGION_NAME: "HDFCloud"
     OS_INTERFACE: public
+    # TODO set proepr values for env variable - need to pass those values to the machine (alternatively edit airflow.cfg only inside the machine)
+    AIRFLOW__SECRETS__BACKEND_KWARGS: '{\"url\" : \"https://zam10036.zam.kfa-juelich.de\", \"user\" : \"${DATACAT_TESTING_USERNAME}\", \"password\" : \"${DATACAT_TESTING_PASSWORD}\"}\' 
+    AIRFLOW__SECRETS__BACKEND: datacat_integration.secrets.DatacatSecretsBackend
     FLOATING_IP: 134.94.199.220
   script:
     - echo "Starting the full testing deployment of airflows example."
diff --git a/config/airflow.cfg b/config/airflow.cfg
index ad90566..e5dba33 100644
--- a/config/airflow.cfg
+++ b/config/airflow.cfg
@@ -347,13 +347,13 @@ statsd_datadog_tags =
 [secrets]
 # Full class name of secrets backend to enable (will precede env vars and metastore in search path)
 # Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend
-backend = datacat_integration.secrets.DatacatSecretsBackend
+backend =
 
 # The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class.
 # See documentation for the secrets backend you are using. JSON is expected.
 # Example for AWS Systems Manager ParameterStore:
 # ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}``
-backend_kwargs = {"url" : "https://zam10036.zam.kfa-juelich.de", "user" : "dls-testing", "password" : "dls-testing-pass"}
+backend_kwargs = 
 
 [cli]
 # In what way should the cli access the API. The LocalClient will use the
diff --git a/dags/test_secrets_backend.py b/dags/test_secrets_backend.py
new file mode 100644
index 0000000..f840966
--- /dev/null
+++ b/dags/test_secrets_backend.py
@@ -0,0 +1,21 @@
+
+from airflow.decorators import dag, task
+from airflow.utils.dates import days_ago
+from airflow.hooks.base import BaseHook
+
+default_args = {
+    'owner': 'airflow',
+}
+
+
+@dag(default_args=default_args, schedule_interval=None, start_date=days_ago(2), tags=['example'])
+def test_secrets_backend():
+    @task()
+    def get_print_and_return_conenction():
+        conn = BaseHook.get_connection('860355e9-975f-4253-9421-1815e20c879b')
+        print(conn.get_extra())
+
+    get_print_and_return_conenction()
+
+
+dag = test_secrets_backend()
diff --git a/dockers/docker-compose.yaml b/dockers/docker-compose.yaml
index 24c78d4..1bd2a0f 100644
--- a/dockers/docker-compose.yaml
+++ b/dockers/docker-compose.yaml
@@ -56,6 +56,8 @@ x-airflow-common:
     AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
     AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
     AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth'
+    AIRFLOW__SECRETS__BACKEND_KWARGS: ${AIRFLOW__SECRETS__BACKEND_KWARGS}
+    AIRFLOW__SECRETS__BACKEND: ${AIRFLOW__SECRETS__BACKEND}
     _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
   volumes:
     - ./dags:/opt/airflow/dags
diff --git a/requirements.txt b/requirements.txt
index 0b4ee44..2881e27 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,8 @@
 requests
-urllib3==1.26.6
+urllib3
 plyvel
 apache-airflow-providers-ssh
 apache-airflow-providers-http
 apache-airflow-providers-sftp
 --index-url https://gitlab.jsc.fz-juelich.de/api/v4/projects/4405/packages/pypi/simple
-airflow-datacat-integration>=0.0.4
+airflow-datacat-integration>=0.0.10
-- 
GitLab