Skip to content
Snippets Groups Projects
Commit 5f5a6d38 authored by Christian Boettcher's avatar Christian Boettcher
Browse files

working datacta integration, rough adaption of CI/CD (not finished)

parent fc76e6dd
No related branches found
No related tags found
1 merge request!2Datacat integration
......@@ -30,6 +30,24 @@ test:
- airflow dags test testdag 2021-08-18
- nosetests
build-custom-image:
stage: build
image: docker:latest
services:
- docker:dind
when: manual
variables:
IMAGE_COMMIT_TAG: $CI_REGISTRY_IMAGE/eflows-airflow:$CI_COMMIT_SHORT_SHA
IMAGE_LATEST_TAG: $CI_REGISTRY_IMAGE/eflows-airflow:latest
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build --no-cache=true --pull -t $IMAGE_COMMIT_TAG -f dockers/eflows-airflow.docker .
- docker push $IMAGE_COMMIT_TAG
- docker tag $IMAGE_COMMIT_TAG $IMAGE_LATEST_TAG
- docker push $IMAGE_LATEST_TAG
deploy-test:
stage: deploy
environment: Testing
......@@ -40,6 +58,9 @@ deploy-test:
OS_IDENTITY_API_VERSION: 3
OS_REGION_NAME: "HDFCloud"
OS_INTERFACE: public
# TODO set proepr values for env variable - need to pass those values to the machine (alternatively edit airflow.cfg only inside the machine)
AIRFLOW__SECRETS__BACKEND_KWARGS: '{\"url\" : \"https://zam10036.zam.kfa-juelich.de\", \"user\" : \"${DATACAT_TESTING_USERNAME}\", \"password\" : \"${DATACAT_TESTING_PASSWORD}\"}\'
AIRFLOW__SECRETS__BACKEND: datacat_integration.secrets.DatacatSecretsBackend
FLOATING_IP: 134.94.199.220
script:
- echo "Starting the full testing deployment of airflows example."
......
......@@ -347,13 +347,13 @@ statsd_datadog_tags =
[secrets]
# Full class name of secrets backend to enable (will precede env vars and metastore in search path)
# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend
backend = datacat_integration.secrets.DatacatSecretsBackend
backend =
# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class.
# See documentation for the secrets backend you are using. JSON is expected.
# Example for AWS Systems Manager ParameterStore:
# ``{"connections_prefix": "/airflow/connections", "profile_name": "default"}``
backend_kwargs = {"url" : "https://zam10036.zam.kfa-juelich.de", "user" : "dls-testing", "password" : "dls-testing-pass"}
backend_kwargs =
[cli]
# In what way should the cli access the API. The LocalClient will use the
......
from airflow.decorators import dag, task
from airflow.utils.dates import days_ago
from airflow.hooks.base import BaseHook
default_args = {
'owner': 'airflow',
}
@dag(default_args=default_args, schedule_interval=None, start_date=days_ago(2), tags=['example'])
def test_secrets_backend():
@task()
def get_print_and_return_conenction():
conn = BaseHook.get_connection('860355e9-975f-4253-9421-1815e20c879b')
print(conn.get_extra())
get_print_and_return_conenction()
dag = test_secrets_backend()
......@@ -56,6 +56,8 @@ x-airflow-common:
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth'
AIRFLOW__SECRETS__BACKEND_KWARGS: ${AIRFLOW__SECRETS__BACKEND_KWARGS}
AIRFLOW__SECRETS__BACKEND: ${AIRFLOW__SECRETS__BACKEND}
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
volumes:
- ./dags:/opt/airflow/dags
......
requests
urllib3==1.26.6
urllib3
plyvel
apache-airflow-providers-ssh
apache-airflow-providers-http
apache-airflow-providers-sftp
--index-url https://gitlab.jsc.fz-juelich.de/api/v4/projects/4405/packages/pypi/simple
airflow-datacat-integration>=0.0.4
airflow-datacat-integration>=0.0.10
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment