-
Christian Boettcher authoredChristian Boettcher authored
.gitlab-ci.yml 8.52 KiB
default:
image: python:3.9-slim
variables:
OS_AUTH_TYPE: v3applicationcredential
OS_AUTH_URL: https://hdf-cloud.fz-juelich.de:5000
OS_IDENTITY_API_VERSION: 3
OS_REGION_NAME: "HDFCloud"
OS_INTERFACE: public
PRODUCTION_IP: 134.94.199.220
OLD_PROD_NAME: old-airflow-production
PRODUCTION_NAME: airflow-production
PRODUCTION_URL: https://datalogistics.eflows4hpc.eu
TESTING_DOMAIN: datalogistics.eflows4hpc.eu
AIRFLOW_TESTUSER: "airflow"
AIRFLOW__SECRETS__BACKEND_KWARGS: $TESTING_AIRFLOW__SECRETS__BACKEND_KWARGS
AIRFLOW__SECRETS__BACKEND: datacat_integration.secrets.DatacatSecretsBackend
VOLUME_ID: 6b58c3a6-691b-496a-8afd-153637c2de48
DOCKER_TLS_CERTDIR: ""
# before script copied from gitlab docs
.before_script_template: &ssh_setup
before_script:
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client gcc libxslt-dev libffi-dev libssl-dev build-essential python3-dev -y )'
- eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
stages:
- test
- build
- publish
- deploy
- test-deployment
- cleanup
test:
stage: test
image:
name: $CI_REGISTRY_IMAGE/eflows-airflow:latest
entrypoint: [""]
before_script:
- echo "DEBUG:"
- pip --version
- airflow db init
- pip install -r requirements.txt
- pip install nose==1.3.7
- airflow connections add --conn-uri https://b2share-testing.fz-juelich.de/ default_b2share
script:
- ls
- pwd
- cp dags/* /opt/airflow/dags/
- airflow dags list
- airflow connections list
- airflow dags test testdag 2021-08-18
- nosetests
build-custom-image:
stage: build
image: docker:latest
services:
- docker:dind
when: manual
tags:
- laptop
variables:
IMAGE_COMMIT_TAG: $CI_REGISTRY_IMAGE/eflows-airflow:$CI_COMMIT_SHORT_SHA