Skip to content
Snippets Groups Projects
Commit be925487 authored by Christian Boettcher's avatar Christian Boettcher
Browse files

sync dags from dag repo

parent 67208e62
No related branches found
No related tags found
No related merge requests found
Pipeline #106796 passed
......@@ -14,6 +14,7 @@ variables:
PRODUCTION_DOMAIN: datalogistics.eflows4hpc.eu
AIRFLOW_TESTUSER: "airflow"
AIRFLOW__SECRETS__BACKEND: datacat_integration.secrets.DatacatSecretsBackend
DAG_GIT_URL: https://github.com/eflows4hpc/dls-dags
VOLUME_ID: 6b58c3a6-691b-496a-8afd-153637c2de48
DOCKER_TLS_CERTDIR: ""
......@@ -102,7 +103,7 @@ full-deploy-production:
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo mkdir -p /persistent_data && sudo mount /dev/vdb1 /persistent_data"
- until ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP ls /finished_cloudinit >/dev/null 2>&1; do sleep 30; done # wait until cloudinit script is complete
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo service docker restart" # to use the configured docker data path
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS $AIRFLOW_FERNET_KEY"
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS $AIRFLOW_FERNET_KEY $DAG_GIT_URL"
- echo "Done"
# NOTE Light deployment did not perform well when the template/main.html file was changed (in case of the official airflow image being updated)
......@@ -119,7 +120,7 @@ light-deploy-production:
environment: Production
script:
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "cd /home/airflow/data-logistics-service && git stash && git stash clear && git checkout main && git checkout -f $CI_COMMIT_TAG && git pull --all"
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS" $AIRFLOW_FERNET_KEY
- ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS $AIRFLOW_FERNET_KEY $DAG_GIT_URL"
test-production-webserver:
cache: {}
......
......@@ -69,6 +69,7 @@ runcmd:
- sudo -u airflow git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service
- cd ./data-logistics-service
- crontab -l | { cat ; echo '@daily root find /persistent_data/logs -mtime +13 -type f -delete'; } | crontab - # setup log clearing crontab
- crontab -l | { cat ; echo '*/5 * * * * cd /home/airflow/eflows-airflow/dags && sudo git pull >/dev/null 2>&1'; } | crontab - # setup dag crontab
- touch /finished_cloudinit
final_message: "The system is finally up, after $UPTIME seconds"
\ No newline at end of file
......@@ -2,7 +2,7 @@
# @author Maria Petrova & Christian Böttcher
## USAGE:
#
# deployment.sh <user_home_directory> <git_directory> [SERVER_DOMAIN] [AIRFLOW__SECRETS__BACKEND] [AIRFLOW__SECRETS__BACKEND_KWARGS]
# deployment.sh <user_home_directory> <git_directory> [SERVER_DOMAIN] [AIRFLOW__SECRETS__BACKEND] [AIRFLOW__SECRETS__BACKEND_KWARGS] [AIRFLOW__CORE__FERNET_KEY] [DAG_GIT_URL]
OLD_DIR=`pwd`
GIT_REPO=$HOME/data-logistics-service
......@@ -15,6 +15,7 @@ if [ -z ${3+x} ]; then export SERVER_DOMAIN=dls.fz-juelich.de; else export SERVE
if [ -z ${4+x} ]; then unset AIRFLOW__SECRETS__BACKEND; else export AIRFLOW__SECRETS__BACKEND=$4; fi
if [ -z ${5+x} ]; then unset AIRFLOW__SECRETS__BACKEND_KWARGS; else export AIRFLOW__SECRETS__BACKEND_KWARGS=$5; fi
if [ -z ${6+x} ]; then unset AIRFLOW__CORE__FERNET_KEY; else export AIRFLOW__CORE__FERNET_KEY=$6; fi
if [ -z ${6+x} ]; then unset DAG_GIT_URL; else export DAG_GIT_URL=$7; fi
......@@ -22,6 +23,7 @@ echo "DEBUG values: OLD_DIR=$OLD_DIR, ENTRYPOINT_DIR=$ENTRYPOINT and GIT_REPO=$G
echo "DEBUG using secrets backend: $AIRFLOW__SECRETS__BACKEND"
echo "DEBUG backend args length: ${#AIRFLOW__SECRETS__BACKEND_KWARGS}"
#echo "DEBUG fernet key: ${AIRFLOW__CORE__FERNET_KEY}"
echo "DEBUG DAG git dir: $DAG_GIT_URL"
cd $ENTRYPOINT
......@@ -38,7 +40,7 @@ echo "Proceeding as user $(whoami)"
# Make the necessary folders for the airflow artefacts and copy the corresponging content
mkdir -p ./dags ./logs ./plugins ./config ./templates
cd $GIT_REPO
cp -r dags/* $AIRFLOW_DIR/dags
git clone $DAG_GIT_URL $AIRFLOW_DIR/dags
cp -r plugins/* $AIRFLOW_DIR/plugins
cp config/* $AIRFLOW_DIR/config/
cp -r templates/* $AIRFLOW_DIR/templates
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment