diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6fd6f204e71a104c8f151ce34c311807235bf6aa..3b978f3a3ad6b56f3fe322ae500325b755e145ff 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -14,6 +14,7 @@ variables: PRODUCTION_DOMAIN: datalogistics.eflows4hpc.eu AIRFLOW_TESTUSER: "airflow" AIRFLOW__SECRETS__BACKEND: datacat_integration.secrets.DatacatSecretsBackend + DAG_GIT_URL: https://github.com/eflows4hpc/dls-dags VOLUME_ID: 6b58c3a6-691b-496a-8afd-153637c2de48 DOCKER_TLS_CERTDIR: "" @@ -102,7 +103,7 @@ full-deploy-production: - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo mkdir -p /persistent_data && sudo mount /dev/vdb1 /persistent_data" - until ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP ls /finished_cloudinit >/dev/null 2>&1; do sleep 30; done # wait until cloudinit script is complete - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo service docker restart" # to use the configured docker data path - - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS $AIRFLOW_FERNET_KEY" + - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS $AIRFLOW_FERNET_KEY $DAG_GIT_URL" - echo "Done" # NOTE Light deployment did not perform well when the template/main.html file was changed (in case of the official airflow image being updated) @@ -119,7 +120,7 @@ light-deploy-production: environment: Production script: - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "cd /home/airflow/data-logistics-service && git stash && git stash clear && git checkout main && git checkout -f $CI_COMMIT_TAG && git pull --all" - - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS" $AIRFLOW_FERNET_KEY + - ssh -oStrictHostKeyChecking=accept-new airflow@$PRODUCTION_IP "sudo /home/airflow/data-logistics-service/scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service $PRODUCTION_DOMAIN $AIRFLOW__SECRETS__BACKEND $AIRFLOW__SECRETS__BACKEND_KWARGS $AIRFLOW_FERNET_KEY $DAG_GIT_URL" test-production-webserver: cache: {} diff --git a/scripts/cloudinit.yml b/scripts/cloudinit.yml index 65e09ede421ec01f6974fcd9c59a04a80b19be56..010991b03b61d40cbfd8a9a402d76b146d914e37 100644 --- a/scripts/cloudinit.yml +++ b/scripts/cloudinit.yml @@ -69,6 +69,7 @@ runcmd: - sudo -u airflow git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service - cd ./data-logistics-service - crontab -l | { cat ; echo '@daily root find /persistent_data/logs -mtime +13 -type f -delete'; } | crontab - # setup log clearing crontab + - crontab -l | { cat ; echo '*/5 * * * * cd /home/airflow/eflows-airflow/dags && sudo git pull >/dev/null 2>&1'; } | crontab - # setup dag crontab - touch /finished_cloudinit final_message: "The system is finally up, after $UPTIME seconds" \ No newline at end of file diff --git a/scripts/deployment.sh b/scripts/deployment.sh index 5f0930aef687d199ebde04a67a9ff2f28ffd0a3a..4517eca9af9ce60a07147db42cd39f208c7db1ef 100755 --- a/scripts/deployment.sh +++ b/scripts/deployment.sh @@ -2,7 +2,7 @@ # @author Maria Petrova & Christian Böttcher ## USAGE: # -# deployment.sh <user_home_directory> <git_directory> [SERVER_DOMAIN] [AIRFLOW__SECRETS__BACKEND] [AIRFLOW__SECRETS__BACKEND_KWARGS] +# deployment.sh <user_home_directory> <git_directory> [SERVER_DOMAIN] [AIRFLOW__SECRETS__BACKEND] [AIRFLOW__SECRETS__BACKEND_KWARGS] [AIRFLOW__CORE__FERNET_KEY] [DAG_GIT_URL] OLD_DIR=`pwd` GIT_REPO=$HOME/data-logistics-service @@ -15,6 +15,7 @@ if [ -z ${3+x} ]; then export SERVER_DOMAIN=dls.fz-juelich.de; else export SERVE if [ -z ${4+x} ]; then unset AIRFLOW__SECRETS__BACKEND; else export AIRFLOW__SECRETS__BACKEND=$4; fi if [ -z ${5+x} ]; then unset AIRFLOW__SECRETS__BACKEND_KWARGS; else export AIRFLOW__SECRETS__BACKEND_KWARGS=$5; fi if [ -z ${6+x} ]; then unset AIRFLOW__CORE__FERNET_KEY; else export AIRFLOW__CORE__FERNET_KEY=$6; fi +if [ -z ${6+x} ]; then unset DAG_GIT_URL; else export DAG_GIT_URL=$7; fi @@ -22,6 +23,7 @@ echo "DEBUG values: OLD_DIR=$OLD_DIR, ENTRYPOINT_DIR=$ENTRYPOINT and GIT_REPO=$G echo "DEBUG using secrets backend: $AIRFLOW__SECRETS__BACKEND" echo "DEBUG backend args length: ${#AIRFLOW__SECRETS__BACKEND_KWARGS}" #echo "DEBUG fernet key: ${AIRFLOW__CORE__FERNET_KEY}" +echo "DEBUG DAG git dir: $DAG_GIT_URL" cd $ENTRYPOINT @@ -38,7 +40,7 @@ echo "Proceeding as user $(whoami)" # Make the necessary folders for the airflow artefacts and copy the corresponging content mkdir -p ./dags ./logs ./plugins ./config ./templates cd $GIT_REPO -cp -r dags/* $AIRFLOW_DIR/dags +git clone $DAG_GIT_URL $AIRFLOW_DIR/dags cp -r plugins/* $AIRFLOW_DIR/plugins cp config/* $AIRFLOW_DIR/config/ cp -r templates/* $AIRFLOW_DIR/templates