Skip to content
Snippets Groups Projects
Commit 9b241d90 authored by Maria Petrova-El Sayed's avatar Maria Petrova-El Sayed
Browse files

Run deployment as a new "airflow" user

parent e685708a
No related branches found
No related tags found
No related merge requests found
Pipeline #85587 failed
...@@ -6,19 +6,23 @@ variables: ...@@ -6,19 +6,23 @@ variables:
OS_IDENTITY_API_VERSION: 3 OS_IDENTITY_API_VERSION: 3
OS_REGION_NAME: "HDFCloud" OS_REGION_NAME: "HDFCloud"
OS_INTERFACE: public OS_INTERFACE: public
FLOATING_IP: 134.94.199.220 TEST_IP: 134.94.199.220
OLD_TEST_NAME: old-airflow-test OLD_TEST_NAME: old-airflow-test
TESTING_NAME: airflow-testing TESTING_NAME: airflow-testing
TESTING_URL: http://134.94.199.220:7001/home
# before script copied from gitlab docs # before script copied from gitlab docs
before_script: before_script: &ssh_setup
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client gcc libxslt-dev libffi-dev libssl-dev build-essential python3-dev -y )' - 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client gcc libxslt-dev libffi-dev libssl-dev build-essential python3-dev -y )'
- eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
stages: stages:
- test - test
- build
- publish
- deploy - deploy
- test-deployment
- cleanup - cleanup
test: test:
...@@ -43,8 +47,9 @@ test: ...@@ -43,8 +47,9 @@ test:
full-deploy-test: full-deploy-test:
stage: deploy stage: deploy
environment: Testing environment: Testing
when: manual # when: manual
only:
- web
script: script:
- echo "Starting the full testing deployment of airflows example." - echo "Starting the full testing deployment of airflows example."
- pip install python-openstackclient - pip install python-openstackclient
...@@ -52,17 +57,42 @@ full-deploy-test: ...@@ -52,17 +57,42 @@ full-deploy-test:
- openstack server set --name $OLD_TEST_NAME $OLD_ID - openstack server set --name $OLD_TEST_NAME $OLD_ID
- INSTANCE_ID=`openstack server create -f value -c id --prefix IMAGE_ --flavor m4 --image 149a65b5-aeb8-499f-aaa6-ec966bd28dd6 --user-data scripts/cloudinit.yml --security-group ssh --security-group airflows --security-group www --security-group https $TESTING_NAME` - INSTANCE_ID=`openstack server create -f value -c id --prefix IMAGE_ --flavor m4 --image 149a65b5-aeb8-499f-aaa6-ec966bd28dd6 --user-data scripts/cloudinit.yml --security-group ssh --security-group airflows --security-group www --security-group https $TESTING_NAME`
- while [ "`openstack server show $INSTANCE_ID -c addresses -f value`" = "{}" ]; do sleep 5; done # wait until an address is available to attach the floating ip - while [ "`openstack server show $INSTANCE_ID -c addresses -f value`" = "{}" ]; do sleep 5; done # wait until an address is available to attach the floating ip
- openstack server add floating ip $INSTANCE_ID $FLOATING_IP - openstack server add floating ip $INSTANCE_ID $TEST_IP
- echo "Done" - echo "Done"
test-testing_webserver:
cache: {}
stage: test-deployment
# only:
# - master
except:
- tags
script:
- apt update && apt -y install curl
- echo "For now, this will be a basic health check i.e. GET / and check for 2xx code."
- sleep 150 # ensure that the docker containers are up and running before testing the airflow installation
- 'curl --insecure -I -H "Accept: application/json" $TESTING_URL'
test-testing_dags:
cache: {}
stage: test-deployment
# only:
# - master
except:
- tags
<<: *ssh_setup
script:
- echo "This is a simple check if the deploments was successful and dags get executed"
- sleep 150 # ensure that the docker containers are up and running before testing the airflow installation
- ssh airflow@$TEST_IP "airflow db init && airflow dags list && airflow connections list"
- ssh airflow@$TEST_IP "airflow dags test testdag 2021-08-18"
cleanup-successful-full-deployment: cleanup-successful-full-deployment:
# check if there is an old prod or test instance, and delete it if present # check if there is an old prod or test instance, and delete it if present
stage: cleanup stage: cleanup
when: on_success when: on_success
# only: only:
# - web - web
dependencies:
- full-deploy-test
script: script:
- echo "This is the cleanup for the full-redeployment of the testing or production servers" - echo "This is the cleanup for the full-redeployment of the testing or production servers"
- echo "if this job is reached, all earlier jobs were successful, and any lingering old instances need to be removed" - echo "if this job is reached, all earlier jobs were successful, and any lingering old instances need to be removed"
......
...@@ -64,7 +64,6 @@ x-airflow-common: ...@@ -64,7 +64,6 @@ x-airflow-common:
- ./plugins:/opt/airflow/plugins - ./plugins:/opt/airflow/plugins
- ./templates/main.html:/home/airflow/.local/lib/python3.7/site-packages/airflow/www/templates/airflow/main.html - ./templates/main.html:/home/airflow/.local/lib/python3.7/site-packages/airflow/www/templates/airflow/main.html
user: "${AIRFLOW_UID:-50000}:0" user: "${AIRFLOW_UID:-50000}:0"
# command: pip3 install -r requirements.txt
depends_on: depends_on:
&airflow-common-depends-on &airflow-common-depends-on
redis: redis:
......
...@@ -38,41 +38,22 @@ users: ...@@ -38,41 +38,22 @@ users:
ssh_authorized_keys: ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUNFmYnaZ1raXQm04/mfdoBfn4i6xYknic2nhGDOrkhp5r6kv4F1m7wgtuL/pddRKuEoQpiXjRWciEMljFmxvVc7+9VitsAn5zBsnzY9+Sq9+si5aKe93RK8JGLX/WsfZGnPMdKPkK2GO9LFJN4TyL9hTpFdFQfxtO82NIa3WikG4RI+WQuKeQ4qr8FHNymr+gHTw/+YaM9331xnM5YqkmOC27CvVtiQx96MNMAyMQ8RJcHy1GL8donTBL+knVZdIwGt4SUy9dIF8iwTXGFkLe8V7/DIEB7RW9gvk2sG3YPo2eq56HsQKAB3yre+5QFhmH/uqUnTKVFgZLqlDUC0duFOwALCRmlEgtOeZqOzRBa6a0RveTIfccMb48ac4FpeeJdo4KId1QO1JaEZ8fYKgRVw3xRuOjDMpxCFuxELpSvx/hd1jgrK9lRizH9DXNf5/5Go2O16hj8LPufBbhX2EiChjWJEJkoRWBhQ3UHmstbqRiuNU/MsHq0FPSHMHV6BU= maria@jsc-strela - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUNFmYnaZ1raXQm04/mfdoBfn4i6xYknic2nhGDOrkhp5r6kv4F1m7wgtuL/pddRKuEoQpiXjRWciEMljFmxvVc7+9VitsAn5zBsnzY9+Sq9+si5aKe93RK8JGLX/WsfZGnPMdKPkK2GO9LFJN4TyL9hTpFdFQfxtO82NIa3WikG4RI+WQuKeQ4qr8FHNymr+gHTw/+YaM9331xnM5YqkmOC27CvVtiQx96MNMAyMQ8RJcHy1GL8donTBL+knVZdIwGt4SUy9dIF8iwTXGFkLe8V7/DIEB7RW9gvk2sG3YPo2eq56HsQKAB3yre+5QFhmH/uqUnTKVFgZLqlDUC0duFOwALCRmlEgtOeZqOzRBa6a0RveTIfccMb48ac4FpeeJdo4KId1QO1JaEZ8fYKgRVw3xRuOjDMpxCFuxELpSvx/hd1jgrK9lRizH9DXNf5/5Go2O16hj8LPufBbhX2EiChjWJEJkoRWBhQ3UHmstbqRiuNU/MsHq0FPSHMHV6BU= maria@jsc-strela
#TODO do a proper ssh key if needed, this has been excluded so far so that the testing of the use case goes faster - name: airflow
# - name: airflows gecos: Common user for running the airflow services
# gecos: Common user for running the apiserver groups: sudo
# groups: sudo sudo: ALL=(ALL) NOPASSWD:ALL
# sudo: ALL=(ALL) NOPASSWD:ALL lock_passwd: true
# lock_passwd: true ssh_authorized_keys:
# ssh_authorized_keys: - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCeJcmX8ogs4/KDQwsQHdCwk5iZz7bYJdthvX0y0YIrslhkW1KfDbOJMdRRLBcKmSCIdPofRkWkgj1hFEW4MqDCNSftLfUDTsymxNdF0dZxaX0jM2K1K7sFr1JG2Lkuxao2g9AoRKqyydlVhApoY8UhwpDaNOAFx5q0Pa7FchWvFX37r0AU5JZTsDFSDM9NaSZll78vwK1wjhhHi4EEFAs2IkrySOGzJTkaHwZrqYMJgEIvtfHN14UJR9WDmnPvSIRe/GzRg1xz3Op4E+S0/jK2bR5jJTMJJka19FjShxlqMr0UbhwBgiKYRhpfqhy+MWGz9H5GvWNdiUbBV8jdlZQHjFujAlzErJ/Twmnn0YVNUX+9ZoyEa/NKMS2quKAUhhQaD04TbAIKNt0iFbOFcbrfpbBXw7PiruIeHGFgmcxug3FcOX6xmHVuPVi1Zm1CxMxaydS7QStUchZdHN0Rc9AF2MOB/ZuKQCUbJZ2pKpP5i90eLbwhZAYvH5DAn9g6w+/6CjuFj1fB4Ywgmv+TvZ0NwtrJRhARN/1nY9uxVSoFAvxrlx3FU0bw0c/3YojY9j+LBJYf+e3Y1R2ZBSVUYGn2eACF86fnGs6Bz/WmrZW6WWrSiEwxtElkQRnuAb35L/V5VFrZv+x0qtoMNl0EK0Rz6CKMD5HHrI6Z2FNqOO6bEQ== service@gitlab
# - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQMbfKUO3NoZspgWpzFY+SwY5Tx251oBT/F22pmnqKq3A0U1EcRooYVc11HzDmLrDTkoLSWAYPuv7I8weKqUPMlypXygu7I1kw1JoAZ4veV/TO8kBIb8+fUjD4VnD0EuU9/MD4rc0IazlInUu/5H2oDj4cj3XGoOFHAPRvo1YXF2eEbXgHcos5o52idZfvZPeWmk4wLqWUI+4q1C5o+c9xGxdWkA0Z6cErw5jSfaqIMu9GnsaPE8dDZ89vtNu8kRK97/Ax0qmJ8eLBfv3qm2HnqACRUv1MRLS/s9KsdB18DV6dTn8VuErJsn9rlpx/2oEMVS5lkUSLTJHf7oNVKDtILQ/rQ2tF/f3LakmiViA4ZsWxFspP0T/sXPhjuCgEqGWG8HrJwFj8DByMpoJUsGe1czAiMdoY5Tr7UeIgK7BGaGjoVUFaVrCKlDpDNhYsHopSTTNajVxsb0LkTRIRphGlQTHlD3nDYdHIrgZiLqA1XLtTTXtWNzQ4uE59tAkIzdTK7RSBduHunqx++IEO6Huj49Vvk1vcO33iqFTTZro1vhZ2kEGxAkxNMti+/eT2rvyfkhsXaUH1/7LXvRrR+pFKcXBpaWWeEt8cOiVrMWAPDi9VRh5QPZbJ1tyTq7XzxeaQuJhL22o2BO13ZSRzr1S+UNFcmfk3esruZoxDIiQ+Bw== apiserver@gitlab
runcmd: runcmd:
- echo "Downloading latest version of docker-compose" - echo "Downloading latest version of docker-compose"
- sudo pip3 install docker-compose - sudo pip3 install docker-compose
- cd /home/mpetrova - cd /home/airflow
- echo "Current user is $(whoami)" - echo "Current user is $(whoami)"
- sudo -u mpetrova git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service - sudo -u airflow git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service
- cd ./data-logistics-service - cd ./data-logistics-service
- git checkout mptest #only for testing - git checkout mptest #only for testing
# - mkdir airflow - sudo -u airflow /bin/bash ./scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service
# - cd airflow
# - mkdir -p ./dags ./logs ./plugins ./config ./templates
# - cd ../data-logistics-service
# - cp dags/* ../airflow/dags
# - cp -r plugins/* ../airflow/plugins
# - cp config/* ../airflow/config
# - cp templates/* ../airflow/templates
# - echo -e "AIRFLOW_UID=$(id -u)" > /home/maria/data-logistics-service/dockers/.env
# - export AIRFLOW_UID=$(id -u)
# - echo "Collecting requirements"
# - reqs=`cat requirements.txt | tr '\n' ' '`
# - echo "Collected - $reqs"
# - sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=$reqs\" >> /home/maria/data-logistics-service/dockers/.env"
# - pip install -r requirements.txt
# - echo "Bringing up the docker containers"
# - docker-compose -f ./dockers/docker-compose.yaml --project-directory ../airflow --verbose up airflow-init
# - docker-compose -f ./dockers/docker-compose.yaml --project-directory ../airflow up -d
- sudo -u mpetrova /bin/bash ./scripts/deployment.sh /home/mpetrova /home/mpetrova/data-logistics-service
final_message: "The system is finally up, after $UPTIME seconds" final_message: "The system is finally up, after $UPTIME seconds"
\ No newline at end of file
...@@ -19,8 +19,8 @@ echo "DEBUG_2 $0 $1 $2 $3 $4" ...@@ -19,8 +19,8 @@ echo "DEBUG_2 $0 $1 $2 $3 $4"
echo "DEBUG values: OLD_DIR=$OLD_DIR, ENTRYPOINT_DIR=$ENTRYPOINT and GIT_REPO=$GIT_REPO" echo "DEBUG values: OLD_DIR=$OLD_DIR, ENTRYPOINT_DIR=$ENTRYPOINT and GIT_REPO=$GIT_REPO"
cd $ENTRYPOINT cd $ENTRYPOINT
mkdir -p airflow mkdir -p airflow_proj
cd airflow cd airflow_proj
AIRFLOW_DIR=`pwd` AIRFLOW_DIR=`pwd`
#DEBUG prints #DEBUG prints
echo "Project dir is set to: $AIRFLOW_DIR" echo "Project dir is set to: $AIRFLOW_DIR"
...@@ -37,9 +37,10 @@ cp templates/* $AIRFLOW_DIR/templates ...@@ -37,9 +37,10 @@ cp templates/* $AIRFLOW_DIR/templates
echo -e "AIRFLOW_UID=$(id -u)" > $GIT_REPO/dockers/.env echo -e "AIRFLOW_UID=$(id -u)" > $GIT_REPO/dockers/.env
export AIRFLOW_UID=$(id -u) export AIRFLOW_UID=$(id -u)
echo "Collecting requirements" echo "Collecting requirements"
reqs=`cat requirements.txt | tr '\n' ' '` reqs=`cat $GIT_REPO/requirements.txt | tr '\n' ' '`
echo "Collected requirements: $reqs" echo "Collected requirements: $reqs"
sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=$reqs\" >> $GIT_REPO/dockers/.env" # sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=\"$reqs\"\" >> $GIT_REPO/dockers/.env"
echo "_PIP_ADDITIONAL_REQUIREMENTS=\"$reqs\"" >> $GIT_REPO/dockers/.env
pip install -r $GIT_REPO/requirements.txt pip install -r $GIT_REPO/requirements.txt
# sed -i "s_datacatalog.fz-juelich.de_${SERVER_DOMAIN}_g" docker-compose.yml # sed -i "s_datacatalog.fz-juelich.de_${SERVER_DOMAIN}_g" docker-compose.yml
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment