diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a4e48a187b6d9a8484083077a58d07faa7d61d65..9791aabd856651595a93089dbf23a728508d0fd9 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -6,19 +6,23 @@ variables:
   OS_IDENTITY_API_VERSION: 3
   OS_REGION_NAME: "HDFCloud"
   OS_INTERFACE: public
-  FLOATING_IP: 134.94.199.220
+  TEST_IP: 134.94.199.220
   OLD_TEST_NAME: old-airflow-test
   TESTING_NAME: airflow-testing
+  TESTING_URL: http://134.94.199.220:7001/home
 
 # before script copied from gitlab docs
-before_script:
+before_script: &ssh_setup
   - 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client gcc libxslt-dev libffi-dev libssl-dev build-essential python3-dev -y )'
+  - eval $(ssh-agent -s)
+  - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
+  - mkdir -p ~/.ssh
+  - chmod 700 ~/.ssh
 
 stages:
   - test
-  - build
-  - publish
   - deploy
+  - test-deployment
   - cleanup
 
 test:
@@ -43,8 +47,9 @@ test:
 full-deploy-test:
   stage: deploy
   environment: Testing
-  when: manual
-
+  # when: manual
+  only:
+    - web
   script:
     - echo "Starting the full testing deployment of airflows example."
     - pip install python-openstackclient
@@ -52,17 +57,42 @@ full-deploy-test:
     - openstack server set --name $OLD_TEST_NAME $OLD_ID
     - INSTANCE_ID=`openstack server create -f value -c id --prefix IMAGE_ --flavor m4 --image 149a65b5-aeb8-499f-aaa6-ec966bd28dd6 --user-data scripts/cloudinit.yml --security-group ssh --security-group airflows --security-group www --security-group https $TESTING_NAME`
     - while [ "`openstack server show $INSTANCE_ID -c addresses -f value`" = "{}" ]; do sleep 5; done # wait until an address is available to attach the floating ip
-    - openstack server add floating ip $INSTANCE_ID $FLOATING_IP
+    - openstack server add floating ip $INSTANCE_ID $TEST_IP
     - echo "Done"
 
+test-testing_webserver:
+  cache: {}
+  stage: test-deployment 
+  # only:
+  #   - master
+  except:
+    - tags
+  script:
+    - apt update && apt -y install curl
+    - echo "For now, this will be a basic health check i.e. GET / and check for 2xx code."
+    - sleep 150 # ensure that the docker containers are up and running before testing the airflow installation
+    - 'curl --insecure -I -H "Accept: application/json" $TESTING_URL'
+
+test-testing_dags:
+  cache: {}
+  stage: test-deployment 
+  # only:
+  #   - master
+  except:
+    - tags
+  <<: *ssh_setup
+  script:
+    - echo "This is a simple check if the deploments was successful and dags get executed"
+    - sleep 150 # ensure that the docker containers are up and running before testing the airflow installation
+    - ssh airflow@$TEST_IP "airflow db init && airflow dags list && airflow connections list"
+    - ssh airflow@$TEST_IP "airflow dags test testdag 2021-08-18"
+
 cleanup-successful-full-deployment:
   # check if there is an old prod or test instance, and delete it if present
   stage: cleanup
   when: on_success
-  # only:
-  #   - web
-  dependencies:
-    - full-deploy-test
+  only:
+    - web
   script:
     - echo "This is the cleanup for the full-redeployment of the testing or production servers"
     - echo "if this job is reached, all earlier jobs were successful, and any lingering old instances need to be removed"
diff --git a/dockers/docker-compose.yaml b/dockers/docker-compose.yaml
index c01c1108b3d09378b624543a0b03d83fa9e24745..24c78d4b44dc381c2382b39ca5ffea88c6c44a14 100644
--- a/dockers/docker-compose.yaml
+++ b/dockers/docker-compose.yaml
@@ -64,7 +64,6 @@ x-airflow-common:
     - ./plugins:/opt/airflow/plugins
     - ./templates/main.html:/home/airflow/.local/lib/python3.7/site-packages/airflow/www/templates/airflow/main.html
   user: "${AIRFLOW_UID:-50000}:0"
-  # command: pip3 install -r requirements.txt
   depends_on:
     &airflow-common-depends-on
     redis:
diff --git a/scripts/cloudinit.yml b/scripts/cloudinit.yml
index 9b190a7e6fe237b90112e6105ea749352f91a3c4..9c9668c142ed7e8241bc49d29eb9f37c3fde7261 100644
--- a/scripts/cloudinit.yml
+++ b/scripts/cloudinit.yml
@@ -38,41 +38,22 @@ users:
     ssh_authorized_keys:
       - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDUNFmYnaZ1raXQm04/mfdoBfn4i6xYknic2nhGDOrkhp5r6kv4F1m7wgtuL/pddRKuEoQpiXjRWciEMljFmxvVc7+9VitsAn5zBsnzY9+Sq9+si5aKe93RK8JGLX/WsfZGnPMdKPkK2GO9LFJN4TyL9hTpFdFQfxtO82NIa3WikG4RI+WQuKeQ4qr8FHNymr+gHTw/+YaM9331xnM5YqkmOC27CvVtiQx96MNMAyMQ8RJcHy1GL8donTBL+knVZdIwGt4SUy9dIF8iwTXGFkLe8V7/DIEB7RW9gvk2sG3YPo2eq56HsQKAB3yre+5QFhmH/uqUnTKVFgZLqlDUC0duFOwALCRmlEgtOeZqOzRBa6a0RveTIfccMb48ac4FpeeJdo4KId1QO1JaEZ8fYKgRVw3xRuOjDMpxCFuxELpSvx/hd1jgrK9lRizH9DXNf5/5Go2O16hj8LPufBbhX2EiChjWJEJkoRWBhQ3UHmstbqRiuNU/MsHq0FPSHMHV6BU= maria@jsc-strela
 
-#TODO do a proper ssh key if needed, this has been excluded so far so that the testing of the use case goes faster
-  # - name: airflows
-  #   gecos: Common user for running the apiserver
-  #   groups: sudo
-  #   sudo: ALL=(ALL) NOPASSWD:ALL
-  #   lock_passwd: true
-  #   ssh_authorized_keys:
-  #     - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQMbfKUO3NoZspgWpzFY+SwY5Tx251oBT/F22pmnqKq3A0U1EcRooYVc11HzDmLrDTkoLSWAYPuv7I8weKqUPMlypXygu7I1kw1JoAZ4veV/TO8kBIb8+fUjD4VnD0EuU9/MD4rc0IazlInUu/5H2oDj4cj3XGoOFHAPRvo1YXF2eEbXgHcos5o52idZfvZPeWmk4wLqWUI+4q1C5o+c9xGxdWkA0Z6cErw5jSfaqIMu9GnsaPE8dDZ89vtNu8kRK97/Ax0qmJ8eLBfv3qm2HnqACRUv1MRLS/s9KsdB18DV6dTn8VuErJsn9rlpx/2oEMVS5lkUSLTJHf7oNVKDtILQ/rQ2tF/f3LakmiViA4ZsWxFspP0T/sXPhjuCgEqGWG8HrJwFj8DByMpoJUsGe1czAiMdoY5Tr7UeIgK7BGaGjoVUFaVrCKlDpDNhYsHopSTTNajVxsb0LkTRIRphGlQTHlD3nDYdHIrgZiLqA1XLtTTXtWNzQ4uE59tAkIzdTK7RSBduHunqx++IEO6Huj49Vvk1vcO33iqFTTZro1vhZ2kEGxAkxNMti+/eT2rvyfkhsXaUH1/7LXvRrR+pFKcXBpaWWeEt8cOiVrMWAPDi9VRh5QPZbJ1tyTq7XzxeaQuJhL22o2BO13ZSRzr1S+UNFcmfk3esruZoxDIiQ+Bw== apiserver@gitlab
+  - name: airflow
+    gecos: Common user for running the airflow services
+    groups: sudo
+    sudo: ALL=(ALL) NOPASSWD:ALL
+    lock_passwd: true
+    ssh_authorized_keys:
+      - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCeJcmX8ogs4/KDQwsQHdCwk5iZz7bYJdthvX0y0YIrslhkW1KfDbOJMdRRLBcKmSCIdPofRkWkgj1hFEW4MqDCNSftLfUDTsymxNdF0dZxaX0jM2K1K7sFr1JG2Lkuxao2g9AoRKqyydlVhApoY8UhwpDaNOAFx5q0Pa7FchWvFX37r0AU5JZTsDFSDM9NaSZll78vwK1wjhhHi4EEFAs2IkrySOGzJTkaHwZrqYMJgEIvtfHN14UJR9WDmnPvSIRe/GzRg1xz3Op4E+S0/jK2bR5jJTMJJka19FjShxlqMr0UbhwBgiKYRhpfqhy+MWGz9H5GvWNdiUbBV8jdlZQHjFujAlzErJ/Twmnn0YVNUX+9ZoyEa/NKMS2quKAUhhQaD04TbAIKNt0iFbOFcbrfpbBXw7PiruIeHGFgmcxug3FcOX6xmHVuPVi1Zm1CxMxaydS7QStUchZdHN0Rc9AF2MOB/ZuKQCUbJZ2pKpP5i90eLbwhZAYvH5DAn9g6w+/6CjuFj1fB4Ywgmv+TvZ0NwtrJRhARN/1nY9uxVSoFAvxrlx3FU0bw0c/3YojY9j+LBJYf+e3Y1R2ZBSVUYGn2eACF86fnGs6Bz/WmrZW6WWrSiEwxtElkQRnuAb35L/V5VFrZv+x0qtoMNl0EK0Rz6CKMD5HHrI6Z2FNqOO6bEQ== service@gitlab
 
 runcmd:
   - echo "Downloading latest version of docker-compose"
   - sudo pip3 install docker-compose
-  - cd /home/mpetrova
+  - cd /home/airflow
   - echo "Current user is $(whoami)"
-  - sudo -u mpetrova git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service
+  - sudo -u airflow git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service
   - cd ./data-logistics-service
   - git checkout mptest #only for testing
-  # - mkdir airflow
-  # - cd airflow
-  # - mkdir -p ./dags ./logs ./plugins ./config ./templates
-  # - cd ../data-logistics-service
-  # - cp dags/* ../airflow/dags
-  # - cp -r plugins/* ../airflow/plugins
-  # - cp config/* ../airflow/config
-  # - cp templates/* ../airflow/templates
-  # - echo -e "AIRFLOW_UID=$(id -u)" > /home/maria/data-logistics-service/dockers/.env
-  # - export AIRFLOW_UID=$(id -u)
-  # - echo "Collecting requirements"
-  # - reqs=`cat requirements.txt | tr '\n' ' '`
-  # - echo "Collected - $reqs"
-  # - sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=$reqs\" >> /home/maria/data-logistics-service/dockers/.env"
-  # - pip install -r requirements.txt
-  # - echo "Bringing up the docker containers"
-  # - docker-compose -f ./dockers/docker-compose.yaml --project-directory ../airflow --verbose up airflow-init
-  # - docker-compose -f ./dockers/docker-compose.yaml --project-directory ../airflow up -d
-  - sudo -u mpetrova /bin/bash ./scripts/deployment.sh /home/mpetrova /home/mpetrova/data-logistics-service
+  - sudo -u airflow /bin/bash ./scripts/deployment.sh /home/airflow /home/airflow/data-logistics-service
 
 final_message: "The system is finally up, after $UPTIME seconds"
\ No newline at end of file
diff --git a/scripts/deployment.sh b/scripts/deployment.sh
index 214169d0e145950d8212556418a593927645c27e..b17c3cd6491c949e127c6b814ffb35e935fb15f6 100755
--- a/scripts/deployment.sh
+++ b/scripts/deployment.sh
@@ -19,8 +19,8 @@ echo "DEBUG_2 $0 $1 $2 $3 $4"
 echo "DEBUG values: OLD_DIR=$OLD_DIR, ENTRYPOINT_DIR=$ENTRYPOINT and GIT_REPO=$GIT_REPO"
 
 cd $ENTRYPOINT
-mkdir -p airflow
-cd airflow
+mkdir -p airflow_proj
+cd airflow_proj
 AIRFLOW_DIR=`pwd`
 #DEBUG prints
 echo "Project dir is set to: $AIRFLOW_DIR"
@@ -37,9 +37,10 @@ cp templates/* $AIRFLOW_DIR/templates
 echo -e "AIRFLOW_UID=$(id -u)" > $GIT_REPO/dockers/.env
 export AIRFLOW_UID=$(id -u)
 echo "Collecting requirements"
-reqs=`cat requirements.txt | tr '\n' ' '`
+reqs=`cat $GIT_REPO/requirements.txt | tr '\n' ' '`
 echo "Collected requirements: $reqs"
-sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=$reqs\" >> $GIT_REPO/dockers/.env"
+# sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=\"$reqs\"\" >> $GIT_REPO/dockers/.env"
+echo "_PIP_ADDITIONAL_REQUIREMENTS=\"$reqs\"" >> $GIT_REPO/dockers/.env
 pip install -r $GIT_REPO/requirements.txt
 
 # sed -i "s_datacatalog.fz-juelich.de_${SERVER_DOMAIN}_g" docker-compose.yml