diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 428f2bc2f09653753af4c43122b6dbf24db34c79..a66d784130d35cb6faea4b5eac2d7ee45efca025 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -59,8 +59,8 @@ cleanup-successful-full-deployment:
   # check if there is an old prod or test instance, and delete it if present
   stage: cleanup
   when: on_success
-  only:
-    - web
+  # only:
+  #   - web
   script:
     - echo "This is the cleanup for the full-redeployment of the testing or production servers"
     - echo "if this job is reached, all earlier jobs were successful, and any lingering old instances need to be removed"
diff --git a/dockers/docker-compose.yaml b/dockers/docker-compose.yaml
index 4c60262567560bae7e54cfa5d6a9d9dd33ffecfa..c01c1108b3d09378b624543a0b03d83fa9e24745 100644
--- a/dockers/docker-compose.yaml
+++ b/dockers/docker-compose.yaml
@@ -64,7 +64,7 @@ x-airflow-common:
     - ./plugins:/opt/airflow/plugins
     - ./templates/main.html:/home/airflow/.local/lib/python3.7/site-packages/airflow/www/templates/airflow/main.html
   user: "${AIRFLOW_UID:-50000}:0"
-  command: pip3 install -r requirements.txt
+  # command: pip3 install -r requirements.txt
   depends_on:
     &airflow-common-depends-on
     redis:
diff --git a/scripts/cloudinit.yml b/scripts/cloudinit.yml
index 702a03b08023dfdf4d975d2d414356d865d5bcdb..55534b1cbea7880b02e44c84e99da3f4885b9439 100644
--- a/scripts/cloudinit.yml
+++ b/scripts/cloudinit.yml
@@ -51,6 +51,9 @@ runcmd:
   - sudo pip3 install docker-compose
   - cd /home/maria
   - 'git clone https://gitlab.jsc.fz-juelich.de/eflows4hpc-wp2/data-logistics-service.git ./data-logistics-service'
+  - cd ./data-logistics-service
+  - sudo git checkout mptest
+  - cd ..
   # - mkdir airflow
   # - cd airflow
   # - mkdir -p ./dags ./logs ./plugins ./config ./templates
@@ -69,6 +72,6 @@ runcmd:
   # - echo "Bringing up the docker containers"
   # - docker-compose -f ./dockers/docker-compose.yaml --project-directory ../airflow --verbose up airflow-init
   # - docker-compose -f ./dockers/docker-compose.yaml --project-directory ../airflow up -d
-  - /bin/bash ../data-logistics-service/scripts/deployment.sh /home/maria /home/maria/data-logistics-service
+  - /bin/bash data-logistics-service/scripts/deployment.sh /home/maria /home/maria/data-logistics-service
 
 final_message: "The system is finally up, after $UPTIME seconds"
\ No newline at end of file
diff --git a/scripts/deployment.sh b/scripts/deployment.sh
index 98aca0f89ca0823f3f4849214227ef1e5d791fe0..7a6036f48fe6112185d69b6760bbb64e2cd27458 100755
--- a/scripts/deployment.sh
+++ b/scripts/deployment.sh
@@ -1,49 +1,53 @@
 #!/bin/bash
-# From Christian B.
+# @author Maria Petrova & Christian Böttcher
 ## USAGE:
 #
-# deployment.sh <git_directory> [API_URL] [SERVER_DOMAIN]
+# deployment.sh <project_directory> <git_directory> [API_URL] [SERVER_DOMAIN]
 
 OLD_DIR=`pwd`
 GIT_REPO=$OLD_DIR/data-logistics-service
 
-echo "DEBUG_1 $0 $1 $2 $3"
+echo "DEBUG_1 $0 $1 $2 $3 $4"
 
 if [ -z ${1+x} ]; then NEW_DIR=`pwd`; else NEW_DIR=$1; fi
 if [ -z ${2+x} ]; then GIT_REPO else GIT_REPO=$2; fi
 # if [ -z ${2+x} ]; then API_URL=https://datacatalog.fz-juelich.de/; else API_URL=$2; fi
 # if [ -z ${3+x} ]; then SERVER_DOMAIN=datacatalog.fz-juelich.de; else SERVER_DOMAIN=$3; fi
 
-echo "DEBUG_2 $0 $1 $2 $3"
+echo "DEBUG_2 $0 $1 $2 $3 $4"
+echo "DEBUG values: OLD_DIR=$OLD_DIR, NEW_DIR=$NEW_DIR and GIT_REPO=$GIT_REPO"
 
 cd $NEW_DIR
-`mkdir airflow`
+`mkdir -p airflow`
 cd airflow
-AIRFLOW_DIR =`pwd`
+AIRFLOW_DIR=`pwd`
+echo "Project dir is: $AIRFLOW_DIR"
+echo "User in use is: $(whoami)"
+
+# Make the necessary folders for the airflow artefacts and copy the corresponging content
 `mkdir -p ./dags ./logs ./plugins ./config ./templates`
 cd $GIT_REPO
 `cp dags/* $AIRFLOW_DIR/dags`
 `cp -r plugins/* $AIRFLOW_DIR/plugins`
 `cp config/* $AIRFLOW_DIR/config`
 `cp templates/* $AIRFLOW_DIR/templates`
+# Setup environment variables and install requirements
 echo -e "AIRFLOW_UID=$(id -u)" > $GIT_REPO/dockers/.env
 export AIRFLOW_UID=$(id -u)
 echo "Collecting requirements"
 reqs=`cat requirements.txt | tr '\n' ' '`
-echo "Collected - $reqs"
+echo "Collected requirements: $reqs"
 sudo sh -c "echo \"_PIP_ADDITIONAL_REQUIREMENTS=$reqs\" >> $GIT_REPO/dockers/.env"
 pip install -r $GIT_REPO/requirements.txt
-echo "Bringing up the docker containers"
-
 
 # sed -i "s_datacatalog.fz-juelich.de_${SERVER_DOMAIN}_g" docker-compose.yml
 
 # it is at this point assumed that ip and volume are correctly assigned, and that dns is working properly
+echo "-----------Bringing up the docker containers-----------"
+docker-compose -f $GIT_REPO/dockers/docker-compose.yaml pull #  pull changed images (e.g. new latest, or specific tag)
 
-docker-compose pull #  pull changed images (e.g. new latest, or specific tag)
-
-docker-compose -f $GIT_REPO/docker-compose.yaml --project-directory $AIRFLOW_DIR --verbose up airflow-init
-docker-compose -f $GIT_REPO/docker-compose.yaml --project-directory $AIRFLOW_DIR up -d
+docker-compose -f $GIT_REPO/dockers/docker-compose.yaml --project-directory $AIRFLOW_DIR --verbose up airflow-init
+docker-compose -f $GIT_REPO/dockers/docker-compose.yaml --project-directory $AIRFLOW_DIR up -d
 
 # docker-compose up -d # should only restart changed images, which will also update nginx and reverse-proxy image if needed