diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index aabf3f99fbae196c09b5e0428261e798771b4aa1..560b3776bbac9ccf269642195725936c80e14ac0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -22,10 +22,13 @@ variables: ROLLBACK_COMMIT_TAG: aa1f8345d322f2532977643043df18eb4aff3bcf # stable version on master, also tagged as 0.18 # before script copied from gitlab docs -before_script: - - 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client gcc libxslt-dev libffi-dev libssl-dev build-essential python3-dev -y )' - - 'echo "Protection: ${CI_COMMIT_REF_PROTECTED}"' - - (if [ "$CI_COMMIT_REF_PROTECTED" == "true" ]; then eval $(ssh-agent -s) && echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - && mkdir -p ~/.ssh && chmod 700 ~/.ssh; else echo "Protected branch"; fi); +.before_script_template: &ssh_setup + before_script: + - 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client gcc libxslt-dev libffi-dev libssl-dev build-essential python3-dev -y )' + - eval $(ssh-agent -s) + - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - + - mkdir -p ~/.ssh + - chmod 700 ~/.ssh stages: - test @@ -52,9 +55,10 @@ light-deploy-testing: except: - tags - web + <<: *ssh_setup environment: Testing script: - - ssh -oStrictHostKeyChecking=accept-new apiserver@$TESTING_DOMAIN "cd /home/apiserver/datacatalog && sudo git fetch --all && sudo git checkout -f $CI_COMMIT_TAG" + - ssh -oStrictHostKeyChecking=accept-new apiserver@$TESTING_DOMAIN "cd /home/apiserver/datacatalog && sudo git stash && sudo git pull --all && sudo git checkout -f $CI_COMMIT_TAG && sudo git stash clear" - ssh -oStrictHostKeyChecking=accept-new apiserver@$TESTING_DOMAIN "sudo /home/apiserver/datacatalog/deploy_scripts/deployment.sh /home/apiserver/datacatalog $TESTING_URL $TESTING_DOMAIN" light-deploy-production: @@ -65,10 +69,11 @@ light-deploy-production: except: - web tags: [stable] + <<: *ssh_setup environment: Production script: - - ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN "cd /home/apiserver/datacatalog && sudo git fetch --all && sudo git checkout -f $CI_COMMIT_TAG" - - ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN "sudo /home/apiserver/datacatalog/deploy_scripts/deployment.sh /home/apiserver/datacatalog $PRODUCTION_URL $PRODUCTION_DOMAIN" + - ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN "cd /home/apiserver/datacatalog && sudo git stash && sudo git pull --all && sudo git checkout -f $CI_COMMIT_TAG && sudo git stash clear" + - ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN "sudo SECRETS_ENCRYPTION_KEY=$SECRETS_ENCRYPTION_KEY, /home/apiserver/datacatalog/deploy_scripts/deployment.sh /home/apiserver/datacatalog $PRODUCTION_URL $PRODUCTION_DOMAIN" full-deploy-production: stage: deploy @@ -76,6 +81,7 @@ full-deploy-production: only: variables: - ($CI_PIPELINE_SOURCE == "web" && $CI_COMMIT_TAG =~ /stable/) + <<: *ssh_setup environment: Production script: - echo "Starting the full production deployment for tag $CI_COMMIT_TAG." @@ -93,6 +99,7 @@ full-deploy-production: - ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN "sudo mkdir -p /app/mnt" - ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN "sudo mount /dev/vdb1 /app/mnt" - until ssh -oStrictHostKeyChecking=accept-new apiserver@$PRODUCTION_DOMAIN ls /finished_cloudinit >/dev/null 2>&1; do sleep 30; done # wait until cloudinit script is complete + - SECRETS_ENCRYPTION_KEY=$SECRETS_ENCRYPTION_KEY, /home/apiserver/datacatalog/deploy_scripts/deployment.sh /home/apiserver/datacatalog $PRODUCTION_URL $PRODUCTION_DOMAIN full-deploy-testing: stage: deploy @@ -100,10 +107,10 @@ full-deploy-testing: only: variables: - ($CI_PIPELINE_SOURCE == "web" && $CI_COMMIT_REF_NAME == "master") + <<: *ssh_setup environment: Testing script: - echo "Starting the full testing deployment." - - sed -i 's_datacatalog.fz_zam10036.zam.kfa_g' deploy_scripts/cloudinit.yml - pip install python-openstackclient - OLD_ID=`openstack server show $TESTING_NAME -f value -c id` - openstack server set --name $OLD_TEST_NAME $OLD_ID @@ -114,6 +121,7 @@ full-deploy-testing: - sleep 10 # ensure that next command reaches the new server, prevents host key problems # TODO move local zip of certificate-docker-volume to server once startup is complete - until ssh -oStrictHostKeyChecking=accept-new apiserver@$TESTING_DOMAIN ls /finished_cloudinit >/dev/null 2>&1; do sleep 30; done # wait until cloudinit script is complete + - /home/apiserver/datacatalog/deploy_scripts/deployment.sh /home/apiserver/datacatalog $TESTING_URL $TESTING_DOMAIN cleanup-failed-full-deployment: @@ -124,6 +132,7 @@ cleanup-failed-full-deployment: when: on_failure only: - web + <<: *ssh_setup script: - echo "This is the cleanup for the full-redeployment of the testing or production servers" - echo "if this job is reached, some earlier job had to have failed, this will return to the previous instance (if available)" @@ -157,6 +166,7 @@ cleanup-successful-full-deployment: when: on_success only: - web + <<: *ssh_setup script: - echo "This is the cleanup for the full-redeployment of the testing or production servers" - echo "if this job is reached, all earlier jobs were successful, and any lingering old instances need to be removed" @@ -173,6 +183,7 @@ cleanup-failed-light-test-deployment: except: - tags - web + <<: *ssh_setup script: - echo "This is the cleanup for the light-redeployment of the testing servers" - echo "if this job is reached, some earlier job had to have failed, this will return to a previous commit" @@ -191,6 +202,7 @@ cleanup-failed-light-production-deployment: except: - web tags: [stable] + <<: *ssh_setup script: - echo "This is the cleanup for the light-redeployment of the production servers" - echo "if this job is reached, some earlier job had to have failed, this will return to a previous commit" @@ -206,7 +218,6 @@ test-testing: - master except: - tags - variables: script: - apt update && apt -y install curl - echo "For now, this will be a basic health check i.e. GET / and check for 2xx code." diff --git a/apiserver/config/settings.py b/apiserver/config/settings.py index 54dfd702ba062eeb8442a350eb9b1f522d9bbafa..d666d357181c69f389422eab7530ca0f36a3aa80 100644 --- a/apiserver/config/settings.py +++ b/apiserver/config/settings.py @@ -8,6 +8,7 @@ DEFAULT_JSON_FILEPATH: str = "./app/data" class ApiserverSettings(BaseSettings): json_storage_path: str = DEFAULT_JSON_FILEPATH userdb_path: str = None + encryption_key: str = None class Config: env_prefix: str = "datacatalog_apiserver_" diff --git a/apiserver/main.py b/apiserver/main.py index 8ab16bd8874c70aa790897e176b3cf5e0ef8ae65..6f79e2e5a5478e9abba9d293785f7978fefee40a 100644 --- a/apiserver/main.py +++ b/apiserver/main.py @@ -22,7 +22,7 @@ from .config import ApiserverSettings from .security import (ACCESS_TOKEN_EXPIRES_MINUTES, JsonDBInterface, Token, User, authenticate_user, create_access_token, get_current_user) -from .storage import JsonFileStorageAdapter, LocationData, LocationDataType +from .storage import JsonFileStorageAdapter, LocationData, LocationDataType, EncryptedJsonFileStorageAdapter log = logging.getLogger(__name__) @@ -43,8 +43,19 @@ app = FastAPI( # if env variable is set, get config .env filepath from it, else use default dotenv_file_path = os.getenv(DOTENV_FILE_PATH_VARNAME, DOTENV_FILE_PATH_DEFAULT) + settings = ApiserverSettings(_env_file=dotenv_file_path) -adapter = JsonFileStorageAdapter(settings) + +if settings.encryption_key is not None and settings.encryption_key: + log.debug("Using encrypted secrets backend.") + try: + adapter = EncryptedJsonFileStorageAdapter(settings) + except: + log.error("Using encrypetd secrets backend failed. Fallback to unencrypted.") + adapter = JsonFileStorageAdapter(settings) +else: + adapter = JsonFileStorageAdapter(settings) + userdb = JsonDBInterface(settings) oauth2_scheme = OAuth2PasswordBearer(tokenUrl=ReservedPaths.TOKEN) @@ -169,6 +180,17 @@ async def list_dataset_secrets(location_data_type: LocationDataType, log.debug("Authenticed User: '%s' listed the secrets of /%s/%s", user.username, location_data_type.value, dataset_id) return adapter.list_secrets(location_data_type, dataset_id, user) +@app.get("/{location_data_type}/{dataset_id}/secrets_values") +async def list_dataset_secrets(location_data_type: LocationDataType, + dataset_id: UUID4, + user: User = Depends(my_user)): + """list the secrets of a specific dataset""" + if user.has_secrets_access: + log.debug("Authenticed User: '%s' listed the secrets (key and value) of /%s/%s", user.username, location_data_type.value, dataset_id) + return adapter.get_secret_values(location_data_type, dataset_id, user) + else: + raise HTTPException(403) + @app.get("/{location_data_type}/{dataset_id}/secrets/{key}") @secrets_required async def get_dataset_secret(location_data_type: LocationDataType, diff --git a/apiserver/storage/EncryptedJsonFileStorageAdapter.py b/apiserver/storage/EncryptedJsonFileStorageAdapter.py new file mode 100644 index 0000000000000000000000000000000000000000..f580c32842571a4caf3df6b9cabbfeef90b8fc5b --- /dev/null +++ b/apiserver/storage/EncryptedJsonFileStorageAdapter.py @@ -0,0 +1,41 @@ +from fastapi.exceptions import HTTPException +from .JsonFileStorageAdapter import JsonFileStorageAdapter, LocationDataType +from cryptography.fernet import Fernet + +from apiserver.config.settings import ApiserverSettings + +class EncryptedJsonFileStorageAdapter(JsonFileStorageAdapter): + + def encrypt(self, string: str): + f = Fernet(self.encryption_key) + return f.encrypt(string.encode()).decode("utf-8") + + def decrypt(self, string: str): + f = Fernet(self.encryption_key) + return f.decrypt(string.encode()).decode("utf-8") + + def __init__(self, settings: ApiserverSettings) -> None: + self.encryption_key = settings.encryption_key + super().__init__(settings) + + + def get_secret_values(self, n_type: LocationDataType, oid:str, usr: str): + """ get all available secrets (key + value) for this object""" + encrypted_dict = super().get_secret_values(n_type, oid, usr) + decrypted_dict = {} + for key in encrypted_dict: + decrypted_dict[key] = self.decrypt(encrypted_dict[key]) + return decrypted_dict + + def add_update_secret(self, n_type: LocationDataType, oid:str, key: str, value: str, usr: str): + """ add new secrets to an existing object""" + super().add_update_secret(n_type, oid, key, self.encrypt(value), usr) + + def get_secret(self, n_type: LocationDataType, oid:str, key: str, usr: str): + """ return the value of the requested secret for the given object""" + encrypted_secret = super().get_secret(n_type, oid, key, usr) + return self.decrypt(encrypted_secret) + + def delete_secret(self, n_type: LocationDataType, oid:str, key: str, usr: str): + """ delete and return the value of the requested secret for the given object""" + return self.decrypt(super().delete_secret(n_type, oid, key, usr)) \ No newline at end of file diff --git a/apiserver/storage/JsonFileStorageAdapter.py b/apiserver/storage/JsonFileStorageAdapter.py index 64a2df6ae3bc60d12f5fb2679fad68c6079159e2..537e3f0f4f205d72b6c38fed09734c68842d5e5f 100644 --- a/apiserver/storage/JsonFileStorageAdapter.py +++ b/apiserver/storage/JsonFileStorageAdapter.py @@ -134,15 +134,24 @@ class JsonFileStorageAdapter(AbstractLocationDataStorageAdapter): def delete(self, n_type: LocationDataType, oid: str, usr: str): full_path = self.__get_object_path(value=n_type.value, oid=oid) - log.debug("Deleted object %s by user '%s'.", oid, usr) + secrets_path = self.__get_secrets_path(n_type.value, oid) + log.debug("Deleted object %s/%s by user '%s'.", n_type, oid, usr) os.remove(full_path) - + if (os.path.isfile(secrets_path)): + log.debug("Deleted secrets from object %s/%s by user '%s", n_type, oid, usr) + os.remove(secrets_path) + def list_secrets(self, n_type: LocationDataType, oid:str, usr: str): """ list all available secrets for this object""" secrets_path = self.__get_secrets_path(value=n_type.value, oid=oid) secrets = self.__load_secrets(secrets_path) return list(secrets.keys()) + def get_secret_values(self, n_type: LocationDataType, oid:str, usr: str): + """ get all available secrets (key + value) for this object""" + secrets_path = self.__get_secrets_path(value=n_type.value, oid=oid) + return self.__load_secrets(secrets_path) + def add_update_secret(self, n_type: LocationDataType, oid:str, key: str, value: str, usr: str): """ add new secrets to an existing object""" secrets_path = self.__get_secrets_path(value=n_type.value, oid=oid) @@ -159,7 +168,7 @@ class JsonFileStorageAdapter(AbstractLocationDataStorageAdapter): try: return secrets[key] except KeyError: - raise HTTPException(404, f"Secret with key {key} does not exist for the object {n_type}/{oid}") + raise HTTPException(404, f"Secret with key {key} does not exist for the object {n_type.value}/{oid}") def delete_secret(self, n_type: LocationDataType, oid:str, key: str, usr: str): """ delete and return the value of the requested secret for the given object""" @@ -167,7 +176,7 @@ class JsonFileStorageAdapter(AbstractLocationDataStorageAdapter): secrets = self.__load_secrets(secrets_path) val = secrets.pop(key, None) if not val: - raise HTTPException(404, f"Secret with key {key} does not exist for the object {n_type}/{oid}") + raise HTTPException(404, f"Secret with key {key} does not exist for the object {n_type.value}/{oid}") # TODO log self.__store_secrets(secrets_path, secrets) return val diff --git a/apiserver/storage/LocationStorage.py b/apiserver/storage/LocationStorage.py index 1b7b0d0fbe5a1b7497222c246c668db22a000ac7..c28b3f25069d7ec1b85dae3440e1570bdda69c14 100644 --- a/apiserver/storage/LocationStorage.py +++ b/apiserver/storage/LocationStorage.py @@ -66,6 +66,9 @@ class AbstractLocationDataStorageAdapter: # pragma: no cover """ list all available secrets for this object""" raise NotImplementedError() + def get_secret_values(self, n_type: LocationDataType, oid:str, usr: str): + raise NotImplementedError() + def add_update_secret(self, n_type: LocationDataType, oid:str, key: str, value: str, usr: str): """ add new secrets to an existing object""" raise NotImplementedError() diff --git a/apiserver/storage/__init__.py b/apiserver/storage/__init__.py index 8c48a896dd34600a875076a9603eeb6f52574e5c..d14af4fbc9c5248d46f91925e463dfa793f5a9d0 100644 --- a/apiserver/storage/__init__.py +++ b/apiserver/storage/__init__.py @@ -1,3 +1,5 @@ from .JsonFileStorageAdapter import JsonFileStorageAdapter, verify_oid -from .LocationStorage import LocationDataType, LocationData, AbstractLocationDataStorageAdapter \ No newline at end of file +from .LocationStorage import LocationDataType, LocationData, AbstractLocationDataStorageAdapter + +from .EncryptedJsonFileStorageAdapter import EncryptedJsonFileStorageAdapter \ No newline at end of file diff --git a/deploy_scripts/cloudinit.yml b/deploy_scripts/cloudinit.yml index 0b739dd1d9bff4aa2ee3a85c1c0f79d64e851a4a..97a9d59686210a0b5ca555e7512bac6050da16f7 100644 --- a/deploy_scripts/cloudinit.yml +++ b/deploy_scripts/cloudinit.yml @@ -51,7 +51,4 @@ runcmd: - 'git clone https://gitlab.jsc.fz-juelich.de/rybicki1/datacatalog.git /home/apiserver/datacatalog' - docker network create net # general startup takes long enough that no delay should bee needed - 5 to 6 minutes until these commands are executed - - 'export API_URL=https://datacatalog.fz-juelich.de/' - - 'export SERVER_DOMAIN=datacatalog.fz-juelich.de' - - /home/apiserver/datacatalog/deploy_scripts/deployment.sh /home/apiserver/datacatalog $API_URL $SERVER_DOMAIN - touch /finished_cloudinit diff --git a/deploy_scripts/deployment.sh b/deploy_scripts/deployment.sh index 3e3ba1f3528c1e612a051993999945805fb31f86..63523eca224b59fda024bbe08df9ddd1ad27eb59 100755 --- a/deploy_scripts/deployment.sh +++ b/deploy_scripts/deployment.sh @@ -25,8 +25,8 @@ sed -i "s_datacatalog.fz-juelich.de_${SERVER_DOMAIN}_g" docker-compose.yml # it is at this point assumed that ip and volume are correctly assigned, and that dns is working properly docker-compose pull # pull changed images (e.g. new latest, or specific tag) -TIME=`date +%Y-%m-%d-%H-%M` -mv /app/mnt/docker.log "/app/mnt/docker.log.${TIME}" +# TIME=`date +%Y-%m-%d-%H-%M` +# mv /app/mnt/docker.log "/app/mnt/docker.log.${TIME}" docker-compose up -d # should only restart changed images, which will also update nginx and reverse-proxy image if needed diff --git a/docker-compose.yml b/docker-compose.yml index fa59b7d892d225432a6af7ab259d2bdddef6571f..09bdeb4c4a21dc671561781df7c24605a19bd4c5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -46,6 +46,7 @@ services: VIRTUAL_HOST: datacatalog.fz-juelich.de LETSENCRYPT_HOST: datacatalog.fz-juelich.de VIRTUAL_PORT: 8000 + DATACATALOG_APISERVER_ENCRYPTION_KEY: ${SECRETS_ENCRYPTION_KEY} networks: - "net" volumes: diff --git a/frontend/templates/base.html.jinja b/frontend/templates/base.html.jinja index 1729b9bb75c6db0c86f9625c94225775ee494d6c..1f0851f111abbe47df1db9a70357e1f29b401026 100644 --- a/frontend/templates/base.html.jinja +++ b/frontend/templates/base.html.jinja @@ -42,13 +42,11 @@ </ul> <!--SPECIFIC PAGE CONTENT--> - <div class="d-flex flex-column min-vh-100"> + <div class="d-flex flex-column min-vh-75"> {% block content%} {% endblock %} </div> - <div class="jumbotron"></div> - <!--FOOTER--> {% block footer %} {% include 'footer.html.jinja' %} diff --git a/proxy_image/default_location b/proxy_image/default_location index ed060b3d1f999604fb6970393719319dcafdfc57..4c2332449123368c0038840b63cab929fec8e70c 100644 --- a/proxy_image/default_location +++ b/proxy_image/default_location @@ -1,5 +1,5 @@ location ~ \.(html|css|js|ico|jpg|jpeg|png|webp)$ { root /var/www/html/static/; - error_page 404 404.html + error_page 404 404.html; } diff --git a/requirements.txt b/requirements.txt index 7415b00a67633a140fb24b62002afbbec7508832..cad50dfeb3e89266b9da1f6381a710655dd859af 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ python-multipart==0.0.5 python-jose[cryptography]==3.2.0 passlib[bcrypt]==1.7.4 jinja2==3.0.1 +cryptography