diff --git a/tests/get_sample_data.ipynb b/tests/get_sample_data.ipynb
index 35bc0f2ee29494ddc7a9a74a24d1a18f4c4fc83d..0e5a9b30dfe09d3573fe275c44a096d8d88c2c51 100644
--- a/tests/get_sample_data.ipynb
+++ b/tests/get_sample_data.ipynb
@@ -2,17 +2,9 @@
  "cells": [
   {
    "cell_type": "code",
-   "execution_count": 1,
+   "execution_count": null,
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "2024-05-07 16:46:21.140327\n"
-     ]
-    }
-   ],
+   "outputs": [],
    "source": [
     "from datetime import datetime, timedelta\n",
     "\n",
@@ -35,20 +27,9 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 2,
+   "execution_count": null,
    "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "2010-01-01 00:00:00\n",
-      "2011-01-01 00:00:00\n",
-      "Info: removed columns 2011-01-02 to match data range of 2010-01-01 00:00:00 to 2011-01-01 00:00:00\n",
-      "0:00:00.036132\n"
-     ]
-    }
-   ],
+   "outputs": [],
    "source": [
     "from pathlib import Path\n",
     "from toargridding.toar_rest_client import AnalysisServiceDownload\n",
diff --git a/tests/produce_data.ipynb b/tests/produce_data.ipynb
index b9d0f9c20a3448e22beb388feefff0b8cfd55a86..b3b7cf7284f2deaaded8b6d3f70472bb82b833f3 100644
--- a/tests/produce_data.ipynb
+++ b/tests/produce_data.ipynb
@@ -38,7 +38,8 @@
     "    RegularGrid( lat_resolution=1.9, lon_resolution=2.5, ),\n",
     "    TimeSample( start=dt(2000,1,1), end=dt(2019,12,31), sampling=\"daily\"),#possibly adopt range:-)\n",
     "    [\"mole_fraction_of_ozone_in_air\"],#variable name\n",
-    "    [\"dma8epax\", \"mean\" ],# will start one request after another other...\n",
+    "    #[ \"mean\", \"dma8epax\"],# will start one request after another other...\n",
+    "    [ \"dma8epax\", \"mean\" ],# will start one request after another other...\n",
     "    details4Query\n",
     ")\n",
     "\n",
diff --git a/toargridding/metadata.py b/toargridding/metadata.py
index 6ace341523dda3f8738f35d491656e3ab5a6c39e..6584952be0ed3491f92050ad9f51f3c593642682 100644
--- a/toargridding/metadata.py
+++ b/toargridding/metadata.py
@@ -185,6 +185,8 @@ class AnalysisRequestResult:
 
 def get_global_attributes(metadata: Metadata) -> Dict:
     """combination of global metadata with request specific values.
+    Also adds all additional options passed to the request as meta data. 
+    Throws an exception if moreOptions contains an key already in use by the metadata.
     """
     dynamic_cf_attributes = {
         "id": metadata.get_id(),
@@ -202,6 +204,11 @@ def get_global_attributes(metadata: Metadata) -> Dict:
         # "time_coverage_duration": 0, # TODO insert durations
         # "time_coverage_resolution": 0,
     }
+    for key, value in metadata.moreOptions.items():
+        if not key in dynamic_cf_attributes:
+            dynamic_cf_attributes[key] = value
+        else:
+            raise ValueError(f"{key} is already has the value {dynamic_cf_attributes[key]}. Prohibited overriding with \"{value}\"!")
     cf_attributes = dynamic_cf_attributes | global_cf_attributes
     return cf_attributes
 
diff --git a/toargridding/toar_rest_client.py b/toargridding/toar_rest_client.py
index 7c2406787e9190824af61d8021c611098e9d89f2..7fdc82041d78203122aca27423158ece87c82cbb 100644
--- a/toargridding/toar_rest_client.py
+++ b/toargridding/toar_rest_client.py
@@ -210,8 +210,16 @@ class Connection:
 
             try:  # test for stale cache
                 self.wait_and_get(status_endpoint).raise_for_status()
-            except requests.exceptions.HTTPError:
-                self.cache.remove(query_options.cache_key)
+            except requests.exceptions.HTTPError as e:
+                #TODO add detailed processing: What was the reason for the error? Do we really need to create a new request or is there another problem, that might resolve by simply waiting
+                print(f"A connection error occurred:")
+                print("Status Code: {e.response.status_code}")
+                print("Reason: {e.response.reason}")
+                print("Text: {e.response.text}")
+                #will be overwritten in the next step...
+                #self.cache.remove(query_options.cache_key)
+            except:
+                raise RuntimeError(f"An error occurred during accessing a cached request")
             else:
                 print("load status endpoint from cache")
                 return status_endpoint
@@ -225,6 +233,8 @@ class Connection:
         """create and new request to the TOAR DB.
 
         Adds the status endpoint of the request to the cache. 
+
+        Throws an exception if the TOAR Db returns an error.
         
         Parameters:
         ----------