diff --git a/tests/conversionOfTimestamps.py b/tests/conversionOfTimestamps.py
new file mode 100644
index 0000000000000000000000000000000000000000..df885abfdd252ae47a11cbfe54bbaf94f241a713
--- /dev/null
+++ b/tests/conversionOfTimestamps.py
@@ -0,0 +1,64 @@
+
+from datetime import datetime as dt
+from collections import namedtuple
+from pathlib import Path
+
+from toargridding.toar_rest_client import AnalysisServiceDownload, Connection
+from toargridding.grids import RegularGrid
+from toargridding.gridding import get_gridded_toar_data
+from toargridding.metadata import TimeSample
+
+#creation of request.
+
+Config = namedtuple("Config", ["grid", "time", "variables", "stats","moreOptions"])
+
+varName = "country"
+grid = RegularGrid( lat_resolution=1.9, lon_resolution=2.5, )
+
+configs = dict()
+country="AL"
+valid_data = Config(
+    grid,
+    TimeSample( start=dt(2000,1,1), end=dt(2018,12,31), sampling="daily"),#possibly adopt range:-)
+    ["mole_fraction_of_ozone_in_air"],#variable name
+    [ "dma8epa_strict" ],
+    {varName : country}
+)
+
+configs[f"test_ta{country}"] = valid_data
+
+#CAVE: this cell runs about 45minutes per requested year. therefore we increase the waiting duration to 1h per request.
+#the processing is done on the server of the TOAR database.
+#a restart of the cell continues the request to the REST API if the requested data are ready for download
+# The download can also take a few minutes
+
+stats_endpoint = "https://toar-data.fz-juelich.de/api/v2/analysis/statistics/"
+cache_basepath = Path("cache")
+result_basepath = Path("results")
+cache_basepath.mkdir(exist_ok=True)
+result_basepath.mkdir(exist_ok=True)
+analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)
+
+Connection.DEBUG=True
+
+# maybe adopt the interval for requesting the results and the total duration, before the client pauses the requests.
+# as the requests take about 45min, it is more suitable to wait 60min before timing out the requests than the original 30min.
+analysis_service.connection.setRequestTimes(interval_min=5, maxWait_min=60)
+
+createdFiles = []
+
+for person, config in configs.items():
+    print(f"\nProcessing {person}:")
+    print(f"--------------------")
+    try:
+        datasets, metadatas = get_gridded_toar_data(
+            analysis_service=analysis_service,
+            grid=config.grid,
+            time=config.time,
+            variables=config.variables,
+            stats=config.stats,
+            **config.moreOptions
+        )
+    except KeyError as e:
+        print("failed for ", person)
+        continue
\ No newline at end of file
diff --git a/tests/produce_data_withOptional.ipynb b/tests/produce_data_withOptional.ipynb
index 0963958a8e61604697671f6b550699048c47b17d..03b719523bb3f5e971ab3a83bd967b95eb2a8e86 100644
--- a/tests/produce_data_withOptional.ipynb
+++ b/tests/produce_data_withOptional.ipynb
@@ -114,7 +114,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.11.7"
+   "version": "3.11.5"
   }
  },
  "nbformat": 4,
diff --git a/tests/produce_data_withOptional_country.ipynb b/tests/produce_data_withOptional_country.ipynb
index 3de253e63433f42f310164a00f6c9a7df81a368c..521218fcfca515bdeb9dee552aa085fb251c6450 100644
--- a/tests/produce_data_withOptional_country.ipynb
+++ b/tests/produce_data_withOptional_country.ipynb
@@ -78,14 +78,18 @@
     "for person, config in configs.items():\n",
     "    print(f\"\\nProcessing {person}:\")\n",
     "    print(f\"--------------------\")\n",
-    "    datasets, metadatas = get_gridded_toar_data(\n",
-    "        analysis_service=analysis_service,\n",
-    "        grid=config.grid,\n",
-    "        time=config.time,\n",
-    "        variables=config.variables,\n",
-    "        stats=config.stats,\n",
-    "        **config.moreOptions\n",
-    "    )\n",
+    "    try:\n",
+    "        datasets, metadatas = get_gridded_toar_data(\n",
+    "            analysis_service=analysis_service,\n",
+    "            grid=config.grid,\n",
+    "            time=config.time,\n",
+    "            variables=config.variables,\n",
+    "            stats=config.stats,\n",
+    "            **config.moreOptions\n",
+    "        )\n",
+    "    except KeyError as e:\n",
+    "        print(\"failed for \", person)\n",
+    "        continue\n",
     "\n",
     "    for dataset, metadata in zip(datasets, metadatas):\n",
     "        outName = result_basepath / f\"{metadata.get_id()}_{config.grid.get_id()}.nc\"\n",
diff --git a/toargridding/metadata.py b/toargridding/metadata.py
index 3689e04f4093b8e30bb74c6531aa426ccaaca717..5567dd0b85fa188064f70ea0e5e3b98358aaf079 100644
--- a/toargridding/metadata.py
+++ b/toargridding/metadata.py
@@ -152,6 +152,7 @@ class Metadata:
         For example, used for saving link to results of a request in the cache.
         """
         addition = "_".join(f"{key}-{val}" for key, val in sorted(self.moreOptions.items()))
+        addition = addition.replace("/","%2F")
         return "_".join(str(i) for i in [self.variable.name, self.statistic, self.time.daterange_option, self.time.frequency, addition, "at", datetime.now().date().isoformat()])
 
     def get_title(self) -> str:
diff --git a/toargridding/toar_rest_client.py b/toargridding/toar_rest_client.py
index e6a43de6e77e9ee98caecc83571261b87ecfb31f..cef1b093f69c29b16a1b8b8346c3455335067a04 100644
--- a/toargridding/toar_rest_client.py
+++ b/toargridding/toar_rest_client.py
@@ -450,6 +450,9 @@ class AnalysisService:
         """
         zip_stream = io.BytesIO(content)
         with ZipFile(zip_stream) as myzip:
+            if len(myzip.namelist())==1:
+                print("Downloaded data do not contain a timeseries.")
+                raise KeyError("Data file is empty")#TODO replace this with a custom exception.
             timeseries = self.extract_data(myzip, metadata.statistic)
             timeseries_metadata = self.extract_data(myzip, AnalysisService.METADATA)
 
@@ -533,4 +536,5 @@ class AnalysisServiceDownload(AnalysisService):
             metadata for the request.
         """
         addition = "_".join(f"{key}{val}" for key,val in sorted(metadata.moreOptions.items()))
+        addition = addition.replace("/","%2F")
         return "_".join(str(i) for i in [metadata.statistic, metadata.time.sampling, metadata.variable.cf_standardname, metadata.time.start.date(), metadata.time.end.date(), addition]) + ".zip"