diff --git a/.gitignore b/.gitignore
index 97e49d4dbb3e05e59ec5d256f4a1599b87f1b9c7..d2a41f322a41f6c51d5daecf291eafacf7fde4ef 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,5 @@ examples/results
 tests/temp_data_cache/
 notTracked/
 *.swp
+log/
+*/log/
diff --git a/README.md b/README.md
index 36e48b7fac6f2899a6953e255109e9655ae32fa3..25ba0b6317a1ce6c465d73ad9b21b92a2d8ab647 100644
--- a/README.md
+++ b/README.md
@@ -80,6 +80,11 @@ The TOAR database has only a limited number of workers for performing a statisti
 The gridding uses a user defined grid to combine all stations in a cell.
 Per cell mean, standard deviation and the number of stations are reported in the resulting xarray dataset.
 
+## Logging
+Output created by the different modules and classes of this package use the python logging.
+There is also a auxiliary class to reuse the same logger setup for examples and so over this script.
+This can be used to configures a logging to the shell as well as to the system log of a linux system.
+
 # Example
 
 There are at the moment five example provided as jupyter notebooks (https://jupyter.org/).
diff --git a/examples/produce_data_manyStations.ipynb b/examples/produce_data_manyStations.ipynb
index 3b4788cac1683b1f3176748493a8e00437e7e4c0..c4462354d8b66e997aa6a7c161a89d3bc661c0ab 100644
--- a/examples/produce_data_manyStations.ipynb
+++ b/examples/produce_data_manyStations.ipynb
@@ -6,6 +6,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
+    "import logging\n",
     "from datetime import datetime as dt\n",
     "from collections import namedtuple\n",
     "from pathlib import Path\n",
@@ -13,7 +14,17 @@
     "from toargridding.toar_rest_client import AnalysisServiceDownload, Connection\n",
     "from toargridding.grids import RegularGrid\n",
     "from toargridding.gridding import get_gridded_toar_data\n",
-    "from toargridding.metadata import TimeSample"
+    "from toargridding.metadata import TimeSample\n",
+    "\n",
+    "from toargridding.defaultLogging import toargridding_defaultLogging\n",
+    "\n",
+    "\n",
+    "from toargridding.defaultLogging import toargridding_defaultLogging\n",
+    "#setup of logging\n",
+    "logger = toargridding_defaultLogging()\n",
+    "logger.addShellLogger(logging.DEBUG)\n",
+    "logger.logExceptions()\n",
+    "logger.addRotatingLogFile(Path(\"log/produce_data_manyStations.log\"))#we need to explicitly set a logfile"
    ]
   },
   {
@@ -59,7 +70,6 @@
     "result_basepath.mkdir(exist_ok=True)\n",
     "analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)\n",
     "\n",
-    "Connection.DEBUG=True\n",
     "\n",
     "#here we adopt the durations before, a request is stopped.\n",
     "#the default value is 30 minutes. \n",
diff --git a/examples/produce_data_withOptional.ipynb b/examples/produce_data_withOptional.ipynb
index 03b719523bb3f5e971ab3a83bd967b95eb2a8e86..8ee718492713214a7e30e5789c0ada29703e5a17 100644
--- a/examples/produce_data_withOptional.ipynb
+++ b/examples/produce_data_withOptional.ipynb
@@ -6,6 +6,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
+    "import logging\n",
     "from datetime import datetime as dt\n",
     "from collections import namedtuple\n",
     "from pathlib import Path\n",
@@ -13,7 +14,16 @@
     "from toargridding.toar_rest_client import AnalysisServiceDownload, Connection\n",
     "from toargridding.grids import RegularGrid\n",
     "from toargridding.gridding import get_gridded_toar_data\n",
-    "from toargridding.metadata import TimeSample"
+    "from toargridding.metadata import TimeSample\n",
+    "\n",
+    "from toargridding.defaultLogging import toargridding_defaultLogging\n",
+    "\n",
+    "#setup of logging\n",
+    "logger = toargridding_defaultLogging()\n",
+    "logger.addShellLogger(logging.DEBUG)\n",
+    "logger.logExceptions()\n",
+    "logger.addRotatingLogFile(Path(\"log/produce_data_withOptional.log\"))#we need to explicitly set a logfile\n",
+    "#logger.addSysLogger(logging.DEBUG)"
    ]
   },
   {
@@ -46,10 +56,12 @@
     "for year in range(0,19):\n",
     "    valid_data = Config(\n",
     "        grid,\n",
-    "        TimeSample( start=dt(2000+year,1,1), end=dt(2000+year,12,31), sampling=\"daily\"),#possibly adopt range:-)\n",
+    "        #TimeSample( start=dt(2000+year,1,1), end=dt(2000+year,12,31), sampling=\"daily\"),#possibly adopt range:-)\n",
+    "        TimeSample( start=dt(2000+year,1,1), end=dt(2000+year,12,31), sampling=\"monthly\"),#possibly adopt range:-)\n",
     "        [\"mole_fraction_of_ozone_in_air\"],#variable name\n",
     "        #[ \"mean\", \"dma8epax\"],# will start one request after another other...\n",
-    "        [ \"dma8epa_strict\" ],\n",
+    "        #[ \"dma8epa_strict\" ],\n",
+    "        [ \"mean\" ],\n",
     "        details4Query\n",
     "    )\n",
     "    \n",
@@ -74,7 +86,6 @@
     "result_basepath.mkdir(exist_ok=True)\n",
     "analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)\n",
     "\n",
-    "Connection.DEBUG=True\n",
     "\n",
     "# maybe adopt the interval for requesting the results and the total duration, before the client pauses the requests.\n",
     "# as the requests take about 45min, it is more suitable to wait 60min before timing out the requests than the original 30min.\n",
@@ -114,7 +125,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.11.5"
+   "version": "3.11.7"
   }
  },
  "nbformat": 4,
diff --git a/examples/produce_data_withOptional_country.ipynb b/examples/produce_data_withOptional_country.ipynb
index c63bf2e17d1a2fa39256867e7af5134c78281732..1e8dc2ea9fbdac1e77c2cf2122a4f8bc3179ce2f 100644
--- a/examples/produce_data_withOptional_country.ipynb
+++ b/examples/produce_data_withOptional_country.ipynb
@@ -15,7 +15,16 @@
     "from toargridding.gridding import get_gridded_toar_data\n",
     "from toargridding.metadata import TimeSample\n",
     "\n",
-    "from toargridding.metadata_utilities import countryCodes"
+    "from toargridding.metadata_utilities import countryCodes\n",
+    "\n",
+    "from toargridding.defaultLogging import toargridding_defaultLogging\n",
+    "#setup of logging\n",
+    "logger = toargridding_defaultLogging()\n",
+    "logger.addShellLogger(logging.DEBUG)\n",
+    "logger.logExceptions()\n",
+    "logger.addRotatingLogFile_scriptName(__file__)\n",
+    "logger.addRotatingLogFile(Path(\"log/produce_data_withOptional_country.log\"))#we need to explicitly set a logfile\n",
+    "#logger.addSysLogger(logging.DEBUG)"
    ]
   },
   {
@@ -67,7 +76,6 @@
     "result_basepath.mkdir(exist_ok=True)\n",
     "analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)\n",
     "\n",
-    "Connection.DEBUG=True\n",
     "\n",
     "# maybe adopt the interval for requesting the results and the total duration, before the client pauses the requests.\n",
     "# as the requests take about 45min, it is more suitable to wait 60min before timing out the requests than the original 30min.\n",
diff --git a/tests/conversionOfTimestamps.py b/tests/conversionOfTimestamps.py
index 623beb39bd3f6372639b4690f395a2e8f989a2b2..f6a03aaf51f2f95d57bea237f4b759000b402304 100644
--- a/tests/conversionOfTimestamps.py
+++ b/tests/conversionOfTimestamps.py
@@ -1,3 +1,4 @@
+import logging
 
 from datetime import datetime as dt
 from collections import namedtuple
@@ -8,6 +9,16 @@ from toargridding.grids import RegularGrid
 from toargridding.gridding import get_gridded_toar_data
 from toargridding.metadata import TimeSample
 
+##setting up logging
+from toargridding.defaultLogging import toargridding_defaultLogging
+logger = toargridding_defaultLogging()
+logger.addShellLogger(logging.DEBUG)
+logger.logExceptions()
+#logger.addRotatingLogFile_scriptName(__file__)
+#logger.addSysLogger(logging.DEBUG)
+
+#raise RuntimeError("For testing purposes")
+
 #creation of request.
 
 Config = namedtuple("Config", ["grid", "time", "variables", "stats","moreOptions"])
@@ -39,8 +50,6 @@ cache_basepath.mkdir(exist_ok=True)
 result_basepath.mkdir(exist_ok=True)
 analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)
 
-Connection.DEBUG=True
-
 # maybe adopt the interval for requesting the results and the total duration, before the client pauses the requests.
 # as the requests take about 45min, it is more suitable to wait 60min before timing out the requests than the original 30min.
 analysis_service.connection.setRequestTimes(interval_min=5, maxWait_min=60)
diff --git a/toargridding/defaultLogging.py b/toargridding/defaultLogging.py
new file mode 100644
index 0000000000000000000000000000000000000000..36ca102c23de13c4556a12677ffba302ed4524f6
--- /dev/null
+++ b/toargridding/defaultLogging.py
@@ -0,0 +1,139 @@
+import sys
+import logging
+from collections import namedtuple
+from pathlib import Path
+from logging.handlers import SysLogHandler, TimedRotatingFileHandler
+
+
+handlerPair = namedtuple("registeredLogger", ["handler","formatter"])
+
+class toargridding_defaultLogging:
+    """! class to setup default loggers for toargridding
+
+    The added handler and their formatters are stored by this class for further configuration.
+    Parameters:
+    ----------
+    loggername:
+        name of the logger to be used. default: toargridding
+    
+    Methods:
+    -------
+    registerHandler:
+        register a handler. Adds it to the logger and stores references in this class
+    addShellLogger:
+        adds a formatting logging to the shell. Default level is DEBUG
+    addSysLogger:
+        adds a formatted logging to the system log (on a linux system). Default level is WARNING
+    logExceptions:
+        enable passing of any uncaught exception to the logger
+    getHandler:
+        get a registered handler
+    getFormatter:
+        get a registered formatter
+    """
+    def __init__(self, loggername : str = "toargridding" ):
+        self.logger = logging.getLogger(loggername)
+        self.logger.setLevel(logging.DEBUG)
+        self.registeredHandlers = {}# name : registeredLogger
+    def registerHandler(self, name : str, handler, formatter = None):
+        """register a handler. Adds it to the logger and stores references in this class.
+        The given formatter (if not None) is added to the handler.
+        The handler is added to the logger.
+        Handler and logger are stored for a possible later access.
+        
+        Throws an exception if the name of the handler is already known.
+        """
+        if name in self.registeredHandlers:
+            raise ValueError(f"There is already a registered handler with the name {name}.")
+        if formatter is not None:
+            handler.setFormatter(formatter)
+        self.logger.addHandler(handler)
+        self.registeredHandlers[name] = handlerPair(handler, formatter)
+    def getHandler(self, name : str):
+        """get a handler for logging by its name used for the registration
+        """
+        if name in self.registeredHandlers:
+            return self.registeredHandlers[name].handler
+        return None
+    def getFormatter(self, name : str):
+        """get a formatter by its name used for the registration
+        """
+        if name in self.registeredHandlers:
+            return self.registeredHandlers[name].formatter
+        return None
+    def addShellLogger(self, level=logging.INFO):
+        """!adds a formatted logging to the shell to the "toargridding" logger.
+        The handler is registered as "shell".
+
+        Parameters:
+        ----------
+        level:
+            set the verbosity level of the logger (default: info)
+        """
+        shell_handler = logging.StreamHandler()
+        shell_handler.setLevel(level)
+        shell_formatter = logging.Formatter(fmt="%(asctime)s [%(levelname)s] - %(filename)s:%(lineno)d: '%(message)s'", datefmt="%Y-%m-%d %H:%M:%S")
+        self.registerHandler("shell", shell_handler, shell_formatter)
+    def addSysLogger(self, level=logging.WARNING):
+        """!adds a formatted logging to the system log of a linux system to the "toargridding" logger.
+        This logging is registered as "syslog".
+        
+        Parameters:
+        ----------
+        level:
+            set the verbosity level of the logger (default: warning)
+        """
+        syslog_handler = SysLogHandler(facility=SysLogHandler.LOG_DAEMON, address='/dev/log')
+        syslog_formatter = logging.Formatter(fmt="TOARGRIDDING [%(levelname)s] - %(filename)s:%(lineno)d: '%(message)s'")
+        syslog_handler.setLevel(level)
+        self.registerHandler("syslog",syslog_handler, syslog_formatter)
+    def addRotatingLogFile(self, filename : Path, level=logging.INFO):
+        """creation of a rotating file handler, that will change the files at midnight.
+        The last 7 files logfiles are stored.
+        
+        Parameters:
+        ----------
+        filename:
+            basename of the file. The parent path will be created, if required.
+        level:
+            output level for this handler
+        """
+        filename.parent.mkdir(parents=True, exist_ok=True)
+        handler = TimedRotatingFileHandler(filename, when="midnight", backupCount=7)
+        handler.setLevel(level)
+        formatter = logging.Formatter(fmt="%(asctime)s [%(levelname)s] - %(filename)s:%(lineno)d: '%(message)s'", datefmt="%Y-%m-%d %H:%M:%S")
+        self.registerHandler("rotatingFile", handler, formatter)
+    def addRotatingLogFile_scriptName(self, scriptName : str | Path, level=logging.INFO):
+        """creation of an rotating log file by using the script name.
+        In the /path/to/script a subdirectory log will be created. The logfile will be name [script basename].log
+        
+        Parameters:
+        ----------
+        scriptName:
+            name to the script, including its path
+        level:
+            verbosity level.
+        """
+        sn = Path(scriptName)
+        if not sn.is_file:
+            raise ValueError(f"Expecting name to a script. {sn} is not a file.")
+        path = sn.parent / "log"
+        path.mkdir(exist_ok=True)
+        fn = path / f"{sn.stem}.log"
+        self.addRotatingLogFile(fn, level)
+    def logExceptions(self):
+        """calling this function will redirect all uncaught exceptions to the logger
+        This is especially useful to write the exceptions to the system log 
+        """
+        sys.excepthook = self.handle_exception
+    def handle_exception(self, exc_type, exc_value, exc_traceback):
+        """function for passing uncaught exceptions to the logger
+
+        """
+        if issubclass(exc_type, KeyboardInterrupt):
+            sys.__excepthook__(exc_type, exc_value, exc_traceback)
+            return
+
+        self.logger.error("Program terminated by the following exception:", exc_info=(exc_type, exc_value, exc_traceback))
+        
+
diff --git a/toargridding/grids.py b/toargridding/grids.py
index c9f85d41880d8112af0c361f18a5e705a5cac7f3..c7deed8679b757e0d79c54401f03e305f7d43969 100644
--- a/toargridding/grids.py
+++ b/toargridding/grids.py
@@ -1,3 +1,4 @@
+import logging 
 from enum import Enum
 from abc import ABC, abstractmethod
 from collections import namedtuple
@@ -18,6 +19,8 @@ from toargridding.metadata import (
 )
 from toargridding.variables import Variable, Coordinate
 
+logger = logging.getLogger(__name__)
+
 GridType = Enum("GridType", ["regular"])
 """list of available grids.
 """
diff --git a/toargridding/metadata.py b/toargridding/metadata.py
index d6739a29599a08bc21932a2a45a4a391bca1b164..6997ce10a857b97ed1093fe76778f4984f739aee 100644
--- a/toargridding/metadata.py
+++ b/toargridding/metadata.py
@@ -1,3 +1,5 @@
+import logging 
+
 from datetime import datetime, timedelta
 from enum import Enum
 from dataclasses import dataclass, field
@@ -11,6 +13,7 @@ from toargridding.static_metadata import global_cf_attributes, TOARVariable
 from typing import Dict
 
 import importlib.metadata
+logger = logging.getLogger(__name__)
 
 date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
 #date_created = datetime.now(datetime.UTC).strftime("%Y-%m-dT%H:%M:%SZ") # fix as utcnow will be removed in the future
diff --git a/toargridding/metadata_utilities.py b/toargridding/metadata_utilities.py
index 4e595f94be4b00ee6264b273d679300bc4be5b30..73bfc3eb92db7b001bfed6d1d8cde5f3278f923b 100644
--- a/toargridding/metadata_utilities.py
+++ b/toargridding/metadata_utilities.py
@@ -1,6 +1,8 @@
+import logging 
 
 import requests
 from collections import namedtuple
+logger = logging.getLogger(__name__)
 
 ControlVoc = namedtuple("ControlVoc", ["ID", "short", "long"])
 
diff --git a/toargridding/setupFunctions.py b/toargridding/setupFunctions.py
index e3e3aeb72c6498720a4516e82cc682e2e4cb3000..4a841ff5c4f13acc691e781c6a89cdb832761fb2 100644
--- a/toargridding/setupFunctions.py
+++ b/toargridding/setupFunctions.py
@@ -1,7 +1,10 @@
+import logging 
+
 from toargridding.static_metadata import TOAR_VARIABLES_METADATA_PATH
 import requests
 import json
 
+logger = logging.getLogger(__name__)
 
 def updateTOARVariables():
     """Download the most recent list of variables from the TOAR database
diff --git a/toargridding/static_metadata.py b/toargridding/static_metadata.py
index dfca7efc9c1ac2a99f4177071a57ed7a19f5dc1c..04ce79d23e00f894739fcd00b2b274c2a977bde8 100644
--- a/toargridding/static_metadata.py
+++ b/toargridding/static_metadata.py
@@ -1,7 +1,11 @@
+import logging 
+
 from pathlib import Path
 from dataclasses import dataclass
 import json
 
+logger = logging.getLogger(__name__)
+
 STATIC_METADATA_PATH = Path(__file__).parent / "static_metadata"
 TOAR_VARIABLES_METADATA_PATH = STATIC_METADATA_PATH / "toar_variables.json"
 GLOABAL_CF_ATTRIBUTES_PATH = STATIC_METADATA_PATH / "global_cf_attributes.json"
diff --git a/toargridding/toar_rest_client.py b/toargridding/toar_rest_client.py
index ee6862fe8f32a1e9a63750fcf5bb84dd0bb94080..dafb710a20656f19765a12bd2a6452a770f4898e 100644
--- a/toargridding/toar_rest_client.py
+++ b/toargridding/toar_rest_client.py
@@ -1,3 +1,5 @@
+import logging 
+
 import time
 from datetime import datetime
 import io
@@ -14,6 +16,7 @@ from typing import Dict
 
 from toargridding.metadata import Metadata, AnalysisRequestResult, Coordinates
 
+logger = logging.getLogger(__name__)
 
 STATION_LAT = "station_coordinates_lat"
 STATION_LON = "station_coordinates_lng"
@@ -87,7 +90,6 @@ class QueryOptions:
 
 
 def quarryToDict(data : QueryOptions):
-    #print(data)
     out = { field : value for field, value in data[:-1] }
     extraVals = data[-1][1]
     for field, value in extraVals.items():
@@ -202,7 +204,6 @@ class Cache:
 
 
 class Connection:
-    DEBUG = False
     def __init__(self, endpoint, cache_dir):
         """connection to the rest API of the TOAR database
 
@@ -258,24 +259,25 @@ class Connection:
         status_endpoint = self.get_status_endpoint(query_options)
 
         for i, wait_time in enumerate(self.wait_seconds):
-            print(f"[{datetime.now().strftime('%Y-%m-%dT%H:%M')}] try: {i+1}, wait_time: {wait_time}")
+            logger.info(f"try: {i+1}, wait_time: {wait_time}")
             response = self.wait_and_get(status_endpoint, wait_secs=wait_time)
             #do error handling i.e. look for connection issues
             try:
                 response.raise_for_status()
             except requests.exceptions.HTTPError as e: 
-                print(f"\tconnection error ({e.response.status_code}: {e.response.reason}).")
+                logger.warning(f"\tconnection error ({e.response.status_code}: {e.response.reason}).")
                 self.printExecption(e, response)
                 #a Status Code 500 seems indicated an aborted request -> restart the request and continue with new status endpoint
                 if e.response.status_code == 500:
+                    logger.warning("Received internal server error. Restarting request.")
                     self.cache.remove(query_options.cache_key)
                     status_endpoint = self.get_status_endpoint(query_options)
                 else:
-                    print("\t Trying again later.")
+                    logger.warning("\tTrying again later.")
                 continue
             #are our results ready to obtain?
             if response.headers["Content-Type"] == "application/zip":
-                print("Results are available for download")
+                logger.info("Results are available for download")
                 return response
         else:
             raise RuntimeError(
@@ -303,26 +305,26 @@ class Connection:
                 response = self.wait_and_get(status_endpoint)
                 response.raise_for_status()
             except requests.exceptions.ReadTimeout as e:
-                print("Caught read timeout.")
+                logger.critical("Caught read timeout.")
                 raise RuntimeError("Connection to TAORDB timed out (ReadTimeout) while checking cached status point. Please try again later.")
             except requests.exceptions.HTTPError as e:
                 #TODO add detailed processing: What was the reason for the error? Do we really need to create a new request or is there another problem, that might resolve by simply waiting
-                print(f"A HTTP error occurred:")
+                logger.warning(f"A HTTP error occurred:")
                 self.printExecption(e, response)
-                print(f"Status Endpoint: {status_endpoint}")
+                logger.debug(f"Status Endpoint: {status_endpoint}")
                 #use inverse order for saving. the status endpoint should be more unique
                 self.cache_backup.put(status_endpoint, query_options.cache_key)
                 #will be overwritten in the next step.
                 self.cache.remove(query_options.cache_key)
-                print("Removing status endpoint from cache and submitting new request.")
+                logger.warning("Removing status endpoint from cache and submitting new request.")
                 pass
             except:
                 raise RuntimeError(f"An error occurred during accessing a cached request")
             else:
-                print("load status endpoint from cache")
+                logger.info("load status endpoint from cache")
                 return status_endpoint
         else:
-            print("query not in cache")
+            logger.info("query not in cache")
 
         status_endpoint = self.query_for_status_endpoint(query_options)
         return status_endpoint
@@ -342,18 +344,17 @@ class Connection:
         try:
             response = self.wait_and_get(self.endpoint, asdict(query_options, dict_factory=quarryToDict))
             url = response.history[0].url if response.history else response.url
-            if Connection.DEBUG:
-                print(f"[DEBUG] Original request: {url}")
+            logger.debug(f"Original request: {url}")
 
             if response.headers["Content-Type"] == "application/json":
                 status_endpoint = response.json()["status"]
             response.raise_for_status()
         except requests.exceptions.HTTPError as e:
-            print(f"An HTTP error occurred:")
+            logger.warning(f"An HTTP error occurred:")
             self.printExecption(e, response)
             raise e
         except requests.exceptions.ReadTimeout as e:
-            print("Caught read timeout.")
+            logger.critical("Caught read timeout.")
             self.printExecption(e, response)
             raise RuntimeError("Read timeout while querying for status endpoint")
         except:
@@ -388,16 +389,15 @@ class Connection:
     def printExecption(self, e : requests.exceptions.HTTPError, response : requests.Response):
         """!output different infos from an exception and the corresponding response.
         """
-        if Connection.DEBUG:
-            print(f"Status Code: {e.response.status_code}")
-            print(f"Reason: {e.response.reason}")
-            print(f"Text: {e.response.text}")
-            print(f"{response=}")
-            print(f"{response.content=}")
-            try:
-                print(response.json())
-            except Exception as e:
-                print("Decoding as json failed.")
+        logger.debug(f"Status Code: {e.response.status_code}")
+        logger.debug(f"Reason: {e.response.reason}")
+        logger.debug(f"Text: {e.response.text}")
+        logger.debug(f"{response=}")
+        logger.debug(f"{response.content=}")
+        try:
+            logger.debug(response.json())
+        except Exception as e:
+            logger.debug("Decoding as json failed.")
 
 
 class AnalysisService:
@@ -494,7 +494,7 @@ class AnalysisService:
                 break
 
         if len(col2Drop):
-            print(f"[Info:] Dropping columns ({col2Drop}) from TOAR data to match requested date range [{metadata.time.start}, {metadata.time.end}]")
+            logger.info(f"Dropping columns ({col2Drop}) from TOAR data to match requested date range [{metadata.time.start}, {metadata.time.end}]")
             timeseries.drop(columns=col2Drop, inplace=True)
             
         all_na = timeseries.isna().all(axis=1)
@@ -581,12 +581,12 @@ class AnalysisServiceDownload(AnalysisService):
         needs_fresh_download = (not self.use_downloaded) or (not filename.is_file())
 
         if needs_fresh_download:
-            print("Performing request to TOAR DB")
+            logger.info("Performing request to TOAR DB")
             response = self.connection.get(query_options)
             with open(filename, "w+b") as downloaded_file:
                 downloaded_file.write(response.content)
         else:
-            print(f"Loading already downloaded data from {filename}")
+            logger.info(f"Loading already downloaded data to file {filename}")
 
         with open(filename, "r+b") as data_file:
             content = data_file.read()
diff --git a/toargridding/toarstats_constants.py b/toargridding/toarstats_constants.py
index 3ff9c1c98accceb2e0e155ffa3510afa163a0d45..9b22c849fda063ebc700c787cb0af53463128efd 100644
--- a/toargridding/toarstats_constants.py
+++ b/toargridding/toarstats_constants.py
@@ -1,3 +1,4 @@
+import logging 
 # taken from https://gitlab.jsc.fz-juelich.de/esde/toar-public/toarstats/-/blob/master/toarstats/metrics/constants.py#L12-21
 
 ALLOWED_SAMPLING_VALUES = [
diff --git a/toargridding/variables.py b/toargridding/variables.py
index 0440e0bf5de35a5cc14575943e0f419eb1d2344f..0dc47219579e5bc386b799e5a5c35d22c7da3bb0 100644
--- a/toargridding/variables.py
+++ b/toargridding/variables.py
@@ -1,3 +1,4 @@
+import logging 
 from dataclasses import dataclass
 
 import numpy as np
@@ -7,6 +8,8 @@ from toargridding.metadata import Variables, get_cf_metadata, Metadata
 
 from typing import Dict
 
+logger = logging.getLogger(__name__)
+
 @dataclass
 class Variable:
     """full variable including data and information according to CF 
@@ -107,10 +110,10 @@ class Coordinate(Variable):
         span = max - min
         n = int(span / resolution)  #TODO: raise error if invalid inputs ?
         if n*resolution != span:
-            print(f"[DEBUG:] Resolution {resolution} does not provide an equidistant division of the span [{min},{max}]")
+            logger.warning(f"Resolution {resolution} does not provide an equidistant division of the span [{min},{max}]")
             n+=1
             step = span / n
-            print(f"[DEBUG:] Adoption resolution {resolution} to {step}")
+            logger.warning(f"Adoption resolution {resolution} to {step}")
         else:
             step = resolution
         data = np.linspace(min, max, n + 1)