diff --git a/toargridding/grids.py b/toargridding/grids.py
index c9f85d41880d8112af0c361f18a5e705a5cac7f3..c7deed8679b757e0d79c54401f03e305f7d43969 100644
--- a/toargridding/grids.py
+++ b/toargridding/grids.py
@@ -1,3 +1,4 @@
+import logging 
 from enum import Enum
 from abc import ABC, abstractmethod
 from collections import namedtuple
@@ -18,6 +19,8 @@ from toargridding.metadata import (
 )
 from toargridding.variables import Variable, Coordinate
 
+logger = logging.getLogger(__name__)
+
 GridType = Enum("GridType", ["regular"])
 """list of available grids.
 """
diff --git a/toargridding/metadata.py b/toargridding/metadata.py
index d6739a29599a08bc21932a2a45a4a391bca1b164..6997ce10a857b97ed1093fe76778f4984f739aee 100644
--- a/toargridding/metadata.py
+++ b/toargridding/metadata.py
@@ -1,3 +1,5 @@
+import logging 
+
 from datetime import datetime, timedelta
 from enum import Enum
 from dataclasses import dataclass, field
@@ -11,6 +13,7 @@ from toargridding.static_metadata import global_cf_attributes, TOARVariable
 from typing import Dict
 
 import importlib.metadata
+logger = logging.getLogger(__name__)
 
 date_created = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
 #date_created = datetime.now(datetime.UTC).strftime("%Y-%m-dT%H:%M:%SZ") # fix as utcnow will be removed in the future
diff --git a/toargridding/metadata_utilities.py b/toargridding/metadata_utilities.py
index 4e595f94be4b00ee6264b273d679300bc4be5b30..73bfc3eb92db7b001bfed6d1d8cde5f3278f923b 100644
--- a/toargridding/metadata_utilities.py
+++ b/toargridding/metadata_utilities.py
@@ -1,6 +1,8 @@
+import logging 
 
 import requests
 from collections import namedtuple
+logger = logging.getLogger(__name__)
 
 ControlVoc = namedtuple("ControlVoc", ["ID", "short", "long"])
 
diff --git a/toargridding/setupFunctions.py b/toargridding/setupFunctions.py
index e3e3aeb72c6498720a4516e82cc682e2e4cb3000..4a841ff5c4f13acc691e781c6a89cdb832761fb2 100644
--- a/toargridding/setupFunctions.py
+++ b/toargridding/setupFunctions.py
@@ -1,7 +1,10 @@
+import logging 
+
 from toargridding.static_metadata import TOAR_VARIABLES_METADATA_PATH
 import requests
 import json
 
+logger = logging.getLogger(__name__)
 
 def updateTOARVariables():
     """Download the most recent list of variables from the TOAR database
diff --git a/toargridding/static_metadata.py b/toargridding/static_metadata.py
index dfca7efc9c1ac2a99f4177071a57ed7a19f5dc1c..04ce79d23e00f894739fcd00b2b274c2a977bde8 100644
--- a/toargridding/static_metadata.py
+++ b/toargridding/static_metadata.py
@@ -1,7 +1,11 @@
+import logging 
+
 from pathlib import Path
 from dataclasses import dataclass
 import json
 
+logger = logging.getLogger(__name__)
+
 STATIC_METADATA_PATH = Path(__file__).parent / "static_metadata"
 TOAR_VARIABLES_METADATA_PATH = STATIC_METADATA_PATH / "toar_variables.json"
 GLOABAL_CF_ATTRIBUTES_PATH = STATIC_METADATA_PATH / "global_cf_attributes.json"
diff --git a/toargridding/toar_rest_client.py b/toargridding/toar_rest_client.py
index ee6862fe8f32a1e9a63750fcf5bb84dd0bb94080..89d03f0f1ac926abefe7a80e8bc40c2ef654d2a1 100644
--- a/toargridding/toar_rest_client.py
+++ b/toargridding/toar_rest_client.py
@@ -1,3 +1,5 @@
+import logging 
+
 import time
 from datetime import datetime
 import io
@@ -14,6 +16,7 @@ from typing import Dict
 
 from toargridding.metadata import Metadata, AnalysisRequestResult, Coordinates
 
+logger = logging.getLogger(__name__)
 
 STATION_LAT = "station_coordinates_lat"
 STATION_LON = "station_coordinates_lng"
@@ -87,7 +90,6 @@ class QueryOptions:
 
 
 def quarryToDict(data : QueryOptions):
-    #print(data)
     out = { field : value for field, value in data[:-1] }
     extraVals = data[-1][1]
     for field, value in extraVals.items():
@@ -202,7 +204,6 @@ class Cache:
 
 
 class Connection:
-    DEBUG = False
     def __init__(self, endpoint, cache_dir):
         """connection to the rest API of the TOAR database
 
@@ -258,24 +259,25 @@ class Connection:
         status_endpoint = self.get_status_endpoint(query_options)
 
         for i, wait_time in enumerate(self.wait_seconds):
-            print(f"[{datetime.now().strftime('%Y-%m-%dT%H:%M')}] try: {i+1}, wait_time: {wait_time}")
+            logger.info(f"try: {i+1}, wait_time: {wait_time}")
             response = self.wait_and_get(status_endpoint, wait_secs=wait_time)
             #do error handling i.e. look for connection issues
             try:
                 response.raise_for_status()
             except requests.exceptions.HTTPError as e: 
-                print(f"\tconnection error ({e.response.status_code}: {e.response.reason}).")
+                logger.warning(f"\tconnection error ({e.response.status_code}: {e.response.reason}).")
                 self.printExecption(e, response)
                 #a Status Code 500 seems indicated an aborted request -> restart the request and continue with new status endpoint
                 if e.response.status_code == 500:
+                    logger.warning("Received internal server error. Restarting request.")
                     self.cache.remove(query_options.cache_key)
                     status_endpoint = self.get_status_endpoint(query_options)
                 else:
-                    print("\t Trying again later.")
+                    logger.warning("\tTrying again later.")
                 continue
             #are our results ready to obtain?
             if response.headers["Content-Type"] == "application/zip":
-                print("Results are available for download")
+                logger.info("Results are available for download")
                 return response
         else:
             raise RuntimeError(
@@ -303,26 +305,26 @@ class Connection:
                 response = self.wait_and_get(status_endpoint)
                 response.raise_for_status()
             except requests.exceptions.ReadTimeout as e:
-                print("Caught read timeout.")
+                logger.critical("Caught read timeout.")
                 raise RuntimeError("Connection to TAORDB timed out (ReadTimeout) while checking cached status point. Please try again later.")
             except requests.exceptions.HTTPError as e:
                 #TODO add detailed processing: What was the reason for the error? Do we really need to create a new request or is there another problem, that might resolve by simply waiting
-                print(f"A HTTP error occurred:")
+                logger.warning(f"A HTTP error occurred:")
                 self.printExecption(e, response)
-                print(f"Status Endpoint: {status_endpoint}")
+                logger.debug(f"Status Endpoint: {status_endpoint}")
                 #use inverse order for saving. the status endpoint should be more unique
                 self.cache_backup.put(status_endpoint, query_options.cache_key)
                 #will be overwritten in the next step.
                 self.cache.remove(query_options.cache_key)
-                print("Removing status endpoint from cache and submitting new request.")
+                logger.warning("Removing status endpoint from cache and submitting new request.")
                 pass
             except:
                 raise RuntimeError(f"An error occurred during accessing a cached request")
             else:
-                print("load status endpoint from cache")
+                logger.info("load status endpoint from cache")
                 return status_endpoint
         else:
-            print("query not in cache")
+            logger.info("query not in cache")
 
         status_endpoint = self.query_for_status_endpoint(query_options)
         return status_endpoint
@@ -342,18 +344,17 @@ class Connection:
         try:
             response = self.wait_and_get(self.endpoint, asdict(query_options, dict_factory=quarryToDict))
             url = response.history[0].url if response.history else response.url
-            if Connection.DEBUG:
-                print(f"[DEBUG] Original request: {url}")
+            logger.debug(f"Original request: {url}")
 
             if response.headers["Content-Type"] == "application/json":
                 status_endpoint = response.json()["status"]
             response.raise_for_status()
         except requests.exceptions.HTTPError as e:
-            print(f"An HTTP error occurred:")
+            logger.warning(f"An HTTP error occurred:")
             self.printExecption(e, response)
             raise e
         except requests.exceptions.ReadTimeout as e:
-            print("Caught read timeout.")
+            logger.critical("Caught read timeout.")
             self.printExecption(e, response)
             raise RuntimeError("Read timeout while querying for status endpoint")
         except:
@@ -388,16 +389,15 @@ class Connection:
     def printExecption(self, e : requests.exceptions.HTTPError, response : requests.Response):
         """!output different infos from an exception and the corresponding response.
         """
-        if Connection.DEBUG:
-            print(f"Status Code: {e.response.status_code}")
-            print(f"Reason: {e.response.reason}")
-            print(f"Text: {e.response.text}")
-            print(f"{response=}")
-            print(f"{response.content=}")
-            try:
-                print(response.json())
-            except Exception as e:
-                print("Decoding as json failed.")
+        logger.debug(f"Status Code: {e.response.status_code}")
+        logger.debug(f"Reason: {e.response.reason}")
+        logger.debug(f"Text: {e.response.text}")
+        logger.debug(f"{response=}")
+        logger.debug(f"{response.content=}")
+        try:
+            logger.debug(response.json())
+        except Exception as e:
+            logger.debug("Decoding as json failed.")
 
 
 class AnalysisService:
@@ -494,7 +494,7 @@ class AnalysisService:
                 break
 
         if len(col2Drop):
-            print(f"[Info:] Dropping columns ({col2Drop}) from TOAR data to match requested date range [{metadata.time.start}, {metadata.time.end}]")
+            logger.info(f"Dropping columns ({col2Drop}) from TOAR data to match requested date range [{metadata.time.start}, {metadata.time.end}]")
             timeseries.drop(columns=col2Drop, inplace=True)
             
         all_na = timeseries.isna().all(axis=1)
@@ -581,12 +581,12 @@ class AnalysisServiceDownload(AnalysisService):
         needs_fresh_download = (not self.use_downloaded) or (not filename.is_file())
 
         if needs_fresh_download:
-            print("Performing request to TOAR DB")
+            logger.info("Performing request to TOAR DB")
             response = self.connection.get(query_options)
             with open(filename, "w+b") as downloaded_file:
                 downloaded_file.write(response.content)
         else:
-            print(f"Loading already downloaded data from {filename}")
+            logger.info(f"Loading already downloaded data from {filename}")
 
         with open(filename, "r+b") as data_file:
             content = data_file.read()
diff --git a/toargridding/toarstats_constants.py b/toargridding/toarstats_constants.py
index 3ff9c1c98accceb2e0e155ffa3510afa163a0d45..9b22c849fda063ebc700c787cb0af53463128efd 100644
--- a/toargridding/toarstats_constants.py
+++ b/toargridding/toarstats_constants.py
@@ -1,3 +1,4 @@
+import logging 
 # taken from https://gitlab.jsc.fz-juelich.de/esde/toar-public/toarstats/-/blob/master/toarstats/metrics/constants.py#L12-21
 
 ALLOWED_SAMPLING_VALUES = [
diff --git a/toargridding/variables.py b/toargridding/variables.py
index 0440e0bf5de35a5cc14575943e0f419eb1d2344f..0dc47219579e5bc386b799e5a5c35d22c7da3bb0 100644
--- a/toargridding/variables.py
+++ b/toargridding/variables.py
@@ -1,3 +1,4 @@
+import logging 
 from dataclasses import dataclass
 
 import numpy as np
@@ -7,6 +8,8 @@ from toargridding.metadata import Variables, get_cf_metadata, Metadata
 
 from typing import Dict
 
+logger = logging.getLogger(__name__)
+
 @dataclass
 class Variable:
     """full variable including data and information according to CF 
@@ -107,10 +110,10 @@ class Coordinate(Variable):
         span = max - min
         n = int(span / resolution)  #TODO: raise error if invalid inputs ?
         if n*resolution != span:
-            print(f"[DEBUG:] Resolution {resolution} does not provide an equidistant division of the span [{min},{max}]")
+            logger.warning(f"Resolution {resolution} does not provide an equidistant division of the span [{min},{max}]")
             n+=1
             step = span / n
-            print(f"[DEBUG:] Adoption resolution {resolution} to {step}")
+            logger.warning(f"Adoption resolution {resolution} to {step}")
         else:
             step = resolution
         data = np.linspace(min, max, n + 1)