Skip to content
Snippets Groups Projects
Commit 52ee7e31 authored by lukas leufen's avatar lukas leufen
Browse files

logfile logging

See merge request toar/machinelearningtools!74
parents 0f645b8d 5232769e
No related branches found
No related tags found
3 merge requests!90WIP: new release update,!89Resolve "release branch / CI on gpu",!74Resolve "Write logfile"
Pipeline #32145 failed
......@@ -3,8 +3,8 @@ __date__ = '2019-11-14'
import argparse
import logging
from src.helpers import Logger
from src.run_modules.experiment_setup import ExperimentSetup
from src.run_modules.model_setup import ModelSetup
from src.run_modules.post_processing import PostProcessing
......@@ -12,6 +12,8 @@ from src.run_modules.pre_processing import PreProcessing
from src.run_modules.run_environment import RunEnvironment
from src.run_modules.training import Training
Logger()
def main(parser_args):
......@@ -29,10 +31,6 @@ def main(parser_args):
if __name__ == "__main__":
formatter = '%(asctime)s - %(levelname)s: %(message)s [%(filename)s:%(funcName)s:%(lineno)s]'
logging.basicConfig(format=formatter, level=logging.INFO)
# logging.basicConfig(format=formatter, level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('--experiment_date', metavar='--exp_date', type=str, default=None,
help="set experiment date as string")
......
......@@ -4,12 +4,13 @@ __author__ = 'Lukas Leufen, Felix Kleinert'
__date__ = '2019-10-21'
import datetime as dt
import logging
import math
import os
import time
import socket
import datetime as dt
import sys
import time
import keras.backend as K
import xarray as xr
......@@ -218,3 +219,59 @@ def list_pop(list_full: list, pop_items):
def dict_pop(dict_orig: Dict, pop_keys):
pop_keys = to_list(pop_keys)
return {k: v for k, v in dict_orig.items() if k not in pop_keys}
class Logger:
"""
Basic logger class to unify all logging outputs. Logs are saved in local file and returned to std output. In default
settings, logging level of file logger is DEBUG, logging level of stream logger is INFO. Class must be imported
and initialised in starting script, all subscripts should log with logging.info(), debug, ...
"""
def __init__(self, log_path=None, level_file=logging.DEBUG, level_stream=logging.INFO):
# define shared logger format
self.formatter = '%(asctime)s - %(levelname)s: %(message)s [%(filename)s:%(funcName)s:%(lineno)s]'
# set log path
log_file = self.setup_logging_path(log_path)
# set root logger as file handler
logging.basicConfig(level=level_file,
format=self.formatter,
filename=log_file,
filemode='a')
# add stream handler to the root logger
logging.getLogger('').addHandler(self.logger_console(level_stream))
@staticmethod
def setup_logging_path(path: str = None):
"""
Check if given path exists and creates if not. If path is None, use path from main. The logging file is named
like `logging_<runtime>.log` where runtime=`%Y-%m-%d_%H-%M-%S` of current run.
:param path: path to logfile
:return: path of logfile
"""
if not path: # set default path
path = os.path.dirname(sys.modules["__main__"].__file__)
path = os.path.join(path, "logging")
if not os.path.exists(path):
os.makedirs(path)
runtime = time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime())
log_file = os.path.join(path, f'logging_{runtime}.log')
return log_file
def logger_console(self, level: int):
"""
Defines a stream handler which writes messages of given level or higher to std out
:param level: logging level as integer, e.g. logging.DEBUG or 10
:return: defines stream handler
"""
# define Handler
console = logging.StreamHandler()
# set level of Handler
console.setLevel(level)
# set a format which is simpler for console use
formatter = logging.Formatter(self.formatter)
# tell the handler to use this format
console.setFormatter(formatter)
return console
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment