From 1852f7f4c31e827554baabbcdf172c309d13c61f Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Fri, 5 Jan 2018 10:01:45 +0100 Subject: [PATCH 1/6] Reconfigure the logging for each periodic celery task This is an attempted solution to fix the broken pipe errors in the celery worker logging. It probably is not the correct way of solving it and is merely patching the result of an incorrect configuration elsewhere. The logging for AiiDA is configured from a dictionary in aiida/__init__.py, where for the 'aiida' loggers and all its children a database and console handler are configured. The console handler is set to print to stdout. Once this logger is triggered in the code executed by one of the celery workers of the daemon process, the pipe to this file descriptor is broken and an IOError exception is thrown. The botch in this commit, reconfigures the logging at each task call, which ensures that the file descriptor of the StreamHandler is pointing to the correct stdout of the worker. --- aiida/__init__.py | 5 +++++ aiida/daemon/tasks.py | 17 ++++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/aiida/__init__.py b/aiida/__init__.py index 531644cf93..06aabd8997 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -98,6 +98,11 @@ def filter(self, record): 'propagate': False, 'qualname': 'sqlalchemy.engine', }, + 'celery': { + 'handlers': ['console'], + 'level': get_property('logging.aiida_loglevel'), + 'propagate': True, + }, }, } diff --git a/aiida/daemon/tasks.py b/aiida/daemon/tasks.py index fd09e60c89..aceba83d15 100644 --- a/aiida/daemon/tasks.py +++ b/aiida/daemon/tasks.py @@ -50,6 +50,16 @@ app = Celery('tasks', broker=broker) +def setup_logging(): + """ + Setup the logging from the dictionary configuration in aiida.LOGGING + This is necessary for the console handlers to be properly configured in the celery workers + that handle the various tasks below + """ + import logging + from aiida import LOGGING + logging.config.dictConfig(LOGGING) + # the tasks as taken from the djsite.db.tasks, same tasks and same functionalities # will now of course fail because set_daemon_timestep has not be implementd for SA @@ -59,6 +69,7 @@ ) ) def submitter(): + setup_logging() from aiida.daemon.execmanager import submit_jobs print "aiida.daemon.tasks.submitter: Checking for calculations to submit" set_daemon_timestamp(task_name='submitter', when='start') @@ -72,6 +83,7 @@ def submitter(): ) ) def updater(): + setup_logging() from aiida.daemon.execmanager import update_jobs print "aiida.daemon.tasks.update: Checking for calculations to update" set_daemon_timestamp(task_name='updater', when='start') @@ -86,6 +98,7 @@ def updater(): ) ) def retriever(): + setup_logging() from aiida.daemon.execmanager import retrieve_jobs print "aiida.daemon.tasks.retrieve: Checking for calculations to retrieve" set_daemon_timestamp(task_name='retriever', when='start') @@ -100,6 +113,7 @@ def retriever(): ) ) def tick_work(): + setup_logging() from aiida.work.daemon import tick_workflow_engine print "aiida.daemon.tasks.tick_workflows: Ticking workflows" tick_workflow_engine() @@ -109,7 +123,8 @@ def tick_work(): ) ) ) -def workflow_stepper(): # daemon for legacy workflow +def workflow_stepper(): # daemon for legacy workflow + setup_logging() from aiida.daemon.workflowmanager import execute_steps print "aiida.daemon.tasks.workflowmanager: Checking for workflows to manage" # RUDIMENTARY way to check if this task is already running (to avoid acting From dcf9284e507b648586170f66a5f3f0e3a45234be Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Tue, 9 Jan 2018 18:23:04 +0100 Subject: [PATCH 2/6] Improve the solution to fix aiida logging in celery workers tasks We define a new function aiida.common.log.setup_logging that will configure the logging from the aiida.LOGGING dictionary. When the argument 'daemon' is set to True, the logging will be configured for a daemon process, which is currently being run by celery workers. The logging configuration is adapted, such that all loggers will get a RotatingFileHandler handler that writes to the daemon log file which is the same as the log file of the master celery process that is launched when the daemon started. This accomplishes that in that log file both the celery log messages show up, with celery's own formatting, as well as the log messages from all the loggers configured by aiida itself. --- aiida/__init__.py | 30 ++++++++++++------------ aiida/cmdline/commands/daemon.py | 4 ++-- aiida/common/__init__.py | 10 +------- aiida/common/log.py | 39 ++++++++++++++++++++++++++++++++ aiida/common/setup.py | 19 ++++++++++------ aiida/daemon/tasks.py | 37 +++++++++++------------------- 6 files changed, 83 insertions(+), 56 deletions(-) create mode 100644 aiida/common/log.py diff --git a/aiida/__init__.py b/aiida/__init__.py index 06aabd8997..092f8a52a4 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -10,7 +10,7 @@ import logging import warnings from logging import config -from aiida.common.setup import get_property +from aiida.common import setup __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." @@ -35,7 +35,6 @@ def filter(self, record): from aiida import settings return not settings.TESTING_MODE - # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. @@ -66,50 +65,53 @@ def filter(self, record): 'formatter': 'halfverbose', 'filters': ['testing'] }, + 'daemon_logfile': { + 'level': 'DEBUG', + 'formatter': 'halfverbose', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': setup.DAEMON_LOG_FILE, + 'encoding': 'utf8', + 'maxBytes': 100000, + }, 'dblogger': { - # get_property takes the property from the config json file + # setup.get_property takes the property from the config json file # The key used in the json, and the default value, are # specified in the _property_table inside aiida.common.setup # NOTE: To modify properties, use the 'verdi devel setproperty' # command and similar ones (getproperty, describeproperties, ...) - 'level': get_property('logging.db_loglevel'), + 'level': setup.get_property('logging.db_loglevel'), 'class': 'aiida.utils.logger.DBLogHandler', }, }, 'loggers': { 'aiida': { 'handlers': ['console', 'dblogger'], - 'level': get_property('logging.aiida_loglevel'), + 'level': setup.get_property('logging.aiida_loglevel'), 'propagate': False, }, 'paramiko': { 'handlers': ['console'], - 'level': get_property('logging.paramiko_loglevel'), + 'level': setup.get_property('logging.paramiko_loglevel'), 'propagate': False, }, 'alembic': { 'handlers': ['console'], - 'level': get_property('logging.alembic_loglevel'), + 'level': setup.get_property('logging.alembic_loglevel'), 'propagate': False, }, 'sqlalchemy': { 'handlers': ['console'], - 'level': get_property('logging.sqlalchemy_loglevel'), + 'level': setup.get_property('logging.sqlalchemy_loglevel'), 'propagate': False, 'qualname': 'sqlalchemy.engine', }, - 'celery': { - 'handlers': ['console'], - 'level': get_property('logging.aiida_loglevel'), - 'propagate': True, - }, }, } # Configure the global logger through the LOGGING dictionary logging.config.dictConfig(LOGGING) -if get_property("warnings.showdeprecations"): +if setup.get_property("warnings.showdeprecations"): # print out the warnings coming from deprecation # in Python 2.7 it is suppressed by default warnings.simplefilter('default', DeprecationWarning) diff --git a/aiida/cmdline/commands/daemon.py b/aiida/cmdline/commands/daemon.py index 98ab958726..4a077c81d4 100644 --- a/aiida/cmdline/commands/daemon.py +++ b/aiida/cmdline/commands/daemon.py @@ -81,8 +81,8 @@ def __init__(self): 'configureuser': (self.configure_user, self.complete_none), } - self.logfile = os.path.join(setup.AIIDA_CONFIG_FOLDER, setup.LOG_SUBDIR, "celery.log") - self.pidfile = os.path.join(setup.AIIDA_CONFIG_FOLDER, setup.LOG_SUBDIR, "celery.pid") + self.logfile = setup.DAEMON_LOG_FILE + self.pidfile = setup.DAEMON_PID_FILE self.workdir = os.path.join(os.path.split(os.path.abspath(aiida.__file__))[0], "daemon") self.celerybeat_schedule = os.path.join(setup.AIIDA_CONFIG_FOLDER, setup.DAEMON_SUBDIR, "celerybeat-schedule") diff --git a/aiida/common/__init__.py b/aiida/common/__init__.py index fc821121b3..ef38d0a034 100644 --- a/aiida/common/__init__.py +++ b/aiida/common/__init__.py @@ -7,12 +7,4 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Internal functionality that is needed by multiple modules of AiiDA""" -import logging -import sys - - -aiidalogger = logging.getLogger("aiida") -# aiidalogger.addHandler(logging.StreamHandler(sys.stderr)) -#FORMAT = '[%(name)s@%(levelname)s] %(message)s' -#logging.basicConfig(format=FORMAT) +from aiida.common.log import aiidalogger \ No newline at end of file diff --git a/aiida/common/log.py b/aiida/common/log.py new file mode 100644 index 0000000000..b6d24c3396 --- /dev/null +++ b/aiida/common/log.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +import logging + +aiidalogger = logging.getLogger('aiida') + + +def setup_logging(daemon=False, daemon_handler='daemon_logfile'): + """ + Setup the logging by retrieving the LOGGING dictionary from aiida and passing it to + the python module logging.config.dictConfig. If the logging needs to be setup for the + daemon running a task for one of the celery workers, set the argument 'daemon' to True. + This will cause the 'daemon_handler' to be added to all the configured loggers. This + handler needs to be defined in the LOGGING dictionary and is 'daemon_logfile' by + default. If this changes in the dictionary, be sure to pass the correct handle name. + The daemon handler should be a RotatingFileHandler that writes to the daemon log file. + + :param daemon: configure the logging for a daemon task by adding a file handler instead + of the default 'console' StreamHandler + :param daemon_handler: name of the file handler in the LOGGING dictionary + """ + from copy import deepcopy + from aiida import LOGGING + + config = deepcopy(LOGGING) + + # Add the daemon file handler to all loggers if daemon=True + if daemon is True: + for name, logger in config.get('loggers', {}).iteritems(): + logger.setdefault('handlers', []).append(daemon_handler) + + logging.config.dictConfig(config) \ No newline at end of file diff --git a/aiida/common/setup.py b/aiida/common/setup.py index c169788b38..16968f1ca8 100644 --- a/aiida/common/setup.py +++ b/aiida/common/setup.py @@ -35,11 +35,16 @@ CONFIG_FNAME = 'config.json' SECRET_KEY_FNAME = 'secret_key.dat' -DAEMON_SUBDIR = "daemon" -LOG_SUBDIR = "daemon/log" -DAEMON_CONF_FILE = "aiida_daemon.conf" +DAEMON_SUBDIR = 'daemon' +LOG_SUBDIR = 'daemon/log' +DAEMON_CONF_FILE = 'aiida_daemon.conf' -WORKFLOWS_SUBDIR = "workflows" +CELERY_LOG_FILE = 'celery.log' +CELERY_PID_FILE = 'celery.pid' +DAEMON_LOG_FILE = os.path.join(AIIDA_CONFIG_FOLDER, LOG_SUBDIR, CELERY_LOG_FILE) +DAEMON_PID_FILE = os.path.join(AIIDA_CONFIG_FOLDER, LOG_SUBDIR, CELERY_PID_FILE) + +WORKFLOWS_SUBDIR = 'workflows' # The key inside the configuration file DEFAULT_USER_CONFIG_FIELD = 'default_user_email' @@ -51,17 +56,17 @@ DEFAULT_UMASK = 0o0077 # Profile keys -aiidadb_backend_key = "AIIDADB_BACKEND" +aiidadb_backend_key = 'AIIDADB_BACKEND' # Profile values -aiidadb_backend_value_django = "django" +aiidadb_backend_value_django = 'django' # Repository for tests TEMP_TEST_REPO = None # Keyword that is used in test profiles, databases and repositories to # differentiate them from non-testing ones. -TEST_KEYWORD = "test_" +TEST_KEYWORD = 'test_' def get_aiida_dir(): diff --git a/aiida/daemon/tasks.py b/aiida/daemon/tasks.py index aceba83d15..4cedf2e1d1 100644 --- a/aiida/daemon/tasks.py +++ b/aiida/daemon/tasks.py @@ -20,6 +20,7 @@ if not is_dbenv_loaded(): load_dbenv(process="daemon") +from aiida.common.log import setup_logging from aiida.common.setup import get_profile_config from aiida.common.exceptions import ConfigurationError from aiida.daemon.timestamps import set_daemon_timestamp,get_last_daemon_timestamp @@ -50,16 +51,6 @@ app = Celery('tasks', broker=broker) -def setup_logging(): - """ - Setup the logging from the dictionary configuration in aiida.LOGGING - This is necessary for the console handlers to be properly configured in the celery workers - that handle the various tasks below - """ - import logging - from aiida import LOGGING - logging.config.dictConfig(LOGGING) - # the tasks as taken from the djsite.db.tasks, same tasks and same functionalities # will now of course fail because set_daemon_timestep has not be implementd for SA @@ -69,7 +60,7 @@ def setup_logging(): ) ) def submitter(): - setup_logging() + setup_logging(daemon=True) from aiida.daemon.execmanager import submit_jobs print "aiida.daemon.tasks.submitter: Checking for calculations to submit" set_daemon_timestamp(task_name='submitter', when='start') @@ -83,7 +74,7 @@ def submitter(): ) ) def updater(): - setup_logging() + setup_logging(daemon=True) from aiida.daemon.execmanager import update_jobs print "aiida.daemon.tasks.update: Checking for calculations to update" set_daemon_timestamp(task_name='updater', when='start') @@ -93,12 +84,11 @@ def updater(): @periodic_task( run_every=timedelta( - seconds=config.get("DAEMON_INTERVALS_RETRIEVE", - DAEMON_INTERVALS_RETRIEVE) + seconds=config.get("DAEMON_INTERVALS_RETRIEVE", DAEMON_INTERVALS_RETRIEVE) ) ) def retriever(): - setup_logging() + setup_logging(daemon=True) from aiida.daemon.execmanager import retrieve_jobs print "aiida.daemon.tasks.retrieve: Checking for calculations to retrieve" set_daemon_timestamp(task_name='retriever', when='start') @@ -108,23 +98,22 @@ def retriever(): @periodic_task( run_every=timedelta( - seconds=config.get("DAEMON_INTERVALS_TICK_WORKFLOWS", - DAEMON_INTERVALS_TICK_WORKFLOWS) + seconds=config.get("DAEMON_INTERVALS_TICK_WORKFLOWS", DAEMON_INTERVALS_TICK_WORKFLOWS) ) ) def tick_work(): - setup_logging() + setup_logging(daemon=True) from aiida.work.daemon import tick_workflow_engine print "aiida.daemon.tasks.tick_workflows: Ticking workflows" tick_workflow_engine() -@periodic_task(run_every=timedelta(seconds=config.get("DAEMON_INTERVALS_WFSTEP", - DAEMON_INTERVALS_WFSTEP - ) - ) - ) +@periodic_task( + run_every=timedelta( + seconds=config.get("DAEMON_INTERVALS_WFSTEP", DAEMON_INTERVALS_WFSTEP) + ) +) def workflow_stepper(): # daemon for legacy workflow - setup_logging() + setup_logging(daemon=True) from aiida.daemon.workflowmanager import execute_steps print "aiida.daemon.tasks.workflowmanager: Checking for workflows to manage" # RUDIMENTARY way to check if this task is already running (to avoid acting From b23a1bab278617f5c1a273f6f9e68d42ed0cd9c2 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Tue, 9 Jan 2018 18:27:16 +0100 Subject: [PATCH 3/6] Rename setup_logging to configure_logging --- aiida/common/log.py | 2 +- aiida/daemon/tasks.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/aiida/common/log.py b/aiida/common/log.py index b6d24c3396..ffa63b717b 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -12,7 +12,7 @@ aiidalogger = logging.getLogger('aiida') -def setup_logging(daemon=False, daemon_handler='daemon_logfile'): +def configure_logging(daemon=False, daemon_handler='daemon_logfile'): """ Setup the logging by retrieving the LOGGING dictionary from aiida and passing it to the python module logging.config.dictConfig. If the logging needs to be setup for the diff --git a/aiida/daemon/tasks.py b/aiida/daemon/tasks.py index 4cedf2e1d1..8a67de685e 100644 --- a/aiida/daemon/tasks.py +++ b/aiida/daemon/tasks.py @@ -20,7 +20,7 @@ if not is_dbenv_loaded(): load_dbenv(process="daemon") -from aiida.common.log import setup_logging +from aiida.common.log import configure_logging from aiida.common.setup import get_profile_config from aiida.common.exceptions import ConfigurationError from aiida.daemon.timestamps import set_daemon_timestamp,get_last_daemon_timestamp @@ -60,7 +60,7 @@ ) ) def submitter(): - setup_logging(daemon=True) + configure_logging(daemon=True) from aiida.daemon.execmanager import submit_jobs print "aiida.daemon.tasks.submitter: Checking for calculations to submit" set_daemon_timestamp(task_name='submitter', when='start') @@ -74,7 +74,7 @@ def submitter(): ) ) def updater(): - setup_logging(daemon=True) + configure_logging(daemon=True) from aiida.daemon.execmanager import update_jobs print "aiida.daemon.tasks.update: Checking for calculations to update" set_daemon_timestamp(task_name='updater', when='start') @@ -88,7 +88,7 @@ def updater(): ) ) def retriever(): - setup_logging(daemon=True) + configure_logging(daemon=True) from aiida.daemon.execmanager import retrieve_jobs print "aiida.daemon.tasks.retrieve: Checking for calculations to retrieve" set_daemon_timestamp(task_name='retriever', when='start') @@ -102,7 +102,7 @@ def retriever(): ) ) def tick_work(): - setup_logging(daemon=True) + configure_logging(daemon=True) from aiida.work.daemon import tick_workflow_engine print "aiida.daemon.tasks.tick_workflows: Ticking workflows" tick_workflow_engine() @@ -113,7 +113,7 @@ def tick_work(): ) ) def workflow_stepper(): # daemon for legacy workflow - setup_logging(daemon=True) + configure_logging(daemon=True) from aiida.daemon.workflowmanager import execute_steps print "aiida.daemon.tasks.workflowmanager: Checking for workflows to manage" # RUDIMENTARY way to check if this task is already running (to avoid acting From f232bc4ee15cab40c62a1e7c254ba1ed19130112 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Tue, 9 Jan 2018 18:37:03 +0100 Subject: [PATCH 4/6] Move all logging related code from aiida/__init__.py to aiida.common.log With the new configure_logging function, in the global init file we simply call this function, which will setup all the necessary code for logging and configures all the loggers from the default dictionary --- aiida/__init__.py | 100 ++------------------------------ aiida/backends/tests/orm/log.py | 2 +- aiida/common/log.py | 90 +++++++++++++++++++++++++++- aiida/utils/logger.py | 2 +- aiida/work/process.py | 2 +- 5 files changed, 95 insertions(+), 101 deletions(-) diff --git a/aiida/__init__.py b/aiida/__init__.py index 092f8a52a4..6befab5194 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -7,10 +7,9 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -import logging import warnings -from logging import config -from aiida.common import setup +from aiida.common.log import configure_logging +from aiida.common.setup import get_property __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." @@ -19,99 +18,10 @@ __paper__ = """G. Pizzi, A. Cepellotti, R. Sabatini, N. Marzari, and B. Kozinsky, "AiiDA: automated interactive infrastructure and database for computational science", Comp. Mat. Sci 111, 218-230 (2016); http://dx.doi.org/10.1016/j.commatsci.2015.09.013 - http://www.aiida.net.""" __paper_short__ = """G. Pizzi et al., Comp. Mat. Sci 111, 218 (2016).""" +# Configure the default logging +configure_logging() -# Custom logging level, intended specifically for informative log messages -# reported during WorkChains and Workflows. We want the level between INFO(20) -# and WARNING(30) such that it will be logged for the default loglevel, however -# the value 25 is already reserved for SUBWARNING by the multiprocessing module. -LOG_LEVEL_REPORT = 23 -logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT') - - -# A logging filter that can be used to disable logging -class NotInTestingFilter(logging.Filter): - - def filter(self, record): - from aiida import settings - return not settings.TESTING_MODE - -# A sample logging configuration. The only tangible logging -# performed by this configuration is to send an email to -# the site admins on every HTTP 500 error when DEBUG=False. -# See http://docs.djangoproject.com/en/dev/topics/logging for -# more details on how to customize your logging configuration. -LOGGING = { - 'version': 1, - 'disable_existing_loggers': True, - 'formatters': { - 'verbose': { - 'format': '%(levelname)s %(asctime)s %(module)s %(process)d ' - '%(thread)d %(message)s', - }, - 'halfverbose': { - 'format': '%(asctime)s, %(name)s: [%(levelname)s] %(message)s', - 'datefmt': '%m/%d/%Y %I:%M:%S %p', - }, - }, - 'filters': { - 'testing': { - '()': NotInTestingFilter - } - }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'halfverbose', - 'filters': ['testing'] - }, - 'daemon_logfile': { - 'level': 'DEBUG', - 'formatter': 'halfverbose', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': setup.DAEMON_LOG_FILE, - 'encoding': 'utf8', - 'maxBytes': 100000, - }, - 'dblogger': { - # setup.get_property takes the property from the config json file - # The key used in the json, and the default value, are - # specified in the _property_table inside aiida.common.setup - # NOTE: To modify properties, use the 'verdi devel setproperty' - # command and similar ones (getproperty, describeproperties, ...) - 'level': setup.get_property('logging.db_loglevel'), - 'class': 'aiida.utils.logger.DBLogHandler', - }, - }, - 'loggers': { - 'aiida': { - 'handlers': ['console', 'dblogger'], - 'level': setup.get_property('logging.aiida_loglevel'), - 'propagate': False, - }, - 'paramiko': { - 'handlers': ['console'], - 'level': setup.get_property('logging.paramiko_loglevel'), - 'propagate': False, - }, - 'alembic': { - 'handlers': ['console'], - 'level': setup.get_property('logging.alembic_loglevel'), - 'propagate': False, - }, - 'sqlalchemy': { - 'handlers': ['console'], - 'level': setup.get_property('logging.sqlalchemy_loglevel'), - 'propagate': False, - 'qualname': 'sqlalchemy.engine', - }, - }, -} - -# Configure the global logger through the LOGGING dictionary -logging.config.dictConfig(LOGGING) - -if setup.get_property("warnings.showdeprecations"): +if get_property("warnings.showdeprecations"): # print out the warnings coming from deprecation # in Python 2.7 it is suppressed by default warnings.simplefilter('default', DeprecationWarning) diff --git a/aiida/backends/tests/orm/log.py b/aiida/backends/tests/orm/log.py index f67da2801d..1a8313e067 100644 --- a/aiida/backends/tests/orm/log.py +++ b/aiida/backends/tests/orm/log.py @@ -10,7 +10,7 @@ import logging import unittest from aiida.utils.timezone import now -from aiida import LOG_LEVEL_REPORT +from aiida.common.log import LOG_LEVEL_REPORT from aiida.orm.log import OrderSpecifier, ASCENDING, DESCENDING from aiida.orm.backend import construct from aiida.orm.calculation import Calculation diff --git a/aiida/common/log.py b/aiida/common/log.py index ffa63b717b..e2b61c4211 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -8,9 +8,96 @@ # For further information please visit http://www.aiida.net # ########################################################################### import logging +from copy import deepcopy +from logging import config +from aiida.common import setup +# Custom logging level, intended specifically for informative log messages +# reported during WorkChains and Workflows. We want the level between INFO(20) +# and WARNING(30) such that it will be logged for the default loglevel, however +# the value 25 is already reserved for SUBWARNING by the multiprocessing module. +LOG_LEVEL_REPORT = 23 +logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT') + +# The AiiDA logger aiidalogger = logging.getLogger('aiida') +# A logging filter that can be used to disable logging +class NotInTestingFilter(logging.Filter): + + def filter(self, record): + from aiida import settings + return not settings.TESTING_MODE + +# The default logging dictionary for AiiDA that can be used in conjunction +# with the config.dictConfig method of python's logging module +LOGGING = { + 'version': 1, + 'disable_existing_loggers': True, + 'formatters': { + 'verbose': { + 'format': '%(levelname)s %(asctime)s %(module)s %(process)d ' + '%(thread)d %(message)s', + }, + 'halfverbose': { + 'format': '%(asctime)s, %(name)s: [%(levelname)s] %(message)s', + 'datefmt': '%m/%d/%Y %I:%M:%S %p', + }, + }, + 'filters': { + 'testing': { + '()': NotInTestingFilter + } + }, + 'handlers': { + 'console': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'halfverbose', + 'filters': ['testing'] + }, + 'daemon_logfile': { + 'level': 'DEBUG', + 'formatter': 'halfverbose', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': setup.DAEMON_LOG_FILE, + 'encoding': 'utf8', + 'maxBytes': 100000, + }, + 'dblogger': { + # setup.get_property takes the property from the config json file + # The key used in the json, and the default value, are + # specified in the _property_table inside aiida.common.setup + # NOTE: To modify properties, use the 'verdi devel setproperty' + # command and similar ones (getproperty, describeproperties, ...) + 'level': setup.get_property('logging.db_loglevel'), + 'class': 'aiida.utils.logger.DBLogHandler', + }, + }, + 'loggers': { + 'aiida': { + 'handlers': ['console', 'dblogger'], + 'level': setup.get_property('logging.aiida_loglevel'), + 'propagate': False, + }, + 'paramiko': { + 'handlers': ['console'], + 'level': setup.get_property('logging.paramiko_loglevel'), + 'propagate': False, + }, + 'alembic': { + 'handlers': ['console'], + 'level': setup.get_property('logging.alembic_loglevel'), + 'propagate': False, + }, + 'sqlalchemy': { + 'handlers': ['console'], + 'level': setup.get_property('logging.sqlalchemy_loglevel'), + 'propagate': False, + 'qualname': 'sqlalchemy.engine', + }, + }, +} def configure_logging(daemon=False, daemon_handler='daemon_logfile'): """ @@ -26,9 +113,6 @@ def configure_logging(daemon=False, daemon_handler='daemon_logfile'): of the default 'console' StreamHandler :param daemon_handler: name of the file handler in the LOGGING dictionary """ - from copy import deepcopy - from aiida import LOGGING - config = deepcopy(LOGGING) # Add the daemon file handler to all loggers if daemon=True diff --git a/aiida/utils/logger.py b/aiida/utils/logger.py index 3a1fa2a8d9..ceb91e12da 100644 --- a/aiida/utils/logger.py +++ b/aiida/utils/logger.py @@ -9,7 +9,7 @@ ########################################################################### import logging -from aiida import LOG_LEVEL_REPORT +from aiida.common.log import LOG_LEVEL_REPORT from aiida.backends.utils import is_dbenv_loaded LOG_LEVELS = { diff --git a/aiida/work/process.py b/aiida/work/process.py index c62dd2d11a..91f3b70142 100644 --- a/aiida/work/process.py +++ b/aiida/work/process.py @@ -24,6 +24,7 @@ from aiida.common.extendeddicts import FixedFieldsAttributeDict import aiida.common.exceptions as exceptions from aiida.common.lang import override, protected +from aiida.common.log import LOG_LEVEL_REPORT from aiida.common.links import LinkType from aiida.utils.calculation import add_source_info from aiida.work.defaults import class_loader @@ -32,7 +33,6 @@ from aiida.orm.calculation import Calculation from aiida.orm.data.parameter import ParameterData from aiida.orm.calculation.work import WorkCalculation -from aiida import LOG_LEVEL_REPORT From e23248862e1e2214ebce6d144486cc6833479a5e Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Tue, 9 Jan 2018 18:46:58 +0100 Subject: [PATCH 5/6] Move contents of aiida.utils.logger to aiida.common.log The module aiida.common.log now provides all the logging related concepts within AiiDA. --- aiida/backends/djsite/cmdline.py | 2 +- aiida/backends/djsite/utils.py | 2 +- aiida/backends/sqlalchemy/cmdline.py | 2 +- aiida/common/log.py | 66 +++++++++++++++++- aiida/common/setup.py | 4 +- aiida/daemon/execmanager.py | 12 ++-- aiida/orm/implementation/django/workflow.py | 2 +- .../general/calculation/__init__.py | 2 +- aiida/orm/implementation/general/workflow.py | 2 +- .../orm/implementation/sqlalchemy/workflow.py | 2 +- aiida/parsers/parser.py | 2 +- aiida/transport/__init__.py | 6 +- aiida/utils/logger.py | 67 ------------------- 13 files changed, 83 insertions(+), 88 deletions(-) delete mode 100644 aiida/utils/logger.py diff --git a/aiida/backends/djsite/cmdline.py b/aiida/backends/djsite/cmdline.py index 1d2db2678b..246fc18562 100644 --- a/aiida/backends/djsite/cmdline.py +++ b/aiida/backends/djsite/cmdline.py @@ -16,7 +16,7 @@ from aiida.common.datastructures import wf_states from aiida.utils import timezone -from aiida.utils.logger import get_dblogger_extra +from aiida.common.log import get_dblogger_extra diff --git a/aiida/backends/djsite/utils.py b/aiida/backends/djsite/utils.py index 6c4cf3c54f..20a3b09c23 100644 --- a/aiida/backends/djsite/utils.py +++ b/aiida/backends/djsite/utils.py @@ -10,7 +10,7 @@ import os import django -from aiida.utils.logger import get_dblogger_extra +from aiida.common.log import get_dblogger_extra def load_dbenv(process=None, profile=None): diff --git a/aiida/backends/sqlalchemy/cmdline.py b/aiida/backends/sqlalchemy/cmdline.py index c5ec651ce5..5c3c6a56c3 100644 --- a/aiida/backends/sqlalchemy/cmdline.py +++ b/aiida/backends/sqlalchemy/cmdline.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### -from aiida.utils.logger import get_dblogger_extra +from aiida.common.log import get_dblogger_extra diff --git a/aiida/common/log.py b/aiida/common/log.py index e2b61c4211..bf62d552a1 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -11,6 +11,7 @@ from copy import deepcopy from logging import config from aiida.common import setup +from aiida.backends.utils import is_dbenv_loaded # Custom logging level, intended specifically for informative log messages # reported during WorkChains and Workflows. We want the level between INFO(20) @@ -19,9 +20,22 @@ LOG_LEVEL_REPORT = 23 logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT') + +# Convenience dictionary of available log level names and their log level integer +LOG_LEVELS = { + logging.getLevelName(logging.DEBUG): logging.DEBUG, + logging.getLevelName(logging.INFO): logging.INFO, + logging.getLevelName(LOG_LEVEL_REPORT): LOG_LEVEL_REPORT, + logging.getLevelName(logging.WARNING): logging.WARNING, + logging.getLevelName(logging.ERROR): logging.ERROR, + logging.getLevelName(logging.CRITICAL): logging.CRITICAL, +} + + # The AiiDA logger aiidalogger = logging.getLogger('aiida') + # A logging filter that can be used to disable logging class NotInTestingFilter(logging.Filter): @@ -29,6 +43,35 @@ def filter(self, record): from aiida import settings return not settings.TESTING_MODE + +# A logging handler that will store the log record in the database DbLog table +class DBLogHandler(logging.Handler): + + def emit(self, record): + # If this is reached before a backend is defined, simply pass + if not is_dbenv_loaded(): + return + + from aiida.orm.backend import construct + from django.core.exceptions import ImproperlyConfigured + + try: + backend = construct() + backend.log.create_entry_from_record(record) + + except ImproperlyConfigured: + # Probably, the logger was called without the + # Django settings module loaded. Then, + # This ignore should be a no-op. + pass + except Exception: + # To avoid loops with the error handler, I just print. + # Hopefully, though, this should not happen! + import traceback + + traceback.print_exc() + + # The default logging dictionary for AiiDA that can be used in conjunction # with the config.dictConfig method of python's logging module LOGGING = { @@ -71,7 +114,7 @@ def filter(self, record): # NOTE: To modify properties, use the 'verdi devel setproperty' # command and similar ones (getproperty, describeproperties, ...) 'level': setup.get_property('logging.db_loglevel'), - 'class': 'aiida.utils.logger.DBLogHandler', + 'class': 'aiida.common.log.DBLogHandler', }, }, 'loggers': { @@ -120,4 +163,23 @@ def configure_logging(daemon=False, daemon_handler='daemon_logfile'): for name, logger in config.get('loggers', {}).iteritems(): logger.setdefault('handlers', []).append(daemon_handler) - logging.config.dictConfig(config) \ No newline at end of file + logging.config.dictConfig(config) + + +def get_dblogger_extra(obj): + """ + Given an object (Node, Calculation, ...) return a dictionary to be passed + as extra to the aiidalogger in order to store the exception also in the DB. + If no such extra is passed, the exception is only logged on file. + """ + from aiida.orm import Node + + if isinstance(obj, Node): + if obj._plugin_type_string: + objname = "node." + obj._plugin_type_string + else: + objname = "node" + else: + objname = obj.__class__.__module__ + "." + obj.__class__.__name__ + objpk = obj.pk + return {'objpk': objpk, 'objname': objname} \ No newline at end of file diff --git a/aiida/common/setup.py b/aiida/common/setup.py index 16968f1ca8..e2ace7af0a 100644 --- a/aiida/common/setup.py +++ b/aiida/common/setup.py @@ -952,7 +952,7 @@ def get_property(name, default=_NoDefaultValue()): no default value is given or provided in _property_table. """ from aiida.common.exceptions import MissingConfigurationError - import aiida.utils.logger as logger + from aiida.common.log import LOG_LEVELS try: key, _, _, table_defval, _ = _property_table[name] @@ -979,7 +979,7 @@ def get_property(name, default=_NoDefaultValue()): # will return the corresponding integer, even though a string is stored in # the config. if name.startswith('logging.') and name.endswith('loglevel'): - value = logger.LOG_LEVELS[value] + value = LOG_LEVELS[value] return value diff --git a/aiida/daemon/execmanager.py b/aiida/daemon/execmanager.py index 58379f1e5d..860ba1d7b4 100644 --- a/aiida/daemon/execmanager.py +++ b/aiida/daemon/execmanager.py @@ -36,7 +36,7 @@ def update_running_calcs_status(authinfo): """ from aiida.orm import JobCalculation, Computer from aiida.scheduler.datastructures import JobInfo - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra from aiida.backends.utils import QueryFactory if not authinfo.enabled: @@ -267,7 +267,7 @@ def submit_jobs(): Submit all jobs in the TOSUBMIT state. """ from aiida.orm import JobCalculation, Computer, User - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra from aiida.backends.utils import get_authinfo, QueryFactory @@ -336,7 +336,7 @@ def submit_jobs_with_authinfo(authinfo): to user and machine as defined in the 'dbauthinfo' table. """ from aiida.orm import JobCalculation - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra from aiida.backends.utils import QueryFactory @@ -384,7 +384,7 @@ def submit_jobs_with_authinfo(authinfo): # because any other exception is caught and skipped above except Exception as e: import traceback - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra for calc in calcs_to_inquire: logger_extra = get_dblogger_extra(calc) @@ -423,7 +423,7 @@ def submit_calc(calc, authinfo, transport=None): from aiida.common.exceptions import ( InputValidationError) from aiida.orm.data.remote import RemoteData - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra if not authinfo.enabled: return @@ -664,7 +664,7 @@ def retrieve_computed_for_authinfo(authinfo): from aiida.orm import JobCalculation from aiida.common.folders import SandboxFolder from aiida.orm.data.folder import FolderData - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra from aiida.orm import DataFactory from aiida.backends.utils import QueryFactory diff --git a/aiida/orm/implementation/django/workflow.py b/aiida/orm/implementation/django/workflow.py index 02830be9fb..be95bd566f 100644 --- a/aiida/orm/implementation/django/workflow.py +++ b/aiida/orm/implementation/django/workflow.py @@ -254,7 +254,7 @@ def logger(self): the 'extra' embedded """ import logging - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra return logging.LoggerAdapter(logger=self._logger, extra=get_dblogger_extra(self)) diff --git a/aiida/orm/implementation/general/calculation/__init__.py b/aiida/orm/implementation/general/calculation/__init__.py index c20f37b0d9..57c1098c06 100644 --- a/aiida/orm/implementation/general/calculation/__init__.py +++ b/aiida/orm/implementation/general/calculation/__init__.py @@ -129,7 +129,7 @@ def logger(self): the 'extra' embedded """ import logging - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra return logging.LoggerAdapter( logger=self._logger, extra=get_dblogger_extra(self)) diff --git a/aiida/orm/implementation/general/workflow.py b/aiida/orm/implementation/general/workflow.py index 17ed732e1d..c6d17e6413 100644 --- a/aiida/orm/implementation/general/workflow.py +++ b/aiida/orm/implementation/general/workflow.py @@ -26,7 +26,7 @@ from aiida.backends.utils import get_automatic_user from aiida.utils import timezone -from aiida.utils.logger import get_dblogger_extra +from aiida.common.log import get_dblogger_extra from aiida.common.utils import abstractclassmethod logger = aiidalogger.getChild('Workflow') diff --git a/aiida/orm/implementation/sqlalchemy/workflow.py b/aiida/orm/implementation/sqlalchemy/workflow.py index 54ec82b25f..141aacf076 100644 --- a/aiida/orm/implementation/sqlalchemy/workflow.py +++ b/aiida/orm/implementation/sqlalchemy/workflow.py @@ -27,7 +27,7 @@ from aiida.orm.implementation.general.workflow import AbstractWorkflow from aiida.orm.implementation.sqlalchemy.utils import django_filter from aiida.utils import timezone -from aiida.utils.logger import get_dblogger_extra +from aiida.common.log import get_dblogger_extra logger = aiidalogger.getChild('Workflow') diff --git a/aiida/parsers/parser.py b/aiida/parsers/parser.py index a09e4eda51..cad6fd6854 100644 --- a/aiida/parsers/parser.py +++ b/aiida/parsers/parser.py @@ -38,7 +38,7 @@ def logger(self): extras of the calculation """ import logging - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra return logging.LoggerAdapter(logger=self._logger, extra=get_dblogger_extra(self._calc)) diff --git a/aiida/transport/__init__.py b/aiida/transport/__init__.py index 97a6b8d422..e4de01fce8 100644 --- a/aiida/transport/__init__.py +++ b/aiida/transport/__init__.py @@ -126,12 +126,12 @@ def _set_logger_extra(self, logger_extra): """ Pass the data that should be passed automatically to self.logger as 'extra' keyword. This is typically useful if you pass data - obtained using get_dblogger_extra in aiida.backends.djsite.utils, to automatically + obtained using get_dblogger_extra in aiida.common.log, to automatically log also to the DbLog table. :param logger_extra: data that you want to pass as extra to the self.logger. To write to DbLog, it should be created by the - aiida.backends.djsite.utils.get_dblogger_extra function. Pass None if you + aiida.common.log.get_dblogger_extra function. Pass None if you do not want to have extras passed. """ self._logger_extra = logger_extra @@ -181,7 +181,7 @@ def logger(self): """ try: import logging - from aiida.utils.logger import get_dblogger_extra + from aiida.common.log import get_dblogger_extra if self._logger_extra is not None: return logging.LoggerAdapter(logger=self._logger, diff --git a/aiida/utils/logger.py b/aiida/utils/logger.py deleted file mode 100644 index ceb91e12da..0000000000 --- a/aiida/utils/logger.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### - -import logging -from aiida.common.log import LOG_LEVEL_REPORT -from aiida.backends.utils import is_dbenv_loaded - -LOG_LEVELS = { - logging.getLevelName(logging.DEBUG): logging.DEBUG, - logging.getLevelName(logging.INFO): logging.INFO, - logging.getLevelName(LOG_LEVEL_REPORT): LOG_LEVEL_REPORT, - logging.getLevelName(logging.WARNING): logging.WARNING, - logging.getLevelName(logging.ERROR): logging.ERROR, - logging.getLevelName(logging.CRITICAL): logging.CRITICAL, -} - - -def get_dblogger_extra(obj): - """ - Given an object (Node, Calculation, ...) return a dictionary to be passed - as extra to the aiidalogger in order to store the exception also in the DB. - If no such extra is passed, the exception is only logged on file. - """ - from aiida.orm import Node - - if isinstance(obj, Node): - if obj._plugin_type_string: - objname = "node." + obj._plugin_type_string - else: - objname = "node" - else: - objname = obj.__class__.__module__ + "." + obj.__class__.__name__ - objpk = obj.pk - return {'objpk': objpk, 'objname': objname} - - -class DBLogHandler(logging.Handler): - def emit(self, record): - # If this is reached before a backend is defined, simply pass - if not is_dbenv_loaded(): - return - - from aiida.orm.backend import construct - from django.core.exceptions import ImproperlyConfigured - - try: - backend = construct() - backend.log.create_entry_from_record(record) - - except ImproperlyConfigured: - # Probably, the logger was called without the - # Django settings module loaded. Then, - # This ignore should be a no-op. - pass - except Exception: - # To avoid loops with the error handler, I just print. - # Hopefully, though, this should not happen! - import traceback - - traceback.print_exc() \ No newline at end of file From 14765cccaddf349c25ac0b6bbd617d8826d8ba20 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Tue, 9 Jan 2018 19:13:12 +0100 Subject: [PATCH 6/6] Remove the daemon_logfile handler from the default logging dict In the aiida initialization the configure_logging() function will be called, which will use the aiida.common.log.LOGGIN dictionary to configure the logger. It defined the 'daemon_logfile' RotatingFileHandler, with a filename in the daemon log directory of the AiiDA config directory. The problem is that this directory does not necessary exist, but when the logging is configured the file handler setup will break if it doesn't. To solve this we remove the handler from the default dictionary and only add it when the configure_logging is called with daemon=True --- aiida/common/log.py | 37 +++++++++++++++++++++---------------- aiida/daemon/tasks.py | 12 ++++++------ 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/aiida/common/log.py b/aiida/common/log.py index bf62d552a1..4619dc2798 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -99,14 +99,6 @@ def emit(self, record): 'formatter': 'halfverbose', 'filters': ['testing'] }, - 'daemon_logfile': { - 'level': 'DEBUG', - 'formatter': 'halfverbose', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': setup.DAEMON_LOG_FILE, - 'encoding': 'utf8', - 'maxBytes': 100000, - }, 'dblogger': { # setup.get_property takes the property from the config json file # The key used in the json, and the default value, are @@ -142,26 +134,39 @@ def emit(self, record): }, } -def configure_logging(daemon=False, daemon_handler='daemon_logfile'): +def configure_logging(daemon=False, daemon_log_file=None): """ Setup the logging by retrieving the LOGGING dictionary from aiida and passing it to the python module logging.config.dictConfig. If the logging needs to be setup for the - daemon running a task for one of the celery workers, set the argument 'daemon' to True. - This will cause the 'daemon_handler' to be added to all the configured loggers. This - handler needs to be defined in the LOGGING dictionary and is 'daemon_logfile' by - default. If this changes in the dictionary, be sure to pass the correct handle name. - The daemon handler should be a RotatingFileHandler that writes to the daemon log file. + daemon running a task for one of the celery workers, set the argument 'daemon' to True + and specify the path to the log file. This will cause a 'daemon_handler' to be added + to all the configured loggers, that is a RotatingFileHandler that writes to the log file. :param daemon: configure the logging for a daemon task by adding a file handler instead of the default 'console' StreamHandler - :param daemon_handler: name of the file handler in the LOGGING dictionary + :param daemon_log_file: absolute filepath of the log file for the RotatingFileHandler """ config = deepcopy(LOGGING) + daemon_handler_name = 'daemon_log_file' # Add the daemon file handler to all loggers if daemon=True if daemon is True: + + if daemon_log_file is None: + raise ValueError('daemon_log_file has to be defined when configuring for the daemon') + + config.setdefault('handlers', {}) + config['handlers'][daemon_handler_name] = { + 'level': 'DEBUG', + 'formatter': 'halfverbose', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': daemon_log_file, + 'encoding': 'utf8', + 'maxBytes': 100000, + } + for name, logger in config.get('loggers', {}).iteritems(): - logger.setdefault('handlers', []).append(daemon_handler) + logger.setdefault('handlers', []).append(daemon_handler_name) logging.config.dictConfig(config) diff --git a/aiida/daemon/tasks.py b/aiida/daemon/tasks.py index 8a67de685e..600744678e 100644 --- a/aiida/daemon/tasks.py +++ b/aiida/daemon/tasks.py @@ -21,7 +21,7 @@ load_dbenv(process="daemon") from aiida.common.log import configure_logging -from aiida.common.setup import get_profile_config +from aiida.common.setup import get_profile_config, DAEMON_LOG_FILE from aiida.common.exceptions import ConfigurationError from aiida.daemon.timestamps import set_daemon_timestamp,get_last_daemon_timestamp @@ -60,7 +60,7 @@ ) ) def submitter(): - configure_logging(daemon=True) + configure_logging(daemon=True, daemon_log_file=DAEMON_LOG_FILE) from aiida.daemon.execmanager import submit_jobs print "aiida.daemon.tasks.submitter: Checking for calculations to submit" set_daemon_timestamp(task_name='submitter', when='start') @@ -74,7 +74,7 @@ def submitter(): ) ) def updater(): - configure_logging(daemon=True) + configure_logging(daemon=True, daemon_log_file=DAEMON_LOG_FILE) from aiida.daemon.execmanager import update_jobs print "aiida.daemon.tasks.update: Checking for calculations to update" set_daemon_timestamp(task_name='updater', when='start') @@ -88,7 +88,7 @@ def updater(): ) ) def retriever(): - configure_logging(daemon=True) + configure_logging(daemon=True, daemon_log_file=DAEMON_LOG_FILE) from aiida.daemon.execmanager import retrieve_jobs print "aiida.daemon.tasks.retrieve: Checking for calculations to retrieve" set_daemon_timestamp(task_name='retriever', when='start') @@ -102,7 +102,7 @@ def retriever(): ) ) def tick_work(): - configure_logging(daemon=True) + configure_logging(daemon=True, daemon_log_file=DAEMON_LOG_FILE) from aiida.work.daemon import tick_workflow_engine print "aiida.daemon.tasks.tick_workflows: Ticking workflows" tick_workflow_engine() @@ -113,7 +113,7 @@ def tick_work(): ) ) def workflow_stepper(): # daemon for legacy workflow - configure_logging(daemon=True) + configure_logging(daemon=True, daemon_log_file=DAEMON_LOG_FILE) from aiida.daemon.workflowmanager import execute_steps print "aiida.daemon.tasks.workflowmanager: Checking for workflows to manage" # RUDIMENTARY way to check if this task is already running (to avoid acting