Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Fix the broken celery logging #1030

Merged
merged 6 commits into from
Jan 10, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 3 additions & 86 deletions aiida/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
import logging
import warnings
from logging import config
from aiida.common.log import configure_logging
from aiida.common.setup import get_property

__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved."
Expand All @@ -19,90 +18,8 @@
__paper__ = """G. Pizzi, A. Cepellotti, R. Sabatini, N. Marzari, and B. Kozinsky, "AiiDA: automated interactive infrastructure and database for computational science", Comp. Mat. Sci 111, 218-230 (2016); http://dx.doi.org/10.1016/j.commatsci.2015.09.013 - http://www.aiida.net."""
__paper_short__ = """G. Pizzi et al., Comp. Mat. Sci 111, 218 (2016)."""


# Custom logging level, intended specifically for informative log messages
# reported during WorkChains and Workflows. We want the level between INFO(20)
# and WARNING(30) such that it will be logged for the default loglevel, however
# the value 25 is already reserved for SUBWARNING by the multiprocessing module.
LOG_LEVEL_REPORT = 23
logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT')


# A logging filter that can be used to disable logging
class NotInTestingFilter(logging.Filter):

def filter(self, record):
from aiida import settings
return not settings.TESTING_MODE


# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d '
'%(thread)d %(message)s',
},
'halfverbose': {
'format': '%(asctime)s, %(name)s: [%(levelname)s] %(message)s',
'datefmt': '%m/%d/%Y %I:%M:%S %p',
},
},
'filters': {
'testing': {
'()': NotInTestingFilter
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'halfverbose',
'filters': ['testing']
},
'dblogger': {
# get_property takes the property from the config json file
# The key used in the json, and the default value, are
# specified in the _property_table inside aiida.common.setup
# NOTE: To modify properties, use the 'verdi devel setproperty'
# command and similar ones (getproperty, describeproperties, ...)
'level': get_property('logging.db_loglevel'),
'class': 'aiida.utils.logger.DBLogHandler',
},
},
'loggers': {
'aiida': {
'handlers': ['console', 'dblogger'],
'level': get_property('logging.aiida_loglevel'),
'propagate': False,
},
'paramiko': {
'handlers': ['console'],
'level': get_property('logging.paramiko_loglevel'),
'propagate': False,
},
'alembic': {
'handlers': ['console'],
'level': get_property('logging.alembic_loglevel'),
'propagate': False,
},
'sqlalchemy': {
'handlers': ['console'],
'level': get_property('logging.sqlalchemy_loglevel'),
'propagate': False,
'qualname': 'sqlalchemy.engine',
},
},
}

# Configure the global logger through the LOGGING dictionary
logging.config.dictConfig(LOGGING)
# Configure the default logging
configure_logging()

if get_property("warnings.showdeprecations"):
# print out the warnings coming from deprecation
Expand Down
2 changes: 1 addition & 1 deletion aiida/backends/djsite/cmdline.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from aiida.common.datastructures import wf_states
from aiida.utils import timezone
from aiida.utils.logger import get_dblogger_extra
from aiida.common.log import get_dblogger_extra



Expand Down
2 changes: 1 addition & 1 deletion aiida/backends/djsite/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

import os
import django
from aiida.utils.logger import get_dblogger_extra
from aiida.common.log import get_dblogger_extra


def load_dbenv(process=None, profile=None):
Expand Down
2 changes: 1 addition & 1 deletion aiida/backends/sqlalchemy/cmdline.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# For further information please visit http://www.aiida.net #
###########################################################################

from aiida.utils.logger import get_dblogger_extra
from aiida.common.log import get_dblogger_extra



Expand Down
2 changes: 1 addition & 1 deletion aiida/backends/tests/orm/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import logging
import unittest
from aiida.utils.timezone import now
from aiida import LOG_LEVEL_REPORT
from aiida.common.log import LOG_LEVEL_REPORT
from aiida.orm.log import OrderSpecifier, ASCENDING, DESCENDING
from aiida.orm.backend import construct
from aiida.orm.calculation import Calculation
Expand Down
4 changes: 2 additions & 2 deletions aiida/cmdline/commands/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,8 @@ def __init__(self):
'configureuser': (self.configure_user, self.complete_none),
}

self.logfile = os.path.join(setup.AIIDA_CONFIG_FOLDER, setup.LOG_SUBDIR, "celery.log")
self.pidfile = os.path.join(setup.AIIDA_CONFIG_FOLDER, setup.LOG_SUBDIR, "celery.pid")
self.logfile = setup.DAEMON_LOG_FILE
self.pidfile = setup.DAEMON_PID_FILE
self.workdir = os.path.join(os.path.split(os.path.abspath(aiida.__file__))[0], "daemon")
self.celerybeat_schedule = os.path.join(setup.AIIDA_CONFIG_FOLDER, setup.DAEMON_SUBDIR, "celerybeat-schedule")

Expand Down
10 changes: 1 addition & 9 deletions aiida/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,4 @@
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""Internal functionality that is needed by multiple modules of AiiDA"""
import logging
import sys


aiidalogger = logging.getLogger("aiida")
# aiidalogger.addHandler(logging.StreamHandler(sys.stderr))
#FORMAT = '[%(name)s@%(levelname)s] %(message)s'
#logging.basicConfig(format=FORMAT)
from aiida.common.log import aiidalogger
190 changes: 190 additions & 0 deletions aiida/common/log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,190 @@
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
import logging
from copy import deepcopy
from logging import config
from aiida.common import setup
from aiida.backends.utils import is_dbenv_loaded

# Custom logging level, intended specifically for informative log messages
# reported during WorkChains and Workflows. We want the level between INFO(20)
# and WARNING(30) such that it will be logged for the default loglevel, however
# the value 25 is already reserved for SUBWARNING by the multiprocessing module.
LOG_LEVEL_REPORT = 23
logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT')


# Convenience dictionary of available log level names and their log level integer
LOG_LEVELS = {
logging.getLevelName(logging.DEBUG): logging.DEBUG,
logging.getLevelName(logging.INFO): logging.INFO,
logging.getLevelName(LOG_LEVEL_REPORT): LOG_LEVEL_REPORT,
logging.getLevelName(logging.WARNING): logging.WARNING,
logging.getLevelName(logging.ERROR): logging.ERROR,
logging.getLevelName(logging.CRITICAL): logging.CRITICAL,
}


# The AiiDA logger
aiidalogger = logging.getLogger('aiida')


# A logging filter that can be used to disable logging
class NotInTestingFilter(logging.Filter):

def filter(self, record):
from aiida import settings
return not settings.TESTING_MODE


# A logging handler that will store the log record in the database DbLog table
class DBLogHandler(logging.Handler):

def emit(self, record):
# If this is reached before a backend is defined, simply pass
if not is_dbenv_loaded():
return

from aiida.orm.backend import construct
from django.core.exceptions import ImproperlyConfigured

try:
backend = construct()
backend.log.create_entry_from_record(record)

except ImproperlyConfigured:
# Probably, the logger was called without the
# Django settings module loaded. Then,
# This ignore should be a no-op.
pass
except Exception:
# To avoid loops with the error handler, I just print.
# Hopefully, though, this should not happen!
import traceback

traceback.print_exc()


# The default logging dictionary for AiiDA that can be used in conjunction
# with the config.dictConfig method of python's logging module
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d '
'%(thread)d %(message)s',
},
'halfverbose': {
'format': '%(asctime)s, %(name)s: [%(levelname)s] %(message)s',
'datefmt': '%m/%d/%Y %I:%M:%S %p',
},
},
'filters': {
'testing': {
'()': NotInTestingFilter
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'halfverbose',
'filters': ['testing']
},
'dblogger': {
# setup.get_property takes the property from the config json file
# The key used in the json, and the default value, are
# specified in the _property_table inside aiida.common.setup
# NOTE: To modify properties, use the 'verdi devel setproperty'
# command and similar ones (getproperty, describeproperties, ...)
'level': setup.get_property('logging.db_loglevel'),
'class': 'aiida.common.log.DBLogHandler',
},
},
'loggers': {
'aiida': {
'handlers': ['console', 'dblogger'],
'level': setup.get_property('logging.aiida_loglevel'),
'propagate': False,
},
'paramiko': {
'handlers': ['console'],
'level': setup.get_property('logging.paramiko_loglevel'),
'propagate': False,
},
'alembic': {
'handlers': ['console'],
'level': setup.get_property('logging.alembic_loglevel'),
'propagate': False,
},
'sqlalchemy': {
'handlers': ['console'],
'level': setup.get_property('logging.sqlalchemy_loglevel'),
'propagate': False,
'qualname': 'sqlalchemy.engine',
},
},
}

def configure_logging(daemon=False, daemon_log_file=None):
"""
Setup the logging by retrieving the LOGGING dictionary from aiida and passing it to
the python module logging.config.dictConfig. If the logging needs to be setup for the
daemon running a task for one of the celery workers, set the argument 'daemon' to True
and specify the path to the log file. This will cause a 'daemon_handler' to be added
to all the configured loggers, that is a RotatingFileHandler that writes to the log file.

:param daemon: configure the logging for a daemon task by adding a file handler instead
of the default 'console' StreamHandler
:param daemon_log_file: absolute filepath of the log file for the RotatingFileHandler
"""
config = deepcopy(LOGGING)
daemon_handler_name = 'daemon_log_file'

# Add the daemon file handler to all loggers if daemon=True
if daemon is True:

if daemon_log_file is None:
raise ValueError('daemon_log_file has to be defined when configuring for the daemon')

config.setdefault('handlers', {})
config['handlers'][daemon_handler_name] = {
'level': 'DEBUG',
'formatter': 'halfverbose',
'class': 'logging.handlers.RotatingFileHandler',
'filename': daemon_log_file,
'encoding': 'utf8',
'maxBytes': 100000,
}

for name, logger in config.get('loggers', {}).iteritems():
logger.setdefault('handlers', []).append(daemon_handler_name)

logging.config.dictConfig(config)


def get_dblogger_extra(obj):
"""
Given an object (Node, Calculation, ...) return a dictionary to be passed
as extra to the aiidalogger in order to store the exception also in the DB.
If no such extra is passed, the exception is only logged on file.
"""
from aiida.orm import Node

if isinstance(obj, Node):
if obj._plugin_type_string:
objname = "node." + obj._plugin_type_string
else:
objname = "node"
else:
objname = obj.__class__.__module__ + "." + obj.__class__.__name__
objpk = obj.pk
return {'objpk': objpk, 'objname': objname}
Loading