Skip to content

Commit

Permalink
Rework log (setup and filtering) implementation
Browse files Browse the repository at this point in the history
Overview
========
Motivated by issue getpelican#3037 (Ensure proper log line/filename in
warning/errors).

``pelican.log`` is no longer required to be early in the import order,
as the package no longer relies on a custom logging class.

Logging setup is now uncoupled so that format and filters can be applied
independently, which makes both setup and testing more flexible.

Behavior of commandline calls have not changed, except for some
modification to help texts.

``LOG_FATAL`` and ``LOG_ONCE_LEVEL`` are new configuration keys.

Summary of changes
==================

:pelican/__init__.py:

    * Parsing commandline:
        * Unify type-converstion from string to logging levels.
        * Tweak some help text in ``parse_arguments``.
    * Preserve ``log`` namespace throughout (for clarity).
    * FIX: ``pelican.listen`` verbosity: avoid assumption that the
      logger's level will be NOTSET.

:pelican/log.py:

    * FIX (getpelican#3037): warnings & errors now display the correct line numbers.
    * Split logging filters with focus on a specific aspect of behavior.
      This allows for more
        a) independence in testing and
        b) powerful logger configuration, with fewer side effects.
    * Isolate configuration *reading* from configuration *application*
      (see ``pelican/settings.py``).

:pelican/settings.py:

    * Remove ``pelican.log`` dependency.
    * Isolate configuration *reading* from configuration *application*;
      to remove dependency on a global state when modifying log filters.
    * Add validation for LOG_LIMIT values.

:pelican/tests/__init__.py:

    * Remove dependency on ``pelican.log`` module as test runners setup
      their own console handler anyway.
    * Only add ``NullHandler`` in absence of a configured logger.

:pelican/tests/support.py:

    * Remove ``LoggedTestCase`` class; tests requiring logging analysis
      use a contextmanager (``LogCountHandler.examine``) instead.

:pelican/tests/test_log.py:

    * Rewrite to reflect changes in pelican's logging module.
    * Increase code coverage.

:pelican/tests/test_contents.py:
:pelican/tests/test_pelican.py:
:pelican/tests/test_utils.py:

    * Import ``unittest`` directly, rather than from other modules.
    * Switch to using contextmanager (``LogCountHandler.examine``) for
      test-specific log inspection.
    * Remove dependency on a custom TestCase (used in logging analysis).
    * Remove code involved with temporarily disabling log filters --
      the filters in question are now only enabled in tests directly
      testing them (eg. in ``test_log.py``)

:pelican/tests/test_settings.py:

    * Add test for LOG_FILTER values from configs.
  • Loading branch information
copperchin committed Oct 5, 2023
1 parent 3be0703 commit e21bdc7
Show file tree
Hide file tree
Showing 10 changed files with 1,031 additions and 397 deletions.
118 changes: 78 additions & 40 deletions pelican/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)

# pelican.log has to be the first pelican module to be loaded
# because logging.setLoggerClass has to be called before logging.getLogger
from pelican.log import console
from pelican.log import init as init_logging
from pelican import log
from pelican.generators import (ArticlesGenerator, # noqa: I100
PagesGenerator, SourceFileGenerator,
StaticGenerator, TemplatePagesGenerator)
Expand Down Expand Up @@ -164,15 +161,18 @@ def run(self):
'draft page',
'draft pages')

console.print('Done: Processed {}, {}, {}, {}, {} and {} in {:.2f} seconds.'
.format(
pluralized_articles,
pluralized_drafts,
pluralized_hidden_articles,
pluralized_pages,
pluralized_hidden_pages,
pluralized_draft_pages,
time.time() - start_time))
log.console.print(
'Done: Processed {}, {}, {}, {}, {} and {} in {:.2f} seconds.'
.format(
pluralized_articles,
pluralized_drafts,
pluralized_hidden_articles,
pluralized_pages,
pluralized_hidden_pages,
pluralized_draft_pages,
time.time() - start_time
)
)

def _get_generator_classes(self):
discovered_generators = [
Expand Down Expand Up @@ -229,13 +229,13 @@ def _get_writer(self):

class PrintSettings(argparse.Action):
def __call__(self, parser, namespace, values, option_string):
init_logging(name=__name__)
log.init(level=logging.WARNING)

try:
instance, settings = get_instance(namespace)
except Exception as e:
logger.critical("%s: %s", e.__class__.__name__, e)
console.print_exception()
log.console.print_exception()
sys.exit(getattr(e, 'exitcode', 1))

if values:
Expand All @@ -247,15 +247,17 @@ def __call__(self, parser, namespace, values, option_string):
setting_format = '\n{}:\n{}'
else:
setting_format = '\n{}: {}'
console.print(setting_format.format(
log.console.print(setting_format.format(
setting,
pprint.pformat(settings[setting])))
else:
console.print('\n{} is not a recognized setting.'.format(setting))
log.console.print(
'\n{} is not a recognized setting.'.format(setting)
)
break
else:
# No argument was given to --print-settings, so print all settings
console.print(settings)
log.console.print(settings)

parser.exit()

Expand Down Expand Up @@ -358,15 +360,17 @@ def parse_arguments(argv=None):
dest='selected_paths', default=None,
help='Comma separated list of selected paths to write')

parser.add_argument('--fatal', metavar='errors|warnings',
choices=('errors', 'warnings'), default='',
parser.add_argument('--fatal', choices=('errors', 'warnings'),
default=None,
# type conversion deferred (see: [FATAL] section below)
help=('Exit the program with non-zero status if any '
'errors/warnings encountered.'))

parser.add_argument('--logs-dedup-min-level', default='WARNING',
choices=('DEBUG', 'INFO', 'WARNING', 'ERROR'),
help=('Only enable log de-duplication for levels equal'
' to or above the specified value'))
metavar=('logging severity level'),
type=log.severity_from_name, dest='once_lvl',
help=('Enable log de-duplication for this severity'
' level and above.'))

parser.add_argument('-l', '--listen', dest='listen', action='store_true',
help='Serve content files via HTTP and port 8000.')
Expand All @@ -390,6 +394,10 @@ def parse_arguments(argv=None):

args = parser.parse_args(argv)

# [FATAL] value type is converted *after* parsing, to get around argparse
# complaining that converted type does not match the "choices" param type.
args.fatal = log.severity_from_name(args.fatal)

if args.port is not None and not args.listen:
logger.warning('--port without --listen has no effect')
if args.bind is not None and not args.listen:
Expand Down Expand Up @@ -424,6 +432,10 @@ def get_config(args):
config['PORT'] = args.port
if args.bind is not None:
config['BIND'] = args.bind
if args.fatal is not None:
config['LOG_FATAL'] = args.fatal
if args.once_lvl is not None:
config['LOG_ONCE_LEVEL'] = args.once_lvl
config['DEBUG'] = args.verbosity == logging.DEBUG
config.update(args.overrides)

Expand All @@ -449,8 +461,11 @@ def get_instance(args):


def autoreload(args, excqueue=None):
console.print(' --- AutoReload Mode: Monitoring `content`, `theme` and'
' `settings` for changes. ---')
log.console.print(
' --- '
'AutoReload Mode: Monitoring `content`, `theme` and `settings` for changes.'
' ---'
)
pelican, settings = get_instance(args)
watcher = FileSystemWatcher(args.settings, Readers, settings)
sleep = False
Expand All @@ -469,8 +484,11 @@ def autoreload(args, excqueue=None):
watcher.update_watchers(settings)

if any(modified.values()):
console.print('\n-> Modified: {}. re-generating...'.format(
', '.join(k for k, v in modified.items() if v)))
log.console.print(
'\n-> Modified: {}. re-generating...'.format(
', '.join(k for k, v in modified.items() if v)
)
)
pelican.run()

except KeyboardInterrupt:
Expand All @@ -492,9 +510,14 @@ def autoreload(args, excqueue=None):


def listen(server, port, output, excqueue=None):
# set logging level to at least "INFO" (so we can see the server requests)
if logger.level < logging.INFO:
logger.setLevel(logging.INFO)
# elevate logging to *at least* "INFO" level to see the server requests:
server_logger = logging.getLogger('pelican.server')
if server_logger.getEffectiveLevel() > logging.INFO:
logObj = server_logger
while logObj:
if logObj.level and logObj.level > logging.INFO:
logObj.setLevel(logging.INFO)
logObj = logObj.parent

RootedHTTPServer.allow_reuse_address = True
try:
Expand All @@ -507,8 +530,8 @@ def listen(server, port, output, excqueue=None):
return

try:
console.print("Serving site at: http://{}:{} - Tap CTRL-C to stop".format(
server, port))
log.console.print("Serving site at: http://{}:{} - Tap CTRL-C to stop".format(
server, port))
httpd.serve_forever()
except Exception as e:
if excqueue is not None:
Expand All @@ -524,15 +547,28 @@ def listen(server, port, output, excqueue=None):

def main(argv=None):
args = parse_arguments(argv)
logs_dedup_min_level = getattr(logging, args.logs_dedup_min_level)
init_logging(level=args.verbosity, fatal=args.fatal,
name=__name__, logs_dedup_min_level=logs_dedup_min_level)

# logging: add rich handler & set verbosity
debug_mode = logging.DEBUG == args.verbosity
log.init(level=args.verbosity)
logger.debug('Pelican version: %s', __version__)
logger.debug('Python version: %s', sys.version.split()[0])

try:
pelican, settings = get_instance(args)
logger.info("Reading config...")

# temp handler to catch possible "fatal" logs while applying settings
with log.temp_handler(
logging.getLogger(),
log.AbortHandler(args.fatal or logging.CRITICAL)
):
pelican, settings = get_instance(args)

if debug_mode:
# LOG_FILTER is disabled in --debug
settings.pop('LOG_FILTER')

log.configure(settings)

if args.autoreload and args.listen:
excqueue = multiprocessing.Queue()
Expand Down Expand Up @@ -560,13 +596,15 @@ def main(argv=None):
else:
watcher = FileSystemWatcher(args.settings, Readers, settings)
watcher.check()
with console.status("Generating..."):
with log.console.status("Generating..."):
pelican.run()
except KeyboardInterrupt:
logger.warning('Keyboard interrupt received. Exiting.')
except Exception as e:
logger.critical("%s: %s", e.__class__.__name__, e)
if debug_mode: # include traceback:
log.AbortHandler.trim_traceback(e)
log.console.print_exception()
else:
log.console.print(e)

if args.verbosity == logging.DEBUG:
console.print_exception()
sys.exit(getattr(e, 'exitcode', 1))

0 comments on commit e21bdc7

Please sign in to comment.