Skip to content

Commit

Permalink
Log also summaries and reports
Browse files Browse the repository at this point in the history
Use a new `override_log_formatter` to temporarily override the
Formatter for all AiiDA log handlers.
Then log console output at the custom AiiDA REPORT level.

If silent, set logging level to CRITICAL.
  • Loading branch information
CasperWA committed May 13, 2020
1 parent e789c78 commit 7e2547e
Show file tree
Hide file tree
Showing 9 changed files with 95 additions and 55 deletions.
8 changes: 5 additions & 3 deletions aiida/cmdline/commands/cmd_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def _try_import(migration_performed, file_to_import, archive, group, migration,
return migrate_archive


def _migrate_archive(ctx, temp_folder, file_to_import, archive, non_interactive, more_archives, **kwargs): # pylint: disable=unused-argument
def _migrate_archive(ctx, temp_folder, file_to_import, archive, non_interactive, more_archives, silent, **kwargs): # pylint: disable=unused-argument
"""Utility function for `verdi import` to migrate archive
Invoke click command `verdi export migrate`, passing in the archive,
outputting the migrated archive in a temporary SandboxFolder.
Expand All @@ -154,6 +154,7 @@ def _migrate_archive(ctx, temp_folder, file_to_import, archive, non_interactive,
:param archive: Filename of archive to be migrated, and later attempted imported.
:param non_interactive: Whether or not the user should be asked for input for any reason.
:param more_archives: Whether or not there are more archives to be imported.
:param silent: Suppress console messages.
:return: Absolute path to migrated archive within SandboxFolder.
"""
from aiida.cmdline.commands.cmd_export import migrate
Expand All @@ -167,7 +168,7 @@ def _migrate_archive(ctx, temp_folder, file_to_import, archive, non_interactive,
# Migration
try:
ctx.invoke(
migrate, input_file=file_to_import, output_file=temp_folder.get_abs_path(temp_out_file), silent=False
migrate, input_file=file_to_import, output_file=temp_folder.get_abs_path(temp_out_file), silent=silent
)
except Exception as exception:
_echo_error(
Expand Down Expand Up @@ -288,7 +289,8 @@ def cmd_import(
'extras_mode_existing': ExtrasImportCode[extras_mode_existing].value,
'extras_mode_new': extras_mode_new,
'comment_mode': comment_mode,
'non_interactive': non_interactive
'non_interactive': non_interactive,
'silent': False,
}

# Import local archives
Expand Down
29 changes: 28 additions & 1 deletion aiida/common/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@
import logging
import types
from contextlib import contextmanager
from wrapt import decorator

from aiida.manage.configuration import get_config_option

__all__ = ('AIIDA_LOGGER', 'override_log_level')
__all__ = ('AIIDA_LOGGER', 'override_log_level', 'override_log_formatter')

# Custom logging level, intended specifically for informative log messages reported during WorkChains.
# We want the level between INFO(20) and WARNING(30) such that it will be logged for the default loglevel, however
Expand Down Expand Up @@ -209,3 +210,29 @@ def override_log_level(level=logging.CRITICAL):
yield
finally:
logging.disable(level=logging.NOTSET)


def override_log_formatter(fmt: str):
"""Temporarily use a different formatter for all handlers.
NOTE: One can _only_ set `fmt` (not `datefmt` or `style`).
Be aware! This may fail if the number of handlers is changed within the decorated function/method.
"""

@decorator
def wrapper(wrapped, instance, args, kwargs): # pylint: disable=unused-argument
temp_formatter = logging.Formatter(fmt=fmt)

cached_formatters = []
for handler in AIIDA_LOGGER.handlers:
cached_formatters.append(handler.formatter)

try:
for handler in AIIDA_LOGGER.handlers:
handler.setFormatter(temp_formatter)
return wrapped(*args, **kwargs)
finally:
for index, handler in enumerate(AIIDA_LOGGER.handlers):
handler.setFormatter(cached_formatters[index])

return wrapper
10 changes: 5 additions & 5 deletions aiida/tools/importexport/common/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def update_description(path, refresh: bool = False):
progress_bar.set_description_str(description, refresh=refresh)


def get_file_iterator(file_handle, folderpath, silent=False, **kwargs): # pylint: disable=unused-argument
def get_file_iterator(file_handle, folderpath, silent=True, **kwargs): # pylint: disable=unused-argument
"""Go through JSON files and then return new file_iterator
:param file_handle: A file handle returned from `with open() as file_handle:`.
Expand Down Expand Up @@ -284,7 +284,7 @@ def get_file_iterator(file_handle, folderpath, silent=False, **kwargs): # pylin
return get_progress_bar(iterable=file_handle.namelist(), unit='files', leave=False, disable=silent)


def extract_zip(infile, folder, nodes_export_subfolder=None, silent=False, **kwargs):
def extract_zip(infile, folder, nodes_export_subfolder=None, **kwargs):
"""Extract the nodes to be imported from a zip file.
:param infile: file path
Expand Down Expand Up @@ -316,7 +316,7 @@ def extract_zip(infile, folder, nodes_export_subfolder=None, silent=False, **kwa
if not handle.namelist():
raise CorruptArchive('no files detected in archive')

file_iterator = get_file_iterator(file_handle=handle, folderpath=folder.abspath, silent=silent, **kwargs)
file_iterator = get_file_iterator(file_handle=handle, folderpath=folder.abspath, **kwargs)

for membername in file_iterator:
# Check that we are only exporting nodes within the subfolder!
Expand All @@ -333,7 +333,7 @@ def extract_zip(infile, folder, nodes_export_subfolder=None, silent=False, **kwa
close_progress_bar(leave=False)


def extract_tar(infile, folder, nodes_export_subfolder=None, silent=False, **kwargs):
def extract_tar(infile, folder, nodes_export_subfolder=None, **kwargs):
"""
Extract the nodes to be imported from a (possibly zipped) tar file.
Expand Down Expand Up @@ -366,7 +366,7 @@ def extract_tar(infile, folder, nodes_export_subfolder=None, silent=False, **kwa
if len(handle.getmembers()) == 1 and handle.getmembers()[0].size == 0:
raise CorruptArchive('no files detected in archive')

file_iterator = get_file_iterator(file_handle=handle, folderpath=folder.abspath, silent=silent, **kwargs)
file_iterator = get_file_iterator(file_handle=handle, folderpath=folder.abspath, **kwargs)

for member in file_iterator:
if member.isdev():
Expand Down
2 changes: 1 addition & 1 deletion aiida/tools/importexport/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
""" Utility functions for import/export of AiiDA entities """
# pylint: disable=inconsistent-return-statements,too-many-branches,too-many-return-statements
# pylint: disable=too-many-nested-blocks,too-many-locals
from html.parser import HTMLParser
import urllib.request
import urllib.parse
from html.parser import HTMLParser

from aiida.tools.importexport.common.config import (
NODE_ENTITY_NAME, GROUP_ENTITY_NAME, COMPUTER_ENTITY_NAME, USER_ENTITY_NAME, LOG_ENTITY_NAME, COMMENT_ENTITY_NAME
Expand Down
44 changes: 24 additions & 20 deletions aiida/tools/importexport/dbexport/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
###########################################################################
# pylint: disable=fixme,too-many-branches,too-many-locals,too-many-statements,too-many-arguments
"""Provides export functionalities."""
import logging
import os
import tarfile
import time
Expand All @@ -18,6 +19,7 @@
from aiida.common.exceptions import LicensingException
from aiida.common.folders import RepositoryFolder, SandboxFolder, Folder
from aiida.common.lang import type_check
from aiida.common.log import override_log_formatter, LOG_LEVEL_REPORT
from aiida.orm.utils.repository import Repository

from aiida.tools.importexport.common import exceptions, get_progress_bar, close_progress_bar
Expand Down Expand Up @@ -67,7 +69,7 @@ def export(
:py:class:`~aiida.tools.importexport.common.exceptions.ArchiveExportError` if the output file already exists.
:type overwrite: bool
:param silent: suppress progress bar.
:param silent: suppress console prints and progress bar.
:type silent: bool
:param use_compression: Whether or not to compress the archive file (only valid for the zip file format).
Expand Down Expand Up @@ -135,15 +137,17 @@ def export(
if not overwrite and os.path.exists(filename):
raise exceptions.ArchiveExportError("The output file '{}' already exists".format(filename))

if not silent:
if file_format == ExportFileFormat.TAR_GZIPPED:
file_format_verbose = 'Gzipped tarball (compressed)'
# Must be a zip then
elif use_compression:
file_format_verbose = 'Zip (compressed)'
else:
file_format_verbose = 'Zip (uncompressed)'
summary(file_format_verbose, filename, **kwargs)
if silent:
logging.disable(level=logging.CRITICAL)

if file_format == ExportFileFormat.TAR_GZIPPED:
file_format_verbose = 'Gzipped tarball (compressed)'
# Must be a zip then
elif use_compression:
file_format_verbose = 'Zip (compressed)'
else:
file_format_verbose = 'Zip (uncompressed)'
summary(file_format_verbose, filename, **kwargs)

try:
if file_format == ExportFileFormat.TAR_GZIPPED:
Expand Down Expand Up @@ -276,6 +280,7 @@ def export_tar(entities=None, filename=None, **kwargs):
return (time_export_start, time_export_end, time_compress_start, time_compress_end)


@override_log_formatter('%(message)s')
def export_tree(
entities=None,
folder=None,
Expand Down Expand Up @@ -307,7 +312,7 @@ def export_tree(
otherwise.
:type forbidden_licenses: list
:param silent: suppress progress bar.
:param silent: suppress console prints and progress bar.
:type silent: bool
:param include_comments: In-/exclude export of comments for given node(s) in ``entities``.
Expand All @@ -328,6 +333,9 @@ def export_tree(
from collections import defaultdict
from aiida.tools.graph.graph_traversers import get_nodes_export

if silent:
logging.disable(level=logging.CRITICAL)

EXPORT_LOGGER.debug('STARTING EXPORT...')

# Backwards-compatibility
Expand Down Expand Up @@ -580,16 +588,12 @@ def export_tree(

model_data = sum(len(model_data) for model_data in export_data.values())
if not model_data:
msg = 'Nothing to store, exiting...'
if not silent:
print(msg)
EXPORT_LOGGER.debug(msg)
EXPORT_LOGGER.log(msg='Nothing to store, exiting...', level=LOG_LEVEL_REPORT)
return

msg = 'Exporting a total of {} database entries, of which {} are Nodes.'.format(model_data, len(all_node_pks))
if not silent:
print(msg)
EXPORT_LOGGER.debug(msg)
EXPORT_LOGGER.log(
msg='Exporting a total of {} database entries, of which {} are Nodes.'.format(model_data, len(all_node_pks)),
level=LOG_LEVEL_REPORT
)

# Instantiate new progress bar
progress_bar = get_progress_bar(total=1, leave=False, disable=silent)
Expand Down
19 changes: 10 additions & 9 deletions aiida/tools/importexport/dbexport/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import warnings

from aiida.orm import QueryBuilder, ProcessNode
from aiida.common.log import AIIDA_LOGGER
from aiida.common.log import AIIDA_LOGGER, LOG_LEVEL_REPORT, override_log_formatter
from aiida.common.warnings import AiidaDeprecationWarning

from aiida.tools.importexport.common import exceptions
Expand Down Expand Up @@ -285,10 +285,10 @@ def check_process_nodes_sealed(nodes):
)


@override_log_formatter('%(message)s')
def summary(file_format, outfile, **kwargs):
"""Print summary for export"""
from tabulate import tabulate
from aiida.cmdline.utils import echo
from aiida.tools.importexport.common.config import EXPORT_VERSION

parameters = [['Archive', outfile], ['Format', file_format], ['Export version', EXPORT_VERSION]]
Expand All @@ -311,7 +311,7 @@ def summary(file_format, outfile, **kwargs):
['Follow CALL Links backwards', call_reversed]]
result += '\n\n{}\n'.format(tabulate(traversal_rules, headers=['Traversal rules', '']))

echo.echo(result)
EXPORT_LOGGER.log(msg=result, level=LOG_LEVEL_REPORT)


def deprecated_parameters(old, new):
Expand All @@ -325,11 +325,12 @@ def deprecated_parameters(old, new):
:return: New parameter's value (if not defined, then old parameter's value)
"""
if new.get('value', None) is not None:
message = '`{}` is deprecated, the supplied `{}` input will be used'.format(old['name'], new['name'])
else:
message = '`{}` is deprecated, please use `{}` instead'.format(old['name'], new['name'])
new['value'] = old['value']
warnings.warn(message, AiidaDeprecationWarning) # pylint: disable=no-member
if old.get('value', None) is not None:
if new.get('value', None) is not None:
message = '`{}` is deprecated, the supplied `{}` input will be used'.format(old['name'], new['name'])
else:
message = '`{}` is deprecated, please use `{}` instead'.format(old['name'], new['name'])
new['value'] = old['value']
warnings.warn(message, AiidaDeprecationWarning) # pylint: disable=no-member

return new['value']
14 changes: 9 additions & 5 deletions aiida/tools/importexport/dbimport/backends/django/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
""" Django-specific import of AiiDA entities """

from distutils.version import StrictVersion
import logging
import os
import tarfile
import zipfile
Expand All @@ -19,6 +20,7 @@
from aiida.common import timezone, json
from aiida.common.folders import SandboxFolder, RepositoryFolder
from aiida.common.links import LinkType, validate_link_label
from aiida.common.log import override_log_formatter
from aiida.common.utils import grouper, get_object_from_string
from aiida.manage.configuration import get_config_option
from aiida.orm.utils.repository import Repository
Expand All @@ -37,6 +39,7 @@
)


@override_log_formatter('%(message)s')
def import_data_dj(
in_path,
group=None,
Expand Down Expand Up @@ -118,6 +121,9 @@ def import_data_dj(
elif not group.is_stored:
group.store()

if silent:
logging.disable(level=logging.CRITICAL)

################
# EXTRACT DATA #
################
Expand Down Expand Up @@ -163,8 +169,7 @@ def import_data_dj(

raise exceptions.IncompatibleArchiveVersionError(msg)

if not silent:
start_summary(in_path, comment_mode, extras_mode_new, extras_mode_existing)
start_summary(in_path, comment_mode, extras_mode_new, extras_mode_existing)

##########################################################################
# CREATE UUID REVERSE TABLES AND CHECK IF I HAVE ALL NODES FOR THE LINKS #
Expand Down Expand Up @@ -769,8 +774,7 @@ def import_data_dj(
# Finalize Progress bar
close_progress_bar(leave=False)

if not silent:
# Summarize import
result_summary(ret_dict, getattr(group, 'label', None))
# Summarize import
result_summary(ret_dict, getattr(group, 'label', None))

return ret_dict
Loading

0 comments on commit 7e2547e

Please sign in to comment.