Skip to content

Commit

Permalink
Merge branch 'develop' into querbuilder-abstract
Browse files Browse the repository at this point in the history
  • Loading branch information
sphuber authored Aug 25, 2021
2 parents aec9ce5 + eae3c50 commit e18b22c
Show file tree
Hide file tree
Showing 57 changed files with 902 additions and 847 deletions.
12 changes: 6 additions & 6 deletions aiida/backends/djsite/db/migrations/0024_dblog_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
# Generated by Django 1.11.16 on 2018-12-21 10:56
# pylint: disable=invalid-name
"""Migration for the update of the DbLog table. Addition of uuids"""

import sys
import click

Expand All @@ -20,6 +19,7 @@
from aiida.backends.djsite.db.migrations import upgrade_schema_version
from aiida.backends.general.migrations.utils import dumps_json
from aiida.common.utils import get_new_uuid
from aiida.cmdline.utils import echo
from aiida.manage import configuration

REVISION = '1.0.24'
Expand Down Expand Up @@ -157,11 +157,11 @@ def export_and_clean_workflow_logs(apps, schema_editor):
return

if not configuration.PROFILE.is_test_profile:
click.echo(
echo.echo_warning(
'We found {} log records that correspond to legacy workflows and {} log records to correspond '
'to an unknown entity.'.format(lwf_number, other_number)
)
click.echo(
echo.echo_warning(
'These records will be removed from the database and exported to JSON files to the current directory).'
)
proceed = click.confirm('Would you like to proceed?', default=True)
Expand All @@ -181,7 +181,7 @@ def export_and_clean_workflow_logs(apps, schema_editor):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo(f'Exported legacy workflow logs to {filename}')
echo.echo(f'Exported legacy workflow logs to {filename}')

# Now delete the records
DbLog.objects.filter(objname__startswith=leg_workflow_prefix).delete()
Expand All @@ -205,7 +205,7 @@ def export_and_clean_workflow_logs(apps, schema_editor):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo(f'Exported unexpected entity logs to {filename}')
echo.echo(f'Exported unexpected entity logs to {filename}')

# Now delete the records
DbLog.objects.exclude(objname__startswith=node_prefix).exclude(objname__startswith=leg_workflow_prefix).delete()
Expand All @@ -229,7 +229,7 @@ def export_and_clean_workflow_logs(apps, schema_editor):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename))
echo.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename))

# Now delete the records
with schema_editor.connection.cursor() as cursor:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def export_workflow_data(apps, _):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
echo.echo_info(f'Exported workflow data to {filename}')
echo.echo_report(f'Exported workflow data to {filename}')


class Migration(migrations.Migration):
Expand Down
14 changes: 7 additions & 7 deletions aiida/backends/sqlalchemy/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
# For further information please visit http://www.aiida.net #
###########################################################################
"""Simple wrapper around the alembic command line tool that first loads an AiiDA profile."""

import alembic
import click

from aiida.cmdline import is_verbose
from aiida.cmdline.params import options


Expand Down Expand Up @@ -51,18 +51,18 @@ def alembic_revision(message):


@alembic_cli.command('current')
@options.VERBOSE()
def alembic_current(verbose):
@options.VERBOSITY()
def alembic_current():
"""Show the current revision."""
execute_alembic_command('current', verbose=verbose)
execute_alembic_command('current', verbose=is_verbose())


@alembic_cli.command('history')
@click.option('-r', '--rev-range')
@options.VERBOSE()
def alembic_history(rev_range, verbose):
@options.VERBOSITY()
def alembic_history(rev_range):
"""Show the history for the given revision range."""
execute_alembic_command('history', rev_range=rev_range, verbose=verbose)
execute_alembic_command('history', rev_range=rev_range, verbose=is_verbose())


@alembic_cli.command('upgrade')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from sqlalchemy.sql import text

from aiida.backends.general.migrations.utils import dumps_json
from aiida.cmdline.utils import echo
from aiida.manage import configuration

# revision identifiers, used by Alembic.
Expand Down Expand Up @@ -152,11 +153,11 @@ def export_and_clean_workflow_logs(connection):
return

if not configuration.PROFILE.is_test_profile:
click.echo(
echo.echo_warning(
'We found {} log records that correspond to legacy workflows and {} log records to correspond '
'to an unknown entity.'.format(lwf_no_number, other_number)
)
click.echo(
echo.echo_warning(
'These records will be removed from the database and exported to JSON files to the current directory).'
)
proceed = click.confirm('Would you like to proceed?', default=True)
Expand All @@ -178,7 +179,7 @@ def export_and_clean_workflow_logs(connection):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo(f'Exported legacy workflow logs to {filename}')
echo.echo(f'Exported legacy workflow logs to {filename}')

# Now delete the records
connection.execute(
Expand All @@ -203,7 +204,7 @@ def export_and_clean_workflow_logs(connection):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo(f'Exported unexpected entity logs to {filename}')
echo.echo(f'Exported unexpected entity logs to {filename}')

# Now delete the records
connection.execute(
Expand All @@ -228,7 +229,7 @@ def export_and_clean_workflow_logs(connection):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename))
echo.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename))

# Now delete the records
connection.execute(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def export_workflow_data(connection):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
echo.echo_info(f'Exported workflow data to {filename}')
echo.echo_report(f'Exported workflow data to {filename}')


def upgrade():
Expand Down
1 change: 1 addition & 0 deletions aiida/cmdline/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
'WorkflowParamType',
'dbenv',
'format_call_graph',
'is_verbose',
'only_if_daemon_running',
'with_dbenv',
)
Expand Down
80 changes: 24 additions & 56 deletions aiida/cmdline/commands/cmd_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
# pylint: disable=too-many-arguments,import-error,too-many-locals,broad-except
"""`verdi archive` command."""
from enum import Enum
import logging
from typing import List, Tuple
import traceback
import urllib.request
Expand All @@ -22,6 +23,7 @@
from aiida.cmdline.params.types import GroupParamType, PathOrUrl
from aiida.cmdline.utils import decorators, echo
from aiida.common.links import GraphTraversalRules
from aiida.common.log import AIIDA_LOGGER

EXTRAS_MODE_EXISTING = ['keep_existing', 'update_existing', 'mirror', 'none', 'ask']
EXTRAS_MODE_NEW = ['import', 'none']
Expand Down Expand Up @@ -82,13 +84,6 @@ def inspect(archive, version, meta_data):
type=click.Choice(['zip', 'zip-uncompressed', 'zip-lowmemory', 'tar.gz', 'null']),
)
@options.FORCE(help='Overwrite output file if it already exists.')
@click.option(
'-v',
'--verbosity',
default='INFO',
type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'CRITICAL']),
help='Control the verbosity of console logging'
)
@options.graph_traversal_rules(GraphTraversalRules.EXPORT.value)
@click.option(
'--include-logs/--exclude-logs',
Expand All @@ -113,7 +108,7 @@ def inspect(archive, version, meta_data):
@decorators.with_dbenv()
def create(
output_file, codes, computers, groups, nodes, archive_format, force, input_calc_forward, input_work_forward,
create_backward, return_backward, call_calc_backward, call_work_backward, include_comments, include_logs, verbosity
create_backward, return_backward, call_calc_backward, call_work_backward, include_comments, include_logs
):
"""
Export subsets of the provenance graph to file for sharing.
Expand All @@ -125,9 +120,8 @@ def create(
You can modify some of those rules using options of this command.
"""
# pylint: disable=too-many-branches
from aiida.common.log import override_log_formatter_context
from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
from aiida.tools.importexport import export, ExportFileFormat, EXPORT_LOGGER
from aiida.tools.importexport import export, ExportFileFormat
from aiida.tools.importexport.common.exceptions import ArchiveExportError

entities = []
Expand Down Expand Up @@ -170,15 +164,13 @@ def create(
elif archive_format == 'null':
export_format = 'null'

if verbosity in ['DEBUG', 'INFO']:
set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
if AIIDA_LOGGER.level <= logging.REPORT: # pylint: disable=no-member
set_progress_bar_tqdm(leave=(AIIDA_LOGGER.level == logging.DEBUG))
else:
set_progress_reporter(None)
EXPORT_LOGGER.setLevel(verbosity)

try:
with override_log_formatter_context('%(message)s'):
export(entities, filename=output_file, file_format=export_format, **kwargs)
export(entities, filename=output_file, file_format=export_format, **kwargs)
except ArchiveExportError as exception:
echo.echo_critical(f'failed to write the archive file. Exception: {exception}')
else:
Expand All @@ -202,18 +194,11 @@ def create(
# version inside the function when needed.
help='Archive format version to migrate to (defaults to latest version).',
)
@click.option(
'--verbosity',
default='INFO',
type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'CRITICAL']),
help='Control the verbosity of console logging'
)
def migrate(input_file, output_file, force, in_place, archive_format, version, verbosity):
def migrate(input_file, output_file, force, in_place, archive_format, version):
"""Migrate an export archive to a more recent format version."""
from aiida.common.log import override_log_formatter_context
from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
from aiida.tools.importexport import detect_archive_type, EXPORT_VERSION
from aiida.tools.importexport.archive.migrators import get_migrator, MIGRATE_LOGGER
from aiida.tools.importexport.archive.migrators import get_migrator

if in_place:
if output_file:
Expand All @@ -225,11 +210,10 @@ def migrate(input_file, output_file, force, in_place, archive_format, version, v
'no output file specified. Please add --in-place flag if you would like to migrate in place.'
)

if verbosity in ['DEBUG', 'INFO']:
set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
if AIIDA_LOGGER.level <= logging.REPORT: # pylint: disable=no-member
set_progress_bar_tqdm(leave=(AIIDA_LOGGER.level == logging.DEBUG))
else:
set_progress_reporter(None)
MIGRATE_LOGGER.setLevel(verbosity)

if version is None:
version = EXPORT_VERSION
Expand All @@ -238,18 +222,16 @@ def migrate(input_file, output_file, force, in_place, archive_format, version, v
migrator = migrator_cls(input_file)

try:
with override_log_formatter_context('%(message)s'):
migrator.migrate(version, output_file, force=force, out_compression=archive_format)
migrator.migrate(version, output_file, force=force, out_compression=archive_format)
except Exception as error: # pylint: disable=broad-except
if verbosity == 'DEBUG':
if AIIDA_LOGGER.level <= logging.DEBUG:
raise
echo.echo_critical(
'failed to migrate the archive file (use `--verbosity DEBUG` to see traceback): '
f'{error.__class__.__name__}:{error}'
)

if verbosity in ['DEBUG', 'INFO']:
echo.echo_success(f'migrated the archive to version {version}')
echo.echo_success(f'migrated the archive to version {version}')


class ExtrasImportCode(Enum):
Expand Down Expand Up @@ -313,36 +295,23 @@ class ExtrasImportCode(Enum):
show_default=True,
help='Force migration of archive file archives, if needed.'
)
@click.option(
'-v',
'--verbosity',
default='INFO',
type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'CRITICAL']),
help='Control the verbosity of console logging'
)
@options.NON_INTERACTIVE()
@decorators.with_dbenv()
@click.pass_context
def import_archive(
ctx, archives, webpages, group, extras_mode_existing, extras_mode_new, comment_mode, migration, non_interactive,
verbosity
ctx, archives, webpages, group, extras_mode_existing, extras_mode_new, comment_mode, migration, non_interactive
):
"""Import data from an AiiDA archive file.
The archive can be specified by its relative or absolute file path, or its HTTP URL.
"""
# pylint: disable=unused-argument
from aiida.common.log import override_log_formatter_context
from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
from aiida.tools.importexport.dbimport.utils import IMPORT_LOGGER
from aiida.tools.importexport.archive.migrators import MIGRATE_LOGGER

if verbosity in ['DEBUG', 'INFO']:
set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
if AIIDA_LOGGER.level <= logging.REPORT: # pylint: disable=no-member
set_progress_bar_tqdm(leave=(AIIDA_LOGGER.level == logging.DEBUG))
else:
set_progress_reporter(None)
IMPORT_LOGGER.setLevel(verbosity)
MIGRATE_LOGGER.setLevel(verbosity)

all_archives = _gather_imports(archives, webpages)

Expand All @@ -358,9 +327,8 @@ def import_archive(
'comment_mode': comment_mode,
}

with override_log_formatter_context('%(message)s'):
for archive, web_based in all_archives:
_import_archive(archive, web_based, import_kwargs, migration)
for archive, web_based in all_archives:
_import_archive(archive, web_based, import_kwargs, migration)


def _echo_exception(msg: str, exception, warn_only: bool = False):
Expand Down Expand Up @@ -401,7 +369,7 @@ def _gather_imports(archives, webpages) -> List[Tuple[str, bool]]:
if webpages is not None:
for webpage in webpages:
try:
echo.echo_info(f'retrieving archive URLS from {webpage}')
echo.echo_report(f'retrieving archive URLS from {webpage}')
urls = get_valid_import_links(webpage)
except Exception as error:
echo.echo_critical(
Expand Down Expand Up @@ -434,7 +402,7 @@ def _import_archive(archive: str, web_based: bool, import_kwargs: dict, try_migr
archive_path = archive

if web_based:
echo.echo_info(f'downloading archive: {archive}')
echo.echo_report(f'downloading archive: {archive}')
try:
with urllib.request.urlopen(archive) as response:
temp_folder.create_file_from_filelike(response, 'downloaded_archive.zip')
Expand All @@ -444,13 +412,13 @@ def _import_archive(archive: str, web_based: bool, import_kwargs: dict, try_migr
archive_path = temp_folder.get_abs_path('downloaded_archive.zip')
echo.echo_success('archive downloaded, proceeding with import')

echo.echo_info(f'starting import: {archive}')
echo.echo_report(f'starting import: {archive}')
try:
import_data(archive_path, **import_kwargs)
except IncompatibleArchiveVersionError as exception:
if try_migration:

echo.echo_info(f'incompatible version detected for {archive}, trying migration')
echo.echo_report(f'incompatible version detected for {archive}, trying migration')
try:
migrator = get_migrator(detect_archive_type(archive_path))(archive_path)
archive_path = migrator.migrate(
Expand All @@ -459,7 +427,7 @@ def _import_archive(archive: str, web_based: bool, import_kwargs: dict, try_migr
except Exception as exception:
_echo_exception(f'an exception occurred while migrating the archive {archive}', exception)

echo.echo_info('proceeding with import of migrated archive')
echo.echo_report('proceeding with import of migrated archive')
try:
import_data(archive_path, **import_kwargs)
except Exception as exception:
Expand Down
2 changes: 1 addition & 1 deletion aiida/cmdline/commands/cmd_calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def calcjob_gotocomputer(calcjob):
echo.echo_critical('no remote work directory for this calcjob, maybe the daemon did not submit it yet')

command = transport.gotocomputer_command(remote_workdir)
echo.echo_info('going to the remote work directory...')
echo.echo_report('going to the remote work directory...')
os.system(command)


Expand Down
Loading

0 comments on commit e18b22c

Please sign in to comment.