diff --git a/src/aiida/cmdline/commands/cmd_process.py b/src/aiida/cmdline/commands/cmd_process.py index ed0b3ccdd0..842232fc8e 100644 --- a/src/aiida/cmdline/commands/cmd_process.py +++ b/src/aiida/cmdline/commands/cmd_process.py @@ -558,7 +558,7 @@ def process_repair(manager, broker, dry_run): echo.echo_report(f'Revived process `{pid}`') -@verdi_process.command("dump") +@verdi_process.command('dump') @arguments.PROCESS() @options.PATH() @options.OVERWRITE() @@ -613,30 +613,30 @@ def process_dump( node data for further inspection. """ + from aiida.tools.archive.exceptions import ExportValidationError from aiida.tools.dumping.data import DataDumper from aiida.tools.dumping.processes import ProcessDumper - from aiida.tools.archive.exceptions import ExportValidationError # from aiida.tools.dumping.utils import validate_rich_options from aiida.tools.dumping.rich import rich_from_cli processdumper_kwargs = { - "include_inputs": include_inputs, - "include_outputs": include_outputs, - "include_attributes": include_attributes, - "include_extras": include_extras, - "flat": flat, - "dump_unsealed": dump_unsealed, - "incremental": incremental, + 'include_inputs': include_inputs, + 'include_outputs': include_outputs, + 'include_attributes': include_attributes, + 'include_extras': include_extras, + 'flat': flat, + 'dump_unsealed': dump_unsealed, + 'incremental': incremental, } rich_kwargs = { - "rich_dump_all": rich_dump_all, + 'rich_dump_all': rich_dump_all, } datadumper_kwargs = { - "also_raw": also_raw, - "also_rich": also_rich, + 'also_raw': also_raw, + 'also_rich': also_rich, } # if also_rich: @@ -672,15 +672,13 @@ def process_dump( output_path=path, ) echo.echo_success( - f"Raw files for {process.__class__.__name__} <{process.pk}> dumped into folder `{dump_path}`." + f'Raw files for {process.__class__.__name__} <{process.pk}> dumped into folder `{dump_path}`.' ) except FileExistsError: echo.echo_critical( - "Dumping directory exists and overwrite is False. Set overwrite to True, or delete directory manually." + 'Dumping directory exists and overwrite is False. Set overwrite to True, or delete directory manually.' ) except ExportValidationError as e: - echo.echo_critical(f"{e!s}") + echo.echo_critical(f'{e!s}') except Exception as e: - echo.echo_critical( - f"Unexpected error while dumping {process.__class__.__name__} <{process.pk}>:\n ({e!s})." - ) + echo.echo_critical(f'Unexpected error while dumping {process.__class__.__name__} <{process.pk}>:\n ({e!s}).') diff --git a/src/aiida/cmdline/commands/cmd_profile.py b/src/aiida/cmdline/commands/cmd_profile.py index f035977f2c..62f9901fab 100644 --- a/src/aiida/cmdline/commands/cmd_profile.py +++ b/src/aiida/cmdline/commands/cmd_profile.py @@ -325,7 +325,6 @@ def storage_mirror( ): """Dump all data in an AiiDA profile's storage to disk.""" - from aiida import orm from aiida.tools.dumping.parser import DumpConfigParser from aiida.tools.dumping.rich import ( @@ -550,4 +549,4 @@ def storage_mirror( if dump_data: echo.echo_report(f'Dumping data for group `{group.label}`...') collection_dumper.dump_data_rich() - # collection_dumper.dump_plugin_data() \ No newline at end of file + # collection_dumper.dump_plugin_data() diff --git a/src/aiida/tools/dumping/processes.py b/src/aiida/tools/dumping/processes.py index 29fbef07c9..6c800c26c7 100644 --- a/src/aiida/tools/dumping/processes.py +++ b/src/aiida/tools/dumping/processes.py @@ -354,22 +354,19 @@ def _dump_calculation( output_path=output_path / io_dump_mapping.inputs, link_triples=input_links ) - if self.data_dumper.also_rich: rich_data_output_path = output_path / io_dump_mapping.inputs - # if not self.data_dumper.data_hidden: - # rich_data_output_path = output_path / io_dump_mapping.inputs - # else: - # # TODO: Currently, when dumping only one selected workflow, if rich dumping is activated, but - # # TODO: `data-hidden` is set, no data nodes were actually being dumped - # # TODO: With the current implementation below, they are dumped, but not in the same structure as for the - # # TODO: `dump_rich_core` function. Quick fix for now - # pass + # if not self.data_dumper.data_hidden: + # rich_data_output_path = output_path / io_dump_mapping.inputs + # else: + # # TODO: Currently, when dumping only one selected workflow, if rich dumping is activated, but + # # TODO: `data-hidden` is set, no data nodes were actually being dumped + # # TODO: With the current implementation below, they are dumped, but not in the same structure as for the + # # TODO: `dump_rich_core` function. Quick fix for now + # pass # Only dump the rich data output files in the process directories if data_hidden is False - self._dump_calculation_io_files_rich( - output_path=rich_data_output_path, link_triples=input_links - ) + self._dump_calculation_io_files_rich(output_path=rich_data_output_path, link_triples=input_links) # Dump the node_outputs apart from `retrieved` if self.include_outputs: output_links = list(calculation_node.base.links.get_outgoing(link_type=LinkType.CREATE))