diff --git a/.gitignore b/.gitignore index fbba9f5..de3ce0b 100644 --- a/.gitignore +++ b/.gitignore @@ -208,3 +208,5 @@ __marimo__/ # MyST documentation build docs/_build + +dev/ \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 7e17841..6fd2cbd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,11 +31,15 @@ dependencies = [ Source = "https://github.com/aiidaplugins/aiida-epw" [project.entry-points.'aiida.calculations'] -'quantumespresso.epw' = 'aiida_quantumespresso.calculations.epw:EpwCalculation' +'epw.epw' = 'aiida_epw.calculations.epw:EpwCalculation' [project.entry-points.'aiida.parsers'] -'quantumespresso.epw' = 'aiida_quantumespresso.parsers.epw:EpwParser' +'epw.epw' = 'aiida_epw.parsers.epw:EpwParser' +[project.entry-points.'aiida.workflows'] +'epw.base' = 'aiida_epw.workflows.base:EpwBaseWorkChain' +'epw.epw_prep' = 'aiida_epw.workflows.prep:EpwPrepWorkChain' +'epw.supercon' = 'aiida_epw.workflows.supercon:SuperConWorkChain' [tool.hatch.version] path = "src/aiida_epw/__about__.py" diff --git a/src/aiida_epw/__init__.py b/src/aiida_epw/__init__.py index 2a208c7..280142f 100644 --- a/src/aiida_epw/__init__.py +++ b/src/aiida_epw/__init__.py @@ -1 +1,3 @@ """AiiDA plugin package for interfacing with the Electron Phonon Wannier (EPW) code.""" + +__version__ = '0.1.0' \ No newline at end of file diff --git a/src/aiida_epw/calculations/epw.py b/src/aiida_epw/calculations/epw.py index 3d430a5..3e7df1a 100644 --- a/src/aiida_epw/calculations/epw.py +++ b/src/aiida_epw/calculations/epw.py @@ -57,11 +57,22 @@ def define(cls, spec): spec.input('settings', valid_type=orm.Dict, required=False, help='') spec.input('parent_folder_nscf', required=False, valid_type=orm.RemoteData, help='the folder of a completed nscf `PwCalculation`') + spec.input('parent_folder_chk', required=False, valid_type=orm.RemoteData, + help='the folder of a completed wannier90 `Wannier90Calculation`') spec.input('parent_folder_ph', required=False, valid_type=orm.RemoteData, help='the folder of a completed `PhCalculation`') spec.input('parent_folder_epw', required=False, valid_type=(orm.RemoteData, orm.RemoteStashFolderData), help='folder that contains all files required to restart an `EpwCalculation`') - spec.inputs['metadata']['options']['parser_name'].default = 'quantumespresso.epw' + spec.input( + 'w90_chk_to_ukk_script', + valid_type=orm.RemoteData, + required=False, + help=( + "The script to convert the chk file to a ukk file" + ) + ) + + spec.inputs['metadata']['options']['parser_name'].default = 'epw.epw' spec.output('output_parameters', valid_type=orm.Dict, help='The `output_parameters` output node of the successful calculation.') @@ -131,6 +142,28 @@ def test_offset(offset): Path(parent_folder_nscf.get_remote_path(), PwCalculation._OUTPUT_SUBFOLDER).as_posix(), self._OUTPUT_SUBFOLDER, )) + + # If parent_folder_chk is provided, we need to copy the .chk, .bvec, and .mmn files to the epw folder. + # We can do symlink for .chk and .bvec. .mmn file is already a symlink as defined in wannier workflow. + # Note that we do some modification to the .mmn file in site so here we rename it to avoid overwriting. + if 'parent_folder_chk' in self.inputs: + parent_folder_chk = self.inputs.parent_folder_chk + + for suffix in ['chk', 'bvec']: + remote_list.append( + ( + parent_folder_chk.computer.uuid, + Path(parent_folder_chk.get_remote_path(), self._PREFIX + '.' + suffix).as_posix(), + self._PREFIX + '.' + suffix + ) + ) + remote_list.append( + ( + parent_folder_chk.computer.uuid, + Path(parent_folder_chk.get_remote_path(), self._PREFIX + '.mmn').as_posix(), + self._PREFIX + '.wannier90.mmn' + ) + ) if 'parent_folder_ph' in self.inputs: parent_folder_ph = self.inputs.parent_folder_ph @@ -207,6 +240,21 @@ def test_offset(offset): remote_list.append( (parent_folder_epw.computer.uuid, Path(epw_path, filename).as_posix(), Path(filename).as_posix()) ) + # check if wannierize is True and if parent_folder_epw or parent_folder_chk is provided + wannierize = parameters['INPUTEPW'].get('wannierize', False) + + if wannierize and any( + _ in self.inputs + for _ in ["parent_folder_epw", "parent_folder_chk"] + ): + self.report("Should not have a parent folder of epw or chk if wannierize is True") + return self.exit_codes.ERROR_PARAMETERS_NOT_VALID + + # check if nstemp is too large + nstemp = parameters['INPUTEPW'].get('nstemp', None) + if nstemp and nstemp > self._MAX_NSTEMP: + self.report(f'nstemp too large, reset it to maximum allowed: {self._MAX_NSTEMP}') + parameters['INPUTEPW']['nstemp'] = self._MAX_NSTEMP parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER parameters['INPUTEPW']['dvscf_dir'] = self._FOLDER_SAVE diff --git a/src/aiida_epw/parsers/__init__.py b/src/aiida_epw/parsers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/aiida_epw/parsers/epw.py b/src/aiida_epw/parsers/epw.py index 3357160..aba88f0 100644 --- a/src/aiida_epw/parsers/epw.py +++ b/src/aiida_epw/parsers/epw.py @@ -3,7 +3,7 @@ from aiida import orm import numpy -from aiida_quantumespresso.calculations.epw import EpwCalculation +from aiida_epw.calculations.epw import EpwCalculation from aiida_quantumespresso.parsers.base import BaseParser from aiida_quantumespresso.utils.mapping import get_logging_container diff --git a/src/aiida_epw/tools/calculators.py b/src/aiida_epw/tools/calculators.py index d7389d0..8684f5a 100644 --- a/src/aiida_epw/tools/calculators.py +++ b/src/aiida_epw/tools/calculators.py @@ -27,3 +27,7 @@ def calculate_lambda_omega(frequency: ArrayLike, spectrum: ArrayLike) -> tuple: omega_log = omega_log * meV_to_Kelvin return lambda_, omega_log + +# This function is taken from https://www.sciencedirect.com/science/article/pii/S0010465516302260 eq.81 +def bcs_gap_function(T, Tc, p, Delta_0): + return Delta_0 * numpy.sqrt(1 - (T/Tc)**p) \ No newline at end of file diff --git a/src/aiida_epw/tools/kpoints.py b/src/aiida_epw/tools/kpoints.py new file mode 100644 index 0000000..b2e1e6c --- /dev/null +++ b/src/aiida_epw/tools/kpoints.py @@ -0,0 +1,23 @@ +def check_kpoints_qpoints_compatibility( + kpoints, + qpoints, + ) -> tuple[bool, str ]: + """Check if the kpoints and qpoints are compatible.""" + + kpoints_mesh, kpoints_shift = kpoints.get_kpoints_mesh() + qpoints_mesh, qpoints_shift = qpoints.get_kpoints_mesh() + + multiplicities = [] + remainder = [] + + for k, q in zip(kpoints_mesh, qpoints_mesh): + multiplicities.append(k // q) + remainder.append(k % q) + + if kpoints_shift != [0.0, 0.0, 0.0] or qpoints_shift != [0.0, 0.0, 0.0]: + return (False, "Shift grid is not supported.") + else: + if remainder == [0, 0, 0]: + return (True, f"The kpoints and qpoints are compatible with multiplicities {multiplicities}.") + else: + return (False, "The kpoints and qpoints are not compatible.") diff --git a/src/aiida_epw/workflows/__init__.py b/src/aiida_epw/workflows/__init__.py index e69de29..4cd5536 100644 --- a/src/aiida_epw/workflows/__init__.py +++ b/src/aiida_epw/workflows/__init__.py @@ -0,0 +1,10 @@ +"""Workflows for the EPW code.""" +from .base import EpwBaseWorkChain +from .prep import EpwPrepWorkChain +from .supercon import SuperConWorkChain + +__all__ = [ + 'EpwBaseWorkChain', + 'EpwPrepWorkChain', + 'SuperConWorkChain', +] \ No newline at end of file diff --git a/src/aiida_epw/workflows/base.py b/src/aiida_epw/workflows/base.py new file mode 100644 index 0000000..16ee372 --- /dev/null +++ b/src/aiida_epw/workflows/base.py @@ -0,0 +1,392 @@ +# -*- coding: utf-8 -*- +from aiida import orm +from aiida.common import AttributeDict, NotExistent + +from aiida.engine import BaseRestartWorkChain, ProcessHandlerReport, process_handler, while_ +from aiida.plugins import CalculationFactory +from aiida.common.lang import type_check + +from aiida_quantumespresso.calculations.functions.create_kpoints_from_distance import create_kpoints_from_distance +from aiida_quantumespresso.workflows.protocols.utils import ProtocolMixin +from aiida.orm.nodes.data.base import to_aiida_type + +from aiida_epw.tools.kpoints import check_kpoints_qpoints_compatibility + +EpwCalculation = CalculationFactory('epw.epw') + +def get_kpoints_from_chk_folder(chk_folder): + """ + This method tries different strategies to find the k-point mesh from a parent nscf folder. + + :param chk_folder: A RemoteData node from a Wannier90Calculation (chk). + :return: A KpointsData node that has mesh information. + :raises ValueError: If the mesh cannot be found through any strategy. + """ + + wannier_params = chk_folder.creator.inputs.parameters + + if 'mp_grid' in wannier_params: + mp_grid = wannier_params['mp_grid'] + kpoints = orm.KpointsData() + kpoints.set_kpoints_mesh(mp_grid) + return kpoints + else: + raise ValueError("Could not deduce mesh from the parent folder of the nscf calculation.") + +def validate_inputs( # pylint: disable=unused-argument,inconsistent-return-statements + inputs, ctx=None + ): + """Validate the inputs of the entire input namespace of `EpwBaseWorkChain`.""" + # Usually at the creation of the inputs, the coarse and fine k/q grid is already determined. + # + # Cannot specify both `kfpoints` and `kfpoints_factor` + if ( + all( + _ in inputs + for _ in ["kfpoints", "kfpoints_factor"] + ) + ): + return "Can only specify one of the `kfpoints`, `kfpoints_factor`." + + if not any( + [_ in inputs for _ in ["kfpoints", "kfpoints_factor"]] + ): + return "Either `kfpoints` or `kfpoints_factor` must be specified." + + # Cannot specify both `kfpoints` and `kfpoints_factor` + if ( + all( + _ in inputs + for _ in ["qfpoints", "qfpoints_distance"] + ) + ): + return "Can only specify one of the `qfpoints`, `qfpoints_distance`." + + if not any( + [_ in inputs for _ in ["qfpoints", "qfpoints_distance"]] + ): + return "Either `qfpoints` or `qfpoints_distance` must be specified." + + return None + +class EpwBaseWorkChain(ProtocolMixin, BaseRestartWorkChain): + """BaseWorkchain to run a epw.x calculation.""" + _process_class = EpwCalculation + + @classmethod + def define(cls, spec): + """Define the process specification.""" + # yapf: disable + super().define(spec) + + # Here we exclude the `metadata` input of the EpwCalculation + # to avoid the conflict between it with the `metadata` of this WorkChain. + # We also exclude the `qfpoints` and `kfpoints` inputs of the EpwCalculation + # because they are marked as required in the EpwCalculation. + # but will only be provided when the EpwBaseWorkChain is run. + spec.expose_inputs( + EpwCalculation, + exclude=('metadata', 'qfpoints', 'kfpoints') + ) + + spec.input( + 'options', + valid_type=orm.Dict, + required=True, + serializer=to_aiida_type, + help=( + "The options dictionary for the calculation." + "It must be defined in the top-level as a solution of the conflict between `metadata_calculation` of the WorkChain and `metadata` of the Calculation." + ) + ) + + spec.input( + 'structure', + valid_type=orm.StructureData, + required=False, + help=( + "The structure data to use for the generation of k/q points by `create_kpoints_from_distance` calcfunction." + "In principle, we should take the structure as the one we used in the previous calculation." + "However, it is a bit difficult to take all the restart cases into account if we have a long chain of EPW calculations." + "Therefore, for now we just provide it manually as an input." + "But in the future, it will be removed." + "In cases that the coarse and fine k/q points are explicitly speficied, this input is not necessary anymore." + ) + ) + + spec.input( + 'qfpoints_distance', + valid_type=orm.Float, + serializer=to_aiida_type, + required=False, + help=( + "The q-points distance to generate the find qpoints" + "If specified, the fine qpoints will be generated from `create_kpoints_from_distance` calcfunction." + "If not specified, the fine qpoints will be read from the inputs.qfpoints input." + ) + ) + + + spec.input( + 'kfpoints_factor', + valid_type=orm.Int, + serializer=to_aiida_type, + required=False, + help=( + "The factor to multiply the q-point mesh to get the fine k-point mesh" + "If not specified, the fine kpoints will be generated from the parent folder of the nscf calculation." + ) + ) + + spec.inputs.validator = validate_inputs + + spec.outline( + cls.setup, + cls.validate_kpoints, + while_(cls.should_run_process)( + cls.prepare_process, + cls.run_process, + cls.inspect_process, + ), + cls.results, + ) + + spec.expose_outputs(EpwCalculation) + + spec.exit_code(202, 'ERROR_COARSE_GRID_NOT_VALID', + message='The specification of coarse k/q grid is not valid.') + spec.exit_code(300, 'ERROR_UNRECOVERABLE_FAILURE', + message='The calculation failed with an unidentified unrecoverable error.') + spec.exit_code(310, 'ERROR_KNOWN_UNRECOVERABLE_FAILURE', + message='The calculation failed with a known unrecoverable error.') + + @classmethod + def get_protocol_filepath(cls): + """Return ``pathlib.Path`` to the ``.yaml`` file that defines the protocols.""" + from importlib_resources import files + + from . import protocols + return files(protocols) / 'base.yaml' + + @classmethod + def get_builder_from_protocol( + cls, + code, + structure, + protocol=None, + overrides=None, + options=None, + w90_chk_to_ukk_script=None, + **_ + ): + """Return a builder prepopulated with inputs selected according to the chosen protocol. + + :param code: the ``Code`` instance configured for the ``quantumespresso.epw`` plugin. + :param protocol: protocol to use, if not specified, the default will be used. + :param overrides: optional dictionary of inputs to override the defaults of the protocol. + :param w90_chk_to_ukk_script: a julia script to convert the prefix.chk file (generated by wannier90.x) to a prefix.ukk file (to be used by epw.x) + :return: a process builder instance with all inputs defined ready for launch. + """ + from aiida_quantumespresso.workflows.protocols.utils import recursive_merge + + type_check(code, orm.Code) + type_check(structure, orm.StructureData) + + inputs = cls.get_protocol_inputs(protocol, overrides) + + # Update the parameters based on the protocol inputs + parameters = inputs['parameters'] + + # If overrides are provided, they are considered absolute + if overrides: + parameter_overrides = overrides.get('parameters', {}) + parameters = recursive_merge(parameters, parameter_overrides) + + metadata = inputs.pop('metadata') + + if options: + metadata['options'] = recursive_merge(metadata['options'], options) + + # pylint: disable=no-member + builder = cls.get_builder() + builder.structure = structure + builder.code = code + builder.parameters = orm.Dict(parameters) + ## Must firstly pop the options from the metadata dictionary. + builder.options = metadata.pop('options') + + if w90_chk_to_ukk_script: + type_check(w90_chk_to_ukk_script, orm.RemoteData) + builder.w90_chk_to_ukk_script = w90_chk_to_ukk_script + + if 'settings' in inputs: + builder.settings = orm.Dict(inputs['settings']) + if 'parallelization' in inputs: + builder.parallelization = orm.Dict(inputs['parallelization']) + + builder.clean_workdir = orm.Bool(inputs['clean_workdir']) + + builder.qfpoints_distance = orm.Float(inputs['qfpoints_distance']) + builder.kfpoints_factor = orm.Int(inputs['kfpoints_factor']) + builder.max_iterations = orm.Int(inputs['max_iterations']) + # pylint: enable=no-member + + return builder + + def setup(self): + """Call the ``setup`` of the ``BaseRestartWorkChain`` and create the inputs dictionary in ``self.ctx.inputs``. + This ``self.ctx.inputs`` dictionary will be used by the ``EpwCalculation`` in the internal loop. + """ + super().setup() + + self.ctx.inputs = AttributeDict( + self.exposed_inputs(EpwCalculation) + ) + + # Initialize here an empty metadata dictionary. + # Now there is only options in it. + + # TODO: Should check if we need to append more + # information into it. + metadata = {} + + metadata['options'] = self.inputs.options.get_dict() + + ## Didn't find the way to modify `metadata` in EpwCalculation. + ## It should be migrated into EpwCalculation in the future. + if 'w90_chk_to_ukk_script' in self.inputs and 'parent_folder_chk' in self.inputs: + prepend_text = metadata['options'].get('prepend_text', '') + prepend_text += f'\n{self.inputs.w90_chk_to_ukk_script.get_remote_path()} {EpwCalculation._PREFIX}.chk {EpwCalculation._OUTPUT_SUBFOLDER}{EpwCalculation._PREFIX}.xml {EpwCalculation._PREFIX}.ukk {EpwCalculation._PREFIX}.wannier90.mmn {EpwCalculation._PREFIX}.mmn' + + metadata['options']['prepend_text'] = prepend_text + + self.ctx.inputs.metadata = metadata + + # Update of the parameters should be done here instead of in EpwCalculation + # so that all the changes of parameters are saved! + # IMPORTANT: I notice that since now EpwCalculation is not encapsulated, the parameters exposed to + # the EpwBaseWorkChain and the parameters inside EpwCalculation are not the same. + parameters = self.ctx.inputs.parameters.get_dict() + + if 'parent_folder_chk' in self.inputs: + w90_params = self.inputs.parent_folder_chk.creator.inputs.parameters.get_dict() + exclude_bands = w90_params.get('exclude_bands', None) #TODO check this! + + if exclude_bands: + parameters['INPUTEPW']['bands_skipped'] = f'exclude_bands = {exclude_bands[0]}:{exclude_bands[-1]}' + + parameters['INPUTEPW']['nbndsub'] = w90_params['num_wann'] + + if 'parent_folder_epw' in self.inputs: + epw_params = self.inputs.parent_folder_epw.creator.inputs.parameters.get_dict() + parameters['INPUTEPW']['use_ws'] = epw_params['INPUTEPW'].get("use_ws", False) + parameters['INPUTEPW']['nbndsub'] = epw_params['INPUTEPW']['nbndsub'] + if 'bands_skipped' in epw_params['INPUTEPW']: + parameters['INPUTEPW']['bands_skipped'] = epw_params['INPUTEPW'].get('bands_skipped') + + self.ctx.inputs.parameters = orm.Dict(parameters) + + # We should validate the kpoints and qpoints on the fly + # because they are usually not determined at the creation of the inputs. + def validate_kpoints(self): + """ + Validate the inputs related to k-points. + `epw.x` requires coarse k-points and q-points to be compatible, which means the kpoints should be multiple of qpoints. + e.g. if qpoints are [2,2,2], kpoints should be [2*l,2*m,2*n] for integer l,m,n. + We firstly construct qpoints. Either an explicit `KpointsData` with given mesh/path, or a desired qpoints distance should be specified. + In the case of the latter, the `KpointsData` will be constructed for the input `StructureData` using the `create_kpoints_from_distance` calculation function. + Then we construct kpoints by multiplying the qpoints mesh by the `kpoints_factor`. + """ + + # If there is already the parent folder of a previous EPW calculation, the coarse k/q grid is already there and must be valid. + # We only need to take the kpointsdata from it and continue to generate the find grid. + + if 'parent_folder_epw' in self.inputs: + epw_calc = self.inputs.parent_folder_epw.creator + kpoints = epw_calc.inputs.kpoints + qpoints = epw_calc.inputs.qpoints + + # If there is no parent folder of a previous EPW calculation, it must be the case that we are running the transition from coarse BLoch representation to Wannier representation. + # This means that we are using coarse k grid from a previous nscf calculation and + else: + if 'kpoints' in self.inputs: + kpoints = self.inputs.kpoints + elif 'parent_folder_chk' in self.inputs: + kpoints = get_kpoints_from_chk_folder(self.inputs.parent_folder_chk) + else: + self.report("Could not determine the coarse k-points from the inputs or the parent folder of the wannier90 calculation.") + return self.exit_codes.ERROR_COARSE_GRID_NOT_VALID + + if 'qpoints' in self.inputs: + qpoints = self.inputs.qpoints + elif 'parent_folder_ph' in self.inputs: + qpoints = self.inputs.parent_folder_ph.creator.inputs.qpoints + else: + self.report("Could not determine the coarse q-points from the inputs or the parent folder of the ph calculation.") + return self.exit_codes.ERROR_COARSE_GRID_NOT_VALID + + self.report(f"Successfully determined coarse k-points from the inputs: {kpoints.get_kpoints_mesh()[0]}") + self.report(f"Successfully determined coarse q-points from the inputs: {qpoints.get_kpoints_mesh()[0]}") + + + is_compatible, message = check_kpoints_qpoints_compatibility(kpoints, qpoints) + + self.ctx.inputs.kpoints = kpoints + self.ctx.inputs.qpoints = qpoints + + if not is_compatible: + self.report(message) + return self.exit_codes.ERROR_COARSE_GRID_NOT_VALID + + ## TODO: If we are restarting from .ephmat folder, we should use the same + ## qfpoints and kfpoints as the creator of 'parent_folder_epw'. + if 'qfpoints' in self.inputs: + qfpoints = self.inputs.qfpoints + else: + inputs = { + 'structure': self.inputs.structure, + 'distance': self.inputs.qfpoints_distance, + 'force_parity': self.inputs.get('qfpoints_force_parity', orm.Bool(False)), + 'metadata': { + 'call_link_label': 'create_qfpoints_from_distance' + } + } + qfpoints = create_kpoints_from_distance(**inputs) # pylint: disable=unexpected-keyword-arg + + if 'kfpoints' in self.inputs: + kfpoints = self.inputs.kfpoints + else: + qfpoints_mesh = qfpoints.get_kpoints_mesh()[0] + kfpoints = orm.KpointsData() + kfpoints.set_kpoints_mesh([v * self.inputs.kfpoints_factor.value for v in qfpoints_mesh]) + + self.ctx.inputs.qfpoints = qfpoints + self.ctx.inputs.kfpoints = kfpoints + + def prepare_process(self): + """ + Prepare inputs for the next calculation. + + Currently, no modifications to `self.ctx.inputs` are needed before + submission. We rely on the parent `run_process` to create the builder. + """ + pass + + def report_error_handled(self, calculation, action): + """Report an action taken for a calculation that has failed. + + This should be called in a registered error handler if its condition is met and an action was taken. + + :param calculation: the failed calculation node + :param action: a string message with the action taken + """ + arguments = [calculation.process_label, calculation.pk, calculation.exit_status, calculation.exit_message] + self.report('{}<{}> failed with exit status {}: {}'.format(*arguments)) + self.report(f'Action taken: {action}') + + @process_handler(priority=600) + def handle_unrecoverable_failure(self, calculation): + """Handle calculations with an exit status below 400 which are unrecoverable, so abort the work chain.""" + if calculation.is_failed and calculation.exit_status < 400: + self.report_error_handled(calculation, 'unrecoverable error, aborting...') + return ProcessHandlerReport(True, self.exit_codes.ERROR_UNRECOVERABLE_FAILURE) diff --git a/src/aiida_epw/workflows/epw.py b/src/aiida_epw/workflows/prep.py similarity index 71% rename from src/aiida_epw/workflows/epw.py rename to src/aiida_epw/workflows/prep.py index 45c7f28..a8240b7 100644 --- a/src/aiida_epw/workflows/epw.py +++ b/src/aiida_epw/workflows/prep.py @@ -4,11 +4,10 @@ from aiida import orm from aiida.common import AttributeDict -from aiida.engine import WorkChain, ToContext +from aiida.engine import WorkChain, ToContext, if_ from aiida_quantumespresso.workflows.ph.base import PhBaseWorkChain from aiida_quantumespresso.workflows.protocols.utils import ProtocolMixin -from aiida_quantumespresso.calculations.epw import EpwCalculation from aiida_quantumespresso.calculations.functions.create_kpoints_from_distance import create_kpoints_from_distance from aiida_wannier90_workflows.workflows import Wannier90BandsWorkChain, Wannier90OptimizeWorkChain @@ -16,8 +15,9 @@ from aiida_wannier90_workflows.utils.workflows.builder.setter import set_kpoints from aiida_wannier90_workflows.common.types import WannierProjectionType +from aiida_epw.workflows.base import EpwBaseWorkChain -class EpwWorkChain(ProtocolMixin, WorkChain): +class EpwPrepWorkChain(ProtocolMixin, WorkChain): """Main work chain to start calculating properties using EPW. Has support for both the selected columns of the density matrix (SCDM) and @@ -50,17 +50,29 @@ def define(cls, spec): 'clean_workdir', 'ph.parent_folder', 'qpoints', 'qpoints_distance' ), namespace_options={ - 'help': 'Inputs for the `PwBaseWorkChain` that does the `ph.x` calculation.' + 'help': 'Inputs for the `PhBaseWorkChain` that does the `ph.x` calculation.' } ) spec.expose_inputs( - EpwCalculation, namespace='epw', exclude=( - 'parent_folder_ph', 'parent_folder_nscf', 'kpoints', 'qpoints', 'kfpoints', 'qfpoints' + EpwBaseWorkChain, namespace='epw_base', exclude=( + 'structure', + 'clean_workdir', + 'kpoints', + 'qpoints', + 'kfpoints', + 'qfpoints', + 'qfpoints_distance', + 'kfpoints_factor', + 'parent_folder_ph', + 'parent_folder_nscf', + 'parent_folder_epw', + 'parent_folder_chk' ), namespace_options={ - 'help': 'Inputs for the `EpwCalculation`.' + 'help': 'Inputs for the `EpwBaseWorkChain`.' } ) + spec.output('retrieved', valid_type=orm.FolderData) spec.output('epw_folder', valid_type=orm.RemoteStashFolderData) @@ -86,7 +98,7 @@ def get_protocol_filepath(cls): """Return ``pathlib.Path`` to the ``.yaml`` file that defines the protocols.""" from importlib_resources import files from . import protocols - return files(protocols) / 'epw.yaml' + return files(protocols) / 'prep.yaml' @classmethod def get_builder_from_protocol(cls, codes, structure, protocol=None, overrides=None, @@ -104,6 +116,9 @@ def get_builder_from_protocol(cls, codes, structure, protocol=None, overrides=No """ inputs = cls.get_protocol_inputs(protocol, overrides) + builder = cls.get_builder() + builder.structure = structure + w90_bands_inputs = inputs.get('w90_bands', {}) pseudo_family = w90_bands_inputs.pop('pseudo_family', None) @@ -135,39 +150,47 @@ def get_builder_from_protocol(cls, codes, structure, protocol=None, overrides=No w90_bands.pop('structure', None) w90_bands.pop('open_grid', None) + builder.w90_bands = w90_bands + args = (codes['ph'], None, protocol) ph_base = PhBaseWorkChain.get_builder_from_protocol(*args, overrides=inputs.get('ph_base', None), **kwargs) ph_base.pop('clean_workdir', None) ph_base.pop('qpoints_distance') - epw_builder = EpwCalculation.get_builder() - - epw_builder.code = codes['epw'] - epw_inputs = inputs.get('epw', None) - - epw_builder.parameters = orm.Dict(epw_inputs['parameters']) + builder.ph_base = ph_base - if 'target_base' not in epw_builder.metadata['options']['stash']: - epw_computer = codes['epw'].computer - if epw_computer.transport_type == 'core.local': - target_basepath = Path(epw_computer.get_workdir(), 'stash').as_posix() - elif epw_computer.transport_type == 'core.ssh': - target_basepath = Path( - epw_computer.get_workdir().format(username=epw_computer.get_configuration()['username']), 'stash' - ).as_posix() - epw_inputs['metadata']['options']['stash']['target_base'] = target_basepath + # TODO: Here I have a loop for the epw builders for future extension of another epw bands interpolation + for namespace in ['epw_base',]: + epw_inputs = inputs.get(namespace, None) + if namespace == 'epw_base': + if 'target_base' not in epw_inputs['metadata']['options']['stash']: + epw_computer = codes['epw'].computer + if epw_computer.transport_type == 'core.local': + target_basepath = Path(epw_computer.get_workdir(), 'stash').as_posix() + elif epw_computer.transport_type == 'core.ssh': + target_basepath = Path( + epw_computer.get_workdir().format(username=epw_computer.get_configuration()['username']), 'stash' + ).as_posix() + + epw_inputs['metadata']['options']['stash']['target_base'] = target_basepath + + epw_builder = EpwBaseWorkChain.get_builder_from_protocol( + code=codes['epw'], + structure=structure, + protocol=protocol, + overrides=epw_inputs, + **kwargs + ) - epw_builder.metadata = epw_inputs['metadata'] - epw_builder.settings = orm.Dict(epw_inputs['settings']) + if 'settings' in epw_inputs: + epw_builder.settings = orm.Dict(epw_inputs['settings']) + if 'parallelization' in epw_inputs: + epw_builder.parallelization = orm.Dict(epw_inputs['parallelization']) + builder[namespace] = epw_builder - builder = cls.get_builder() builder.qpoints_distance = orm.Float(inputs['qpoints_distance']) builder.kpoints_distance_scf = orm.Float(inputs['kpoints_distance_scf']) builder.kpoints_factor_nscf = orm.Int(inputs['kpoints_factor_nscf']) - builder.structure = structure - builder.w90_bands = w90_bands - builder.ph_base = ph_base - builder.epw = epw_builder builder.clean_workdir = orm.Bool(inputs['clean_workdir']) return builder @@ -205,12 +228,13 @@ def generate_reciprocal_points(self): def run_wannier90(self): """Run the wannier90 workflow.""" if 'projwfc' in self.inputs.w90_bands: - self.report('Running a Wannier90BandsWorkChain.') w90_class = Wannier90BandsWorkChain else: - self.report('Running a Wannier90OptimizeWorkChain.') w90_class = Wannier90OptimizeWorkChain + self.ctx.w90_class_name = w90_class.get_name() + self.report(f'Running a {self.ctx.w90_class_name}.') + inputs = AttributeDict( self.exposed_inputs(Wannier90OptimizeWorkChain, namespace='w90_bands') ) @@ -221,7 +245,7 @@ def run_wannier90(self): inputs['scf']['kpoints'] = self.ctx.kpoints_scf workchain_node = self.submit(w90_class, **inputs) - self.report(f'launching wannier90 work chain {workchain_node.pk}') + self.report(f'launching {w90_class.get_name()}<{workchain_node.pk}>') return ToContext(workchain_w90_bands=workchain_node) @@ -230,7 +254,7 @@ def inspect_wannier90(self): workchain = self.ctx.workchain_w90_bands if not workchain.is_finished_ok: - self.report(f'`Wannier90BandsWorkChain` failed with exit status {workchain.exit_status}') + self.report(f'{self.ctx.w90_class_name}<{workchain.pk}> failed with exit status {workchain.exit_status}') return self.exit_codes.ERROR_SUB_PROCESS_FAILED_WANNIER90 def run_ph(self): @@ -244,7 +268,7 @@ def run_ph(self): inputs.metadata.call_link_label = 'ph_base' workchain_node = self.submit(PhBaseWorkChain, **inputs) - self.report(f'launching `ph` {workchain_node.pk}') + self.report(f'launching PhBaseWorkChain<{workchain_node.pk}>') return ToContext(workchain_ph=workchain_node) @@ -253,18 +277,27 @@ def inspect_ph(self): workchain = self.ctx.workchain_ph if not workchain.is_finished_ok: - self.report(f'Electron-phonon PhBaseWorkChain failed with exit status {workchain.exit_status}') + self.report(f'PhBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}') return self.exit_codes.ERROR_SUB_PROCESS_FAILED_PHONON def run_epw(self): - """Run the `epw.x` calculation.""" - inputs = AttributeDict(self.exposed_inputs(EpwCalculation, namespace='epw')) + """Run the `EpwBaseWorkChain`.""" + inputs = AttributeDict(self.exposed_inputs(EpwBaseWorkChain), namespace='epw_base') + # The EpwBaseWorkChain will take the parent folder of the previous + # PhCalculation, PwCalculation, and Wannier90Calculation. inputs.parent_folder_ph = self.ctx.workchain_ph.outputs.remote_folder - nscf_base_wc = self.ctx.workchain_w90_bands.base.links.get_outgoing(link_label_filter='nscf').first().node - inputs.parent_folder_nscf = nscf_base_wc.outputs.remote_folder + w90_workchain = self.ctx.workchain_w90_bands + inputs.parent_folder_nscf = w90_workchain.outputs.nscf.remote_folder + if self.ctx.w90_class_name == 'Wannier90OptimizeWorkChain' and w90_workchain.inputs.optimize_disproj: + inputs.parent_folder_chk = w90_workchain.outputs.wannier90_optimal__remote_folder + else: + inputs.parent_folder_chk = w90_workchain.outputs.wannier90.remote_folder + # Here we explicitly specify the coarse k/q grid so the EpwBaseWorkChain will not deduce it from the parent + # folders. This EpwBaseWorkChain is only used for the transition from coarse Bloch representation to Wannier + # representation. Thus the fine grid is always [1, 1, 1]. fine_points = orm.KpointsData() fine_points.set_kpoints_mesh([1, 1, 1]) @@ -273,47 +306,25 @@ def run_epw(self): inputs.qpoints = self.ctx.qpoints inputs.qfpoints = fine_points - parameters = inputs.parameters.get_dict() - - wannier_params = self.ctx.workchain_w90_bands.inputs.wannier90.wannier90.parameters.get_dict() - exclude_bands = wannier_params.get('exclude_bands') #TODO check this! - if exclude_bands: - parameters['INPUTEPW']['bands_skipped'] = f'exclude_bands = {exclude_bands[0]}:{exclude_bands[-1]}' - - parameters['INPUTEPW']['nbndsub'] = wannier_params['num_wann'] - inputs.parameters = orm.Dict(parameters) - - if 'projwfc' in self.inputs.w90_bands: - w90_remote_data = self.ctx.workchain_w90_bands.outputs.wannier90__remote_folder - else: - w90_remote_data = self.ctx.workchain_w90_bands.outputs.wannier90_optimal__remote_folder - - wannier_chk_path = Path(w90_remote_data.get_remote_path(), 'aiida.chk') - nscf_xml_path = Path(self.ctx.workchain_w90_bands.outputs.nscf.remote_folder.get_remote_path(), 'out/aiida.xml') + inputs.metadata.call_link_label = 'epw_base' - prepend_text = inputs.metadata.options.get('prepend_text', '') - prepend_text += f'\n{self.inputs.w90_chk_to_ukk_script.get_remote_path()} {wannier_chk_path} {nscf_xml_path} aiida.ukk' - inputs.metadata.options.prepend_text = prepend_text + workchain_node = self.submit(EpwBaseWorkChain, **inputs) + self.report(f'launching EpwBaseWorkChain<{workchain_node.pk}> in transformation mode') - inputs.metadata.call_link_label = 'epw' - - calcjob_node = self.submit(EpwCalculation, **inputs) - self.report(f'launching `epw` {calcjob_node.pk}') - - return ToContext(calcjob_epw=calcjob_node) + return ToContext(workchain_epw=workchain_node) def inspect_epw(self): - """Verify that the `epw.x` calculation finished successfully.""" - calcjob = self.ctx.calcjob_epw + """Verify that the `EpwBaseWorkChain` finished successfully.""" + workchain = self.ctx.workchain_epw - if not calcjob.is_finished_ok: - self.report(f'`EpwCalculation` failed with exit status {calcjob.exit_status}') + if not workchain.is_finished_ok: + self.report(f'EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}') return self.exit_codes.ERROR_SUB_PROCESS_FAILED_EPW def results(self): """Add the most important results to the outputs of the work chain.""" - self.out('retrieved', self.ctx.calcjob_epw.outputs.retrieved) - self.out('epw_folder', self.ctx.calcjob_epw.outputs.remote_stash) + self.out('retrieved', self.ctx.workchain_epw.outputs.retrieved) + self.out('epw_folder', self.ctx.workchain_epw.outputs.remote_stash) def on_terminated(self): """Clean the working directories of all child calculations if `clean_workdir=True` in the inputs.""" diff --git a/src/aiida_epw/workflows/protocols/base.yaml b/src/aiida_epw/workflows/protocols/base.yaml new file mode 100644 index 0000000..f8d79ae --- /dev/null +++ b/src/aiida_epw/workflows/protocols/base.yaml @@ -0,0 +1,29 @@ +default_inputs: + clean_workdir: True + max_iterations: 5 + qfpoints_distance: 0.1 + kfpoints_factor: 2 + metadata: + options: + withmpi: True + parameters: + INPUTEPW: + degaussw: 0.04 + eps_acoustic: 0.1 + muc: 0.13 + temps: 300 + vme: 'dipole' + use_ws: True + +default_protocol: moderate +protocols: + moderate: + description: 'Protocol to perform a electron-phonon calculation at normal precision at moderate computational cost.' + precise: + description: 'Protocol to perform a electron-phonon calculation at high precision at higher computational cost.' + qfpoints_distance: 0.06 + kfpoints_factor: 2 + fast: + description: 'Protocol to perform a electron-phonon calculation at low precision at minimal computational cost for testing purposes.' + qfpoints_distance: 0.2 + kfpoints_factor: 1 \ No newline at end of file diff --git a/src/aiida_epw/workflows/protocols/epw.yaml b/src/aiida_epw/workflows/protocols/prep.yaml similarity index 99% rename from src/aiida_epw/workflows/protocols/epw.yaml rename to src/aiida_epw/workflows/protocols/prep.yaml index 9eaf95f..944537c 100644 --- a/src/aiida_epw/workflows/protocols/epw.yaml +++ b/src/aiida_epw/workflows/protocols/prep.yaml @@ -24,7 +24,7 @@ default_inputs: ph: settings: PREPARE_FOR_EPW: True - epw: + epw_base: metadata: options: resources: diff --git a/src/aiida_epw/workflows/protocols/supercon.yaml b/src/aiida_epw/workflows/protocols/supercon.yaml index b8e0f98..b853ad2 100644 --- a/src/aiida_epw/workflows/protocols/supercon.yaml +++ b/src/aiida_epw/workflows/protocols/supercon.yaml @@ -31,7 +31,7 @@ default_inputs: epbread: False epbwrite: False ephwrite: True - eps_acustic: 1 + eps_acoustic: 1 epwread: True epwwrite: False etf_mem: 1 @@ -45,13 +45,13 @@ default_inputs: nqstep: 500 nsiter: 1 nstemp: 1 - restart: True + restart: False selecqread: False temps: 10.0 vme: 'dipole' wannierize: False wscut: 0.5 - epw_final: + epw_final_iso: metadata: options: resources: @@ -68,16 +68,16 @@ default_inputs: ep_coupling: False epbread: False epbwrite: False - ephwrite: False + ephwrite: False epwread: True epwwrite: False - eps_acustic: 1 # cm^{-1} + eps_acoustic: 0.1 # cm^{-1} etf_mem: 1 fsthick: 0.8 laniso: False limag: True liso: True - # mp_mesh_k: True + mp_mesh_k: True muc: 0.13 nqstep: 500 nsiter: 500 @@ -90,6 +90,43 @@ default_inputs: vme: 'dipole' wannierize: False wscut: 0.5 + epw_final_aniso: + metadata: + options: + resources: + num_machines: 1 + max_wallclock_seconds: 43200 # Twelve hours + withmpi: True + parameters: + INPUTEPW: + conv_thr_iaxis: 0.01 + degaussq: 0.5 + degaussw: 0.1 + elecselfen: False + eliashberg: True + ep_coupling: False + epbread: False + epbwrite: False + ephwrite: False + epwread: True + epwwrite: False + eps_acoustic: 0.1 # cm^{-1} + etf_mem: 1 + fsthick: 0.8 + laniso: True + limag: True + liso: False + # mp_mesh_k: True + muc: 0.13 + nqstep: 500 + nsiter: 500 + nstemp: 20 + restart: True + selecqread: False + temps: 5 43 + vme: 'dipole' + wannierize: False + wscut: 0.5 default_protocol: moderate protocols: moderate: diff --git a/src/aiida_epw/workflows/supercon.py b/src/aiida_epw/workflows/supercon.py index b3f8fa8..30472a4 100644 --- a/src/aiida_epw/workflows/supercon.py +++ b/src/aiida_epw/workflows/supercon.py @@ -1,11 +1,11 @@ -"""Work chain for computing the critical temperature based off an `EpwWorkChain`.""" +"""Work chain for computing the critical temperature based on an `EpwWorkChain`.""" from aiida import orm from aiida.common import AttributeDict from aiida.engine import WorkChain, ToContext, while_, if_, append_ from aiida_quantumespresso.workflows.protocols.utils import ProtocolMixin -from aiida_quantumespresso.calculations.epw import EpwCalculation +from aiida_epw.workflows.base import EpwBaseWorkChain from aiida_quantumespresso.calculations.functions.create_kpoints_from_distance import create_kpoints_from_distance from aiida.engine import calcfunction @@ -44,7 +44,10 @@ def calculate_tc(max_eigenvalue: orm.XyData) -> orm.Float: class SuperConWorkChain(ProtocolMixin, WorkChain): - """Work chain to compute the electron-phonon coupling.""" + """This workchain will run a series of `EpwBaseWorkChain`s in interpolation mode to converge + the Allen-Dynes Tc according to the interpolation distance, if converged or forced by `always_run_final`, + it will then run the final isotropic and anisotropic `EpwBaseWorkChain`s to compute the + critical temperature solving the isotropic and anisotropic Migdal-Eliashberg equations.""" @classmethod def define(cls, spec): @@ -53,37 +56,48 @@ def define(cls, spec): spec.input('structure', valid_type=orm.StructureData) spec.input('clean_workdir', valid_type=orm.Bool, default=lambda: orm.Bool(False)) - spec.input('epw_folder', valid_type=(orm.RemoteData, orm.RemoteStashFolderData)) + spec.input('parent_folder_epw', valid_type=(orm.RemoteData, orm.RemoteStashFolderData)) spec.input('interpolation_distance', valid_type=(orm.Float, orm.List)) spec.input('convergence_threshold', valid_type=orm.Float, required=False) spec.input('always_run_final', valid_type=orm.Bool, default=lambda: orm.Bool(False)) spec.expose_inputs( - EpwCalculation, namespace='epw_interp', exclude=( - 'parent_folder_ph', 'parent_folder_nscf', 'kfpoints', 'qfpoints' + EpwBaseWorkChain, namespace='epw_interp', exclude=( + 'clean_workdir', 'parent_folder_ph', 'parent_folder_nscf', 'parent_folder_chk', 'qfpoints', 'kfpoints' ), namespace_options={ - 'help': 'Inputs for the interpolation `EpwCalculation`s.' + 'help': 'Inputs for the interpolation `EpwBaseWorkChain`s.' } ) spec.expose_inputs( - EpwCalculation, namespace='epw_final', exclude=( - 'parent_folder_ph', 'parent_folder_nscf', 'kfpoints', 'qfpoints' + EpwBaseWorkChain, namespace='epw_final_iso', exclude=( + 'clean_workdir', 'parent_folder_ph', 'parent_folder_nscf', 'parent_folder_chk', 'qfpoints_distance', 'kfpoints_factor' ), namespace_options={ - 'help': 'Inputs for the final `EpwCalculation`.' + 'help': 'Inputs for the final isotropic `EpwBaseWorkChain`.' + } + ) + spec.expose_inputs( + EpwBaseWorkChain, namespace='epw_final_aniso', exclude=( + 'clean_workdir', 'parent_folder_ph', 'parent_folder_nscf', 'parent_folder_chk', 'qfpoints_distance', 'kfpoints_factor' + ), + namespace_options={ + 'help': 'Inputs for the final anisotropic `EpwBaseWorkChain`.' } ) spec.outline( cls.setup, while_(cls.should_run_conv)( - cls.generate_reciprocal_points, - cls.interp_epw, - cls.inspect_epw, + cls.run_conv, + cls.inspect_conv, ), if_(cls.should_run_final)( - cls.final_epw, + cls.run_final_epw_iso, + cls.inspect_final_epw_iso, + cls.run_final_epw_aniso, + cls.inspect_final_epw_aniso, ), + cls.results ) spec.output('parameters', valid_type=orm.Dict, @@ -92,11 +106,17 @@ def define(cls, spec): help='The temperature dependence of the max eigenvalue for the final EPW.') spec.output('a2f', valid_type=orm.XyData, help='The contents of the `.a2f` file for the final EPW.') - spec.output('Tc', valid_type=orm.Float, - help='The isotropic linearised Eliashberg Tc interpolated from the max eigenvalue curve.') + spec.output('Tc_iso', valid_type=orm.Float, + help='The critical temperature.') spec.exit_code(401, 'ERROR_SUB_PROCESS_EPW_INTERP', - message='The interpolation `epw.x` sub process failed') + message='The interpolation `EpwBaseWorkChain` sub process failed') + spec.exit_code(402, 'ERROR_ALLEN_DYNES_NOT_CONVERGED', + message='Allen-Dynes Tc is not converged.') + spec.exit_code(403, 'ERROR_SUB_PROCESS_EPW_ISO', + message='The isotropic `EpwBaseWorkChain` sub process failed') + spec.exit_code(404, 'ERROR_SUB_PROCESS_EPW_ANISO', + message='The anisotropic `EpwBaseWorkChain` sub process failed') @classmethod def get_protocol_filepath(cls): @@ -107,7 +127,14 @@ def get_protocol_filepath(cls): @classmethod def get_builder_from_protocol( - cls, epw_code, parent_epw, protocol=None, overrides=None, scon_epw_code=None, epw_folder=None, **kwargs + cls, + epw_code, + parent_epw, + protocol=None, + overrides=None, + scon_epw_code=None, + parent_folder_epw=None, + **kwargs ): """Return a builder prepopulated with inputs selected according to the chosen protocol. @@ -117,30 +144,34 @@ def get_builder_from_protocol( builder = cls.get_builder() - epw_source = parent_epw.base.links.get_outgoing(link_label_filter='epw').first().node + if parent_epw.process_label == 'EpwPrepWorkChain': + epw_source = parent_epw.base.links.get_outgoing(link_label_filter='epw_base').first().node + elif parent_epw.process_label == 'EpwBaseWorkChain': + epw_source = parent_epw + else: + raise ValueError(f'Invalid parent_epw process: {parent_epw.process_label}') - if epw_folder is None: + if parent_folder_epw is None: - if epw_source.inputs.code.computer.hostname != epw_code.computer.hostname: + if epw_source.inputs.epw.code.computer.hostname != epw_code.computer.hostname: raise ValueError( 'The `epw_code` must be configured on the same computer as that where the `parent_epw` was run.' ) - epw_folder = parent_epw.outputs.epw_folder + parent_folder_epw = parent_epw.outputs.epw_folder else: - # TODO: Add check to make sure epw_folder is on same computer as epw_code + # TODO: Add check to make sure parent_folder_epw is on same computer as epw_code pass - for epw_namespace in ('epw_interp', 'epw_final'): + for epw_namespace in ('epw_interp', 'epw_final_iso', 'epw_final_aniso'): epw_inputs = inputs.get(epw_namespace, None) - parameters = epw_inputs['parameters'] - parameters["INPUTEPW"]["use_ws"] = epw_source.inputs.parameters["INPUTEPW"].get("use_ws", False) - parameters['INPUTEPW']['nbndsub'] = epw_source.inputs.parameters['INPUTEPW']['nbndsub'] - if 'bands_skipped' in epw_source.inputs.parameters['INPUTEPW']: - parameters['INPUTEPW']['bands_skipped'] = epw_source.inputs.parameters['INPUTEPW'].get('bands_skipped') - - epw_builder = EpwCalculation.get_builder() + epw_builder = EpwBaseWorkChain.get_builder_from_protocol( + code=epw_code, + structure=epw_source.inputs.structure, + protocol=protocol, + overrides=epw_inputs + ) if epw_namespace == 'epw_interp' and scon_epw_code is not None: epw_builder.code = scon_epw_code @@ -150,8 +181,6 @@ def get_builder_from_protocol( epw_builder.kpoints = epw_source.inputs.kpoints epw_builder.qpoints = epw_source.inputs.qpoints - epw_builder.parameters = orm.Dict(parameters) - epw_builder.metadata = epw_inputs['metadata'] if 'settings' in epw_inputs: epw_builder.settings = orm.Dict(epw_inputs['settings']) @@ -160,15 +189,12 @@ def get_builder_from_protocol( if isinstance(inputs['interpolation_distance'], float): builder.interpolation_distance = orm.Float(inputs['interpolation_distance']) if isinstance(inputs['interpolation_distance'], list): - qpoints_distance = parent_epw.inputs.qpoints_distance - interpolation_distance = [v for v in inputs['interpolation_distance'] if v < qpoints_distance / 2] - builder.interpolation_distance = orm.List(interpolation_distance) + builder.interpolation_distance = orm.List(inputs['interpolation_distance']) builder.convergence_threshold = orm.Float(inputs['convergence_threshold']) builder.always_run_final = orm.Bool(inputs.get('always_run_final', False)) builder.structure = parent_epw.inputs.structure - builder.epw_folder = epw_folder - # builder.epw_folder = epw_source.outputs.remote_folder + builder.parent_folder_epw = parent_folder_epw builder.clean_workdir = orm.Bool(inputs['clean_workdir']) return builder @@ -182,6 +208,7 @@ def setup(self): self.ctx.interpolation_list = [intp] self.ctx.interpolation_list.sort() + self.ctx.iteration = 0 self.ctx.final_interp = None self.ctx.allen_dynes_values = [] self.ctx.is_converged = False @@ -201,126 +228,128 @@ def should_run_conv(self): self.report(f'Checking convergence: old {prev_allen_dynes}; new {new_allen_dynes} -> Converged = {self.ctx.is_converged.value}') except (AttributeError, IndexError, KeyError): self.report('Not enough data to check convergence.') - + # + if ( + len(self.ctx.interpolation_list) == 0 and + not self.ctx.is_converged and + self.inputs.always_run_final.value + ): + self.report( + 'Allen-Dynes Tc is not converged, ' + 'but will run the subsequent isotropic and anisotropic workchains as required.' + ) else: self.report('No `convergence_threshold` input was provided, convergence automatically achieved.') self.ctx.is_converged = True return len(self.ctx.interpolation_list) > 0 and not self.ctx.is_converged - def generate_reciprocal_points(self): - """Generate the qpoints and kpoints meshes for the interpolation.""" - - inputs = { - 'structure': self.inputs.structure, - 'distance': self.ctx.interpolation_list.pop(), - 'force_parity': orm.Bool(False), - 'metadata': { - 'call_link_label': 'create_kpoints_from_distance' - } - } - inter_points = create_kpoints_from_distance(**inputs) # pylint: disable=unexpected-keyword-arg - - self.ctx.inter_points = inter_points - - def interp_epw(self): - """Run the ``restart`` EPW calculation for the current interpolation distance.""" - inputs = AttributeDict(self.exposed_inputs(EpwCalculation, namespace='epw_interp')) + def run_conv(self): + """Run the EpwBaseWorkChain in interpolation mode for the current interpolation distance.""" + + self.ctx.iteration += 1 - inputs.parent_folder_epw = self.inputs.epw_folder - inputs.kfpoints = self.ctx.inter_points - inputs.qfpoints = self.ctx.inter_points + inputs = AttributeDict(self.exposed_inputs(EpwBaseWorkChain, namespace='epw_interp')) - try: - settings = inputs.settings.get_dict() - except AttributeError: - settings = {} - - settings['ADDITIONAL_RETRIEVE_LIST'] = ['aiida.a2f'] - inputs.settings = orm.Dict(settings) + inputs.parent_folder_epw = self.inputs.parent_folder_epw + inputs.kfpoints_factor = self.inputs.epw_interp.kfpoints_factor + inputs.qfpoints_distance = self.ctx.interpolation_list.pop() if self.ctx.degaussq: parameters = inputs.parameters.get_dict() parameters['INPUTEPW']['degaussq'] = self.ctx.degaussq inputs.parameters = orm.Dict(parameters) - inputs.metadata.call_link_label = 'epw_interp' - calcjob_node = self.submit(EpwCalculation, **inputs) - mesh = 'x'.join(str(i) for i in self.ctx.inter_points.get_kpoints_mesh()[0]) - self.report(f'launching interpolation `epw` with PK {calcjob_node.pk} and interpolation mesh {mesh}') + inputs.setdefault('metadata', {})['call_link_label'] = f'conv_{self.ctx.iteration:02d}' + workchain_node = self.submit(EpwBaseWorkChain, **inputs) + + self.report(f'launching EpwBaseWorkChain<{workchain_node.pk}> in a2f mode: convergence #{self.ctx.iteration}') - return ToContext(epw_interp=append_(calcjob_node)) + return ToContext(epw_interp=append_(workchain_node)) - def inspect_epw(self): - """Verify that the epw.x workflow finished successfully.""" - epw_calculation = self.ctx.epw_interp[-1] + def inspect_conv(self): + """Verify that the EpwBaseWorkChain in interpolation mode finished successfully.""" + workchain = self.ctx.epw_interp[-1] - if not epw_calculation.is_finished_ok: - self.report(f'`epw.x` failed with exit status {epw_calculation.exit_status}') + if not workchain.is_finished_ok: + self.report(f'EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}') self.ctx.epw_interp.pop() - # return self.exit_codes.ERROR_SUB_PROCESS_EPW_INTERP else: - self.ctx.final_interp = self.ctx.inter_points try: - self.report(f"Allen-Dynes: {epw_calculation.outputs.output_parameters['allen_dynes']}") + self.report(f"Allen-Dynes: {workchain.outputs.output_parameters['Allen_Dynes_Tc']}") except KeyError: self.report(f"Could not find Allen-Dynes temperature in parsed output parameters!") if self.ctx.degaussq is None: - frequency = epw_calculation.outputs.a2f.get_array('frequency') + frequency = workchain.outputs.a2f.get_array('frequency') self.ctx.degaussq = frequency[-1] / 100 def should_run_final(self): - """Check if the final ``epw.x`` calculation should be run.""" - # if not self.inputs.always_run_final and 'convergence_threshold' in self.inputs: - # return self.ctx.is_converged - if self.ctx.final_interp is None: - return False - - return True - - def final_epw(self): - """Run the final ``epw.x`` calculation.""" - inputs = AttributeDict(self.exposed_inputs(EpwCalculation, namespace='epw_final')) - - inputs.parent_folder_epw = self.ctx.epw_interp[-1].outputs.remote_folder - inputs.kfpoints = self.ctx.final_interp - inputs.qfpoints = self.ctx.final_interp + """Check if the final EpwBaseWorkChain should be run.""" + if self.ctx.is_converged or self.inputs.always_run_final.value: + return True + else: + self.report(f'Allen-Dynes Tc is not converged.') + return self.exit_codes.ERROR_ALLEN_DYNES_NOT_CONVERGED - try: - settings = inputs.settings.get_dict() - except AttributeError: - settings = {} + def run_final_epw_iso(self): + """Run the final EpwBaseWorkChain in isotropic mode.""" + inputs = AttributeDict(self.exposed_inputs(EpwBaseWorkChain, namespace='epw_final_iso')) - settings['ADDITIONAL_RETRIEVE_LIST'] = ['aiida.a2f'] - inputs.settings = orm.Dict(settings) + parent_folder_epw = self.ctx.epw_interp[-1].outputs.remote_folder + inputs.parent_folder_epw = parent_folder_epw + inputs.kfpoints = parent_folder_epw.creator.inputs.kfpoints + inputs.qfpoints = parent_folder_epw.creator.inputs.qfpoints if self.ctx.degaussq: parameters = inputs.parameters.get_dict() parameters['INPUTEPW']['degaussq'] = self.ctx.degaussq inputs.parameters = orm.Dict(parameters) - inputs.metadata.call_link_label = 'epw_final' + inputs.metadata.call_link_label = 'epw_final_iso' + + workchain_node = self.submit(EpwBaseWorkChain, **inputs) + self.report(f'launching EpwBaseWorkChain<{workchain_node.pk}> in isotropic mode') + + return ToContext(final_epw_iso=workchain_node) + + def inspect_final_epw_iso(self): + """Verify that the final EpwBaseWorkChain in isotropic mode finished successfully.""" + workchain = self.ctx.final_epw_iso + + if not workchain.is_finished_ok: + self.report(f'EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}') + return self.exit_codes.ERROR_SUB_PROCESS_EPW_ISO + + def run_final_epw_aniso(self): + """Run the EpwBaseWorkChain in anisotropic mode for the current interpolation distance.""" + inputs = AttributeDict(self.exposed_inputs(EpwBaseWorkChain, namespace='epw_final_aniso')) + + parent_folder_epw = self.ctx.epw_interp[-1].outputs.remote_folder + inputs.parent_folder_epw = parent_folder_epw + inputs.kfpoints = parent_folder_epw.creator.inputs.kfpoints + inputs.qfpoints = parent_folder_epw.creator.inputs.qfpoints - calcjob_node = self.submit(EpwCalculation, **inputs) - self.report(f'launching final `epw` {calcjob_node.pk}') + inputs.metadata.call_link_label = 'epw_final_aniso' + workchain_node = self.submit(EpwBaseWorkChain, **inputs) + self.report(f'launching EpwBaseWorkChain<{workchain_node.pk}> in anisotropic mode') - return ToContext(final_epw=calcjob_node) + return ToContext(final_epw_aniso=workchain_node) - def inspect_final_epw(self): - """Verify that the final epw.x workflow finished successfully.""" - epw_calculation = self.ctx.final_epw + def inspect_final_epw_aniso(self): + """Verify that the final EpwBaseWorkChain in anisotropic mode finished successfully.""" + workchain = self.ctx.final_epw_aniso - if not epw_calculation.is_finished_ok: - self.report(f'Final `epw.x` failed with exit status {epw_calculation.exit_status}') - return self.exit_codes.ERROR_SUB_PROCESS_EPW_INTERP + if not workchain.is_finished_ok: + self.report(f'EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}') + return self.exit_codes.ERROR_SUB_PROCESS_EPW_ANISO def results(self): """TODO""" - self.out('Tc', calculate_tc(self.ctx.final_epw.outputs.max_eigenvalue)) - self.out('parameters', self.ctx.final_epw.outputs.output_parameters) - self.out('max_eigenvalue', self.ctx.final_epw.outputs.max_eigenvalue) - self.out('a2f', self.ctx.final_epw.outputs.a2f) + self.out('Tc_iso', calculate_tc(self.ctx.final_epw_iso.outputs.max_eigenvalue)) + self.out('parameters', self.ctx.final_epw_iso.outputs.output_parameters) + self.out('max_eigenvalue', self.ctx.final_epw_iso.outputs.max_eigenvalue) + self.out('a2f', self.ctx.final_epw_iso.outputs.a2f) def on_terminated(self): """Clean the working directories of all child calculations if `clean_workdir=True` in the inputs."""