diff --git a/pyproject.toml b/pyproject.toml index 005b6c86b4..1fb7425409 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,6 +69,7 @@ forcefields = [ torchsim = [ "torch-sim-atomistic==0.5.0; python_version >= '3.12'" ] +jdftx = ["pymatgen==2025.10.7"] approxneb = ["pymatgen-analysis-diffusion>=2024.7.15"] ase = ["ase>=3.26.0"] ase-ext = ["tblite>=0.3.0; platform_system=='Linux'"] @@ -173,6 +174,7 @@ atomate2 = ["py.typed"] "atomate2.vasp.sets" = ["*.yaml"] "atomate2.cp2k.sets" = ["*.yaml"] "atomate2.cp2k.schemas.calc_types" = ["*.yaml"] +"atomate2.jdftx.sets" = ["*.yaml"] [tool.versioningit.vcs] method = "git" diff --git a/src/atomate2/jdftx/__init__.py b/src/atomate2/jdftx/__init__.py new file mode 100644 index 0000000000..b9af3b34b0 --- /dev/null +++ b/src/atomate2/jdftx/__init__.py @@ -0,0 +1 @@ +"""Module for JDFTx workflows.""" diff --git a/src/atomate2/jdftx/files.py b/src/atomate2/jdftx/files.py new file mode 100644 index 0000000000..7edd36d767 --- /dev/null +++ b/src/atomate2/jdftx/files.py @@ -0,0 +1,38 @@ +"""File operations and default JDFTx file names.""" + +import logging + +# if TYPE_CHECKING: +from pathlib import Path + +from pymatgen.core import Structure + +from atomate2.jdftx.sets.base import JdftxInputGenerator + +logger = logging.getLogger(__name__) + + +def write_jdftx_input_set( + structure: Structure, + input_set_generator: JdftxInputGenerator, + directory: str | Path = ".", + **kwargs, +) -> None: + """ + Write JDFTx input set. + + Parameters + ---------- + structure : .Structure + A structure. + input_set_generator : .JdftxInputGenerator + A JDFTx input set generator. + directory : str or Path + The directory to write the input files to. + **kwargs + Keyword arguments to pass to :obj:`.JdftxInputSet.write_input`. + """ + cis = input_set_generator.get_input_set(structure) + + logger.info("Writing JDFTx input set.") + cis.write_input(directory, **kwargs) diff --git a/src/atomate2/jdftx/jobs/__init__.py b/src/atomate2/jdftx/jobs/__init__.py new file mode 100644 index 0000000000..3472cb94f5 --- /dev/null +++ b/src/atomate2/jdftx/jobs/__init__.py @@ -0,0 +1 @@ +"""Module for JDFTx jobs.""" diff --git a/src/atomate2/jdftx/jobs/adsorption.py b/src/atomate2/jdftx/jobs/adsorption.py new file mode 100644 index 0000000000..bf754a627a --- /dev/null +++ b/src/atomate2/jdftx/jobs/adsorption.py @@ -0,0 +1,42 @@ +"""Core jobs for running JDFTx calculations.""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from atomate2.jdftx.jobs.base import BaseJdftxMaker +from atomate2.jdftx.sets.core import IonicMinSetGenerator + +if TYPE_CHECKING: + from atomate2.jdftx.sets.core import JdftxInputGenerator + +logger = logging.getLogger(__name__) + + +@dataclass +class SurfaceMinMaker(BaseJdftxMaker): + """Maker to create surface relaxation job.""" + + name: str = "surface_ionic_min" + input_set_generator: JdftxInputGenerator = field( + default_factory=lambda: IonicMinSetGenerator( + coulomb_truncation=True, + auto_kpoint_density=1000, + calc_type="surface", + ) + ) + + +@dataclass +class MolMinMaker(BaseJdftxMaker): + """Maker to create molecule relaxation job.""" + + name: str = "surface_ionic_min" + input_set_generator: JdftxInputGenerator = field( + default_factory=IonicMinSetGenerator( + coulomb_truncation=True, + calc_type="molecule", + ) + ) diff --git a/src/atomate2/jdftx/jobs/base.py b/src/atomate2/jdftx/jobs/base.py new file mode 100644 index 0000000000..ecdf6e0ea0 --- /dev/null +++ b/src/atomate2/jdftx/jobs/base.py @@ -0,0 +1,140 @@ +"""Definition of base JDFTx job maker.""" + +from __future__ import annotations + +import logging +import os +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from jobflow import Maker, Response, job +from pymatgen.core.trajectory import Trajectory +from pymatgen.electronic_structure.bandstructure import ( + BandStructure, + BandStructureSymmLine, +) + +from atomate2.jdftx.files import write_jdftx_input_set +from atomate2.jdftx.run import run_jdftx, should_stop_children +from atomate2.jdftx.schemas.task import TaskDoc +from atomate2.jdftx.sets.base import JdftxInputGenerator + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + + from pymatgen.core import Structure + + +logger = logging.getLogger(__name__) + +_DATA_OBJECTS = [ # TODO update relevant list for JDFTx + BandStructure, + BandStructureSymmLine, + Trajectory, + "force_constants", + "normalmode_eigenvecs", + "bandstructure", # FIX: BandStructure is not currently MSONable +] + +_INPUT_FILES = [ + "init.in", + "init.lattice", + "init.ionpos", +] + +# Output files. +_OUTPUT_FILES = [ # TODO finish this list + "output.out", + "Ecomponents", + "wfns", + "bandProjections", + "boundCharge", + "lattice", + "ionpos", +] + + +def jdftx_job(method: Callable) -> job: + """ + Decorate the ``make`` method of JDFTx job makers. + + Parameters + ---------- + method : callable + A BaseJdftxMaker.make method. This should not be specified directly and is + implied by the decorator. + + Returns + ------- + callable + A decorated version of the make function that will generate JDFTx jobs. + """ + return job(method, data=_DATA_OBJECTS, output_schema=TaskDoc) + + +@dataclass +class BaseJdftxMaker(Maker): + """ + Base JDFTx job maker. + + Parameters + ---------- + name : str + The job name. + input_set_generator : .JdftxInputGenerator + A generator used to make the input set. + write_input_set_kwargs : dict + Keyword arguments that will get passed to :obj:`.write_jdftx_input_set`. + run_jdftx_kwargs : dict + Keyword arguments that will get passed to :obj:`.run_jdftx`. + task_document_kwargs : dict + Keyword arguments that will get passed to :obj:`.TaskDoc.from_directory`. + + """ + + name: str = "base JDFTx job" + input_set_generator: JdftxInputGenerator = field( + default_factory=JdftxInputGenerator + ) + write_input_set_kwargs: dict = field(default_factory=dict) + run_jdftx_kwargs: dict = field(default_factory=dict) + task_document_kwargs: dict = field(default_factory=dict) + + @jdftx_job + def make(self, structure: Structure) -> Response: + """Run a JDFTx calculation. + + Parameters + ---------- + structure : Structure + A pymatgen structure object. + + Returns + ------- + Response: A response object containing the output, detours and stop + commands of the JDFTx run. + """ + # write jdftx input files + write_jdftx_input_set( + structure, self.input_set_generator, **self.write_input_set_kwargs + ) + logger.info("Wrote JDFTx input files.") + # run jdftx + run_jdftx(**self.run_jdftx_kwargs) + + current_dir = os.getcwd() + task_doc = get_jdftx_task_document(current_dir, **self.task_document_kwargs) + + stop_children = should_stop_children(task_doc) + + return Response( + stop_children=stop_children, + stored_data={}, + output=task_doc, + ) + + +def get_jdftx_task_document(path: Path | str, **kwargs) -> TaskDoc: + """Get JDFTx Task Document using atomate2 settings.""" + return TaskDoc.from_directory(path, **kwargs) diff --git a/src/atomate2/jdftx/jobs/core.py b/src/atomate2/jdftx/jobs/core.py new file mode 100644 index 0000000000..93f0222ab2 --- /dev/null +++ b/src/atomate2/jdftx/jobs/core.py @@ -0,0 +1,50 @@ +"""Core jobs for running JDFTx calculations.""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from atomate2.jdftx.jobs.base import BaseJdftxMaker +from atomate2.jdftx.sets.core import ( + IonicMinSetGenerator, + LatticeMinSetGenerator, + SinglePointSetGenerator, +) + +if TYPE_CHECKING: + from atomate2.jdftx.sets.base import JdftxInputGenerator + + +logger = logging.getLogger(__name__) + + +@dataclass +class SinglePointMaker(BaseJdftxMaker): + """Maker to create JDFTx ionic optimization job.""" + + name: str = "single_point" + input_set_generator: JdftxInputGenerator = field( + default_factory=SinglePointSetGenerator + ) + + +@dataclass +class IonicMinMaker(BaseJdftxMaker): + """Maker to create JDFTx ionic optimization job.""" + + name: str = "ionic_min" + input_set_generator: JdftxInputGenerator = field( + default_factory=IonicMinSetGenerator + ) + + +@dataclass +class LatticeMinMaker(BaseJdftxMaker): + """Maker to create JDFTx lattice optimization job.""" + + name: str = "lattice_min" + input_set_generator: JdftxInputGenerator = field( + default_factory=LatticeMinSetGenerator + ) diff --git a/src/atomate2/jdftx/run.py b/src/atomate2/jdftx/run.py new file mode 100644 index 0000000000..611d012461 --- /dev/null +++ b/src/atomate2/jdftx/run.py @@ -0,0 +1,59 @@ +"""Functions to run JDFTx.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from custodian.jdftx.jobs import JDFTxJob +from jobflow.utils import ValueEnum +from pymatgen.io.jdftx.sets import FILE_NAMES + +from atomate2 import SETTINGS +from atomate2.jdftx.schemas.enums import JDFTxStatus + +if TYPE_CHECKING: + from atomate2.jdftx.schemas.task import TaskDoc + + +class JobType(ValueEnum): + """Type of JDFTx job.""" + + NORMAL = "normal" + # Only running through Custodian now, can add DIRECT method later. + + +def get_jdftx_cmd() -> str: + """Get the JDFTx run command.""" + return SETTINGS.JDFTX_CMD + + +def run_jdftx( + job_type: JobType | str = JobType.NORMAL, + jdftx_cmd: str = None, + jdftx_job_kwargs: dict[str, Any] = None, +) -> None: + """Run JDFTx.""" + jdftx_job_kwargs = jdftx_job_kwargs or {} + if jdftx_cmd is None: + jdftx_cmd = get_jdftx_cmd() + + if job_type == JobType.NORMAL: + job = JDFTxJob( + jdftx_cmd, + input_file=FILE_NAMES["in"], + output_file=FILE_NAMES["out"], + **jdftx_job_kwargs, + ) + + job.run() + + +def should_stop_children( + task_document: TaskDoc, +) -> bool: + """ + Parse JDFTx TaskDoc and decide whether to stop child processes. + + If JDFTx failed, stop child processes. + """ + return task_document.state == JDFTxStatus.SUCCESS diff --git a/src/atomate2/jdftx/schemas/__init__.py b/src/atomate2/jdftx/schemas/__init__.py new file mode 100644 index 0000000000..f14bc9a4a0 --- /dev/null +++ b/src/atomate2/jdftx/schemas/__init__.py @@ -0,0 +1 @@ +"""Module for JDFTx database schemas.""" diff --git a/src/atomate2/jdftx/schemas/calculation.py b/src/atomate2/jdftx/schemas/calculation.py new file mode 100644 index 0000000000..734367f271 --- /dev/null +++ b/src/atomate2/jdftx/schemas/calculation.py @@ -0,0 +1,331 @@ +"""Core definitions of a JDFTx calculation document.""" + +import logging +from pathlib import Path + +from pydantic import BaseModel, Field +from pymatgen.core.structure import Structure +from pymatgen.core.trajectory import Trajectory +from pymatgen.io.jdftx.inputs import JDFTXInfile +from pymatgen.io.jdftx.joutstructure import JOutStructure +from pymatgen.io.jdftx.outputs import JDFTXOutfile + +from atomate2.jdftx.schemas.enums import CalcType, SolvationType, TaskType + +__author__ = "Cooper Tezak " +logger = logging.getLogger(__name__) + + +class Convergence(BaseModel): + """Schema for calculation convergence.""" + + converged: bool = Field( + default=True, description="Whether the JDFTx calculation converged" + ) + geom_converged: bool | None = Field( + default=True, description="Whether the ionic/lattice optimization converged" + ) + elec_converged: bool | None = Field( + default=True, description="Whether the last electronic optimization converged" + ) + geom_converged_reason: str | None = Field( + None, description="Reason ionic/lattice convergence was reached" + ) + elec_converged_reason: str | None = Field( + None, description="Reason electronic convergence was reached" + ) + + @classmethod + def from_jdftxoutput(cls, jdftxoutput: JDFTXOutfile) -> "Convergence": + """Initialize Convergence from JDFTxOutfile.""" + converged = jdftxoutput.converged + jstrucs = jdftxoutput.jstrucs + geom_converged = jstrucs.geom_converged + geom_converged_reason = jstrucs.geom_converged_reason + elec_converged = jstrucs.elec_converged + elec_converged_reason = jstrucs.elec_converged_reason + return cls( + converged=converged, + geom_converged=geom_converged, + geom_converged_reason=geom_converged_reason, + elec_converged=elec_converged, + elec_converged_reason=elec_converged_reason, + ) + + +class RunStatistics(BaseModel): + """JDFTx run statistics.""" + + total_time: float | None = Field( + 0, description="Total wall time for this calculation" + ) + + @classmethod + def from_jdftxoutput(cls, jdftxoutput: JDFTXOutfile) -> "RunStatistics": + """Initialize RunStatistics from JDFTXOutfile.""" + t_s = jdftxoutput.t_s if hasattr(jdftxoutput, "t_s") else None + + return cls(total_time=t_s) + + +class CalculationInput(BaseModel): + """Document defining JDFTx calculation inputs.""" + + structure: Structure = Field( + None, description="input structure to JDFTx calculation" + ) + jdftxinfile: dict = Field(None, description="input tags in JDFTx in file") + + @classmethod + def from_jdftxinput(cls, jdftxinput: JDFTXInfile) -> "CalculationInput": + """ + Create a JDFTx InputDoc schema from a JDFTXInfile object. + + Parameters + ---------- + jdftxinput + A JDFTXInfile object. + + Returns + ------- + CalculationInput + The input document. + """ + return cls( + structure=jdftxinput.structure, + jdftxinfile=jdftxinput.as_dict(), + ) + + +class CalculationOutput(BaseModel): + """Document defining JDFTx calculation outputs.""" + + structure: Structure | None = Field( + None, + description="optimized geometry of the structure after calculation", + ) + parameters: dict | None = Field( + None, + description="JDFTXOutfile dictionary from last JDFTx run", + ) + forces: list | None = Field(None, description="forces from last ionic step") + energy: float = Field(None, description="Final energy") + energy_type: str = Field( + "F", description="Type of energy returned by JDFTx (e.g., F, G)" + ) + mu: float = Field(None, description="Fermi level of last electronic step") + lowdin_charges: list | None = Field( + None, description="Lowdin charges from last electronic optimizaiton" + ) + total_charge: float = Field( + None, + description=( + "Total system charge from last electronic step in numberof electrons" + ), + ) + stress: list[list] | None = Field( + None, description="Stress from last lattice optimization step" + ) + cbm: float | None = Field( + None, + description="Conduction band minimum / LUMO from last electronic optimization", + ) + vbm: float | None = Field( + None, description="Valence band maximum /HOMO from last electonic optimization" + ) + trajectory: Trajectory | None = ( + Field(None, description="Ionic trajectory from last JDFTx run"), + ) + + @classmethod + def from_jdftxoutput( + cls, jdftxoutput: JDFTXOutfile, **kwargs + ) -> "CalculationOutput": + """ + Create a JDFTx output document from a JDFTXOutfile object. + + Parameters + ---------- + jdftxoutput + A JDFTXOutfile object. + + Returns + ------- + CalculationOutput + The output document. + """ + optimized_structure: Structure = jdftxoutput.structure + if hasattr(jdftxoutput, "forces"): + forces = None if jdftxoutput.forces is None else jdftxoutput.forces.tolist() + if hasattr(jdftxoutput, "stress"): + stress = None if jdftxoutput.stress is None else jdftxoutput.stress.tolist() + else: + stress = None + energy = jdftxoutput.e + energy_type = jdftxoutput.eopt_type + mu = jdftxoutput.mu + lowdin_charges = optimized_structure.site_properties.get("charges", None) + # total charge in number of electrons (negative of oxidation state) + total_charge = ( + jdftxoutput.total_electrons_uncharged - jdftxoutput.total_electrons + ) + cbm = jdftxoutput.lumo + vbm = jdftxoutput.homo + structure = joutstruct_to_struct(joutstruct=optimized_structure) + + return cls( + structure=structure, + forces=forces, + energy=energy, + energy_type=energy_type, + mu=mu, + lowdin_charges=lowdin_charges, + total_charge=total_charge, + stress=stress, + cbm=cbm, + vbm=vbm, + trajectory=( + jdftxoutput.trajectory.as_dict() + if kwargs.get("store_trajectory", True) + else None + ), + parameters=jdftxoutput.to_dict(), + ) + + +class Calculation(BaseModel): + """Full JDFTx calculation inputs and outputs.""" + + dir_name: str = Field(None, description="The directory for this JDFTx calculation") + input: CalculationInput = Field( + None, description="JDFTx input settings for the calculation" + ) + output: CalculationOutput = Field( + None, description="The JDFTx calculation output document" + ) + converged: Convergence = Field(None, description="JDFTx job conversion information") + run_stats: RunStatistics = Field(0, description="Statistics for the JDFTx run") + calc_type: CalcType = Field(None, description="Calculation type (e.g. PBE)") + task_type: TaskType = Field( + None, description="Task type (e.g. Lattice Optimization)" + ) + solvation_type: SolvationType = Field( + None, description="Type of solvation model used (e.g. LinearPCM CANDLE)" + ) + + @classmethod + def from_files( + cls, + dir_name: Path | str, + jdftxinput_file: Path | str, + jdftxoutput_file: Path | str, + jdftxinput_kwargs: dict | None = None, + jdftxoutput_kwargs: dict | None = None, + # **jdftx_calculation_kwargs, #TODO implement optional calcdoc kwargs + ) -> "Calculation": + """ + Create a JDFTx calculation document from a directory and file paths. + + Parameters + ---------- + dir_name + The directory containing the JDFTx calculation outputs. + jdftxinput_file + Path to the JDFTx in file relative to dir_name. + jdftxoutput_file + Path to the JDFTx out file relative to dir_name. + jdftxinput_kwargs + Additional keyword arguments that will be passed to the + :obj:`.JDFTXInFile.from_file` method + jdftxoutput_kwargs + Additional keyword arguments that will be passed to the + :obj:`.JDFTXOutFile.from_file` method + + Returns + ------- + Calculation + A JDFTx calculation document. + """ + jdftxinput_file = Path(dir_name) / jdftxinput_file + jdftxoutput_file = Path(dir_name) / jdftxoutput_file + + jdftxinput_kwargs = jdftxinput_kwargs or {} + jdftxinput = JDFTXInfile.from_file(jdftxinput_file) + + jdftxoutput_kwargs = jdftxoutput_kwargs or {} + jdftxoutput = JDFTXOutfile.from_file(jdftxoutput_file) + + input_doc = CalculationInput.from_jdftxinput(jdftxinput, **jdftxinput_kwargs) + output_doc = CalculationOutput.from_jdftxoutput( + jdftxoutput, **jdftxoutput_kwargs + ) + logger.log(logging.DEBUG, f"{output_doc}") + converged = Convergence.from_jdftxoutput(jdftxoutput) + run_stats = RunStatistics.from_jdftxoutput(jdftxoutput) + + calc_type = _calc_type(output_doc) + task_type = _task_type(output_doc) + solvation_type = _solvation_type(input_doc) + + return cls( + dir_name=str(dir_name), + input=input_doc, + output=output_doc, + converged=converged, + run_stats=run_stats, + calc_type=calc_type, + task_type=task_type, + solvation_type=solvation_type, + ) + + +def _task_type( + outputdoc: CalculationOutput, +) -> TaskType: + """Return TaskType for JDFTx calculation.""" + jdftxoutput: dict = outputdoc.parameters + if not jdftxoutput.get("geom_opt"): + return TaskType("Single Point") + if jdftxoutput.get("geom_opt_type") == "lattice": + return TaskType("Lattice Optimization") + if jdftxoutput.get("geom_opt_type") == "ionic": + return TaskType("Ionic Optimization") + # TODO implement MD and frequency task types. Waiting on output parsers + + return TaskType("Unknown") + + +def _calc_type( + outputdoc: CalculationOutput, +) -> CalcType: + jdftxoutput = outputdoc.parameters + xc = jdftxoutput.get("xc_func", None) + return CalcType(xc) + + +def _solvation_type(inputdoc: CalculationInput) -> SolvationType: + jdftxinput: JDFTXInfile = inputdoc.jdftxinfile + fluid = jdftxinput.get("fluid", None) + if fluid is None: + return SolvationType("None") + fluid_solvent = jdftxinput.get("pcm-variant") + fluid_type = fluid.get("type") + solvation_type = f"{fluid_type} {fluid_solvent}" + return SolvationType(solvation_type) + + +def joutstruct_to_struct(joutstruct: JOutStructure) -> Structure: + """Convert JOutStructre to Structure.""" + lattice = joutstruct.lattice + cart_coords = joutstruct.cart_coords + species = joutstruct.species + struct = Structure( + lattice=lattice, + coords=cart_coords, + species=species, + coords_are_cartesian=True, + ) + for prop, values in joutstruct.site_properties.items(): + for isite, site in enumerate(struct): + site.properties[prop] = values[isite] + return struct diff --git a/src/atomate2/jdftx/schemas/enums.py b/src/atomate2/jdftx/schemas/enums.py new file mode 100644 index 0000000000..e1903f3d42 --- /dev/null +++ b/src/atomate2/jdftx/schemas/enums.py @@ -0,0 +1,66 @@ +"""Enums for constants across JDFTx schemas.""" + +from emmet.core.types.enums import ValueEnum + + +class JDFTxStatus(ValueEnum): + """JDFTx Calculation State.""" + + SUCCESS = "successful" + FAILED = "unsuccessful" + + +class CalcType(ValueEnum): + """JDFTx calculation type.""" + + GGA = "gga" + GGA_PBE = "gga-PBE" + GGA_PBESOL = "gga-PBEsol" + GGA_PW91 = "gga-PW91" + HARTREE_FOCK = "Hartree-Fock" + HYB_HSE06 = "hyb-HSE06" + HYB_HSE12 = "hyb-HSE12" + HYB_HSE12S = "hyb-HSE12s" + HYB_PBE0 = "hyb-PBE0" + LDA = "lda" + LDA_PW = "lda-PW" + LDA_PW_PREC = "lda-PW-prec" + LDA_PZ = "lda-PZ" + LDA_TETER = "lda-Teter" + LDA_VWN = "lda-VWN" + MGGA_REVTPSS = "mgga-revTPSS" + MGGA_TPSS = "mgga-TPSS" + ORB_GLLBSC = "orb-GLLBsc" + POT_LB94 = "pot-LB94" + + +class TaskType(ValueEnum): + """JDFTx task type.""" + + SINGLEPOINT = "Single Point" + LATTICEOPT = "Lattice Optimization" + IONOPT = "Ionic Optimization" + FREQ = "Frequency" + SOFTSPHERE = "SoftSphere" + DYNAMICS = "Molecular Dynamics" + + +class SolvationType(ValueEnum): + """JDFTx solvent type.""" + + NONE = "None" + SALSA = "SaLSA" + CDFT = "Classical DFT" + CANON = "CANON" + LINEAR_CANDLE = "LinearPCM CANDLE" + LINEAR_SCCS_ANION = "LinearPCM SCCS_anion" + LINEAR_SCCS_CATION = "LinearPCM SCCS_anion" + LINEAR_SCCS_G03 = "LinearPCM SCCS_g03" + LINEAR_SCCS_G03BETA = "LinearPCM SCCS_g03beta" + LINEAR_SCCS_G03P = "LinearPCM SCCS_g03p" + LINEAR_SCCS_G03PBETA = "LinearPCM SCCS_g03pbeta" + LINEAR_SCCS_G09 = "LinearPCM SCCS_g09" + LINEAR_SCCS_G09BETA = "LinearPCM SCCS_g09beta" + LINEAR_SGA13 = "LinearPCM SGA13" + LINEAR_SOFTSPHERE = "LinearPCM SoftSphere" + NONLINEAR_SGA13 = "NonlinearPCM SGA13" diff --git a/src/atomate2/jdftx/schemas/task.py b/src/atomate2/jdftx/schemas/task.py new file mode 100644 index 0000000000..1558c5ee3f --- /dev/null +++ b/src/atomate2/jdftx/schemas/task.py @@ -0,0 +1,121 @@ +"""Core definition of a JDFTx Task Document.""" + +import logging +from pathlib import Path +from typing import Any + +from custodian.jdftx.jobs import JDFTxJob +from emmet.core.structure import StructureMetadata +from pydantic import BaseModel, Field +from pymatgen.io.jdftx.sets import FILE_NAMES +from typing_extensions import Self + +from atomate2.jdftx.schemas.calculation import ( + Calculation, + CalculationInput, + CalculationOutput, + RunStatistics, +) +from atomate2.jdftx.schemas.enums import JDFTxStatus, TaskType +from atomate2.utils.datetime import datetime_str + +__author__ = "Cooper Tezak " + +logger = logging.getLogger(__name__) +# _DERIVATIVE_FILES = ("GRAD", "HESS") + + +class CustodianDoc(BaseModel): + """Custodian data for JDFTx calculations.""" + + corrections: list[Any] | None = Field( + None, + title="Custodian Corrections", + description="list of custodian correction data for calculation.", + ) + + job: dict[str, Any] | JDFTxJob | None = Field( + None, + title="Custodian Job Data", + description="Job data logged by custodian.", + ) + + +class TaskDoc(StructureMetadata): + """Calculation-level details about JDFTx calculations.""" + + dir_name: str | Path | None = Field( + None, description="The directory for this JDFTx task" + ) + last_updated: str = Field( + default_factory=datetime_str, + description="Timestamp for this task document was last updated", + ) + comnpleted_at: str | None = Field( + None, description="Timestamp for when this task was completed" + ) + calc_inputs: CalculationInput | None = Field( + {}, description="JDFTx calculation inputs" + ) + run_stats: dict[str, RunStatistics] | None = Field( + None, + description="Summary of runtime statistics for each calculation in this task", + ) + calc_outputs: CalculationOutput | None = Field( + None, + description="JDFTx calculation outputs", + ) + state: JDFTxStatus | None = Field( + None, description="State of this JDFTx calculation" + ) + task_type: TaskType | None = Field( + None, description="The type of task this calculation is" + ) + + @classmethod + def from_directory( + cls, + dir_name: Path | str, + additional_fields: dict[str, Any] = None, + # **jdftx_calculation_kwargs, #TODO implement + ) -> Self: + """ + Create a task document from a directory containing JDFTx files. + + Parameters + ---------- + dir_name + The path to the folder containing the calculation outputs. + store_additional_json + Whether to store additional json files in the calculation directory. + additional_fields + dictionary of additional fields to add to output document. + **jdftx_calculation_kwargs + Additional parsing options that will be passed to the + :obj:`.Calculation.from_qchem_files` function. + + Returns + ------- + TaskDoc + A task document for the JDFTx calculation + """ + logger.info(f"Getting task doc in: {dir_name}") + + additional_fields = additional_fields or {} + dir_name = Path(dir_name) + calc_doc = Calculation.from_files( + dir_name=dir_name, + jdftxinput_file=FILE_NAMES["in"], + jdftxoutput_file=FILE_NAMES["out"], + # **jdftx_calculation_kwargs, # still need to implement + ) + + doc = cls.from_structure( + meta_structure=calc_doc.output.structure, + dir_name=dir_name, + calc_outputs=calc_doc.output, + calc_inputs=calc_doc.input, + task_type=calc_doc.task_type, + ) + + return doc.model_copy(update=additional_fields) diff --git a/src/atomate2/jdftx/sets/BaseJdftxSet.yaml b/src/atomate2/jdftx/sets/BaseJdftxSet.yaml new file mode 100644 index 0000000000..f6aa8f6bf6 --- /dev/null +++ b/src/atomate2/jdftx/sets/BaseJdftxSet.yaml @@ -0,0 +1,67 @@ +# Default JDFTx settings for atomate2 calculations. +### Functional ### +elec-ex-corr: gga +van-der-waals: D3 + +### Electronic Parameters ### +elec-cutoff: + Ecut: 20 + EcutRho: 100 +electronic-minimize: + nIterations: 100 + energyDiffThreshold: 1.0e-07 +elec-smearing: + smearingType: Fermi + smearingWidth: 0.001 +# elec-initial-magnetization: +# M: 0 +# constrain: False +spintype: z-spin +core-overlap-check: none +converge-empty-states: True +band-projection-params: + ortho: True + norm: False + +### Lattice / Unit Cell ### +latt-move-scale: + s0: 0 + s1: 0 + s2: 0 +lattice-minimize: + nIterations: 00 +symmetries: none +#coulomb-interaction: slab 001 +#coords-type Lattice + +### Solvation & Bias ### +# fluid: LinearPCM +# pcm-variant: CANDLE +# fluid-solvent: H2O +# fluid-cation: +# name: Na+ +# concentration: 0.5 +# fluid-anion: +# name: F- +# concentration: 0.5 + +### Pseudopotential ### +ion-species: GBRV_v1.5/$ID_pbe_v1.uspp + + +### Output Files ### +dump-name: jdftx.$VAR +dump: + - End: + Dtot: True + State: True + BoundCharge: True + Forces: True + Ecomponents: True + VfluidTot: True + ElecDensity: True + KEdensity: True + EigStats: True + BandEigs: True + BandProjections: True + DOS: True diff --git a/src/atomate2/jdftx/sets/GenerationConfig.yaml b/src/atomate2/jdftx/sets/GenerationConfig.yaml new file mode 100644 index 0000000000..bf114d4200 --- /dev/null +++ b/src/atomate2/jdftx/sets/GenerationConfig.yaml @@ -0,0 +1,5 @@ +kpoint-density: 1000 +coulomb-truncation: True +bands_multiplier: 1.2 +ASHEP: # absolute SHE potential in V + CANDLE: -4.66 diff --git a/src/atomate2/jdftx/sets/PseudosConfig.yaml b/src/atomate2/jdftx/sets/PseudosConfig.yaml new file mode 100644 index 0000000000..9c55dca934 --- /dev/null +++ b/src/atomate2/jdftx/sets/PseudosConfig.yaml @@ -0,0 +1,200 @@ +# Number of electrons for each element in each pseudopotential +GBRV: + suffixes: + - _pbe.uspp + Cd: 12 + Be: 4 + Br: 7 + Fe: 16 + K: 9 + Rb: 9 + Os: 16 + La: 11 + Tc: 15 + Ni: 18 + Te: 6 + Ti: 12 + Rh: 15 + Ga: 19 + Se: 6 + Au: 11 + Mn: 15 + Ru: 16 + Zr: 12 + Pd: 16 + Re: 15 + F: 7 + N: 5 + Cs: 9 + Sn: 14 + Hg: 12 + Ta: 13 + Ir: 15 + Hf: 12 + Ca: 10 + Si: 4 + Sr: 10 + Bi: 15 + Li: 3 + W: 14 + B: 3 + P: 5 + As: 5 + Ge: 14 + V: 13 + Zn: 20 + Mg: 10 + Y: 11 + Pb: 14 + Sb: 15 + Al: 3 + Ba: 10 + Cr: 14 + Mo: 14 + I: 7 + O: 6 + Nb: 13 + Ag: 19 + Cu: 19 + Tl: 13 + C: 4 + Co: 17 + Pt: 16 + S: 6 + Na: 9 + Sc: 11 + Cl: 7 + In: 13 + H: 1 + +GBRV_v1.5: + Cd: 12 + Be: 4 + Br: 7 + Fe: 16 + K: 9 + Rb: 9 + Os: 16 + La: 11 + Tc: 15 + Ni: 18 + Te: 6 + Ti: 12 + Rh: 15 + Ga: 19 + Se: 6 + Au: 11 + Mn: 15 + Ru: 16 + Zr: 12 + Pd: 16 + Re: 15 + F: 7 + N: 5 + Cs: 9 + Sn: 14 + Hg: 12 + Ta: 13 + Ir: 15 + Hf: 12 + Ca: 10 + Si: 4 + Sr: 10 + Bi: 15 + Li: 3 + W: 14 + B: 3 + P: 5 + As: 5 + Ge: 14 + V: 13 + Zn: 20 + Mg: 10 + Y: 11 + Pb: 14 + Sb: 15 + Al: 3 + Ba: 10 + Cr: 14 + Mo: 14 + I: 7 + O: 6 + Nb: 13 + Ag: 19 + Cu: 19 + Tl: 13 + C: 4 + Co: 17 + Pt: 16 + S: 6 + Na: 9 + Sc: 11 + Cl: 7 + In: 13 + H: 1 + +SG15: + Cd: 12 + Be: 4 + Br: 7 + Fe: 16 + K: 9 + Rb: 9 + Os: 16 + La: 11 + Tc: 15 + Ni: 18 + Te: 6 + Ti: 12 + Rh: 15 + Ga: 19 + Se: 6 + Au: 11 + Mn: 15 + Ru: 16 + Zr: 12 + Pd: 16 + Re: 15 + F: 7 + N: 5 + Cs: 9 + Sn: 14 + Hg: 12 + Ta: 13 + Ir: 15 + Hf: 12 + Ca: 10 + Si: 4 + Sr: 10 + Bi: 15 + Li: 3 + W: 14 + B: 3 + P: 5 + As: 5 + Ge: 14 + V: 13 + Zn: 20 + Mg: 10 + Y: 11 + Pb: 14 + Sb: 15 + Al: 3 + Ba: 10 + Cr: 14 + Mo: 14 + I: 7 + O: 6 + Nb: 13 + Ag: 19 + Cu: 19 + Tl: 13 + C: 4 + Co: 17 + Pt: 16 + S: 6 + Na: 9 + Sc: 11 + Cl: 7 + In: 13 + H: 1 diff --git a/src/atomate2/jdftx/sets/__init__.py b/src/atomate2/jdftx/sets/__init__.py new file mode 100644 index 0000000000..373e641cdd --- /dev/null +++ b/src/atomate2/jdftx/sets/__init__.py @@ -0,0 +1 @@ +"""Module for JDFTx input sets.""" diff --git a/src/atomate2/jdftx/sets/base.py b/src/atomate2/jdftx/sets/base.py new file mode 100644 index 0000000000..1fb415172b --- /dev/null +++ b/src/atomate2/jdftx/sets/base.py @@ -0,0 +1,315 @@ +"""Module defining base JDFTx input set and generator.""" + +from __future__ import annotations + +from collections import defaultdict +from dataclasses import dataclass, field +from importlib.resources import files as get_mod_path +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import numpy as np +from monty.serialization import loadfn +from pymatgen.core.units import ang_to_bohr, eV_to_Ha +from pymatgen.io.core import InputGenerator +from pymatgen.io.jdftx.inputs import JDFTXInfile +from pymatgen.io.jdftx.sets import JdftxInputSet +from pymatgen.io.vasp import Kpoints + +from atomate2 import SETTINGS + +if TYPE_CHECKING: + from pymatgen.core import Structure + +# TODO: remove atomate2 import + yaml once pymatgen reorg is finalized / released +for module_path in ("pymatgen.io.jdftx", "atomate2.jdftx.sets"): + if (_set_path := Path(get_mod_path(module_path) / "BaseJdftxSet.yaml")).exists(): # type: ignore[arg-type] + _BASE_JDFTX_SET = loadfn(_set_path) + break + +_GENERATION_CONFIG = loadfn( + get_mod_path("atomate2.jdftx.sets") / "GenerationConfig.yaml" +) +_PSEUDO_CONFIG = loadfn(get_mod_path("atomate2.jdftx.sets") / "PseudosConfig.yaml") + + +@dataclass +class JdftxInputGenerator(InputGenerator): + """A class to generate JDFTx input sets. + + Args: + user_settings (dict): User JDFTx settings. This allows the user to + override the default JDFTx settings loaded in the default_settings + argument. + coulomb_truncation (bool) = False: + Whether to use coulomb truncation and calculate the coulomb + truncation center. Only works for molecules and slabs. + auto_kpoint_density (int) = 1000: + Reciprocal k-point density for automatic k-point calculation. If + k-points are specified in user_settings, they will not be + overridden. + potential (None, float) = None: + Potential vs SHE for GC-DFT calculation. + calc_type (str) = "bulk": + Type of calculation used for setting input parameters. Options are: + ["bulk", "surface", "molecule"]. + pseudopotentials (str) = "GBRV" + config_dict (dict): The config dictionary used to set input parameters + used in the calculation of JDFTx tags. + default_settings: Default JDFTx settings. + """ + + # copy _BASE_JDFTX_SET to ensure each class instance has its own copy + # otherwise in-place changes can affect other instances + user_settings: dict = field(default_factory=dict) + coulomb_truncation: bool = False + auto_kpoint_density: int = 1000 + potential: None | float = None + calc_type: str = "bulk" + pseudopotentials: str = "GBRV" + config_dict: dict = field(default_factory=lambda: _GENERATION_CONFIG) + default_settings: dict = field(default_factory=lambda: _BASE_JDFTX_SET) + + def __post_init__(self) -> None: + """Post init formatting of arguments.""" + calc_type_options = ["bulk", "surface", "molecule"] + if self.calc_type not in calc_type_options: + raise ValueError( + f"calc type f{self.calc_type} not in list of supported calc " + "types: {calc_type_options}." + ) + self.settings = self.default_settings.copy() + self.settings.update(self.user_settings) + # set default coords-type to Cartesian + if "coords-type" not in self.settings: + self.settings["coords-type"] = "Cartesian" + self._apply_settings(self.settings) + + def _apply_settings( + self, settings: dict[str, Any] + ) -> None: # settings as attributes + for key, value in settings.items(): + setattr(self, key, value) + + def get_input_set( + self, + structure: Structure = None, + ) -> JdftxInputSet: + """Get a JDFTx input set for a structure. + + Parameters + ---------- + structure + A Pymatgen Structure. + + Returns + ------- + JdftxInputSet + A JDFTx input set. + """ + self.settings.update(self.user_settings) + self.set_kgrid(structure=structure) + self.set_coulomb_interaction(structure=structure) + self.set_nbands(structure=structure) + self.set_mu() + self.set_pseudos() + self.set_magnetic_moments(structure=structure) + self._apply_settings(self.settings) + + jdftxinput = JDFTXInfile.from_dict(self.settings) + + return JdftxInputSet(jdftxinput=jdftxinput, structure=structure) + + def set_kgrid(self, structure: Structure) -> None: + """Get k-point grid. + + Parameters + ---------- + structure + A pymatgen structure. + + Returns + ------- + Kpoints + A tuple of integers specifying the k-point grid. + """ + # never override k grid definition in user settings + if "kpoint-folding" in self.user_settings: + return + # calculate k-grid with k-point density + kpoints = Kpoints.automatic_density( + structure=structure, kppa=self.auto_kpoint_density + ) + kpoints = kpoints.kpts[0] + if self.calc_type == "surface": + kpoints = (kpoints[0], kpoints[1], 1) + elif self.calc_type == "molecule": + kpoints = (1, 1, 1) + kpoint_update = { + "kpoint-folding": { + "n0": kpoints[0], + "n1": kpoints[1], + "n2": kpoints[2], + } + } + self.settings.update(kpoint_update) + return + + def set_coulomb_interaction( + self, + structure: Structure, + ) -> JDFTXInfile: + """ + Set coulomb-interaction and coulomb-truncation for JDFTXInfile. + + Description + + Parameters + ---------- + structure + A pymatgen structure + + Returns + ------- + jdftxinputs + A pymatgen.io.jdftx.inputs.JDFTXInfile object + + """ + if "coulomb-interaction" in self.settings: + return + if self.calc_type == "bulk": + self.settings["coulomb-interaction"] = { + "truncationType": "Periodic", + } + return + if self.calc_type == "surface": + self.settings["coulomb-interaction"] = { + "truncationType": "Slab", + "dir": "001", + } + elif self.calc_type == "molecule": + self.settings["coulomb-interaction"] = { + "truncationType": "Isolated", + } + com = center_of_mass(structure=structure) + if self.settings["coords-type"] == "Cartesian": + com = com @ structure.lattice.matrix * ang_to_bohr + elif self.settings["coords-type"] == "Lattice": + com = com * ang_to_bohr + self.settings["coulomb-truncation-embed"] = { + "c0": com[0], + "c1": com[1], + "c2": com[2], + } + return + + def set_nbands(self, structure: Structure) -> None: + """Set number of bands in DFT calculation.""" + nelec = sum( + _PSEUDO_CONFIG[self.pseudopotentials][str(atom)] + for atom in structure.species + ) + nbands_add = int(nelec / 2) + 10 + nbands_mult = int(nelec / 2) * self.config_dict["bands_multiplier"] + self.settings["elec-n-bands"] = max(nbands_add, nbands_mult) + + def set_pseudos(self) -> None: + """Set ion-species tag corresponding to pseudopotentials.""" + if SETTINGS.JDFTX_PSEUDOS_DIR is not None: + pseudos_str = str( + Path(SETTINGS.JDFTX_PSEUDOS_DIR) / Path(self.pseudopotentials) + ) + else: + pseudos_str = self.pseudopotentials + + add_tags = [ + pseudos_str + "/$ID" + suffix + for suffix in _PSEUDO_CONFIG[self.pseudopotentials]["suffixes"] + ] + # do not override pseudopotentials in settings + if "ion-species" not in self.settings: + self.settings["ion-species"] = add_tags + + def set_mu(self) -> None: + """Set absolute electron chemical potential (fermi level) for GC-DFT.""" + # never override mu in settings + if "target-mu" in self.settings or self.potential is None: + return + solvent_model = self.settings["pcm-variant"] + ashep = self.config_dict["ASHEP"][solvent_model] + # calculate absolute potential in Hartree + mu = -(ashep - self.potential) / eV_to_Ha + self.settings["target-mu"] = {"mu": mu} + return + + def set_magnetic_moments(self, structure: Structure) -> None: + """Set the magnetic moments for each atom in the structure. + + If the user specified magnetic moments as JDFTx tags, they will + not be prioritized. The user can also set the magnetic moments in + the site_params dictionary attribute of the structure. If neither above + options are set, the code will initialize all metal atoms with +5 + magnetic moments. + + Parameters + ---------- + structure + A pymatgen structure + + Returns + ------- + None + """ + # check if user set JFDTx magnetic tags and return if true + if ( + "initial-magnetic-moments" in self.settings + or "elec-initial-magnetization" in self.settings + ): + return + # if magmoms set on structure, build JDFTx tag + if "magmom" in structure.site_properties: + if len(structure.species) != len(structure.site_properties["magmom"]): + raise ValueError( + f"length of magmom, {structure.site_properties['magmom']} " + "does not match number of species in structure, " + f"{len(structure.species)}." + ) + magmoms = defaultdict(list) + for magmom, species in zip( + structure.site_properties["magmom"], structure.species, strict=False + ): + magmoms[species].append(magmom) + tag_str = "" + for element, magmom_list in magmoms.items(): + tag_str += f"{element} " + " ".join(list(map(str, magmom_list))) + " " + # set magmoms to +5 for all metals in structure. + else: + magmoms = defaultdict(list) + for species in structure.species: + if species.is_metal: + magmoms[str(species)].append(5) + else: + magmoms[str(species)].append(0) + tag_str = "" + for element, magmom_list in magmoms.items(): + tag_str += f"{element} " + " ".join(list(map(str, magmom_list))) + " " + self.settings["initial-magnetic-moments"] = tag_str + return + + +def center_of_mass(structure: Structure) -> np.ndarray: + """ + Calculate center of mass. + + Parameters + ---------- + structure: Structure + A pymatgen structure + + Returns + ------- + np.ndarray + A numpy array containing the center of mass in fractional coordinates. + """ + weights = [site.species.weight for site in structure] + return np.average(structure.frac_coords, weights=weights, axis=0) diff --git a/src/atomate2/jdftx/sets/core.py b/src/atomate2/jdftx/sets/core.py new file mode 100644 index 0000000000..188feb5df7 --- /dev/null +++ b/src/atomate2/jdftx/sets/core.py @@ -0,0 +1,62 @@ +"""Module defining core JDFTx input set generators.""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field + +from atomate2.jdftx.sets.base import _BASE_JDFTX_SET, JdftxInputGenerator + +logger = logging.getLogger(__name__) + + +@dataclass +class SinglePointSetGenerator(JdftxInputGenerator): + """Class to generate JDFTx input sets that follow BEAST convention.""" + + default_settings: dict = field( + default_factory=lambda: { + **_BASE_JDFTX_SET, + } + ) + + +@dataclass +class IonicMinSetGenerator(JdftxInputGenerator): + """Class to generate JDFTx relax sets.""" + + default_settings: dict = field( + default_factory=lambda: { + **_BASE_JDFTX_SET, + "ionic-minimize": {"nIterations": 100}, + } + ) + + +@dataclass +class LatticeMinSetGenerator(JdftxInputGenerator): + """Class to generate JDFTx lattice minimization sets.""" + + default_settings: dict = field( + default_factory=lambda: { + **_BASE_JDFTX_SET, + "lattice-minimize": {"nIterations": 100}, + "latt-move-scale": {"s0": 1, "s1": 1, "s2": 1}, + } + ) + + +class BEASTSetGenerator(JdftxInputGenerator): + """Generate BEAST Database ionic relaxation set.""" + + default_settings: dict = field( + default_factory=lambda: { + **_BASE_JDFTX_SET, + "fluid": {"type": "LinearPCM"}, + "pcm-variant": "CANDLE", + "fluid-solvent": {"name": "H2O"}, + "fluid-cation": {"name": "Na+", "concentration": 0.5}, + "fluid-anion": {"name": "F-", "concentration": 0.5}, + "ionic-minimize": {"nIterations": 100}, + } + ) diff --git a/src/atomate2/settings.py b/src/atomate2/settings.py index 84b146fd25..dc4603c1a1 100644 --- a/src/atomate2/settings.py +++ b/src/atomate2/settings.py @@ -252,6 +252,12 @@ class Atomate2Settings(BaseSettings): "parsing QChem directories useful for storing duplicate of FW.json", ) + JDFTX_CMD: str = Field("jdftx", description="Command to run jdftx.") + + JDFTX_PSEUDOS_DIR: str = Field( + "GBRV_v1.5", description="location of JDFTX pseudopotentials." + ) + @model_validator(mode="before") @classmethod def load_default_settings(cls, values: dict[str, Any]) -> dict[str, Any]: diff --git a/tests/jdftx/__init__.py b/tests/jdftx/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/jdftx/conftest.py b/tests/jdftx/conftest.py new file mode 100644 index 0000000000..e24a97d98e --- /dev/null +++ b/tests/jdftx/conftest.py @@ -0,0 +1,186 @@ +from __future__ import annotations + +import logging +import math +import os +import shutil +from pathlib import Path +from typing import TYPE_CHECKING, Literal + +import pytest +from jobflow import CURRENT_JOB +from monty.io import zopen +from monty.os.path import zpath as monty_zpath +from pymatgen.io.jdftx.inputs import JDFTXInfile +from pymatgen.io.jdftx.sets import FILE_NAMES + +import atomate2.jdftx.jobs.base +import atomate2.jdftx.run +from atomate2.jdftx.sets.base import JdftxInputGenerator + +if TYPE_CHECKING: + from collections.abc import Sequence + + +logger = logging.getLogger("atomate2") + +_JFILES = "init.in" +_REF_PATHS: dict[str, str | Path] = {} +_FAKE_RUN_JDFTX_KWARGS: dict[str, dict] = {} + + +def zpath(path: str | Path) -> Path: + return Path(monty_zpath(str(path))) + + +def parse_inp_file_zipped(path: str | Path) -> JDFTXInfile: + """Parse a possibly gzipped JDFTx input file. + + Note that `JDFTXInfile` does not currently support + gzipped input like other I/O in pymatgen. + """ + with zopen(zpath(path), "rt") as f: + return JDFTXInfile.from_str(f.read()) + + +@pytest.fixture(scope="session") +def jdftx_test_dir(test_dir): + return test_dir / "jdftx" + + +@pytest.fixture(params=["sp_test", "ionicmin_test", "latticemin_test"]) +def task_name(request): + task_table = { + "sp_test": "Single Point", + "ionicmin_test": "Ionic Optimization", + "latticemin_test": "Lattice Optimization", + } + return task_table[request.param] + + +@pytest.fixture +def mock_filenames(monkeypatch): + monkeypatch.setitem(FILE_NAMES, "in", "init.in") + monkeypatch.setitem(FILE_NAMES, "out", "jdftx.out") + + +@pytest.fixture +def mock_jdftx(monkeypatch, jdftx_test_dir: Path): + def mock_run_jdftx(*args, **kwargs): + name = CURRENT_JOB.job.name + ref_path = jdftx_test_dir / _REF_PATHS[name] + logger.info("mock_run called") + fake_run_jdftx(ref_path, **_FAKE_RUN_JDFTX_KWARGS, clear_inputs=False) + + get_input_set_orig = JdftxInputGenerator.get_input_set + + def mock_get_input_set(self, *args, **kwargs): + logger.info("mock_input called") + return get_input_set_orig(self, *args, **kwargs) + + monkeypatch.setattr(atomate2.jdftx.run, "run_jdftx", mock_run_jdftx) + monkeypatch.setattr(atomate2.jdftx.jobs.base, "run_jdftx", mock_run_jdftx) + monkeypatch.setattr(JdftxInputGenerator, "get_input_set", mock_get_input_set) + + def _run(ref_paths, fake_run_jdftx_kwargs=None): + if fake_run_jdftx_kwargs is None: + fake_run_jdftx_kwargs = {} + + _REF_PATHS.update(ref_paths) + _FAKE_RUN_JDFTX_KWARGS.update(fake_run_jdftx_kwargs) + logger.info("_run passed") + + yield _run + + monkeypatch.undo() + _REF_PATHS.clear() + _FAKE_RUN_JDFTX_KWARGS.clear() + + +def fake_run_jdftx( + ref_path: str | Path, + input_settings: Sequence[str] = None, + check_inputs: Sequence[Literal["init.in"]] = _JFILES, + clear_inputs: bool = True, +): + logger.info("Running fake JDFTx.") + ref_path = Path(ref_path) + + if "init.in" in check_inputs: + results = check_input(ref_path, input_settings) + for key, (user_val, ref_val) in results.items(): + if isinstance(user_val, dict) and isinstance(ref_val, dict): + compare_dict(user_val, ref_val, key) + else: + assert user_val == ref_val, ( + f"Mismatch for {key}: user_val={user_val}, ref_val={ref_val}" + ) + + logger.info("Verified inputs successfully") + + if clear_inputs: + clear_jdftx_inputs() + + copy_jdftx_outputs(ref_path) + + +def check_input(ref_path, input_settings: Sequence[str] = None): + logger.info("Checking inputs.") + + ref_input = parse_inp_file_zipped(ref_path / "inputs" / "init.in") + user_input = parse_inp_file_zipped("init.in") + + keys_to_check = set(user_input) if input_settings is None else set(input_settings) + + results = {} + for key in keys_to_check: + user_val = user_input.get(key) + ref_val = ref_input.get(key) + results[key] = (user_val, ref_val) + + return results + + +def compare_dict(user_val, ref_val, key, rel_tol=1e-9): + for sub_key, user_sub_val in user_val.items(): + ref_sub_val = ref_val[sub_key] + + if isinstance(user_sub_val, (int | float)) and isinstance( + ref_sub_val, (int | float) + ): + # Compare numerical values with tolerance + assert math.isclose(user_sub_val, ref_sub_val, rel_tol=rel_tol), ( + f"Mismatch for {key}.{sub_key}: " + f"user_val={user_sub_val}, ref_val={ref_sub_val}" + ) + else: + assert user_sub_val == ref_sub_val, ( + f"Mismatch for {key}.{sub_key}: " + f"user_val={user_sub_val}, ref_val={ref_sub_val}" + ) + + +def clear_jdftx_inputs(): + if (file_path := zpath("init.in")).exists(): + file_path.unlink() + logger.info("Cleared jdftx inputs") + + +def copy_jdftx_outputs(ref_path: Path, suffix: str = "outputs"): + base_path = Path(os.getcwd()) + output_path = ref_path / suffix + logger.info(f"copied output files to {base_path}") + for output_file in output_path.iterdir(): + if output_file.is_file(): + # First check if file is zipped + if any( + output_file.name.lower().endswith(suffix) + for suffix in (".gz", ".bz2", ".z", ".lzma", ".xz") + ): + with ( + zopen(output_file, "rb") as f_in, + open(output_file.name.rsplit(".", 1)[0], "wb") as f_out, + ): + f_out.writelines(f_in) + else: + shutil.copy(output_file, ".") diff --git a/tests/jdftx/jobs/__init__.py b/tests/jdftx/jobs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/jdftx/jobs/test_core.py b/tests/jdftx/jobs/test_core.py new file mode 100644 index 0000000000..e46a0bec47 --- /dev/null +++ b/tests/jdftx/jobs/test_core.py @@ -0,0 +1,61 @@ +from jobflow import run_locally + +from atomate2.jdftx.jobs.core import IonicMinMaker, LatticeMinMaker, SinglePointMaker +from atomate2.jdftx.schemas.task import TaskDoc +from atomate2.jdftx.sets.core import ( + IonicMinSetGenerator, + LatticeMinSetGenerator, + SinglePointSetGenerator, +) + + +def test_sp_maker(mock_jdftx, si_structure, mock_filenames, clean_dir): + ref_paths = {"single_point": "sp_test"} + + fake_run_jdftx_kwargs = {} + + mock_jdftx(ref_paths, fake_run_jdftx_kwargs) + + maker = SinglePointMaker(input_set_generator=SinglePointSetGenerator()) + maker.input_set_generator.user_settings["coords-type"] = "Lattice" + + job = maker.make(si_structure) + + responses = run_locally(job, create_folders=True, ensure_success=True) + output1 = responses[job.uuid][1].output + assert isinstance(output1, TaskDoc) + + +def test_ionicmin_maker(mock_jdftx, si_structure, mock_filenames, clean_dir): + ref_paths = {"ionic_min": "ionicmin_test"} + + fake_run_jdftx_kwargs = {} + + mock_jdftx(ref_paths, fake_run_jdftx_kwargs) + + maker = IonicMinMaker(input_set_generator=IonicMinSetGenerator()) + maker.input_set_generator.user_settings["coords-type"] = "Lattice" + + job = maker.make(si_structure) + + responses = run_locally(job, create_folders=True, ensure_success=True) + output1 = responses[job.uuid][1].output + assert isinstance(output1, TaskDoc) + + +def test_latticemin_maker(mock_jdftx, si_structure, mock_filenames, clean_dir): + ref_paths = {"lattice_min": "latticemin_test"} + + fake_run_jdftx_kwargs = {} + + mock_jdftx(ref_paths, fake_run_jdftx_kwargs) + + maker = LatticeMinMaker(input_set_generator=LatticeMinSetGenerator()) + # Need to be in Lattice coords to compare to test files + maker.input_set_generator.user_settings["coords-type"] = "Lattice" + + job = maker.make(si_structure) + + responses = run_locally(job, create_folders=True, ensure_success=True) + output1 = responses[job.uuid][1].output + assert isinstance(output1, TaskDoc) diff --git a/tests/jdftx/schemas/__init__.py b/tests/jdftx/schemas/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/jdftx/schemas/test_taskdoc.py b/tests/jdftx/schemas/test_taskdoc.py new file mode 100644 index 0000000000..9244fd7e34 --- /dev/null +++ b/tests/jdftx/schemas/test_taskdoc.py @@ -0,0 +1,26 @@ +# test that TaskDoc is loaded with the right attributes +from pathlib import Path + +import pytest +from pymatgen.io.jdftx.outputs import JDFTXOutfile +from pymatgen.io.jdftx.sets import FILE_NAMES + +from atomate2.jdftx.schemas.task import TaskDoc + +from ..conftest import copy_jdftx_outputs # noqa: TID252 + + +@pytest.mark.parametrize("task_name", ["sp_test"], indirect=True) +@pytest.mark.parametrize("task_dir_name", ["sp_test"], indirect=False) +def test_taskdoc(task_name, task_dir_name, mock_filenames, jdftx_test_dir, tmp_dir): + """ + Test the JDFTx TaskDoc to verify that attributes are created properly. + """ + for subdir in ("inputs", "outputs"): + copy_jdftx_outputs(jdftx_test_dir / Path(task_dir_name), suffix=subdir) + taskdoc = TaskDoc.from_directory(dir_name=".") + jdftxoutfile = JDFTXOutfile.from_file(Path(FILE_NAMES["out"])) + # check that the taskdoc attributes correspond to the expected values. + # currently checking task_type and energy + assert taskdoc.task_type == task_name + assert taskdoc.calc_outputs.energy == jdftxoutfile.e diff --git a/tests/jdftx/sets/__init__.py b/tests/jdftx/sets/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/jdftx/sets/test_core.py b/tests/jdftx/sets/test_core.py new file mode 100644 index 0000000000..f439ba6bc1 --- /dev/null +++ b/tests/jdftx/sets/test_core.py @@ -0,0 +1,71 @@ +import numpy as np +import pytest + +from atomate2.jdftx.sets.base import JdftxInputGenerator +from atomate2.jdftx.sets.core import ( + IonicMinSetGenerator, + LatticeMinSetGenerator, + SinglePointSetGenerator, +) + + +@pytest.fixture +def basis_and_potential(): + return { + "fluid-cation": {"name": "Na+", "concentration": 1.0}, + "fluid-anion": {"name": "F-", "concentration": 1.0}, + } + + +def test_singlepoint_generator(si_structure, basis_and_potential): + gen = SinglePointSetGenerator(user_settings=basis_and_potential) + input_set = gen.get_input_set(si_structure) + jdftx_input = input_set.jdftxinput + assert jdftx_input["fluid-cation"]["concentration"] == 1.0 + assert jdftx_input["lattice-minimize"]["nIterations"] == 0 + + +def test_default_generator(si_structure, basis_and_potential): + gen = JdftxInputGenerator(user_settings=basis_and_potential) + input_set = gen.get_input_set(si_structure) + jdftx_input = input_set.jdftxinput + assert jdftx_input["fluid-cation"]["concentration"] == 1.0 + + +def test_ionicmin_generator(si_structure, basis_and_potential): + gen = IonicMinSetGenerator(user_settings=basis_and_potential) + input_set = gen.get_input_set(si_structure) + jdftx_input = input_set.jdftxinput + assert jdftx_input["ionic-minimize"]["nIterations"] == 100 + + +def test_latticemin_generator(si_structure, basis_and_potential): + gen = LatticeMinSetGenerator(user_settings=basis_and_potential) + input_set = gen.get_input_set(si_structure) + jdftx_input = input_set.jdftxinput + assert jdftx_input["lattice-minimize"]["nIterations"] == 100 + + +def test_coulomb_truncation(si_structure): + cart_gen = JdftxInputGenerator( + calc_type="surface", user_settings={"coords-type": "Cartesian"} + ) + frac_gen = JdftxInputGenerator( + calc_type="surface", user_settings={"coords-type": "Lattice"} + ) + cart_input_set = cart_gen.get_input_set(si_structure) + frac_input_set = frac_gen.get_input_set(si_structure) + cart_jdftx_input = cart_input_set.jdftxinput + frac_jdftx_input = frac_input_set.jdftxinput + + cart_center_of_mass = np.array( + list(cart_jdftx_input["coulomb-truncation-embed"].values()) + ) + frac_center_of_mass = np.array( + list(frac_jdftx_input["coulomb-truncation-embed"].values()) + ) + assert any(cart_center_of_mass > 1) + assert all(frac_center_of_mass < 1) + assert np.allclose( + cart_center_of_mass, frac_center_of_mass @ si_structure.lattice.matrix + ) diff --git a/tests/test_data/jdftx/ionicmin_test/inputs/init.in.gz b/tests/test_data/jdftx/ionicmin_test/inputs/init.in.gz new file mode 100644 index 0000000000..5f237d198b Binary files /dev/null and b/tests/test_data/jdftx/ionicmin_test/inputs/init.in.gz differ diff --git a/tests/test_data/jdftx/ionicmin_test/outputs/jdftx.out.gz b/tests/test_data/jdftx/ionicmin_test/outputs/jdftx.out.gz new file mode 100644 index 0000000000..e69394f27e Binary files /dev/null and b/tests/test_data/jdftx/ionicmin_test/outputs/jdftx.out.gz differ diff --git a/tests/test_data/jdftx/latticemin_test/inputs/init.in.gz b/tests/test_data/jdftx/latticemin_test/inputs/init.in.gz new file mode 100644 index 0000000000..cf0a5a443e Binary files /dev/null and b/tests/test_data/jdftx/latticemin_test/inputs/init.in.gz differ diff --git a/tests/test_data/jdftx/latticemin_test/outputs/jdftx.out.gz b/tests/test_data/jdftx/latticemin_test/outputs/jdftx.out.gz new file mode 100644 index 0000000000..4c8e9d6f8d Binary files /dev/null and b/tests/test_data/jdftx/latticemin_test/outputs/jdftx.out.gz differ diff --git a/tests/test_data/jdftx/sp_test/inputs/init.in.gz b/tests/test_data/jdftx/sp_test/inputs/init.in.gz new file mode 100644 index 0000000000..42fc549402 Binary files /dev/null and b/tests/test_data/jdftx/sp_test/inputs/init.in.gz differ diff --git a/tests/test_data/jdftx/sp_test/outputs/jdftx.out.gz b/tests/test_data/jdftx/sp_test/outputs/jdftx.out.gz new file mode 100644 index 0000000000..595d95eef5 Binary files /dev/null and b/tests/test_data/jdftx/sp_test/outputs/jdftx.out.gz differ