From 143c172a0d939dfc02d5e7996193c7b6eee269ee Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Wed, 17 Apr 2024 13:30:53 +0200 Subject: [PATCH 01/21] First commit --- src/atomate2/abinit/flows/gw.py | 218 +++++++++++++++++++++ src/atomate2/abinit/jobs/core.py | 183 ++++++++++++++++- src/atomate2/abinit/jobs/gw.py | 45 +++++ src/atomate2/abinit/schemas/calculation.py | 146 +++++++++++++- src/atomate2/abinit/schemas/task.py | 77 +++++++- src/atomate2/abinit/sets/gw.py | 130 ++++++++++++ 6 files changed, 792 insertions(+), 7 deletions(-) create mode 100644 src/atomate2/abinit/flows/gw.py create mode 100644 src/atomate2/abinit/jobs/gw.py create mode 100644 src/atomate2/abinit/sets/gw.py diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py new file mode 100644 index 0000000000..8bb4d47be0 --- /dev/null +++ b/src/atomate2/abinit/flows/gw.py @@ -0,0 +1,218 @@ +"""Core abinit flow makers.""" + +from dataclasses import dataclass, field +from pathlib import Path +from typing import List, Optional, Union + +from jobflow import Flow, Maker +from pymatgen.core.structure import Structure + +from atomate2.abinit.jobs.base import BaseAbinitMaker +from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker +from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker +from atomate2.abinit.powerups import update_user_abinit_settings + + +@dataclass +class GWbandsMaker(Maker): + """ + Maker to generate bands for GW caculation. + """ + + name: str = "Bands calculation" + scf_maker: StaticMaker = field(default_factory=StaticMaker) + nscf_maker: NonSCFMaker = field(default_factory=NonSCFMaker) + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None, + ): + """ + Create a G0W0 flow. + + Parameters + ---------- + structure : Structure + A pymatgen structure object. + restart_from : str or Path or None + One previous directory to restart from. + + Returns + ------- + Flow + A G0W0 flow. + """ + + scf_job = self.scf_maker.make( + structure, + restart_from=restart_from) + nscf_job = self.nscf_maker.make( + prev_outputs=[scf_job.output.dir_name], + mode="uniform", + ) + return Flow([scf_job, nscf_job], output=nscf_job.output, name=self.name) + +@dataclass +class G0W0Maker(Maker): + """ + Maker to generate G0W0 flows. + """ + + name: str = "G0W0 calculation" + nscf_output: str = None + scr_maker: BaseAbinitMaker = field(default_factory=ScreeningMaker) + sigma_maker: BaseAbinitMaker = field(default_factory=SigmaMaker) + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None + ): + """ + Create a G0W0 flow. + + Parameters + ---------- + structure : Structure + A pymatgen structure object. + restart_from : str or Path or None + One previous directory to restart from. + + Returns + ------- + Flow + A G0W0 flow. + """ + + scr_job = self.scr_maker.make( + prev_outputs=[self.nscf_output], + ) + m_scr_job = update_user_abinit_settings( + flow=scr_job, + abinit_updates={"iomode": 3} + ) + sigma_job = self.sigma_maker.make( + prev_outputs=[self.nscf_output, scr_job.output.dir_name], + ) + m_sigma_job = update_user_abinit_settings( + flow=sigma_job, + abinit_updates={"gw_qprange": 0, "iomode": 3} + ) + return Flow([m_scr_job, m_sigma_job], output=m_sigma_job.output, name=self.name) + +@dataclass +class G0W0ConvergenceMaker(Maker): + """ + Maker to generate convergence of G0W0 calculations. + """ + + name: str = "G0W0 calculation" + scf_maker: StaticMaker = field(default_factory=StaticMaker) + nscf_maker: NonSCFMaker = field(default_factory=NonSCFMaker) + scr_makers: List[ScreeningMaker] = field(default_factory=lambda: [ScreeningMaker()]) + sigma_makers: List[SigmaMaker] = field(default_factory=lambda: [SigmaMaker()]) + + def __post_init__(self): + # TODO: make some checks on the input sets, e.g.: + # - non scf has to be uniform + # - set istwfk ? or check that it is "*1" ? + # - kpoint shifts ? + pass + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None, + ): + """ + Create a convergence G0W0 flow. + + Parameters + ---------- + structure : Structure + A pymatgen structure object. + restart_from : str or Path or None + One previous directory to restart from. + + Returns + ------- + Flow + A G0W0 flow. + """ + + scf_job = self.scf_maker.make(structure, restart_from=restart_from) + nscf_job = self.nscf_maker.make( + prev_outputs=scf_job.output.dir_name, mode="uniform" + ) + jobs = [scf_job, nscf_job] + for scr_maker in self.scr_makers: + scr_job = scr_maker.make(prev_outputs=nscf_job.output.dir_name) + jobs.append(scr_job) + for sigma_maker in self.sigma_makers: + sigma_job = sigma_maker.make( + prev_outputs=[nscf_job.output.dir_name, scr_job.output.dir_name] + ) + jobs.append(sigma_job) + + return Flow(jobs, name=self.name) + + +@dataclass +class PeriodicGWConvergenceMaker(Maker): + """ + A maker to perform a GW workflow with automatic convergence in FHI-aims. + + Parameters + ---------- + name : str + A name for the job + criterion_name: str + A name for the convergence criterion. Must be in the run results + epsilon: float + A difference in criterion value for subsequent runs + convergence_field: str + An input parameter that changes to achieve convergence + convergence_steps: list | tuple + An iterable of the possible values for the convergence field. + If the iterable is depleted and the convergence is not reached, + that the job is failed + """ + + name: str = "GW convergence" + criterion_name: str = "bandgap" + epsilon: float = 0.1 + convergence_field: str = field(default_factory=str) + convergence_steps: list = field(default_factory=list) + + #def __post_init__(self): + # TODO: make some checks on the input sets, e.g.: + # - non scf has to be uniform + # - set istwfk ? or check that it is "*1" ? + # - kpoint shifts ? + # - check nbands in nscf is >= nband in screening and sigma + # pass + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None, + ): + + #scf_job = self.scf_maker.make(structure, restart_from=restart_from) + #nscf_job = self.nscf_maker.make( + # prev_outputs=scf_job.output.dir_name, mode="uniform" + #) + #scr_job = self.scr_maker.make(prev_outputs=["../nscf"],abinit_settings={self.convergence_field: value}) + #static = GWbandsMaker().make(structure) + gw_maker = G0W0Maker(nscf_output='/home/ucl/modl/tbiswas/abinit_run/nscf') + convergence = ConvergenceMaker( + maker=gw_maker, + epsilon=self.epsilon, + criterion_name=self.criterion_name, + convergence_field=self.convergence_field, + convergence_steps=self.convergence_steps, + ) + gw = convergence.make(structure) + return Flow([gw], gw.output, name=self.name) + diff --git a/src/atomate2/abinit/jobs/core.py b/src/atomate2/abinit/jobs/core.py index 2e1e73d9d6..4dd5c07195 100644 --- a/src/atomate2/abinit/jobs/core.py +++ b/src/atomate2/abinit/jobs/core.py @@ -3,6 +3,8 @@ from __future__ import annotations import logging +import json +from pathlib import Path from dataclasses import dataclass, field from typing import TYPE_CHECKING, ClassVar @@ -12,9 +14,11 @@ RelaxConvergenceWarning, ScfConvergenceWarning, ) -from jobflow import Job, job +from jobflow import Job, job, Maker, Response, Flow from atomate2.abinit.jobs.base import BaseAbinitMaker +from atomate2.abinit.powerups import update_user_abinit_settings +from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary from atomate2.abinit.sets.core import ( LineNonSCFSetGenerator, NonSCFSetGenerator, @@ -33,6 +37,10 @@ from atomate2.abinit.utils.history import JobHistory logger = logging.getLogger(__name__) +CONVERGENCE_FILE_NAME = "convergence.json" + +__all__ = ["StaticMaker", "NonSCFMaker", "RelaxMaker", "ConvergenceMaker"] + @dataclass @@ -194,3 +202,176 @@ def full_relaxation(cls, *args, **kwargs) -> Job: return cls( input_set_generator=RelaxSetGenerator(*args, relax_cell=True, **kwargs) ) +@dataclass +class ConvergenceMaker(Maker): + """A job that performs convergence run for a given number of steps. Stops either + when all steps are done, or when the convergence criterion is reached, that is when + the absolute difference between the subsequent values of the convergence field is + less than a given epsilon. + + Parameters + ---------- + name : str + A name for the job + maker: .BaseAbinitMaker + A maker for the run + criterion_name: str + A name for the convergence criterion. Must be in the run results + epsilon: float + A difference in criterion value for subsequent runs + convergence_field: str + An input parameter that changes to achieve convergence + convergence_steps: list | tuple + An iterable of the possible values for the convergence field. + If the iterable is depleted and the convergence is not reached, + that the job is failed + """ + + name: str = "Convergence job" + maker: BaseAbinitMaker = field(default_factory=BaseAbinitMaker) + criterion_name: str = "energy_per_atom" + epsilon: float = 0.001 + convergence_field: str = field(default_factory=str) + convergence_steps: list = field(default_factory=list) + def __post_init__(self): + self.last_idx = len(self.convergence_steps) + + def make(self, structure): + """A top-level flow controlling convergence iteration + + Parameters + ---------- + atoms : MSONableAtoms + a structure to run a job + """ + convergence_job = self.convergence_iteration(structure) + return Flow([convergence_job], output=convergence_job.output) + + @job + def convergence_iteration( + self, + structure: Structure, + prev_dir: str | Path = None, + ) -> Response: + """ + Runs several jobs with changing inputs consecutively to investigate + convergence in the results + + Parameters + ---------- + structure : Structure + The structure to run the job for + prev_dir: str | None + An Abinit calculation directory in which previous run contents are stored + + Returns + ------- + The output response for the job + """ + + # getting the calculation index + idx = 0 + converged = False + if prev_dir is not None: + prev_dir = prev_dir.split(":")[-1] + convergence_file = Path(prev_dir) / CONVERGENCE_FILE_NAME + idx += 1 + if convergence_file.exists(): + with open(convergence_file) as f: + data = json.load(f) + idx = data["idx"] + 1 + # check for convergence + converged = data["converged"] + + if idx < self.last_idx and not converged: + # finding next jobs + base_job = self.maker.make(structure) + next_base_job = update_user_abinit_settings(flow=base_job, abinit_updates={self.convergence_field: self.convergence_steps[idx]}) + next_base_job.append_name(append_str=f" {idx}") + + update_file_job = self.update_convergence_file( + prev_dir=prev_dir, + job_dir=next_base_job.output.dir_name, + output=next_base_job.output, + ) + + next_job = self.convergence_iteration( + structure, prev_dir=next_base_job.output.dir_name + ) + + replace_flow = Flow( + [next_base_job, update_file_job, next_job], output=next_base_job.output + ) + return Response(detour=replace_flow, output=replace_flow.output) + else: + task_doc = AbinitTaskDoc.from_directory(prev_dir) + summary = ConvergenceSummary.from_abinit_calc_doc(task_doc) + return summary + + @job(name="Writing a convergence file") + def update_convergence_file( + self, prev_dir: str | Path, job_dir: str | Path, output + ): + """Write a convergence file + + Parameters + ---------- + TO DO: fill out + """ + idx = 0 + if prev_dir is not None: + prev_dir = prev_dir.split(":")[-1] + convergence_file = Path(prev_dir) / CONVERGENCE_FILE_NAME + if convergence_file.exists(): + with open(convergence_file) as f: + convergence_data = json.load(f) + idx = convergence_data["idx"] + 1 + else: + idx = 0 + convergence_data = { + "criterion_name": self.criterion_name, + "asked_epsilon": self.epsilon, + "criterion_values": [], + "convergence_field_name": self.convergence_field, + "convergence_field_values": [], + "converged": False, + } + convergence_data["convergence_field_values"].append(self.convergence_steps[idx]) + convergence_data["criterion_values"].append( + getattr(output.output, self.criterion_name) + ) + convergence_data["idx"] = idx + + if len(convergence_data["criterion_values"]) > 1: + # checking for convergence + convergence_data["converged"] = ( + abs( + convergence_data["criterion_values"][-1] + - convergence_data["criterion_values"][-2] + ) + < self.epsilon + ) + + job_dir = job_dir.split(":")[-1] + convergence_file = Path(job_dir) / CONVERGENCE_FILE_NAME + with open(convergence_file, "w") as f: + json.dump(convergence_data, f) + + @job(name="Getting the results") + def get_results(self, prev_dir: Path | str) -> Dict[str, Any]: + """Get the results for a calculation from a given directory + + Parameters + ---------- + prev_dir: Path or str + The calculation directory to get the results for + + Results + ------- + The results dictionary loaded from the JSON file + """ + convergence_file = Path(prev_dir) / CONVERGENCE_FILE_NAME + with open(convergence_file) as f: + return json.load(f) + + diff --git a/src/atomate2/abinit/jobs/gw.py b/src/atomate2/abinit/jobs/gw.py new file mode 100644 index 0000000000..adba903f1e --- /dev/null +++ b/src/atomate2/abinit/jobs/gw.py @@ -0,0 +1,45 @@ +"""Core jobs for running ABINIT calculations.""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field + +from atomate2.abinit.jobs.base import BaseAbinitMaker +from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator + +logger = logging.getLogger(__name__) + +__all__ = ["ScreeningMaker", "SigmaMaker"] + + +@dataclass +class ScreeningMaker(BaseAbinitMaker): + """Maker to create ABINIT scf jobs. + + Parameters + ---------- + name : str + The job name. + """ + + calc_type: str = "scr" + name: str = "Screening calculation" + + input_set_generator: ScreeningSetGenerator = field( + default_factory=ScreeningSetGenerator + ) + + # CRITICAL_EVENTS: ClassVar[Sequence[str]] = ("ScfConvergenceWarning",) + + +@dataclass +class SigmaMaker(BaseAbinitMaker): + """Maker to create non SCF calculations.""" + + calc_type: str = "sigma" + name: str = "Sigma calculation" + + input_set_generator: SigmaSetGenerator = field(default_factory=SigmaSetGenerator) + + # CRITICAL_EVENTS: ClassVar[Sequence[str]] = ("ScfConvergenceWarning",) diff --git a/src/atomate2/abinit/schemas/calculation.py b/src/atomate2/abinit/schemas/calculation.py index 64b694ef5b..5b0110d9d8 100644 --- a/src/atomate2/abinit/schemas/calculation.py +++ b/src/atomate2/abinit/schemas/calculation.py @@ -4,6 +4,7 @@ import logging import os +import pandas as pd from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, Optional, Union @@ -12,6 +13,8 @@ pass from abipy.electrons.gsr import GsrFile +from abipy.electrons.scr import ScrFile +from abipy.electrons.gw import SigresFile from abipy.flowtk import events from abipy.flowtk.utils import File from emmet.core.math import Matrix3D, Vector3D @@ -105,6 +108,19 @@ class CalculationOutput(BaseModel): description="The valence band maximum, or HOMO for molecules, in eV " "(if system is not metallic)", ) + qp_corr_vbm: Optional[float] = Field( + None, + description="The valence band maximum, or HOMO for molecules, in eV " + "(if system is not metallic)", + ) + qp_corr_cbm: Optional[float] = Field( + None, + description="The valence band maximum, or HOMO for molecules, in eV " + "(if system is not metallic)", + ) + + class Config: + arbitrary_types_allowed = True @classmethod def from_abinit_gsr( @@ -161,6 +177,111 @@ def from_abinit_gsr( forces=forces, stress=stress, ) + @classmethod + def from_abinit_scr( + cls, + output: ScrFile, # Must use auto_load kwarg when passed + ) -> CalculationOutput: + """ + Create an Abinit output document from Abinit outputs. + + Parameters + ---------- + output: .AbinitOutput + An AbinitOutput object. + + Returns + ------- + The Abinit calculation output document. + """ + structure = output.structure # final structure by default for GSR + + # In case no conduction bands were included + try: + cbm = output.ebands.get_edge_state("cbm").eig + bandgap = output.ebands.fundamental_gaps[ + 0 + ].energy # [0] for one spin channel only + direct_bandgap = output.ebands.direct_gaps[0].energy + except ValueError: + cbm = None + bandgap = None + direct_bandgap = None + + electronic_output = { + "efermi": float(output.ebands.fermie), + "vbm": output.ebands.get_edge_state("vbm").eig, + "cbm": cbm, + "bandgap": bandgap, + "direct_bandgap": direct_bandgap, + } + + #forces = None + #if output.cart_forces is not None: + # forces = output.cart_forces.tolist() + + #stress = None + #if output.cart_stress_tensor is not None: + # stress = output.cart_stress_tensor.tolist() + return cls( + structure=structure, + energy=0.0, + energy_per_atom=0.0, + **electronic_output, + #forces=forces, + #stress=stress, + ) + @classmethod + def from_abinit_sig( + cls, + output: SigresFile, # Must use auto_load kwarg when passed + ) -> CalculationOutput: + """ + Create an Abinit output document from Abinit outputs. + + Parameters + ---------- + output: .AbinitOutput + An AbinitOutput object. + + Returns + ------- + The Abinit calculation output document. + """ + structure = output.structure # final structure by default for GSR + + # In case no conduction bands were included + ivbm=output.ebands.get_edge_state("vbm") + icbm=output.ebands.get_edge_state("cbm") + vbm=output.get_qpcorr(ivbm.spin, ivbm.kpoint, ivbm.band).re_qpe + cbm=output.get_qpcorr(icbm.spin, icbm.kpoint, icbm.band).re_qpe + bandgap=cbm-vbm + direct_bandgap=None + electronic_output = { + "efermi": float(output.ebands.fermie), + "vbm": vbm, + "cbm": cbm, + "bandgap": bandgap, + "direct_bandgap": direct_bandgap, + } + #forces = None + #if output.cart_forces is not None: + # forces = output.cart_forces.tolist() + + #stress = None + #if output.cart_stress_tensor is not None: + # stress = output.cart_stress_tensor.tolist() + + #qp_data=output.get_dataframe() + return cls( + structure=structure, + energy=0.0, + energy_per_atom=0.0, + **electronic_output, + #qp_data=qp_data, + #forces=forces, + #stress=stress, + ) class Calculation(BaseModel): @@ -208,9 +329,13 @@ def from_abinit_files( cls, dir_name: Path | str, task_name: str, + abinit_gsr_file: Path | str = "out_GSR.nc", + abinit_scr_file: Path | str = "out_SCR.nc", + abinit_sig_file: Path | str = "out_SIGRES.nc", abinit_log_file: Path | str = LOG_FILE_NAME, abinit_abort_file: Path | str = MPIABORTFILE, + ) -> tuple[Calculation, dict[AbinitObject, dict]]: """ Create an Abinit calculation document from a directory and file paths. @@ -235,14 +360,27 @@ def from_abinit_files( """ dir_name = Path(dir_name) abinit_gsr_file = dir_name / abinit_gsr_file + abinit_scr_file = dir_name / abinit_scr_file + abinit_sig_file = dir_name / abinit_sig_file abinit_log_file = dir_name / abinit_log_file abinit_abort_file = dir_name / abinit_abort_file - - abinit_gsr = GsrFile.from_file(abinit_gsr_file) + if os.path.isfile(abinit_gsr_file): + abinit_gsr = GsrFile.from_file(abinit_gsr_file) + output_doc = CalculationOutput.from_abinit_gsr(abinit_gsr) + abinit_version = abinit_gsr.abinit_version + elif os.path.isfile(abinit_scr_file): + abinit_scr = ScrFile.from_file(abinit_scr_file) + output_doc = CalculationOutput.from_abinit_scr(abinit_scr) + abinit_version = abinit_scr.abinit_version + elif os.path.isfile(abinit_sig_file): + abinit_sig = SigresFile.from_file(abinit_sig_file) + output_doc = CalculationOutput.from_abinit_sig(abinit_sig) + abinit_version = abinit_sig.abinit_version + else: + print("No ouput file found.") completed_at = str(datetime.fromtimestamp(os.stat(abinit_log_file).st_mtime)) - output_doc = CalculationOutput.from_abinit_gsr(abinit_gsr) report = None has_abinit_completed = TaskState.FAILED @@ -267,7 +405,7 @@ def from_abinit_files( cls( dir_name=str(dir_name), task_name=task_name, - abinit_version=abinit_gsr.abinit_version, + abinit_version=abinit_version, has_abinit_completed=has_abinit_completed, completed_at=completed_at, output=output_doc, diff --git a/src/atomate2/abinit/schemas/task.py b/src/atomate2/abinit/schemas/task.py index 5a24b97c1d..548a95cd00 100644 --- a/src/atomate2/abinit/schemas/task.py +++ b/src/atomate2/abinit/schemas/task.py @@ -2,6 +2,7 @@ from __future__ import annotations +import json import logging from collections.abc import Sequence from pathlib import Path @@ -234,7 +235,6 @@ class AbinitTaskDoc(StructureMetadata): additional_json: Optional[dict[str, Any]] = Field( None, description="Additional json loaded from the calculation directory" ) - @classmethod def from_directory( cls: type[_T], @@ -364,7 +364,10 @@ def _get_task_files(files: list[Path], suffix: str = "") -> dict: abinit_files["abinit_log_file"] = Path(file).relative_to(path) elif file.match(f"*{MPIABORTFILE}{suffix}*"): abinit_files["abinit_abort_file"] = Path(file).relative_to(path) - + if file.match(f"*outdata/out_SCR{suffix}*"): + abinit_files["abinit_scr_file"] = Path(file).relative_to(path) + if file.match(f"*outdata/out_SIGRES{suffix}*"): + abinit_files["abinit_sig_file"] = Path(file).relative_to(path) return abinit_files for task_name in task_names: @@ -388,3 +391,73 @@ def _get_task_files(files: list[Path], suffix: str = "") -> dict: task_files["standard"] = standard_files return task_files + +class ConvergenceSummary(BaseModel): + """Summary of the outputs for an Abinit convergence calculation.""" + + structure: Structure = Field(None, description="The output structure object") + converged: bool = Field(None, description="Is convergence achieved?") + + convergence_criterion_name: str = Field( + None, description="The output name of the convergence criterion" + ) + convergence_field_name: str = Field( + None, description="The name of the input setting to study convergence against" + ) + convergence_criterion_value: float = Field( + None, description="The output value of the convergence criterion" + ) + convergence_field_value: Any = Field( + None, + description="The last value of the input setting to study convergence against", + ) + asked_epsilon: float = Field( + None, + description="The difference in the values for the convergence criteria that was asked for", + ) + actual_epsilon: float = Field( + None, description="The actual difference in the convergence criteria values" + ) + + @classmethod + def from_abinit_calc_doc(cls, calc_doc: Calculation) -> "ConvergenceSummary": + """ + Create a summary of Abinit calculation outputs from an Abinit calculation document. + + Parameters + ---------- + calc_doc + An Abinit calculation document. + + Returns + ------- + :ConvergenceSummary + The summary for convergence runs. + """ + + from atomate2.abinit.jobs.core import CONVERGENCE_FILE_NAME + + job_dir = calc_doc.dir_name.split(":")[-1] + + convergence_file = Path(job_dir) / CONVERGENCE_FILE_NAME + if not convergence_file.exists(): + raise ValueError( + f"Did not find the convergence json file {CONVERGENCE_FILE_NAME} in {calc_doc.dir_name}" + ) + + with open(convergence_file) as f: + convergence_data = json.load(f) + + return cls( + structure=calc_doc.output.structure, + converged=convergence_data["converged"], + convergence_criterion_name=convergence_data["criterion_name"], + convergence_field_name=convergence_data["convergence_field_name"], + convergence_criterion_value=convergence_data["criterion_values"][-1], + convergence_field_value=convergence_data["convergence_field_values"][-1], + asked_epsilon=convergence_data["asked_epsilon"], + actual_epsilon=abs( + convergence_data["criterion_values"][-2] + - convergence_data["criterion_values"][-1] + ), + ) diff --git a/src/atomate2/abinit/sets/gw.py b/src/atomate2/abinit/sets/gw.py new file mode 100644 index 0000000000..783d64438c --- /dev/null +++ b/src/atomate2/abinit/sets/gw.py @@ -0,0 +1,130 @@ +"""Module defining Abinit input set generators specific to GW calculations.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Callable + +from abipy.abio.factories import scr_from_nscfinput, sigma_from_inputs, scf_input +from abipy.abio.input_tags import NSCF, SCREENING + +from atomate2.abinit.files import load_abinit_input +from atomate2.abinit.sets.base import AbinitInputGenerator + +if TYPE_CHECKING: + from abipy.abio.inputs import AbinitInput + from pymatgen.core import Structure + from pymatgen.io.abinit import PseudoTable + from pymatgen.io.abinit.abiobjects import KSampling + +__all__ = [ + "ScreeningSetGenerator", + "SigmaSetGenerator", +] + + +@dataclass +class ScreeningSetGenerator(AbinitInputGenerator): + """Class to generate Abinit Screening input sets.""" + + + calc_type: str = "scr" + factory: Callable = scr_from_nscfinput + pseudos: str | list[str] | PseudoTable | None = None + prev_outputs_deps: tuple = (f"{NSCF}:WFK",) + factory_kwargs: dict = field(default_factory=dict) + factory_prev_inputs_kwargs: dict | None = field( + default_factory=lambda: {"nscf_input": (NSCF,)} + ) + + + def get_abinit_input( + self, + structure: Structure | None = None, + pseudos: PseudoTable | None = None, + prev_outputs: list[str] | None = None, + factory_kwargs: dict | None = None, + kpoints_settings: dict | KSampling | None = None, + ) -> AbinitInput: + + + nscf_inp = load_abinit_input(prev_outputs[0]) + factory_kwargs={"ecutwfn": nscf_inp["ecut"]} + return super().get_abinit_input( + structure=structure, + pseudos=pseudos, + prev_outputs=prev_outputs, + factory_kwargs=factory_kwargs, + kpoints_settings=kpoints_settings, + ) + + +@dataclass +class SigmaSetGenerator(AbinitInputGenerator): + """Class to generate Abinit Sigma input sets.""" + + calc_type: str = "sigma" + factory: Callable = sigma_from_inputs + pseudos: str | list[str] | PseudoTable | None = None + prev_outputs_deps: tuple = (f"{NSCF}:WFK", f"{SCREENING}:SCR") + factory_kwargs: dict = field(default_factory=dict) + factory_prev_inputs_kwargs: dict | None = field( + default_factory=lambda: {"nscf_input": (NSCF,), "scr_input": (SCREENING,)} + ) + + def get_abinit_input( + self, + structure: Structure | None = None, + pseudos: PseudoTable | None = None, + prev_outputs: list[str] | None = None, + abinit_settings: dict | None = None, + factory_kwargs: dict | None = None, + kpoints_settings: dict | KSampling | None = None, + ) -> AbinitInput: + + """Get AbinitInput object for SCR calculation.""" + if prev_outputs is None: + raise RuntimeError("No previous_outputs. Cannot perform Sigma calculation.") + if len(prev_outputs) != 2: + raise RuntimeError( + "Should have exactly two previous outputs (one NSCF calculation " + "and one SCREENING calculation)." + ) + ab1 = load_abinit_input(prev_outputs[0]) + ab2 = load_abinit_input(prev_outputs[1]) + if NSCF in ab1.runlevel and SCREENING in ab2.runlevel: + nscf_inp = ab1 + scr_inp = ab2 + elif SCREENING in ab1.runlevel and NSCF in ab2.runlevel: + nscf_inp = ab2 + scr_inp = ab1 + else: + raise RuntimeError("Could not find one NSCF and one SCREENING calculation.") + # TODO: do we need to check that the structures are the same in nscf and + # screening ? + + #previous_structure = get_final_structure(prev_outputs[0]) + # TODO: the structure in the previous abinit input may be slightly different + # from the one in the previous output (if abinit symmetrizes the structure) + # Should we set the structure in the previous_abinit_input ? Or should we + # assume that abinit will make the same symmetrization ? + # Or should we always symmetrize the structure before ? + # Or should we always set tolsym to 1.0e-8 ? + #nscf_inp.set_structure(previous_structure) + #scr_inp.set_structure(previous_structure) + if structure is not None: + if structure != previous_structure: + raise RuntimeError( + "Structure is provided in non-SCF input set generator but " + "is not the same as the one from the previous (SCF) input set." + ) + + # Sigma. + factory_kwargs={"ecutsigx": scr_inp["ecut"]} + return super().get_abinit_input( + structure=structure, + pseudos=pseudos, + prev_outputs=prev_outputs, + factory_kwargs=factory_kwargs, + kpoints_settings=kpoints_settings, + ) From 6e4b9529d08edeba98bfee9566cb427c9765aafa Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Thu, 18 Apr 2024 13:13:45 +0200 Subject: [PATCH 02/21] GW convergence fixed --- src/atomate2/abinit/flows/gw.py | 106 ++++++++++--------------------- src/atomate2/abinit/jobs/core.py | 13 ++-- 2 files changed, 44 insertions(+), 75 deletions(-) diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index 8bb4d47be0..1d1ba19647 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -56,17 +56,40 @@ def make( @dataclass class G0W0Maker(Maker): """ - Maker to generate G0W0 flows. + Maker to perform G0W0 calculation from previous GWbands calculation. + + This is a screening calculation followed by a sigma calculations, + one can perform QP corrections only for bandedges (useful for + convergence calculations) or at all k-points. + + Parameters + ---------- + name : str + Name of the flows produced by this maker. + scr_maker : .BaseAbinitMaker + The maker to use for the screening calculation. + sigma_maker : .BaseAbinitMaker + The maker to use for the sigma calculations. + gw_qprange: int + 0 - Compute the QP corrections only for the fundamental and the direct gap + + +num - Compute the QP corrections for all the k-points in the irreducible zone + , and include num bands above and below the Fermi level. + + -num - Compute the QP corrections for all the k-points in the irreducible zone. + Include all occupied states and num empty states. + """ name: str = "G0W0 calculation" - nscf_output: str = None scr_maker: BaseAbinitMaker = field(default_factory=ScreeningMaker) sigma_maker: BaseAbinitMaker = field(default_factory=SigmaMaker) + gw_qprange: int = 0 def make( self, structure: Structure, + prev_outputs: str = None, restart_from: Optional[Union[str, Path]] = None ): """ @@ -76,6 +99,9 @@ def make( ---------- structure : Structure A pymatgen structure object. + prev_outputs : str + One previous directory where ncsf + calculation were performed. restart_from : str or Path or None One previous directory to restart from. @@ -86,18 +112,18 @@ def make( """ scr_job = self.scr_maker.make( - prev_outputs=[self.nscf_output], + prev_outputs=prev_outputs, ) m_scr_job = update_user_abinit_settings( flow=scr_job, abinit_updates={"iomode": 3} ) sigma_job = self.sigma_maker.make( - prev_outputs=[self.nscf_output, scr_job.output.dir_name], + prev_outputs=[prev_outputs, scr_job.output.dir_name], ) m_sigma_job = update_user_abinit_settings( flow=sigma_job, - abinit_updates={"gw_qprange": 0, "iomode": 3} + abinit_updates={"gw_qprange": self.gw_qprange, "iomode": 3} ) return Flow([m_scr_job, m_sigma_job], output=m_sigma_job.output, name=self.name) @@ -105,63 +131,6 @@ def make( class G0W0ConvergenceMaker(Maker): """ Maker to generate convergence of G0W0 calculations. - """ - - name: str = "G0W0 calculation" - scf_maker: StaticMaker = field(default_factory=StaticMaker) - nscf_maker: NonSCFMaker = field(default_factory=NonSCFMaker) - scr_makers: List[ScreeningMaker] = field(default_factory=lambda: [ScreeningMaker()]) - sigma_makers: List[SigmaMaker] = field(default_factory=lambda: [SigmaMaker()]) - - def __post_init__(self): - # TODO: make some checks on the input sets, e.g.: - # - non scf has to be uniform - # - set istwfk ? or check that it is "*1" ? - # - kpoint shifts ? - pass - - def make( - self, - structure: Structure, - restart_from: Optional[Union[str, Path]] = None, - ): - """ - Create a convergence G0W0 flow. - - Parameters - ---------- - structure : Structure - A pymatgen structure object. - restart_from : str or Path or None - One previous directory to restart from. - - Returns - ------- - Flow - A G0W0 flow. - """ - - scf_job = self.scf_maker.make(structure, restart_from=restart_from) - nscf_job = self.nscf_maker.make( - prev_outputs=scf_job.output.dir_name, mode="uniform" - ) - jobs = [scf_job, nscf_job] - for scr_maker in self.scr_makers: - scr_job = scr_maker.make(prev_outputs=nscf_job.output.dir_name) - jobs.append(scr_job) - for sigma_maker in self.sigma_makers: - sigma_job = sigma_maker.make( - prev_outputs=[nscf_job.output.dir_name, scr_job.output.dir_name] - ) - jobs.append(sigma_job) - - return Flow(jobs, name=self.name) - - -@dataclass -class PeriodicGWConvergenceMaker(Maker): - """ - A maker to perform a GW workflow with automatic convergence in FHI-aims. Parameters ---------- @@ -199,13 +168,8 @@ def make( restart_from: Optional[Union[str, Path]] = None, ): - #scf_job = self.scf_maker.make(structure, restart_from=restart_from) - #nscf_job = self.nscf_maker.make( - # prev_outputs=scf_job.output.dir_name, mode="uniform" - #) - #scr_job = self.scr_maker.make(prev_outputs=["../nscf"],abinit_settings={self.convergence_field: value}) - #static = GWbandsMaker().make(structure) - gw_maker = G0W0Maker(nscf_output='/home/ucl/modl/tbiswas/abinit_run/nscf') + static = GWbandsMaker().make(structure) + gw_maker = G0W0Maker() convergence = ConvergenceMaker( maker=gw_maker, epsilon=self.epsilon, @@ -213,6 +177,6 @@ def make( convergence_field=self.convergence_field, convergence_steps=self.convergence_steps, ) - gw = convergence.make(structure) - return Flow([gw], gw.output, name=self.name) + gw = convergence.make(structure, prev_outputs=static.output.dir_name) + return Flow([static, gw], gw.output, name=self.name) diff --git a/src/atomate2/abinit/jobs/core.py b/src/atomate2/abinit/jobs/core.py index 4dd5c07195..1858d97b87 100644 --- a/src/atomate2/abinit/jobs/core.py +++ b/src/atomate2/abinit/jobs/core.py @@ -233,10 +233,14 @@ class ConvergenceMaker(Maker): epsilon: float = 0.001 convergence_field: str = field(default_factory=str) convergence_steps: list = field(default_factory=list) + def __post_init__(self): self.last_idx = len(self.convergence_steps) - def make(self, structure): + def make( + self, + structure: Structure, + prev_outputs: str | Path = None): """A top-level flow controlling convergence iteration Parameters @@ -244,7 +248,7 @@ def make(self, structure): atoms : MSONableAtoms a structure to run a job """ - convergence_job = self.convergence_iteration(structure) + convergence_job = self.convergence_iteration(structure, prev_outputs=prev_outputs) return Flow([convergence_job], output=convergence_job.output) @job @@ -252,6 +256,7 @@ def convergence_iteration( self, structure: Structure, prev_dir: str | Path = None, + prev_outputs: str | Path = None, ) -> Response: """ Runs several jobs with changing inputs consecutively to investigate @@ -285,7 +290,7 @@ def convergence_iteration( if idx < self.last_idx and not converged: # finding next jobs - base_job = self.maker.make(structure) + base_job = self.maker.make(structure, prev_outputs=prev_outputs) next_base_job = update_user_abinit_settings(flow=base_job, abinit_updates={self.convergence_field: self.convergence_steps[idx]}) next_base_job.append_name(append_str=f" {idx}") @@ -296,7 +301,7 @@ def convergence_iteration( ) next_job = self.convergence_iteration( - structure, prev_dir=next_base_job.output.dir_name + structure, prev_dir=next_base_job.output.dir_name, prev_outputs=prev_outputs, ) replace_flow = Flow( From 465d15e45790c33de820a2f1f9eeb83f18453195 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Tue, 23 Apr 2024 18:42:19 +0200 Subject: [PATCH 03/21] First commit --- src/atomate2/abinit/jobs/gw.py | 11 ++- src/atomate2/abinit/sets/bse.py | 24 +++++++ src/atomate2/abinit/sets/core.py | 1 - src/atomate2/abinit/sets/gw.py | 114 +++++++++++++++++++++++++++++-- 4 files changed, 143 insertions(+), 7 deletions(-) create mode 100644 src/atomate2/abinit/sets/bse.py diff --git a/src/atomate2/abinit/jobs/gw.py b/src/atomate2/abinit/jobs/gw.py index adba903f1e..56310cd689 100644 --- a/src/atomate2/abinit/jobs/gw.py +++ b/src/atomate2/abinit/jobs/gw.py @@ -6,7 +6,7 @@ from dataclasses import dataclass, field from atomate2.abinit.jobs.base import BaseAbinitMaker -from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator +from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator, BSENonSCFSetGenerator logger = logging.getLogger(__name__) @@ -43,3 +43,12 @@ class SigmaMaker(BaseAbinitMaker): input_set_generator: SigmaSetGenerator = field(default_factory=SigmaSetGenerator) # CRITICAL_EVENTS: ClassVar[Sequence[str]] = ("ScfConvergenceWarning",) + +@dataclass +class BSENonSCFMaker(BaseAbinitMaker): + """Maker to create non SCF calculations.""" + + calc_type: str = "bse_nscf" + name: str = "Sigma calculation" + + input_set_generator: BSENonSCFSetGenerator = field(default_factory=BSENonSCFSetGenerator) diff --git a/src/atomate2/abinit/sets/bse.py b/src/atomate2/abinit/sets/bse.py new file mode 100644 index 0000000000..1ee5e10f8d --- /dev/null +++ b/src/atomate2/abinit/sets/bse.py @@ -0,0 +1,24 @@ +# coding: utf-8 +"""Factory functions for Abinit input files """ +from __future__ import annotations + +import numpy as np +import pymatgen.io.abinit.abiobjects as aobj +from abipy.abio.inputs import AbinitInput + +def bse_with_mdf_from_inputs(nscf_input, sigma_input, bs_loband, bs_nband, + mdf_epsinf, mbpt_sciss, exc_type="TDA", bs_algo="haydock", accuracy="normal", spin_mode="polarized", + smearing="fermi_dirac:0.1 eV") -> AbinitInput: + """Return a sigma input.""" + + bse_input = nscf_input.deepcopy() + bse_input.pop_irdvars() + + exc_ham = aobj.ExcHamiltonian(bs_loband, bs_nband, mbpt_sciss, coulomb_mode="model_df", ecuteps=sigma_input["ecuteps"], + spin_mode=spin_mode, mdf_epsinf=mdf_epsinf, exc_type=exc_type, algo=bs_algo, + bs_freq_mesh=None, with_lf=True, zcut=None) + + bse_input.set_vars(exc_ham.to_abivars()) + # TODO: Cannot use istwfk != 1. + bse_input.set_vars(istwfk="*1") + return bse_input diff --git a/src/atomate2/abinit/sets/core.py b/src/atomate2/abinit/sets/core.py index 184c6cc673..bada1b7e5e 100644 --- a/src/atomate2/abinit/sets/core.py +++ b/src/atomate2/abinit/sets/core.py @@ -98,7 +98,6 @@ def get_abinit_input( """Get AbinitInput object for Non-SCF calculation.""" factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} factory_kwargs["nband"] = self._get_nband(prev_outputs) - return super().get_abinit_input( structure=structure, pseudos=pseudos, diff --git a/src/atomate2/abinit/sets/gw.py b/src/atomate2/abinit/sets/gw.py index 783d64438c..018eabdb73 100644 --- a/src/atomate2/abinit/sets/gw.py +++ b/src/atomate2/abinit/sets/gw.py @@ -5,17 +5,19 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING, Callable -from abipy.abio.factories import scr_from_nscfinput, sigma_from_inputs, scf_input -from abipy.abio.input_tags import NSCF, SCREENING +from abipy.abio.factories import scr_from_nscfinput, sigma_from_inputs, scf_input, nscf_from_gsinput +from atomate2.abinit.sets.bse import bse_with_mdf_from_inputs +from abipy.abio.input_tags import SCF, NSCF, SCREENING, SIGMA from atomate2.abinit.files import load_abinit_input from atomate2.abinit.sets.base import AbinitInputGenerator +from atomate2.abinit.sets.core import NonSCFSetGenerator +from pymatgen.io.abinit.abiobjects import KSampling if TYPE_CHECKING: from abipy.abio.inputs import AbinitInput from pymatgen.core import Structure from pymatgen.io.abinit import PseudoTable - from pymatgen.io.abinit.abiobjects import KSampling __all__ = [ "ScreeningSetGenerator", @@ -49,7 +51,12 @@ def get_abinit_input( nscf_inp = load_abinit_input(prev_outputs[0]) - factory_kwargs={"ecutwfn": nscf_inp["ecut"]} + + if factory_kwargs: + factory_kwargs.update({"ecutwfn": nscf_inp["ecut"]}) + else: + factory_kwargs={"ecutwfn": nscf_inp["ecut"]} + return super().get_abinit_input( structure=structure, pseudos=pseudos, @@ -120,7 +127,10 @@ def get_abinit_input( ) # Sigma. - factory_kwargs={"ecutsigx": scr_inp["ecut"]} + if factory_kwargs: + factory_kwargs.update({"ecutsigx": scr_inp["ecut"]}) + else: + factory_kwargs={"ecutsigx": scr_inp["ecut"]} return super().get_abinit_input( structure=structure, pseudos=pseudos, @@ -128,3 +138,97 @@ def get_abinit_input( factory_kwargs=factory_kwargs, kpoints_settings=kpoints_settings, ) + +@dataclass +class BSENonSCFSetGenerator(NonSCFSetGenerator): + """Class to generate Abinit non-SCF input sets.""" + + calc_type: str = "nscf_bse" + factory: Callable = nscf_from_gsinput + pseudos: str | list[str] | PseudoTable | None = None + restart_from_deps: tuple = (f"{NSCF}:WFK",) + prev_outputs_deps: tuple = (f"{SCF}:DEN",) + nbands_factor: float = 1.2 + factory_kwargs: dict = field(default_factory=dict) + + factory_prev_inputs_kwargs: dict | None = field( + default_factory=lambda: {"gs_input": (SCF,)} + ) + + def get_abinit_input( + self, + structure: Structure | None = None, + pseudos: PseudoTable | None = None, + prev_outputs: list[str] | None = None, + abinit_settings: dict | None = None, + factory_kwargs: dict | None = None, + kpoints_settings: dict | KSampling | None = None, + input_index: int | None = None, + nscf_ngkpt: tuple | None = None, + nscf_shiftk: tuple | None = None, + ) -> AbinitInput: + """Get AbinitInput object for Non-SCF calculation.""" + factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} + factory_kwargs["nband"] = self._get_nband(prev_outputs) + kpoints_settings=KSampling.monkhorst(nscf_ngkpt, shiftk=nscf_shiftk, chksymbreak=0) + return super().get_abinit_input( + structure=structure, + pseudos=pseudos, + prev_outputs=prev_outputs, + abinit_settings=abinit_settings, + factory_kwargs=factory_kwargs, + kpoints_settings=kpoints_settings, + ) + + def _get_nband(self, prev_outputs: list[str] | None) -> int: + abinit_inputs = self.resolve_prev_inputs( + prev_outputs, self.factory_prev_inputs_kwargs + ) + if len(abinit_inputs) != 1: + raise RuntimeError( + f"Should have exactly one previous output. Found {len(abinit_inputs)}" + ) + previous_abinit_input = next(iter(abinit_inputs.values())) + n_band = previous_abinit_input.get( + "nband", + previous_abinit_input.structure.num_valence_electrons( + previous_abinit_input.pseudos + ), + ) + +@dataclass +class BSEmdfSetGenerator(AbinitInputGenerator): + """Class to generate Abinit non-SCF input sets.""" + + calc_type: str = "bse_mdf" + factory: Callable = bse_with_mdf_from_inputs + pseudos: str | list[str] | PseudoTable | None = None + prev_outputs_deps: tuple = (f"{NSCF}:WFK",) + factory_kwargs: dict = field(default_factory=dict) + + factory_prev_inputs_kwargs: dict | None = field( + default_factory=lambda: {"nscf_input": (NSCF,), "sigma_input": (SIGMA,)} + ) + def get_abinit_input( + self, + bs_loband: int | None = 1, + bs_nband: int | None = None, + mdf_epsinf: float | None = None, + mbpt_sciss: float | None = None, + prev_outputs: list[str] | None = None, + abinit_settings: dict | None = None, + factory_kwargs: dict | None = None, + ) -> AbinitInput: + + factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} + factory_kwargs["bs_loband"] = bs_loband + factory_kwargs["bs_nband"] = bs_nband if bs_nband is not None else load_abinit_input(prev_outputs[0])["nband"] + factory_kwargs["mdf_epsinf"] = mdf_epsinf + factory_kwargs["mbpt_sciss"] = mbpt_sciss + + return super().get_abinit_input( + prev_outputs=prev_outputs, + abinit_settings=abinit_settings, + factory_kwargs=factory_kwargs, + ) + From d546062493e91b8f942914bd2bea9018d9da727d Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Mon, 29 Apr 2024 11:39:23 +0200 Subject: [PATCH 04/21] bsemdf added --- src/atomate2/abinit/files.py | 141 --------- src/atomate2/abinit/flows/gw.py | 71 ++++- src/atomate2/abinit/run.py | 65 ---- src/atomate2/abinit/sets/core.py | 2 +- src/atomate2/abinit/sets/factories.py | 25 ++ src/atomate2/abinit/sets/gw.py | 27 +- src/atomate2/abinit/utils/common.py | 432 -------------------------- 7 files changed, 108 insertions(+), 655 deletions(-) delete mode 100644 src/atomate2/abinit/files.py delete mode 100644 src/atomate2/abinit/run.py create mode 100644 src/atomate2/abinit/sets/factories.py delete mode 100644 src/atomate2/abinit/utils/common.py diff --git a/src/atomate2/abinit/files.py b/src/atomate2/abinit/files.py deleted file mode 100644 index af05b9f94d..0000000000 --- a/src/atomate2/abinit/files.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Functions for manipulating Abinit files.""" - -from __future__ import annotations - -import logging -import os -from typing import TYPE_CHECKING - -from abipy.flowtk.utils import abi_extensions -from monty.serialization import loadfn - -from atomate2.abinit.utils.common import INDIR_NAME -from atomate2.utils.file_client import FileClient, auto_fileclient - -if TYPE_CHECKING: - from collections.abc import Iterable - from pathlib import Path - - from abipy.abio.inputs import AbinitInput - from pymatgen.core.structure import Structure - - from atomate2.abinit.sets.base import AbinitInputGenerator - - -logger = logging.getLogger(__name__) - - -ALL_ABIEXTS = abi_extensions() - - -def fname2ext(filepath: Path | str) -> None | str: - """Get the abinit extension of a given filename. - - This will return None if no extension is found. - """ - filename = os.path.basename(filepath) - if "_" not in filename: - return None - ext = filename.split("_")[-1].replace(".nc", "") - if ext not in ALL_ABIEXTS: - return None - return ext - - -@auto_fileclient -def out_to_in( - out_files: Iterable[tuple[str, str]], - src_host: str | None = None, - indir: Path | str = INDIR_NAME, - file_client: FileClient | None = None, - link_files: bool = True, -) -> None: - """ - Copy or link abinit output files to the Abinit input directory. - - Parameters - ---------- - out_files : list of tuples - The list of (abinit output filepath, abinit input filename) to be copied - or linked. - src_host : str or None - The source hostname used to specify a remote filesystem. Can be given as - either "username@remote_host" or just "remote_host" in which case the username - will be inferred from the current user. If ``None``, the local filesystem will - be used as the source. - indir : Path or str - The input directory for Abinit input files. - file_client : .FileClient - A file client to use for performing file operations. - link_files : bool - Whether to link the files instead of copying them. - """ - dest_dir = file_client.abspath(indir, host=None) - - for out_filepath, in_file in out_files: - src_file = file_client.abspath(out_filepath, host=src_host) - dest_file = os.path.join(dest_dir, in_file) - if link_files and src_host is None: - file_client.link(src_file, dest_file) - else: - file_client.copy(src_file, dest_file, src_host=src_host) - - -def load_abinit_input( - dirpath: Path | str, fname: str = "abinit_input.json" -) -> AbinitInput: - """Load the AbinitInput object from a given directory. - - Parameters - ---------- - dirpath - Directory to load the AbinitInput from. - fname - Name of the json file containing the AbinitInput. - - Returns - ------- - AbinitInput - The AbinitInput object. - """ - abinit_input_file = os.path.join(dirpath, f"{fname}") - if not os.path.exists(abinit_input_file): - raise NotImplementedError( - f"Cannot load AbinitInput from directory without {fname} file." - ) - - return loadfn(abinit_input_file) - - -def write_abinit_input_set( - structure: Structure, - input_set_generator: AbinitInputGenerator, - prev_outputs: str | Path | list[str] | None = None, - restart_from: str | Path | list[str] | None = None, - directory: str | Path = ".", -) -> None: - """Write the abinit inputs for a given structure using a given generator. - - Parameters - ---------- - structure - The structure for which the abinit inputs have to be written. - input_set_generator - The input generator used to write the abinit inputs. - prev_outputs - The list of previous directories needed for the calculation. - restart_from - The previous directory of the same calculation (in case of a restart). - Note that this should be provided as a list of one directory. - directory - Directory in which to write the abinit inputs. - """ - ais = input_set_generator.get_input_set( - structure=structure, - restart_from=restart_from, - prev_outputs=prev_outputs, - ) - if not ais.validate(): - raise RuntimeError("AbinitInputSet is not valid.") - - ais.write_input(directory=directory, make_dir=True, overwrite=False) diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index 1d1ba19647..bfd6b19d17 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -9,8 +9,9 @@ from atomate2.abinit.jobs.base import BaseAbinitMaker from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker -from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker -from atomate2.abinit.powerups import update_user_abinit_settings +from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker, BSENonSCFMaker, BSEMaker +from atomate2.abinit.powerups import update_user_abinit_settings, update_factory_kwargs, update_user_kpoints_settings +from pymatgen.io.abinit.abiobjects import KSampling @dataclass @@ -180,3 +181,69 @@ def make( gw = convergence.make(structure, prev_outputs=static.output.dir_name) return Flow([static, gw], gw.output, name=self.name) +@dataclass +class BSEmdfMaker(Maker): + + bs_nband: int + mdf_epsinf: float + name: str = "BSE mdf calculation" + scf_maker: BaseAbinitMaker = field(default_factory=StaticMaker) + nscf_maker: BaseAbinitMaker = field(default_factory=NonSCFMaker) + bse_maker: BaseAbinitMaker = field(default_factory=BSEMaker) + bs_loband: int = 1 + mbpt_sciss: float = 0.0 + kppa: int = 100 + shiftk: tuple = (0.0, 0.0, 0.0) + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None + ): + + scf_job = self.scf_maker.make( + structure, + restart_from=restart_from + ) + m_scf_job = update_user_kpoints_settings( + flow=scf_job, + kpoints_updates=KSampling.automatic_density( + structure=structure, + kppa=self.kppa, + shifts=self.shiftk) + ) + nscf_job = self.nscf_maker.make( + prev_outputs=[scf_job.output.dir_name], + mode="uniform", + ) + njob=update_user_abinit_settings( + flow=nscf_job, + abinit_updates={ + 'rfelfd': 1, + 'rfdir': (1, 1, 1), + 'nqpt': 1, + 'qpt': (0.0, 0.0, 0.0), + 'kptopt': 2, + 'iscf': -2, + 'tolwfr': 1e-22} + ) + m_nscf_job = update_user_kpoints_settings( + flow=njob, + kpoints_updates=KSampling.automatic_density( + structure=structure, + kppa=self.kppa, + shifts=self.shiftk, + chksymbreak=0) + ) + bse_job = self.bse_maker.make( + prev_outputs=[nscf_job.output.dir_name, + "/home/ucl/modl/tbiswas/scratch/abinit_run/sigma"], + ) + m_bse_job=update_factory_kwargs( + flow=bse_job, factory_updates={ + 'bs_loband': self.bs_loband, + 'bs_nband': self.bs_nband, + 'mdf_epsinf': self.mdf_epsinf, + 'mbpt_sciss': self.mbpt_sciss} + ) + return Flow([m_scf_job, m_nscf_job, m_bse_job], output=m_bse_job.output, name=self.name) diff --git a/src/atomate2/abinit/run.py b/src/atomate2/abinit/run.py deleted file mode 100644 index 4cb5384d98..0000000000 --- a/src/atomate2/abinit/run.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Functions to run ABINIT.""" - -from __future__ import annotations - -import logging -import subprocess -import time - -from abipy.flowtk.qutils import time2slurm - -from atomate2 import SETTINGS -from atomate2.abinit.utils.common import ( - INPUT_FILE_NAME, - LOG_FILE_NAME, - STDERR_FILE_NAME, -) - -__all__ = ["run_abinit"] - - -SLEEP_TIME_STEP = 30 - - -logger = logging.getLogger(__name__) - - -def run_abinit( - abinit_cmd: str = None, - mpirun_cmd: str = None, - wall_time: int = None, - start_time: float = None, -) -> None: - """Run ABINIT.""" - abinit_cmd = abinit_cmd or SETTINGS.ABINIT_CMD - mpirun_cmd = mpirun_cmd or SETTINGS.ABINIT_MPIRUN_CMD - start_time = start_time or time.time() - command = [mpirun_cmd, abinit_cmd] if mpirun_cmd is not None else [abinit_cmd] - - max_end_time = 0.0 - if wall_time is not None: - abinit_timelimit = wall_time - if abinit_timelimit > 480: - # TODO: allow tuning this timelimit buffer for abinit, - # e.g. using a config variable or possibly per job - abinit_timelimit -= 240 - command.extend(["--timelimit", time2slurm(abinit_timelimit)]) - max_end_time = start_time + wall_time - - command.append(INPUT_FILE_NAME) - - with open(LOG_FILE_NAME, "w") as stdout, open(STDERR_FILE_NAME, "w") as stderr: - process = subprocess.Popen(command, stdout=stdout, stderr=stderr) # noqa: S603 - - if wall_time is not None: - while True: - time.sleep(SLEEP_TIME_STEP) - if process.poll() is not None: - break - current_time = time.time() - remaining_time = max_end_time - current_time - if remaining_time < 5 * SLEEP_TIME_STEP: - process.terminate() - - process.wait() - return diff --git a/src/atomate2/abinit/sets/core.py b/src/atomate2/abinit/sets/core.py index bada1b7e5e..ceda588802 100644 --- a/src/atomate2/abinit/sets/core.py +++ b/src/atomate2/abinit/sets/core.py @@ -77,7 +77,7 @@ class NonSCFSetGenerator(AbinitInputGenerator): factory: Callable = nscf_from_gsinput pseudos: str | list[str] | PseudoTable | None = None restart_from_deps: tuple = (f"{NSCF}:WFK",) - prev_outputs_deps: tuple = (f"{SCF}:DEN",) + prev_outputs_deps: tuple = (f"{SCF}:DEN", f"{SCF}:WFK",) nbands_factor: float = 1.2 factory_kwargs: dict = field(default_factory=dict) diff --git a/src/atomate2/abinit/sets/factories.py b/src/atomate2/abinit/sets/factories.py new file mode 100644 index 0000000000..1f4947de30 --- /dev/null +++ b/src/atomate2/abinit/sets/factories.py @@ -0,0 +1,25 @@ +# coding: utf-8 +"""Factory functions for Abinit input files """ +from __future__ import annotations + +import numpy as np +import pymatgen.io.abinit.abiobjects as aobj +from abipy.abio.inputs import AbinitInput + +def bse_with_mdf_from_inputs(nscf_input, sigma_input, bs_loband, bs_nband, + mdf_epsinf, mbpt_sciss, exc_type="TDA", bs_algo="haydock", accuracy="normal", spin_mode="polarized", + smearing="fermi_dirac:0.1 eV") -> AbinitInput: + """Return a sigma input.""" + + bse_input = nscf_input.deepcopy() + bse_input.pop_irdvars() + + exc_ham = aobj.ExcHamiltonian(bs_loband, bs_nband, mbpt_sciss, coulomb_mode="model_df", ecuteps=sigma_input["ecuteps"], + spin_mode=spin_mode, mdf_epsinf=mdf_epsinf, exc_type=exc_type, algo=bs_algo, + bs_freq_mesh=None, with_lf=True, zcut=None) + + bse_input.set_vars(exc_ham.to_abivars()) + # TODO: Cannot use istwfk != 1. + bse_input.set_vars(istwfk="*1") + bse_input.set_vars(ecutwfn=nscf_input["ecut"]) + return bse_input diff --git a/src/atomate2/abinit/sets/gw.py b/src/atomate2/abinit/sets/gw.py index 018eabdb73..c0b3f05241 100644 --- a/src/atomate2/abinit/sets/gw.py +++ b/src/atomate2/abinit/sets/gw.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Callable from abipy.abio.factories import scr_from_nscfinput, sigma_from_inputs, scf_input, nscf_from_gsinput -from atomate2.abinit.sets.bse import bse_with_mdf_from_inputs +from atomate2.abinit.sets.factories import bse_with_mdf_from_inputs from abipy.abio.input_tags import SCF, NSCF, SCREENING, SIGMA from atomate2.abinit.files import load_abinit_input @@ -140,7 +140,7 @@ def get_abinit_input( ) @dataclass -class BSENonSCFSetGenerator(NonSCFSetGenerator): +class BSENonSCFSetGenerator(AbinitInputGenerator): """Class to generate Abinit non-SCF input sets.""" calc_type: str = "nscf_bse" @@ -163,7 +163,6 @@ def get_abinit_input( abinit_settings: dict | None = None, factory_kwargs: dict | None = None, kpoints_settings: dict | KSampling | None = None, - input_index: int | None = None, nscf_ngkpt: tuple | None = None, nscf_shiftk: tuple | None = None, ) -> AbinitInput: @@ -205,30 +204,30 @@ class BSEmdfSetGenerator(AbinitInputGenerator): pseudos: str | list[str] | PseudoTable | None = None prev_outputs_deps: tuple = (f"{NSCF}:WFK",) factory_kwargs: dict = field(default_factory=dict) - factory_prev_inputs_kwargs: dict | None = field( default_factory=lambda: {"nscf_input": (NSCF,), "sigma_input": (SIGMA,)} ) def get_abinit_input( self, - bs_loband: int | None = 1, - bs_nband: int | None = None, - mdf_epsinf: float | None = None, - mbpt_sciss: float | None = None, + structure: Structure | None = None, + pseudos: PseudoTable | None = None, prev_outputs: list[str] | None = None, abinit_settings: dict | None = None, factory_kwargs: dict | None = None, + kpoints_settings: dict | KSampling | None = None, ) -> AbinitInput: - factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} - factory_kwargs["bs_loband"] = bs_loband - factory_kwargs["bs_nband"] = bs_nband if bs_nband is not None else load_abinit_input(prev_outputs[0])["nband"] - factory_kwargs["mdf_epsinf"] = mdf_epsinf - factory_kwargs["mbpt_sciss"] = mbpt_sciss + #factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} + #factory_kwargs["bs_loband"] = bs_loband + #factory_kwargs["bs_nband"] = bs_nband if bs_nband is not None else load_abinit_input(prev_outputs[0])["nband"] + #factory_kwargs["mdf_epsinf"] = mdf_epsinf + #factory_kwargs["mbpt_sciss"] = mbpt_sciss return super().get_abinit_input( + structure=structure, + pseudos=pseudos, prev_outputs=prev_outputs, - abinit_settings=abinit_settings, factory_kwargs=factory_kwargs, + kpoints_settings=kpoints_settings, ) diff --git a/src/atomate2/abinit/utils/common.py b/src/atomate2/abinit/utils/common.py deleted file mode 100644 index 3fd19cb55b..0000000000 --- a/src/atomate2/abinit/utils/common.py +++ /dev/null @@ -1,432 +0,0 @@ -"""Module with common file names and classes used for Abinit flows.""" - -from __future__ import annotations - -import logging -import os -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from pathlib import Path - - from abipy.abio.inputs import AbinitInput - from abipy.core.structure import Structure - from abipy.flowtk.events import EventReport - from jobflow import Flow, Job - - from atomate2.abinit.utils.history import JobHistory - -from abipy.abio.outputs import AbinitOutputFile -from abipy.dfpt.ddb import DdbFile -from abipy.electrons.gsr import GsrFile -from abipy.flowtk import events -from abipy.flowtk.utils import Directory, File -from monty.json import MSONable -from monty.serialization import MontyDecoder - -TMPDIR_NAME = "tmpdata" -OUTDIR_NAME = "outdata" -INDIR_NAME = "indata" -TMPDATAFILE_PREFIX = "tmp" -OUTDATAFILE_PREFIX = "out" -INDATAFILE_PREFIX = "in" -TMPDATA_PREFIX = os.path.join(TMPDIR_NAME, TMPDATAFILE_PREFIX) -OUTDATA_PREFIX = os.path.join(OUTDIR_NAME, OUTDATAFILE_PREFIX) -INDATA_PREFIX = os.path.join(INDIR_NAME, INDATAFILE_PREFIX) -STDERR_FILE_NAME = "run.err" -LOG_FILE_NAME = "run.log" -OUTPUT_FILE_NAME = "run.abo" -OUTNC_FILE_NAME = "out_OUT.nc" -INPUT_FILE_NAME: str = "run.abi" -MPIABORTFILE = "__ABI_MPIABORTFILE__" -DUMMY_FILENAME = "__DUMMY__" -ELPHON_OUTPUT_FILE_NAME = "run.abo_elphon" -DDK_FILES_FILE_NAME = "ddk.files" -HISTORY_JSON = "history.json" - - -logger = logging.getLogger(__name__) - - -class ErrorCode: - """Error code to classify the errors.""" - - ERROR = "Error" - UNRECOVERABLE = "Unrecoverable" - UNCLASSIFIED = "Unclassified" - UNCONVERGED = "Unconverged" - UNCONVERGED_PARAMETERS = "Unconverged_parameters" - INITIALIZATION = "Initialization" - RESTART = "Restart" - POSTPROCESS = "Postprocess" - WALLTIME = "Walltime" - - -class AbiAtomateError(Exception): - """Base class for the abinit errors in atomate.""" - - ERROR_CODE = ErrorCode.ERROR - - def __init__(self, msg: str) -> None: - super().__init__(msg) - self.msg = msg - - def to_dict(self) -> dict: - """Create dictionary representation of the error.""" - return {"error_code": self.ERROR_CODE, "msg": self.msg} - - -class AbinitRuntimeError(AbiAtomateError): - """Exception raised for errors during Abinit calculation. - - Contains the information about the errors and warning extracted from - the output files. - Initialized with a job, uses it to prepare a suitable error message. - """ - - ERROR_CODE = ErrorCode.ERROR - - def __init__( - self, - job: Job | Flow | None = None, - msg: str | None = None, - num_errors: int | None = None, - num_warnings: int | None = None, - errors: list | None = None, - warnings: list | None = None, - ) -> None: - """Construct AbinitRuntimeError object. - - If the job has a report all the information will be extracted from it, - otherwise the arguments will be used. - - Parameters - ---------- - job - the atomate2 job - msg - the error message - num_errors - number of errors in the abinit execution. Only used if job doesn't - have a report. - num_warnings - number of warning in the abinit execution. Only used if job doesn't - have a report. - errors - list of errors in the abinit execution. Only used if job doesn't - have a report. - warnings - list of warnings in the abinit execution. Only used if job doesn't - have a report. - """ - # This can handle both the cases of DECODE_MONTY=True and False - # (Since it has a from_dict method). - super().__init__(msg) - self.job = job - if ( - self.job is not None - and hasattr(self.job, "report") - and self.job.report is not None - ): - report = self.job.report - self.num_errors = report.num_errors - self.num_warnings = report.num_warnings - self.errors = report.errors - self.warnings = report.warnings - else: - self.num_errors = num_errors - self.num_warnings = num_warnings - self.errors = errors - self.warnings = warnings - self.msg = msg - - def to_dict(self) -> dict: - """Create dictionary representation of the error.""" - d = {"num_errors": self.num_errors, "num_warnings": self.num_warnings} - if self.errors: - errors = [error.as_dict() for error in self.errors] - d["errors"] = errors - if self.warnings: - warnings = [warning.as_dict() for warning in self.warnings] - d["warnings"] = warnings - if self.msg: - d["error_message"] = self.msg - - d["error_code"] = self.ERROR_CODE - d["@module"] = type(self).__module__ - d["@class"] = type(self).__name__ - - return d - - def as_dict(self) -> dict: - """Create dictionary representation of the error.""" - return self.to_dict() - - @classmethod - def from_dict(cls, d: dict) -> AbinitRuntimeError: - """Create instance of the error from its dictionary representation.""" - dec = MontyDecoder() - warnings = ( - [dec.process_decoded(w) for w in d["warnings"]] if "warnings" in d else [] - ) - errors = [dec.process_decoded(w) for w in d["errors"]] if "errors" in d else [] - msg = d.get("error_message") - - return cls( - warnings=warnings, - errors=errors, - num_errors=d["num_errors"], - num_warnings=d["num_warnings"], - msg=msg, - ) - - -class UnconvergedError(AbinitRuntimeError): - """Exception raised when a calculation didn't converge after the max restarts.""" - - ERROR_CODE = ErrorCode.UNCONVERGED - - def __init__( - self, - job: Job | Flow | None = None, - msg: str | None = None, - num_errors: int | None = None, - num_warnings: int | None = None, - errors: list | None = None, - warnings: list | None = None, - abinit_input: AbinitInput | None = None, - restart_info: RestartInfo | None = None, - history: JobHistory | None = None, - ) -> None: - """Construct UnconvergedError object. - - If the job has a report all the information will be extracted from it, - otherwise the arguments will be used. - It contains information that can be used to further restart the job. - - Parameters - ---------- - job - the atomate2 job - msg - the error message - num_errors - number of errors in the abinit execution. Only used if job doesn't - have a report. - num_warnings - number of warning in the abinit execution. Only used if job doesn't - have a report. - errors - list of errors in the abinit execution. Only used if job doesn't - have a report. - warnings - list of warnings in the abinit execution. Only used if job doesn't - have a report. - abinit_input - the last AbinitInput used. - restart_info - the RestartInfo required to restart the job. - history - The history of the job. - """ - super().__init__(job, msg, num_errors, num_warnings, errors, warnings) - self.abinit_input = abinit_input - self.restart_info = restart_info - self.history = history - - def to_dict(self) -> dict: - """Create dictionary representation of the error.""" - d = super().to_dict() - d["abinit_input"] = self.abinit_input.as_dict() if self.abinit_input else None - d["restart_info"] = self.restart_info.as_dict() if self.restart_info else None - d["history"] = self.history.as_dict() if self.history else None - d["@module"] = type(self).__module__ - d["@class"] = type(self).__name__ - return d - - @classmethod - def from_dict(cls, d: dict) -> UnconvergedError: - """Create instance of the error from its dictionary representation.""" - dec = MontyDecoder() - warnings = ( - [dec.process_decoded(w) for w in d["warnings"]] if "warnings" in d else [] - ) - errors = [dec.process_decoded(w) for w in d["errors"]] if "errors" in d else [] - if "abinit_input" in d and d["abinit_input"] is not None: - abinit_input = dec.process_decoded(d["abinit_input"]) - else: - abinit_input = None - if "restart_info" in d and d["restart_info"] is not None: - restart_info = dec.process_decoded(d["restart_info"]) - else: - restart_info = None - if "history" in d and d["history"] is not None: - history = dec.process_decoded(d["history"]) - else: - history = None - return cls( - warnings=warnings, - errors=errors, - num_errors=d["num_errors"], - num_warnings=d["num_warnings"], - msg=d["error_message"], - abinit_input=abinit_input, - restart_info=restart_info, - history=history, - ) - - -class WalltimeError(AbiAtomateError): - """Exception raised when the calculation didn't complete in time.""" - - ERROR_CODE = ErrorCode.WALLTIME - - -class InitializationError(AbiAtomateError): - """Exception raised if errors are present during the initialization of the job.""" - - ERROR_CODE = ErrorCode.INITIALIZATION - - -class RestartError(InitializationError): - """Exception raised if errors show up during the set up of the restart.""" - - ERROR_CODE = ErrorCode.RESTART - - -class PostProcessError(AbiAtomateError): - """Exception raised if problems are encountered during the post processing.""" - - ERROR_CODE = ErrorCode.POSTPROCESS - - -class RestartInfo(MSONable): - """Object that contains the information about the restart of a job.""" - - def __init__(self, previous_dir: Path | str, num_restarts: int = 0) -> None: - self.previous_dir = previous_dir - # self.reset = reset - self.num_restarts = num_restarts - - def as_dict(self) -> dict: - """Create dictionary representation of the error.""" - return { - "previous_dir": self.previous_dir, - # "reset": self.reset, - "num_restarts": self.num_restarts, - "@module": type(self).__module__, - "@class": type(self).__name__, - } - - @classmethod - def from_dict(cls, d: dict) -> RestartInfo: - """Create instance of the error from its dictionary representation.""" - return cls( - previous_dir=d["previous_dir"], - # reset=d["reset"], - num_restarts=d["num_restarts"], - ) - - @property - def prev_outdir(self) -> Directory: - """Get the Directory pointing to the output directory of the previous step.""" - return Directory(os.path.join(self.previous_dir, OUTDIR_NAME)) - - @property - def prev_indir(self) -> Directory: - """Get the Directory pointing to the input directory of the previous step.""" - return Directory(os.path.join(self.previous_dir, INDIR_NAME)) - - -def get_final_structure(dir_name: Path | str) -> Structure: - """Get the final/last structure of a calculation in a given directory. - - This functions tries to get the structure: - 1. from the output file of abinit (run.abo). - 2. from the gsr file of abinit (out_GSR.nc). - """ - gsr_path = Directory(os.path.join(dir_name, OUTDIR_NAME)).has_abiext("GSR") - if gsr_path: - # Open the GSR file. - try: - gsr_file = GsrFile(gsr_path) - except Exception: - logging.exception("Exception occurred") - else: - return gsr_file.structure - - ddb_path = Directory(os.path.join(dir_name, OUTDIR_NAME)).has_abiext("DDB") - if ddb_path: - # Open the GSR file. - try: - ddb_file = DdbFile(ddb_path) - except Exception: - logging.exception("Exception occurred") - else: - return ddb_file.structure - - out_path = File(os.path.join(dir_name, OUTPUT_FILE_NAME)) - if out_path.exists: - try: - ab_out = AbinitOutputFile.from_file(out_path.path) - except Exception: - logging.exception("Exception occurred") - else: - return ab_out.final_structure - - raise RuntimeError("Could not get final structure.") - - -def get_event_report(ofile: File, mpiabort_file: File) -> EventReport | None: - """Get report from abinit calculation. - - This analyzes the main output file for possible Errors or Warnings. - It will check the presence of an MPIABORTFILE if not output file is found. - - Parameters - ---------- - ofile : File - Output file to be parsed. Should be either the standard abinit - output or the log file (stdout). - mpiabort_file : File - - Returns - ------- - EventReport - Report of the abinit calculation or None if no output file exists. - """ - parser = events.EventsParser() - - if not ofile.exists: - if not mpiabort_file.exists: - return None - # ABINIT abort file without log! - - return parser.parse(mpiabort_file.path) - - try: - report = parser.parse(ofile.path) - - # Add events found in the ABI_MPIABORTFILE. - if mpiabort_file.exists: - logger.critical("Found ABI_MPIABORTFILE!") - abort_report = parser.parse(mpiabort_file.path) - if len(abort_report) == 0: - logger.warning("ABI_MPIABORTFILE but empty") - else: - if len(abort_report) != 1: - logger.critical("Found more than one event in ABI_MPIABORTFILE") - - # Add it to the initial report only if it differs - # from the last one found in the main log file. - last_abort_event = abort_report[-1] - if report and last_abort_event != report[-1]: - report.append(last_abort_event) - else: - report.append(last_abort_event) - except Exception as exc: - # Return a report with an error entry with info on the exception. - logger.critical(f"{ofile}: Exception while parsing ABINIT events:\n {exc!s}") - return parser.report_exception(ofile.path, exc) - else: - return report From 53cf8b54a69242b0df99bf1851a64a5ddb709a19 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Mon, 29 Apr 2024 12:13:31 +0200 Subject: [PATCH 05/21] bse added --- src/atomate2/abinit/files.py | 141 +++++++++ src/atomate2/abinit/flows/gw.py | 41 ++- src/atomate2/abinit/jobs/gw.py | 8 +- src/atomate2/abinit/run.py | 65 +++++ src/atomate2/abinit/sets/bse.py | 24 -- src/atomate2/abinit/utils/common.py | 432 ++++++++++++++++++++++++++++ 6 files changed, 659 insertions(+), 52 deletions(-) create mode 100644 src/atomate2/abinit/files.py create mode 100644 src/atomate2/abinit/run.py delete mode 100644 src/atomate2/abinit/sets/bse.py create mode 100644 src/atomate2/abinit/utils/common.py diff --git a/src/atomate2/abinit/files.py b/src/atomate2/abinit/files.py new file mode 100644 index 0000000000..af05b9f94d --- /dev/null +++ b/src/atomate2/abinit/files.py @@ -0,0 +1,141 @@ +"""Functions for manipulating Abinit files.""" + +from __future__ import annotations + +import logging +import os +from typing import TYPE_CHECKING + +from abipy.flowtk.utils import abi_extensions +from monty.serialization import loadfn + +from atomate2.abinit.utils.common import INDIR_NAME +from atomate2.utils.file_client import FileClient, auto_fileclient + +if TYPE_CHECKING: + from collections.abc import Iterable + from pathlib import Path + + from abipy.abio.inputs import AbinitInput + from pymatgen.core.structure import Structure + + from atomate2.abinit.sets.base import AbinitInputGenerator + + +logger = logging.getLogger(__name__) + + +ALL_ABIEXTS = abi_extensions() + + +def fname2ext(filepath: Path | str) -> None | str: + """Get the abinit extension of a given filename. + + This will return None if no extension is found. + """ + filename = os.path.basename(filepath) + if "_" not in filename: + return None + ext = filename.split("_")[-1].replace(".nc", "") + if ext not in ALL_ABIEXTS: + return None + return ext + + +@auto_fileclient +def out_to_in( + out_files: Iterable[tuple[str, str]], + src_host: str | None = None, + indir: Path | str = INDIR_NAME, + file_client: FileClient | None = None, + link_files: bool = True, +) -> None: + """ + Copy or link abinit output files to the Abinit input directory. + + Parameters + ---------- + out_files : list of tuples + The list of (abinit output filepath, abinit input filename) to be copied + or linked. + src_host : str or None + The source hostname used to specify a remote filesystem. Can be given as + either "username@remote_host" or just "remote_host" in which case the username + will be inferred from the current user. If ``None``, the local filesystem will + be used as the source. + indir : Path or str + The input directory for Abinit input files. + file_client : .FileClient + A file client to use for performing file operations. + link_files : bool + Whether to link the files instead of copying them. + """ + dest_dir = file_client.abspath(indir, host=None) + + for out_filepath, in_file in out_files: + src_file = file_client.abspath(out_filepath, host=src_host) + dest_file = os.path.join(dest_dir, in_file) + if link_files and src_host is None: + file_client.link(src_file, dest_file) + else: + file_client.copy(src_file, dest_file, src_host=src_host) + + +def load_abinit_input( + dirpath: Path | str, fname: str = "abinit_input.json" +) -> AbinitInput: + """Load the AbinitInput object from a given directory. + + Parameters + ---------- + dirpath + Directory to load the AbinitInput from. + fname + Name of the json file containing the AbinitInput. + + Returns + ------- + AbinitInput + The AbinitInput object. + """ + abinit_input_file = os.path.join(dirpath, f"{fname}") + if not os.path.exists(abinit_input_file): + raise NotImplementedError( + f"Cannot load AbinitInput from directory without {fname} file." + ) + + return loadfn(abinit_input_file) + + +def write_abinit_input_set( + structure: Structure, + input_set_generator: AbinitInputGenerator, + prev_outputs: str | Path | list[str] | None = None, + restart_from: str | Path | list[str] | None = None, + directory: str | Path = ".", +) -> None: + """Write the abinit inputs for a given structure using a given generator. + + Parameters + ---------- + structure + The structure for which the abinit inputs have to be written. + input_set_generator + The input generator used to write the abinit inputs. + prev_outputs + The list of previous directories needed for the calculation. + restart_from + The previous directory of the same calculation (in case of a restart). + Note that this should be provided as a list of one directory. + directory + Directory in which to write the abinit inputs. + """ + ais = input_set_generator.get_input_set( + structure=structure, + restart_from=restart_from, + prev_outputs=prev_outputs, + ) + if not ais.validate(): + raise RuntimeError("AbinitInputSet is not valid.") + + ais.write_input(directory=directory, make_dir=True, overwrite=False) diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index bfd6b19d17..484a0bb39e 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -9,7 +9,7 @@ from atomate2.abinit.jobs.base import BaseAbinitMaker from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker -from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker, BSENonSCFMaker, BSEMaker +from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker, BSEMaker from atomate2.abinit.powerups import update_user_abinit_settings, update_factory_kwargs, update_user_kpoints_settings from pymatgen.io.abinit.abiobjects import KSampling @@ -193,7 +193,7 @@ class BSEmdfMaker(Maker): bs_loband: int = 1 mbpt_sciss: float = 0.0 kppa: int = 100 - shiftk: tuple = (0.0, 0.0, 0.0) + shiftk: tuple = (0.11, 0.12, 0.13) def make( self, @@ -205,30 +205,23 @@ def make( structure, restart_from=restart_from ) - m_scf_job = update_user_kpoints_settings( - flow=scf_job, - kpoints_updates=KSampling.automatic_density( - structure=structure, - kppa=self.kppa, - shifts=self.shiftk) - ) nscf_job = self.nscf_maker.make( prev_outputs=[scf_job.output.dir_name], mode="uniform", ) - njob=update_user_abinit_settings( - flow=nscf_job, - abinit_updates={ - 'rfelfd': 1, - 'rfdir': (1, 1, 1), - 'nqpt': 1, - 'qpt': (0.0, 0.0, 0.0), - 'kptopt': 2, - 'iscf': -2, - 'tolwfr': 1e-22} - ) - m_nscf_job = update_user_kpoints_settings( - flow=njob, + #njob=update_user_abinit_settings( + # flow=nscf_job, + # abinit_updates={ + # 'rfelfd': 1, + # 'rfdir': (1, 1, 1), + # 'nqpt': 1, + # 'qpt': (0.0, 0.0, 0.0), + # 'kptopt': 2, + # 'iscf': -2, + # 'tolwfr': 1e-22} + #) + nscf_job = update_user_kpoints_settings( + flow=nscf_job, kpoints_updates=KSampling.automatic_density( structure=structure, kppa=self.kppa, @@ -239,11 +232,11 @@ def make( prev_outputs=[nscf_job.output.dir_name, "/home/ucl/modl/tbiswas/scratch/abinit_run/sigma"], ) - m_bse_job=update_factory_kwargs( + bse_job=update_factory_kwargs( flow=bse_job, factory_updates={ 'bs_loband': self.bs_loband, 'bs_nband': self.bs_nband, 'mdf_epsinf': self.mdf_epsinf, 'mbpt_sciss': self.mbpt_sciss} ) - return Flow([m_scf_job, m_nscf_job, m_bse_job], output=m_bse_job.output, name=self.name) + return Flow([scf_job, nscf_job, bse_job], output=nscf_job.output, name=self.name) diff --git a/src/atomate2/abinit/jobs/gw.py b/src/atomate2/abinit/jobs/gw.py index 56310cd689..209371973b 100644 --- a/src/atomate2/abinit/jobs/gw.py +++ b/src/atomate2/abinit/jobs/gw.py @@ -6,7 +6,7 @@ from dataclasses import dataclass, field from atomate2.abinit.jobs.base import BaseAbinitMaker -from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator, BSENonSCFSetGenerator +from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator, BSEmdfSetGenerator logger = logging.getLogger(__name__) @@ -45,10 +45,10 @@ class SigmaMaker(BaseAbinitMaker): # CRITICAL_EVENTS: ClassVar[Sequence[str]] = ("ScfConvergenceWarning",) @dataclass -class BSENonSCFMaker(BaseAbinitMaker): +class BSEMaker(BaseAbinitMaker): """Maker to create non SCF calculations.""" calc_type: str = "bse_nscf" - name: str = "Sigma calculation" + name: str = "BSE calculation" - input_set_generator: BSENonSCFSetGenerator = field(default_factory=BSENonSCFSetGenerator) + input_set_generator: BSEmdfSetGenerator = field(default_factory=BSEmdfSetGenerator) diff --git a/src/atomate2/abinit/run.py b/src/atomate2/abinit/run.py new file mode 100644 index 0000000000..4cb5384d98 --- /dev/null +++ b/src/atomate2/abinit/run.py @@ -0,0 +1,65 @@ +"""Functions to run ABINIT.""" + +from __future__ import annotations + +import logging +import subprocess +import time + +from abipy.flowtk.qutils import time2slurm + +from atomate2 import SETTINGS +from atomate2.abinit.utils.common import ( + INPUT_FILE_NAME, + LOG_FILE_NAME, + STDERR_FILE_NAME, +) + +__all__ = ["run_abinit"] + + +SLEEP_TIME_STEP = 30 + + +logger = logging.getLogger(__name__) + + +def run_abinit( + abinit_cmd: str = None, + mpirun_cmd: str = None, + wall_time: int = None, + start_time: float = None, +) -> None: + """Run ABINIT.""" + abinit_cmd = abinit_cmd or SETTINGS.ABINIT_CMD + mpirun_cmd = mpirun_cmd or SETTINGS.ABINIT_MPIRUN_CMD + start_time = start_time or time.time() + command = [mpirun_cmd, abinit_cmd] if mpirun_cmd is not None else [abinit_cmd] + + max_end_time = 0.0 + if wall_time is not None: + abinit_timelimit = wall_time + if abinit_timelimit > 480: + # TODO: allow tuning this timelimit buffer for abinit, + # e.g. using a config variable or possibly per job + abinit_timelimit -= 240 + command.extend(["--timelimit", time2slurm(abinit_timelimit)]) + max_end_time = start_time + wall_time + + command.append(INPUT_FILE_NAME) + + with open(LOG_FILE_NAME, "w") as stdout, open(STDERR_FILE_NAME, "w") as stderr: + process = subprocess.Popen(command, stdout=stdout, stderr=stderr) # noqa: S603 + + if wall_time is not None: + while True: + time.sleep(SLEEP_TIME_STEP) + if process.poll() is not None: + break + current_time = time.time() + remaining_time = max_end_time - current_time + if remaining_time < 5 * SLEEP_TIME_STEP: + process.terminate() + + process.wait() + return diff --git a/src/atomate2/abinit/sets/bse.py b/src/atomate2/abinit/sets/bse.py deleted file mode 100644 index 1ee5e10f8d..0000000000 --- a/src/atomate2/abinit/sets/bse.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding: utf-8 -"""Factory functions for Abinit input files """ -from __future__ import annotations - -import numpy as np -import pymatgen.io.abinit.abiobjects as aobj -from abipy.abio.inputs import AbinitInput - -def bse_with_mdf_from_inputs(nscf_input, sigma_input, bs_loband, bs_nband, - mdf_epsinf, mbpt_sciss, exc_type="TDA", bs_algo="haydock", accuracy="normal", spin_mode="polarized", - smearing="fermi_dirac:0.1 eV") -> AbinitInput: - """Return a sigma input.""" - - bse_input = nscf_input.deepcopy() - bse_input.pop_irdvars() - - exc_ham = aobj.ExcHamiltonian(bs_loband, bs_nband, mbpt_sciss, coulomb_mode="model_df", ecuteps=sigma_input["ecuteps"], - spin_mode=spin_mode, mdf_epsinf=mdf_epsinf, exc_type=exc_type, algo=bs_algo, - bs_freq_mesh=None, with_lf=True, zcut=None) - - bse_input.set_vars(exc_ham.to_abivars()) - # TODO: Cannot use istwfk != 1. - bse_input.set_vars(istwfk="*1") - return bse_input diff --git a/src/atomate2/abinit/utils/common.py b/src/atomate2/abinit/utils/common.py new file mode 100644 index 0000000000..3fd19cb55b --- /dev/null +++ b/src/atomate2/abinit/utils/common.py @@ -0,0 +1,432 @@ +"""Module with common file names and classes used for Abinit flows.""" + +from __future__ import annotations + +import logging +import os +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pathlib import Path + + from abipy.abio.inputs import AbinitInput + from abipy.core.structure import Structure + from abipy.flowtk.events import EventReport + from jobflow import Flow, Job + + from atomate2.abinit.utils.history import JobHistory + +from abipy.abio.outputs import AbinitOutputFile +from abipy.dfpt.ddb import DdbFile +from abipy.electrons.gsr import GsrFile +from abipy.flowtk import events +from abipy.flowtk.utils import Directory, File +from monty.json import MSONable +from monty.serialization import MontyDecoder + +TMPDIR_NAME = "tmpdata" +OUTDIR_NAME = "outdata" +INDIR_NAME = "indata" +TMPDATAFILE_PREFIX = "tmp" +OUTDATAFILE_PREFIX = "out" +INDATAFILE_PREFIX = "in" +TMPDATA_PREFIX = os.path.join(TMPDIR_NAME, TMPDATAFILE_PREFIX) +OUTDATA_PREFIX = os.path.join(OUTDIR_NAME, OUTDATAFILE_PREFIX) +INDATA_PREFIX = os.path.join(INDIR_NAME, INDATAFILE_PREFIX) +STDERR_FILE_NAME = "run.err" +LOG_FILE_NAME = "run.log" +OUTPUT_FILE_NAME = "run.abo" +OUTNC_FILE_NAME = "out_OUT.nc" +INPUT_FILE_NAME: str = "run.abi" +MPIABORTFILE = "__ABI_MPIABORTFILE__" +DUMMY_FILENAME = "__DUMMY__" +ELPHON_OUTPUT_FILE_NAME = "run.abo_elphon" +DDK_FILES_FILE_NAME = "ddk.files" +HISTORY_JSON = "history.json" + + +logger = logging.getLogger(__name__) + + +class ErrorCode: + """Error code to classify the errors.""" + + ERROR = "Error" + UNRECOVERABLE = "Unrecoverable" + UNCLASSIFIED = "Unclassified" + UNCONVERGED = "Unconverged" + UNCONVERGED_PARAMETERS = "Unconverged_parameters" + INITIALIZATION = "Initialization" + RESTART = "Restart" + POSTPROCESS = "Postprocess" + WALLTIME = "Walltime" + + +class AbiAtomateError(Exception): + """Base class for the abinit errors in atomate.""" + + ERROR_CODE = ErrorCode.ERROR + + def __init__(self, msg: str) -> None: + super().__init__(msg) + self.msg = msg + + def to_dict(self) -> dict: + """Create dictionary representation of the error.""" + return {"error_code": self.ERROR_CODE, "msg": self.msg} + + +class AbinitRuntimeError(AbiAtomateError): + """Exception raised for errors during Abinit calculation. + + Contains the information about the errors and warning extracted from + the output files. + Initialized with a job, uses it to prepare a suitable error message. + """ + + ERROR_CODE = ErrorCode.ERROR + + def __init__( + self, + job: Job | Flow | None = None, + msg: str | None = None, + num_errors: int | None = None, + num_warnings: int | None = None, + errors: list | None = None, + warnings: list | None = None, + ) -> None: + """Construct AbinitRuntimeError object. + + If the job has a report all the information will be extracted from it, + otherwise the arguments will be used. + + Parameters + ---------- + job + the atomate2 job + msg + the error message + num_errors + number of errors in the abinit execution. Only used if job doesn't + have a report. + num_warnings + number of warning in the abinit execution. Only used if job doesn't + have a report. + errors + list of errors in the abinit execution. Only used if job doesn't + have a report. + warnings + list of warnings in the abinit execution. Only used if job doesn't + have a report. + """ + # This can handle both the cases of DECODE_MONTY=True and False + # (Since it has a from_dict method). + super().__init__(msg) + self.job = job + if ( + self.job is not None + and hasattr(self.job, "report") + and self.job.report is not None + ): + report = self.job.report + self.num_errors = report.num_errors + self.num_warnings = report.num_warnings + self.errors = report.errors + self.warnings = report.warnings + else: + self.num_errors = num_errors + self.num_warnings = num_warnings + self.errors = errors + self.warnings = warnings + self.msg = msg + + def to_dict(self) -> dict: + """Create dictionary representation of the error.""" + d = {"num_errors": self.num_errors, "num_warnings": self.num_warnings} + if self.errors: + errors = [error.as_dict() for error in self.errors] + d["errors"] = errors + if self.warnings: + warnings = [warning.as_dict() for warning in self.warnings] + d["warnings"] = warnings + if self.msg: + d["error_message"] = self.msg + + d["error_code"] = self.ERROR_CODE + d["@module"] = type(self).__module__ + d["@class"] = type(self).__name__ + + return d + + def as_dict(self) -> dict: + """Create dictionary representation of the error.""" + return self.to_dict() + + @classmethod + def from_dict(cls, d: dict) -> AbinitRuntimeError: + """Create instance of the error from its dictionary representation.""" + dec = MontyDecoder() + warnings = ( + [dec.process_decoded(w) for w in d["warnings"]] if "warnings" in d else [] + ) + errors = [dec.process_decoded(w) for w in d["errors"]] if "errors" in d else [] + msg = d.get("error_message") + + return cls( + warnings=warnings, + errors=errors, + num_errors=d["num_errors"], + num_warnings=d["num_warnings"], + msg=msg, + ) + + +class UnconvergedError(AbinitRuntimeError): + """Exception raised when a calculation didn't converge after the max restarts.""" + + ERROR_CODE = ErrorCode.UNCONVERGED + + def __init__( + self, + job: Job | Flow | None = None, + msg: str | None = None, + num_errors: int | None = None, + num_warnings: int | None = None, + errors: list | None = None, + warnings: list | None = None, + abinit_input: AbinitInput | None = None, + restart_info: RestartInfo | None = None, + history: JobHistory | None = None, + ) -> None: + """Construct UnconvergedError object. + + If the job has a report all the information will be extracted from it, + otherwise the arguments will be used. + It contains information that can be used to further restart the job. + + Parameters + ---------- + job + the atomate2 job + msg + the error message + num_errors + number of errors in the abinit execution. Only used if job doesn't + have a report. + num_warnings + number of warning in the abinit execution. Only used if job doesn't + have a report. + errors + list of errors in the abinit execution. Only used if job doesn't + have a report. + warnings + list of warnings in the abinit execution. Only used if job doesn't + have a report. + abinit_input + the last AbinitInput used. + restart_info + the RestartInfo required to restart the job. + history + The history of the job. + """ + super().__init__(job, msg, num_errors, num_warnings, errors, warnings) + self.abinit_input = abinit_input + self.restart_info = restart_info + self.history = history + + def to_dict(self) -> dict: + """Create dictionary representation of the error.""" + d = super().to_dict() + d["abinit_input"] = self.abinit_input.as_dict() if self.abinit_input else None + d["restart_info"] = self.restart_info.as_dict() if self.restart_info else None + d["history"] = self.history.as_dict() if self.history else None + d["@module"] = type(self).__module__ + d["@class"] = type(self).__name__ + return d + + @classmethod + def from_dict(cls, d: dict) -> UnconvergedError: + """Create instance of the error from its dictionary representation.""" + dec = MontyDecoder() + warnings = ( + [dec.process_decoded(w) for w in d["warnings"]] if "warnings" in d else [] + ) + errors = [dec.process_decoded(w) for w in d["errors"]] if "errors" in d else [] + if "abinit_input" in d and d["abinit_input"] is not None: + abinit_input = dec.process_decoded(d["abinit_input"]) + else: + abinit_input = None + if "restart_info" in d and d["restart_info"] is not None: + restart_info = dec.process_decoded(d["restart_info"]) + else: + restart_info = None + if "history" in d and d["history"] is not None: + history = dec.process_decoded(d["history"]) + else: + history = None + return cls( + warnings=warnings, + errors=errors, + num_errors=d["num_errors"], + num_warnings=d["num_warnings"], + msg=d["error_message"], + abinit_input=abinit_input, + restart_info=restart_info, + history=history, + ) + + +class WalltimeError(AbiAtomateError): + """Exception raised when the calculation didn't complete in time.""" + + ERROR_CODE = ErrorCode.WALLTIME + + +class InitializationError(AbiAtomateError): + """Exception raised if errors are present during the initialization of the job.""" + + ERROR_CODE = ErrorCode.INITIALIZATION + + +class RestartError(InitializationError): + """Exception raised if errors show up during the set up of the restart.""" + + ERROR_CODE = ErrorCode.RESTART + + +class PostProcessError(AbiAtomateError): + """Exception raised if problems are encountered during the post processing.""" + + ERROR_CODE = ErrorCode.POSTPROCESS + + +class RestartInfo(MSONable): + """Object that contains the information about the restart of a job.""" + + def __init__(self, previous_dir: Path | str, num_restarts: int = 0) -> None: + self.previous_dir = previous_dir + # self.reset = reset + self.num_restarts = num_restarts + + def as_dict(self) -> dict: + """Create dictionary representation of the error.""" + return { + "previous_dir": self.previous_dir, + # "reset": self.reset, + "num_restarts": self.num_restarts, + "@module": type(self).__module__, + "@class": type(self).__name__, + } + + @classmethod + def from_dict(cls, d: dict) -> RestartInfo: + """Create instance of the error from its dictionary representation.""" + return cls( + previous_dir=d["previous_dir"], + # reset=d["reset"], + num_restarts=d["num_restarts"], + ) + + @property + def prev_outdir(self) -> Directory: + """Get the Directory pointing to the output directory of the previous step.""" + return Directory(os.path.join(self.previous_dir, OUTDIR_NAME)) + + @property + def prev_indir(self) -> Directory: + """Get the Directory pointing to the input directory of the previous step.""" + return Directory(os.path.join(self.previous_dir, INDIR_NAME)) + + +def get_final_structure(dir_name: Path | str) -> Structure: + """Get the final/last structure of a calculation in a given directory. + + This functions tries to get the structure: + 1. from the output file of abinit (run.abo). + 2. from the gsr file of abinit (out_GSR.nc). + """ + gsr_path = Directory(os.path.join(dir_name, OUTDIR_NAME)).has_abiext("GSR") + if gsr_path: + # Open the GSR file. + try: + gsr_file = GsrFile(gsr_path) + except Exception: + logging.exception("Exception occurred") + else: + return gsr_file.structure + + ddb_path = Directory(os.path.join(dir_name, OUTDIR_NAME)).has_abiext("DDB") + if ddb_path: + # Open the GSR file. + try: + ddb_file = DdbFile(ddb_path) + except Exception: + logging.exception("Exception occurred") + else: + return ddb_file.structure + + out_path = File(os.path.join(dir_name, OUTPUT_FILE_NAME)) + if out_path.exists: + try: + ab_out = AbinitOutputFile.from_file(out_path.path) + except Exception: + logging.exception("Exception occurred") + else: + return ab_out.final_structure + + raise RuntimeError("Could not get final structure.") + + +def get_event_report(ofile: File, mpiabort_file: File) -> EventReport | None: + """Get report from abinit calculation. + + This analyzes the main output file for possible Errors or Warnings. + It will check the presence of an MPIABORTFILE if not output file is found. + + Parameters + ---------- + ofile : File + Output file to be parsed. Should be either the standard abinit + output or the log file (stdout). + mpiabort_file : File + + Returns + ------- + EventReport + Report of the abinit calculation or None if no output file exists. + """ + parser = events.EventsParser() + + if not ofile.exists: + if not mpiabort_file.exists: + return None + # ABINIT abort file without log! + + return parser.parse(mpiabort_file.path) + + try: + report = parser.parse(ofile.path) + + # Add events found in the ABI_MPIABORTFILE. + if mpiabort_file.exists: + logger.critical("Found ABI_MPIABORTFILE!") + abort_report = parser.parse(mpiabort_file.path) + if len(abort_report) == 0: + logger.warning("ABI_MPIABORTFILE but empty") + else: + if len(abort_report) != 1: + logger.critical("Found more than one event in ABI_MPIABORTFILE") + + # Add it to the initial report only if it differs + # from the last one found in the main log file. + last_abort_event = abort_report[-1] + if report and last_abort_event != report[-1]: + report.append(last_abort_event) + else: + report.append(last_abort_event) + except Exception as exc: + # Return a report with an error entry with info on the exception. + logger.critical(f"{ofile}: Exception while parsing ABINIT events:\n {exc!s}") + return parser.report_exception(ofile.path, exc) + else: + return report From f73862d3e70f8f51b6a3dd61fba823900ca4f1e3 Mon Sep 17 00:00:00 2001 From: "Tathagata Biswas (tathagata.biswas@uclouvain.be)" Date: Wed, 26 Jun 2024 13:13:22 +0200 Subject: [PATCH 06/21] BSE workflows added --- src/atomate2/abinit/flows/bse.py | 240 +++++++++++++++++ src/atomate2/abinit/flows/gw.py | 247 ++++++++---------- src/atomate2/abinit/jobs/base.py | 1 - src/atomate2/abinit/jobs/bse.py | 119 +++++++++ src/atomate2/abinit/jobs/core.py | 72 +++-- src/atomate2/abinit/jobs/gw.py | 15 +- src/atomate2/abinit/schemas/calculation.py | 119 +++++---- src/atomate2/abinit/schemas/task.py | 47 +++- src/atomate2/abinit/sets/base.py | 1 - src/atomate2/abinit/sets/bse.py | 85 ++++++ src/atomate2/abinit/sets/factories.py | 18 +- src/atomate2/abinit/sets/gw.py | 93 ------- src/atomate2/settings.py | 2 +- .../outputs/transformations.json.gz | Bin 0 -> 601 bytes .../outputs/transformations.json.gz | Bin 0 -> 601 bytes .../outputs/transformations.json.gz | Bin 0 -> 602 bytes .../outputs/transformations.json.gz | Bin 0 -> 609 bytes .../outputs/transformations.json.gz | Bin 0 -> 604 bytes .../outputs/transformations.json.gz | Bin 0 -> 615 bytes 19 files changed, 726 insertions(+), 333 deletions(-) create mode 100644 src/atomate2/abinit/flows/bse.py create mode 100644 src/atomate2/abinit/jobs/bse.py create mode 100644 src/atomate2/abinit/sets/bse.py create mode 100644 tests/test_data/vasp/Si_elastic/elastic_relax_1_6/outputs/transformations.json.gz create mode 100644 tests/test_data/vasp/Si_elastic/elastic_relax_2_6/outputs/transformations.json.gz create mode 100644 tests/test_data/vasp/Si_elastic/elastic_relax_3_6/outputs/transformations.json.gz create mode 100644 tests/test_data/vasp/Si_elastic/elastic_relax_4_6/outputs/transformations.json.gz create mode 100644 tests/test_data/vasp/Si_elastic/elastic_relax_5_6/outputs/transformations.json.gz create mode 100644 tests/test_data/vasp/Si_elastic/elastic_relax_6_6/outputs/transformations.json.gz diff --git a/src/atomate2/abinit/flows/bse.py b/src/atomate2/abinit/flows/bse.py new file mode 100644 index 0000000000..e4f04b0f41 --- /dev/null +++ b/src/atomate2/abinit/flows/bse.py @@ -0,0 +1,240 @@ +"""Core abinit flow makers.""" + +import numpy as np +from dataclasses import dataclass, field +from pathlib import Path +from typing import List, Optional, Union + +from jobflow import Flow, Maker, Response, job +from pymatgen.core.structure import Structure + +from atomate2.abinit.jobs.base import BaseAbinitMaker +from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker +from atomate2.abinit.jobs.bse import BSEmdfMaker, BSEscrMaker +from atomate2.abinit.powerups import update_user_abinit_settings, update_factory_kwargs, update_user_kpoints_settings +from pymatgen.io.abinit.abiobjects import KSampling +from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary + + +@dataclass +class BSEFlowMaker(Maker): + + name: str = "BSE mdf calculation" + nscf_maker: BaseAbinitMaker = field(default_factory=NonSCFMaker) + bse_maker: BaseAbinitMaker = field(default_factory=BSEmdfMaker) + kppa: int = 1000 + shifts: tuple = (0.11, 0.22, 0.33) + mbpt_sciss: float = 0.0 + mdf_epsinf: float = None + enwinbse: float = 3.0 + + def make( + self, + structure: Structure, + prev_outputs: Union[str, Path, list[str]], + ): + + nscf_job = self.nscf_maker.make( + prev_outputs=prev_outputs[0], + mode="uniform", + ) + + nscf_job = update_user_kpoints_settings( + flow=nscf_job, + kpoints_updates=KSampling.automatic_density( + structure=structure, + kppa=self.kppa, + shifts=self.shifts, + chksymbreak=0) + ) + nscf_job = update_user_abinit_settings( + flow=nscf_job, + abinit_updates={"nstep": 50} + ) + bse_prepjob = self.find_bse_params( + nscf_job.output.output.bandlims, + self.enwinbse, + nscf_job.output.output.direct_gap + ) + + if len(prev_outputs)==2: + prev_outputs=[nscf_job.output.dir_name, prev_outputs[1]] + else: + prev_outputs=[nscf_job.output.dir_name] + + bse_job = self.bse_maker.make( + prev_outputs=prev_outputs, + mbpt_sciss=self.mbpt_sciss, + bs_loband=bse_prepjob.output["bs_loband"], + nband=bse_prepjob.output["nband"], + mdf_epsinf=self.mdf_epsinf, + bs_freq_mesh=bse_prepjob.output["bs_freq_mesh"] + ) + jobs=[nscf_job, bse_prepjob, bse_job] + + return Flow(jobs, output=bse_job.output, name=self.name) + + @job(name="Find BSE parameters") + def find_bse_params(self, bandlims, enwinbse, directgap): + vband=[] + cband=[] + for bandlim in bandlims: + spin=bandlim[0] + iband=bandlim[1]+1 + enemin=bandlim[2] + enemax=bandlim[3] + if enemin>0: + if enemin= nband in screening and sigma + # pass + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None, + ): + + static_job = self.scf_maker.make( + structure, + restart_from=restart_from + ) + convergence = ConvergenceMaker( + maker=self.bse_maker, + epsilon=self.epsilon, + criterion_name=self.criterion_name, + convergence_field=self.convergence_field, + convergence_steps=self.convergence_steps, + ) + + bse = convergence.make(structure, prev_outputs=[static_job.output.dir_name]) + + return Flow([static_job, bse], bse.output, name=self.name) + +@dataclass +class BSEMultiShiftedMaker(Maker): + """ + Maker to generate convergence of G0W0 calculations. + + Parameters + ---------- + name : str + A name for the job + criterion_name: str + A name for the convergence criterion. Must be in the run results + epsilon: float + A difference in criterion value for subsequent runs + convergence_field: str + An input parameter that changes to achieve convergence + convergence_steps: list | tuple + An iterable of the possible values for the convergence field. + If the iterable is depleted and the convergence is not reached, + that the job is failed + """ + cards: List[str] = field(default_factory=list) + + def __post_init__(self): + if not self.cards: + self.cards = self.create_cards() + + def create_cards(self): + return ['King', 'Queen'] + + + + name: str = "BSE Mutiple Shifted Grid" + scf_maker: StaticMaker = field(default_factory=StaticMaker) + bse_maker: Maker = field(default_factory=Maker) + shiftks: list = None + + def make( + self, + structure: Structure, + restart_from: Optional[Union[str, Path]] = None, + ): + + jobs=[] + spectra=[] + static_job = self.scf_maker.make( + structure, + restart_from=restart_from + ) + jobs.append(static_job) + for idx, shifts in enumerate(self.shiftks): + bse_job = self.bse_maker.make( + structure=structure, + prev_outputs=[static_job.output.dir_name], + ) + bse_job = update_user_abinit_settings( + flow=bse_job, + abinit_updates={ + "shiftk": shifts} + ) + bse_job.append_name(append_str=f" {idx}") + jobs.append(bse_job) + spectra.append( + bse_job.output.output.emacro, + ) + avg_job=self.calc_average_spectra(spectra) + jobs.append(avg_job) + return Flow(jobs, output=avg_job.output, name=self.name) + + @job(name="Calculate average spectra") + def calc_average_spectra(self, spectra): + for idx, spectrum in enumerate(spectra): + if idx==0: + mesh0=spectrum[0] + teps2=spectrum[1] + else: + mesh=spectrum[0] + int_eps2=np.interp(mesh0, mesh, spectrum[1]) + teps2=np.add(teps2, int_eps2) + teps2=np.array(teps2)*(1./len(spectra)) + conv_res=[mesh0, teps2] + return Response(output=conv_res) + + diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index 484a0bb39e..0d7e1f3e93 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -1,58 +1,21 @@ """Core abinit flow makers.""" +import numpy as np from dataclasses import dataclass, field from pathlib import Path from typing import List, Optional, Union -from jobflow import Flow, Maker +from jobflow import Flow, Maker, Response, job from pymatgen.core.structure import Structure from atomate2.abinit.jobs.base import BaseAbinitMaker from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker -from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker, BSEMaker +from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker from atomate2.abinit.powerups import update_user_abinit_settings, update_factory_kwargs, update_user_kpoints_settings from pymatgen.io.abinit.abiobjects import KSampling +from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary -@dataclass -class GWbandsMaker(Maker): - """ - Maker to generate bands for GW caculation. - """ - - name: str = "Bands calculation" - scf_maker: StaticMaker = field(default_factory=StaticMaker) - nscf_maker: NonSCFMaker = field(default_factory=NonSCFMaker) - - def make( - self, - structure: Structure, - restart_from: Optional[Union[str, Path]] = None, - ): - """ - Create a G0W0 flow. - - Parameters - ---------- - structure : Structure - A pymatgen structure object. - restart_from : str or Path or None - One previous directory to restart from. - - Returns - ------- - Flow - A G0W0 flow. - """ - - scf_job = self.scf_maker.make( - structure, - restart_from=restart_from) - nscf_job = self.nscf_maker.make( - prev_outputs=[scf_job.output.dir_name], - mode="uniform", - ) - return Flow([scf_job, nscf_job], output=nscf_job.output, name=self.name) @dataclass class G0W0Maker(Maker): @@ -83,15 +46,18 @@ class G0W0Maker(Maker): """ name: str = "G0W0 calculation" + gw_qprange: int = 0 + joblist: List = field(default_factory=lambda: ["scf", "nscf", "scr", "sigma"]) + scf_maker: StaticMaker = field(default_factory=StaticMaker) + nscf_maker: NonSCFMaker = field(default_factory=NonSCFMaker) scr_maker: BaseAbinitMaker = field(default_factory=ScreeningMaker) sigma_maker: BaseAbinitMaker = field(default_factory=SigmaMaker) - gw_qprange: int = 0 def make( self, structure: Structure, - prev_outputs: str = None, - restart_from: Optional[Union[str, Path]] = None + prev_outputs: Optional[list] = [], + restart_from: Optional[Union[str, Path]] = None, ): """ Create a G0W0 flow. @@ -101,8 +67,7 @@ def make( structure : Structure A pymatgen structure object. prev_outputs : str - One previous directory where ncsf - calculation were performed. + List of previous directory where scf, ncsf and scr calculations were done. restart_from : str or Path or None One previous directory to restart from. @@ -111,22 +76,63 @@ def make( Flow A G0W0 flow. """ - - scr_job = self.scr_maker.make( - prev_outputs=prev_outputs, - ) - m_scr_job = update_user_abinit_settings( - flow=scr_job, - abinit_updates={"iomode": 3} - ) - sigma_job = self.sigma_maker.make( - prev_outputs=[prev_outputs, scr_job.output.dir_name], - ) - m_sigma_job = update_user_abinit_settings( - flow=sigma_job, - abinit_updates={"gw_qprange": self.gw_qprange, "iomode": 3} + joblist=self.joblist + jobs=[] + result={} + #SCF step + scf_job = self.scf_maker.make( + structure, + restart_from=restart_from ) - return Flow([m_scr_job, m_sigma_job], output=m_sigma_job.output, name=self.name) + if "scf" in joblist: + if len(prev_outputs)!=0: + raise RuntimeError("No previous calculation needed in prev_outputs") + jobs.append(scf_job) + prev_outputs=[scf_job.output.dir_name] + result=scf_job.output + + #NSCF step + if "nscf" in joblist: + if len(prev_outputs)!=1: + raise RuntimeError("Previous SCF calculation needed in prev_outputs") + nscf_job = self.nscf_maker.make( + prev_outputs=[prev_outputs[0]], + mode="uniform", + ) + jobs.append(nscf_job) + prev_outputs=[nscf_job.output.dir_name] + result=nscf_job.output + + #SCR step + if "scr" in joblist: + if len(prev_outputs)!=1: + raise RuntimeError("Previous SCF and NSCF calculation needed in prev_outputs") + scr_job = self.scr_maker.make( + prev_outputs=[prev_outputs[0]], + ) + scr_job = update_user_abinit_settings( + flow=scr_job, + abinit_updates={"iomode": 3} + ) + jobs.append(scr_job) + prev_outputs.append(scr_job.output.dir_name) + result=scr_job.output + + #SIGMA step + if "sigma" in joblist: + if len(prev_outputs)!=2: + raise RuntimeError("Previous SCF, NSCF and SCR calculation needed in prev_outputs") + sigma_job = self.sigma_maker.make( + prev_outputs=[prev_outputs[0], prev_outputs[1]], + ) + sigma_job = update_user_abinit_settings( + flow=sigma_job, + abinit_updates={"gw_qprange": self.gw_qprange, "iomode": 3} + ) + jobs.append(sigma_job) + result=sigma_job.output + + return Flow(jobs, output=result, name=self.name) @dataclass class G0W0ConvergenceMaker(Maker): @@ -155,88 +161,49 @@ class G0W0ConvergenceMaker(Maker): convergence_field: str = field(default_factory=str) convergence_steps: list = field(default_factory=list) - #def __post_init__(self): - # TODO: make some checks on the input sets, e.g.: - # - non scf has to be uniform - # - set istwfk ? or check that it is "*1" ? - # - kpoint shifts ? - # - check nbands in nscf is >= nband in screening and sigma - # pass - def make( self, structure: Structure, restart_from: Optional[Union[str, Path]] = None, - ): - - static = GWbandsMaker().make(structure) - gw_maker = G0W0Maker() + ): + + NSCF_FIELDS = ["nband","ngkpt"] + SCR_FIELDS = ["ecuteps"] + SIGMA_FIELDS = ["ecutsigx"] + + SUPPORTED_FIELDS = NSCF_FIELDS + SCR_FIELDS + SIGMA_FIELDS + + if self.convergence_field not in SUPPORTED_FIELDS: + raise RuntimeError("convergence_field not supported yet") + + if self.convergence_field in NSCF_FIELDS: + static_job = G0W0Maker(joblist=["scf"]).make(structure) + gw_maker = G0W0Maker(joblist=["nscf","scr","sigma"]) + flow=[static_job] + prev_outputs=[static_job.output.dir_name] + + if self.convergence_field in SCR_FIELDS: + static_job = G0W0Maker(joblist=["scf","nscf"]).make(structure) + gw_maker = G0W0Maker(joblist=["scr","sigma"]) + flow=[static_job] + prev_outputs=[static_job.output.dir_name] + + if self.convergence_field in SIGMA_FIELDS: + pre_static_job = G0W0Maker(joblist=["scf","nscf"]).make(structure) + static_job = G0W0Maker(joblist=["scr"]).make(structure, prev_outputs=[pre_static_job.output.dir_name]) + gw_maker = G0W0Maker(joblist=["sigma"]) + flow=[pre_static_job,static_job] + prev_outputs=[pre_static_job.output.dir_name, static_job.output.dir_name] + convergence = ConvergenceMaker( - maker=gw_maker, - epsilon=self.epsilon, - criterion_name=self.criterion_name, - convergence_field=self.convergence_field, - convergence_steps=self.convergence_steps, - ) - gw = convergence.make(structure, prev_outputs=static.output.dir_name) - return Flow([static, gw], gw.output, name=self.name) + maker = gw_maker, + epsilon = self.epsilon, + criterion_name = self.criterion_name, + convergence_field = self.convergence_field, + convergence_steps = self.convergence_steps, + ) + + gw_job = convergence.make(structure, prev_outputs=prev_outputs) + flow.append(gw_job) + return Flow(flow, gw_job.output, name=self.name) -@dataclass -class BSEmdfMaker(Maker): - - bs_nband: int - mdf_epsinf: float - name: str = "BSE mdf calculation" - scf_maker: BaseAbinitMaker = field(default_factory=StaticMaker) - nscf_maker: BaseAbinitMaker = field(default_factory=NonSCFMaker) - bse_maker: BaseAbinitMaker = field(default_factory=BSEMaker) - bs_loband: int = 1 - mbpt_sciss: float = 0.0 - kppa: int = 100 - shiftk: tuple = (0.11, 0.12, 0.13) - - def make( - self, - structure: Structure, - restart_from: Optional[Union[str, Path]] = None - ): - - scf_job = self.scf_maker.make( - structure, - restart_from=restart_from - ) - nscf_job = self.nscf_maker.make( - prev_outputs=[scf_job.output.dir_name], - mode="uniform", - ) - #njob=update_user_abinit_settings( - # flow=nscf_job, - # abinit_updates={ - # 'rfelfd': 1, - # 'rfdir': (1, 1, 1), - # 'nqpt': 1, - # 'qpt': (0.0, 0.0, 0.0), - # 'kptopt': 2, - # 'iscf': -2, - # 'tolwfr': 1e-22} - #) - nscf_job = update_user_kpoints_settings( - flow=nscf_job, - kpoints_updates=KSampling.automatic_density( - structure=structure, - kppa=self.kppa, - shifts=self.shiftk, - chksymbreak=0) - ) - bse_job = self.bse_maker.make( - prev_outputs=[nscf_job.output.dir_name, - "/home/ucl/modl/tbiswas/scratch/abinit_run/sigma"], - ) - bse_job=update_factory_kwargs( - flow=bse_job, factory_updates={ - 'bs_loband': self.bs_loband, - 'bs_nband': self.bs_nband, - 'mdf_epsinf': self.mdf_epsinf, - 'mbpt_sciss': self.mbpt_sciss} - ) - return Flow([scf_job, nscf_job, bse_job], output=nscf_job.output, name=self.name) diff --git a/src/atomate2/abinit/jobs/base.py b/src/atomate2/abinit/jobs/base.py index 43c467a979..a3b2853400 100644 --- a/src/atomate2/abinit/jobs/base.py +++ b/src/atomate2/abinit/jobs/base.py @@ -164,7 +164,6 @@ def make( history=history, wall_time=self.wall_time, ) - # Write abinit input set write_abinit_input_set( structure=structure, diff --git a/src/atomate2/abinit/jobs/bse.py b/src/atomate2/abinit/jobs/bse.py new file mode 100644 index 0000000000..71f26fab7f --- /dev/null +++ b/src/atomate2/abinit/jobs/bse.py @@ -0,0 +1,119 @@ +"""Core jobs for running ABINIT calculations.""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field +import numpy as np +from jobflow import Response, job + +from atomate2.abinit.jobs.base import BaseAbinitMaker +from atomate2.abinit.sets.bse import BSEmdfSetGenerator, BSEscrSetGenerator + +logger = logging.getLogger(__name__) + +__all__ = ["BSEmdfMaker", "BSEscrMaker"] + + +@dataclass +class BSEscrMaker(BaseAbinitMaker): + """Maker to create non SCF calculations.""" + + calc_type: str = "bse_scr" + name: str = "BSE scr calculation" + + input_set_generator: BSEscrSetGenerator = field( + default_factory=BSEscrSetGenerator + ) + + @job + def make( + self, + structure: Structure | None = None, + prev_outputs: str | list[str] | None = None, + mdf_epsinf: float | None = None, + mbpt_sciss: float = 0.0, + bs_loband: float = 0.0, + nband: float = 0.0, + bs_freq_mesh: list[float] | None = None, + ) -> Job: + """ + Run a non-scf ABINIT job. + + Parameters + ---------- + structure : .Structure + A pymatgen structure object. + mode : str + Type of band structure calculation. Options are: + - "line": Full band structure along symmetry lines. + - "uniform": Uniform mesh band structure. + """ + + if len(prev_outputs)!=2: + raise RuntimeError("Need previous SCF and SCREENING calculations") + + self.input_set_generator.factory_kwargs = {"mbpt_sciss": mbpt_sciss, + "bs_loband": bs_loband, + "nband": nband, + "mdf_epsinf": mdf_epsinf, + "bs_freq_mesh": bs_freq_mesh + } + + return super().make.original( + self, + structure=structure, + prev_outputs=prev_outputs, + ) + +@dataclass +class BSEmdfMaker(BaseAbinitMaker): + """Maker to create non SCF calculations.""" + + calc_type: str = "bse_mdf" + name: str = "BSE mdf calculation" + + input_set_generator: BSEmdfSetGenerator = field( + default_factory=BSEmdfSetGenerator + ) + + @job + def make( + self, + structure: Structure | None = None, + prev_outputs: str | list[str] | None = None, + mdf_epsinf: float | None = None, + mbpt_sciss: float = 0.0, + bs_loband: float = 0.0, + nband: float = 0.0, + bs_freq_mesh: list[float] | None = None, + ) -> Job: + """ + Run a non-scf ABINIT job. + + Parameters + ---------- + structure : .Structure + A pymatgen structure object. + mode : str + Type of band structure calculation. Options are: + - "line": Full band structure along symmetry lines. + - "uniform": Uniform mesh band structure. + """ + if mdf_epsinf==None: + raise RuntimeError("Need a value of mdf_epsinf") + if len(prev_outputs)!=1: + raise RuntimeError("Need previous SCF calculation") + + self.input_set_generator.factory_kwargs = {"mbpt_sciss": mbpt_sciss, + "bs_loband": bs_loband, + "nband": nband, + "mdf_epsinf": mdf_epsinf, + "bs_freq_mesh": bs_freq_mesh + } + + return super().make.original( + self, + structure=structure, + prev_outputs=prev_outputs, + ) diff --git a/src/atomate2/abinit/jobs/core.py b/src/atomate2/abinit/jobs/core.py index 1858d97b87..41d40868b3 100644 --- a/src/atomate2/abinit/jobs/core.py +++ b/src/atomate2/abinit/jobs/core.py @@ -4,6 +4,8 @@ import logging import json +import numpy as np +from scipy.integrate import simpson from pathlib import Path from dataclasses import dataclass, field from typing import TYPE_CHECKING, ClassVar @@ -17,7 +19,8 @@ from jobflow import Job, job, Maker, Response, Flow from atomate2.abinit.jobs.base import BaseAbinitMaker -from atomate2.abinit.powerups import update_user_abinit_settings +from atomate2.abinit.powerups import update_user_abinit_settings, update_user_kpoints_settings +from pymatgen.io.abinit.abiobjects import KSampling from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary from atomate2.abinit.sets.core import ( LineNonSCFSetGenerator, @@ -240,7 +243,8 @@ def __post_init__(self): def make( self, structure: Structure, - prev_outputs: str | Path = None): + prev_outputs: str | list[str] | Path = None + ): """A top-level flow controlling convergence iteration Parameters @@ -256,7 +260,7 @@ def convergence_iteration( self, structure: Structure, prev_dir: str | Path = None, - prev_outputs: str | Path = None, + prev_outputs: str | list[str] | Path = None, ) -> Response: """ Runs several jobs with changing inputs consecutively to investigate @@ -277,6 +281,7 @@ def convergence_iteration( # getting the calculation index idx = 0 converged = False + num_prev_outputs=len(prev_outputs) if prev_dir is not None: prev_dir = prev_dir.split(":")[-1] convergence_file = Path(prev_dir) / CONVERGENCE_FILE_NAME @@ -290,23 +295,32 @@ def convergence_iteration( if idx < self.last_idx and not converged: # finding next jobs - base_job = self.maker.make(structure, prev_outputs=prev_outputs) - next_base_job = update_user_abinit_settings(flow=base_job, abinit_updates={self.convergence_field: self.convergence_steps[idx]}) + if self.convergence_field=="kppa": + next_base_job = self.maker.make( + structure, + prev_outputs=prev_outputs, + kppa=self.convergence_steps[idx]) + print(idx,self.convergence_steps[idx]) + else: + base_job = self.maker.make( + structure, + prev_outputs=prev_outputs) + next_base_job = update_user_abinit_settings( + flow=base_job, + abinit_updates={ + self.convergence_field: self.convergence_steps[idx]}) next_base_job.append_name(append_str=f" {idx}") - update_file_job = self.update_convergence_file( prev_dir=prev_dir, job_dir=next_base_job.output.dir_name, - output=next_base_job.output, - ) - + output=next_base_job.output) + prev_outputs=prev_outputs[:num_prev_outputs] next_job = self.convergence_iteration( - structure, prev_dir=next_base_job.output.dir_name, prev_outputs=prev_outputs, - ) - + structure, + prev_dir=next_base_job.output.dir_name, + prev_outputs=prev_outputs) replace_flow = Flow( - [next_base_job, update_file_job, next_job], output=next_base_job.output - ) + [next_base_job, update_file_job, next_job], output=next_base_job.output) return Response(detour=replace_flow, output=replace_flow.output) else: task_doc = AbinitTaskDoc.from_directory(prev_dir) @@ -343,20 +357,32 @@ def update_convergence_file( } convergence_data["convergence_field_values"].append(self.convergence_steps[idx]) convergence_data["criterion_values"].append( - getattr(output.output, self.criterion_name) + getattr(output.output, self.criterion_name) ) convergence_data["idx"] = idx - if len(convergence_data["criterion_values"]) > 1: # checking for convergence - convergence_data["converged"] = ( - abs( - convergence_data["criterion_values"][-1] - - convergence_data["criterion_values"][-2] + if type(convergence_data["criterion_values"][-1]) is list: + old_data=np.array(convergence_data["criterion_values"][-2]) + new_data=np.array(convergence_data["criterion_values"][-1]) + mesh0=old_data[0] + mesh=new_data[0] + values0=old_data[1] + values=new_data[1] + values1=np.interp(mesh0, mesh, values) + delta=abs(values1-values0) + delarea=simpson(delta) + area=simpson(values0) + print(delarea/area) + convergence_data["converged"] = bool(delarea/area < self.epsilon) + if type(convergence_data["criterion_values"][-1]) is float: + convergence_data["converged"] = bool( + abs( + convergence_data["criterion_values"][-1] + - convergence_data["criterion_values"][-2] + ) + < self.epsilon ) - < self.epsilon - ) - job_dir = job_dir.split(":")[-1] convergence_file = Path(job_dir) / CONVERGENCE_FILE_NAME with open(convergence_file, "w") as f: diff --git a/src/atomate2/abinit/jobs/gw.py b/src/atomate2/abinit/jobs/gw.py index 209371973b..c92b2b4cda 100644 --- a/src/atomate2/abinit/jobs/gw.py +++ b/src/atomate2/abinit/jobs/gw.py @@ -4,9 +4,11 @@ import logging from dataclasses import dataclass, field +import numpy as np +from jobflow import Response, job from atomate2.abinit.jobs.base import BaseAbinitMaker -from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator, BSEmdfSetGenerator +from atomate2.abinit.sets.gw import ScreeningSetGenerator, SigmaSetGenerator logger = logging.getLogger(__name__) @@ -40,15 +42,10 @@ class SigmaMaker(BaseAbinitMaker): calc_type: str = "sigma" name: str = "Sigma calculation" - input_set_generator: SigmaSetGenerator = field(default_factory=SigmaSetGenerator) + input_set_generator: SigmaSetGenerator = field( + default_factory=SigmaSetGenerator + ) # CRITICAL_EVENTS: ClassVar[Sequence[str]] = ("ScfConvergenceWarning",) -@dataclass -class BSEMaker(BaseAbinitMaker): - """Maker to create non SCF calculations.""" - - calc_type: str = "bse_nscf" - name: str = "BSE calculation" - input_set_generator: BSEmdfSetGenerator = field(default_factory=BSEmdfSetGenerator) diff --git a/src/atomate2/abinit/schemas/calculation.py b/src/atomate2/abinit/schemas/calculation.py index 5b0110d9d8..9a21af8276 100644 --- a/src/atomate2/abinit/schemas/calculation.py +++ b/src/atomate2/abinit/schemas/calculation.py @@ -5,6 +5,7 @@ import logging import os import pandas as pd +import numpy as np from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, Optional, Union @@ -15,6 +16,7 @@ from abipy.electrons.gsr import GsrFile from abipy.electrons.scr import ScrFile from abipy.electrons.gw import SigresFile +from abipy.electrons.bse import MdfFile from abipy.flowtk import events from abipy.flowtk.utils import File from emmet.core.math import Matrix3D, Vector3D @@ -70,6 +72,8 @@ class CalculationOutput(BaseModel): The conduction band minimum in eV (if system is not metallic vbm: float The valence band maximum in eV (if system is not metallic) + emacro: abipy Function1D object + Macroscopic dielectric function """ energy: float = Field( @@ -95,7 +99,7 @@ class CalculationOutput(BaseModel): bandgap: Optional[float] = Field( None, description="The band gap from the calculation in eV" ) - direct_bandgap: Optional[float] = Field( + direct_gap: Optional[float] = Field( None, description="The direct band gap from the calculation in eV" ) cbm: Optional[float] = Field( @@ -108,15 +112,17 @@ class CalculationOutput(BaseModel): description="The valence band maximum, or HOMO for molecules, in eV " "(if system is not metallic)", ) - qp_corr_vbm: Optional[float] = Field( + emacro: Optional[list] = Field( None, - description="The valence band maximum, or HOMO for molecules, in eV " - "(if system is not metallic)", + description="Macroscopic dielectric function", ) - qp_corr_cbm: Optional[float] = Field( + mbpt_sciss: Optional[float] = Field( None, - description="The valence band maximum, or HOMO for molecules, in eV " - "(if system is not metallic)", + description="Scissor shift (scalar) obtained from GW calculation", + ) + bandlims: Optional[list] = Field( + None, + description="Minimum and Maximum energies of each bands", ) class Config: @@ -143,24 +149,34 @@ def from_abinit_gsr( # In case no conduction bands were included try: + vbm = output.ebands.get_edge_state("vbm").eig cbm = output.ebands.get_edge_state("cbm").eig bandgap = output.ebands.fundamental_gaps[ 0 ].energy # [0] for one spin channel only - direct_bandgap = output.ebands.direct_gaps[0].energy + direct_gap = output.ebands.direct_gaps[0].energy except ValueError: cbm = None bandgap = None - direct_bandgap = None + direct_gap = None + + bandlims=[] + for spin in output.ebands.spins: + for iband in range(output.ebands.nband): + enemin=output.ebands.enemin(spin=spin,band=iband)-vbm + enemax=output.ebands.enemax(spin=spin,band=iband)-vbm + bandlims.append([spin, iband, enemin, enemax]) electronic_output = { "efermi": float(output.ebands.fermie), - "vbm": output.ebands.get_edge_state("vbm").eig, + "vbm": vbm, "cbm": cbm, "bandgap": bandgap, - "direct_bandgap": direct_bandgap, + "direct_gap": direct_gap, + "bandlims": bandlims, } + forces = None if output.cart_forces is not None: forces = output.cart_forces.tolist() @@ -196,40 +212,11 @@ def from_abinit_scr( """ structure = output.structure # final structure by default for GSR - # In case no conduction bands were included - try: - cbm = output.ebands.get_edge_state("cbm").eig - bandgap = output.ebands.fundamental_gaps[ - 0 - ].energy # [0] for one spin channel only - direct_bandgap = output.ebands.direct_gaps[0].energy - except ValueError: - cbm = None - bandgap = None - direct_bandgap = None - - electronic_output = { - "efermi": float(output.ebands.fermie), - "vbm": output.ebands.get_edge_state("vbm").eig, - "cbm": cbm, - "bandgap": bandgap, - "direct_bandgap": direct_bandgap, - } - - #forces = None - #if output.cart_forces is not None: - # forces = output.cart_forces.tolist() - - #stress = None - #if output.cart_stress_tensor is not None: - # stress = output.cart_stress_tensor.tolist() return cls( structure=structure, energy=0.0, energy_per_atom=0.0, - **electronic_output, - #forces=forces, - #stress=stress, + efermi=0.0, ) @classmethod def from_abinit_sig( @@ -255,32 +242,50 @@ def from_abinit_sig( icbm=output.ebands.get_edge_state("cbm") vbm=output.get_qpcorr(ivbm.spin, ivbm.kpoint, ivbm.band).re_qpe cbm=output.get_qpcorr(icbm.spin, icbm.kpoint, icbm.band).re_qpe + ks_gap=icbm.eig-ivbm.eig bandgap=cbm-vbm - direct_bandgap=None + mbpt_sciss=bandgap-ks_gap electronic_output = { "efermi": float(output.ebands.fermie), "vbm": vbm, "cbm": cbm, "bandgap": bandgap, - "direct_bandgap": direct_bandgap, + "mbpt_sciss": mbpt_sciss, } - #forces = None - #if output.cart_forces is not None: - # forces = output.cart_forces.tolist() + return cls( + structure=structure, + energy=0.0, + energy_per_atom=0.0, + **electronic_output, + ) + @classmethod + def from_abinit_mdf( + cls, + output: MdfFile, # Must use auto_load kwarg when passed + ) -> CalculationOutput: + """ + Create an Abinit output document from Abinit outputs. - #stress = None - #if output.cart_stress_tensor is not None: - # stress = output.cart_stress_tensor.tolist() + Parameters + ---------- + output: .AbinitOutput + An AbinitOutput object. - #qp_data=output.get_dataframe() + Returns + ------- + The Abinit calculation output document. + """ + structure = output.structure # final structure by default for GSR + gw_eps=output.get_mdf(mdf_type="exc").emacro_avg + emacromesh=gw_eps.mesh + emacrovalues=gw_eps.values.imag + emacro=[emacromesh, emacrovalues] return cls( structure=structure, energy=0.0, energy_per_atom=0.0, - **electronic_output, - #qp_data=qp_data, - #forces=forces, - #stress=stress, + efermi=0.0, + emacro=emacro ) @@ -333,6 +338,7 @@ def from_abinit_files( abinit_gsr_file: Path | str = "out_GSR.nc", abinit_scr_file: Path | str = "out_SCR.nc", abinit_sig_file: Path | str = "out_SIGRES.nc", + abinit_mdf_file: Path | str = "out_MDF.nc", abinit_log_file: Path | str = LOG_FILE_NAME, abinit_abort_file: Path | str = MPIABORTFILE, @@ -362,6 +368,7 @@ def from_abinit_files( abinit_gsr_file = dir_name / abinit_gsr_file abinit_scr_file = dir_name / abinit_scr_file abinit_sig_file = dir_name / abinit_sig_file + abinit_mdf_file = dir_name / abinit_mdf_file abinit_log_file = dir_name / abinit_log_file abinit_abort_file = dir_name / abinit_abort_file if os.path.isfile(abinit_gsr_file): @@ -376,6 +383,10 @@ def from_abinit_files( abinit_sig = SigresFile.from_file(abinit_sig_file) output_doc = CalculationOutput.from_abinit_sig(abinit_sig) abinit_version = abinit_sig.abinit_version + elif os.path.isfile(abinit_mdf_file): + abinit_mdf = MdfFile.from_file(abinit_mdf_file) + output_doc = CalculationOutput.from_abinit_mdf(abinit_mdf) + abinit_version = abinit_mdf.abinit_version else: print("No ouput file found.") diff --git a/src/atomate2/abinit/schemas/task.py b/src/atomate2/abinit/schemas/task.py index 548a95cd00..a8f6b3add5 100644 --- a/src/atomate2/abinit/schemas/task.py +++ b/src/atomate2/abinit/schemas/task.py @@ -4,11 +4,14 @@ import json import logging +import numpy as np +from scipy.integrate import simpson from collections.abc import Sequence from pathlib import Path from typing import Any, Optional, TypeVar, Union from abipy.abio.inputs import AbinitInput +from abipy.core.func1d import Function1D from abipy.flowtk import events from emmet.core.math import Matrix3D, Vector3D from emmet.core.structure import StructureMetadata @@ -102,6 +105,9 @@ class OutputDoc(BaseModel): bandgap: Optional[float] = Field( None, description="The DFT bandgap for the last calculation" ) + direct_gap: Optional[float] = Field( + None, description="The direct DFT bandgap for the last calculation" + ) cbm: Optional[float] = Field(None, description="CBM for this calculation") vbm: Optional[float] = Field(None, description="VBM for this calculation") forces: Optional[list[Vector3D]] = Field( @@ -110,6 +116,17 @@ class OutputDoc(BaseModel): stress: Optional[Matrix3D] = Field( None, description="Stress on the unit cell from the last calculation" ) + emacro: Optional[list] = Field( + None, description="Macroscopic dielectric function" + ) + mbpt_sciss: Optional[float] = Field( + None, description="Scissor shift (scalar) from GW calculation" + ) + bandlims: Optional[list] = Field( + None, description="Minimum and Maximum energies of each bands" + ) + class Config: + arbitrary_types_allowed = True @classmethod def from_abinit_calc_doc(cls, calc_doc: Calculation) -> OutputDoc: @@ -130,10 +147,14 @@ def from_abinit_calc_doc(cls, calc_doc: Calculation) -> OutputDoc: energy=calc_doc.output.energy, energy_per_atom=calc_doc.output.energy_per_atom, bandgap=calc_doc.output.bandgap, + direct_gap=calc_doc.output.direct_gap, cbm=calc_doc.output.cbm, vbm=calc_doc.output.vbm, forces=calc_doc.output.forces, stress=calc_doc.output.stress, + emacro=calc_doc.output.emacro, + mbpt_sciss=calc_doc.output.mbpt_sciss, + bandlims=calc_doc.output.bandlims, ) @@ -368,6 +389,8 @@ def _get_task_files(files: list[Path], suffix: str = "") -> dict: abinit_files["abinit_scr_file"] = Path(file).relative_to(path) if file.match(f"*outdata/out_SIGRES{suffix}*"): abinit_files["abinit_sig_file"] = Path(file).relative_to(path) + if file.match(f"*outdata/out_MDF{suffix}*"): + abinit_files["abinit_mdf_file"] = Path(file).relative_to(path) return abinit_files for task_name in task_names: @@ -404,7 +427,7 @@ class ConvergenceSummary(BaseModel): convergence_field_name: str = Field( None, description="The name of the input setting to study convergence against" ) - convergence_criterion_value: float = Field( + convergence_criterion_value: Union[float, list] = Field( None, description="The output value of the convergence criterion" ) convergence_field_value: Any = Field( @@ -447,6 +470,23 @@ def from_abinit_calc_doc(cls, calc_doc: Calculation) -> "ConvergenceSummary": with open(convergence_file) as f: convergence_data = json.load(f) + if type(convergence_data["criterion_values"][-1]) is list: + old_data=np.array(convergence_data["criterion_values"][-2]) + new_data=np.array(convergence_data["criterion_values"][-1]) + mesh0=old_data[0] + mesh=new_data[0] + values0=old_data[1] + values=new_data[1] + values1=np.interp(mesh0, mesh, values) + delta=abs(values1-values0) + delarea=simpson(delta) + area=simpson(values0) + actual_epsilon=delarea/area + if type(convergence_data["criterion_values"][-1]) is float: + actual_epsilon=abs( + convergence_data["criterion_values"][-1] + - convergence_data["criterion_values"][-2] + ) return cls( structure=calc_doc.output.structure, @@ -456,8 +496,5 @@ def from_abinit_calc_doc(cls, calc_doc: Calculation) -> "ConvergenceSummary": convergence_criterion_value=convergence_data["criterion_values"][-1], convergence_field_value=convergence_data["convergence_field_values"][-1], asked_epsilon=convergence_data["asked_epsilon"], - actual_epsilon=abs( - convergence_data["criterion_values"][-2] - - convergence_data["criterion_values"][-1] - ), + actual_epsilon=actual_epsilon, ) diff --git a/src/atomate2/abinit/sets/base.py b/src/atomate2/abinit/sets/base.py index a8f80da0e4..71e7bdeda1 100644 --- a/src/atomate2/abinit/sets/base.py +++ b/src/atomate2/abinit/sets/base.py @@ -612,7 +612,6 @@ def get_abinit_input( total_factory_kwargs["pseudos"] = pseudos if factory_kwargs: total_factory_kwargs.update(factory_kwargs) - generated_input = self.factory(**total_factory_kwargs) if input_index is not None: diff --git a/src/atomate2/abinit/sets/bse.py b/src/atomate2/abinit/sets/bse.py new file mode 100644 index 0000000000..8ffa7a9912 --- /dev/null +++ b/src/atomate2/abinit/sets/bse.py @@ -0,0 +1,85 @@ +"""Module defining Abinit input set generators specific to GW calculations.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Callable + +from atomate2.abinit.sets.factories import bse_with_mdf_from_inputs +from abipy.abio.input_tags import SCF, NSCF, SCREENING, SIGMA + +from atomate2.abinit.files import load_abinit_input +from atomate2.abinit.sets.base import AbinitInputGenerator +from atomate2.abinit.sets.core import NonSCFSetGenerator +from pymatgen.io.abinit.abiobjects import KSampling + +if TYPE_CHECKING: + from abipy.abio.inputs import AbinitInput + from pymatgen.core import Structure + from pymatgen.io.abinit import PseudoTable + +__all__ = [ + "BSEmdfSetGenerator", + "BSEscrSetGenerator", +] + + +@dataclass +class BSEmdfSetGenerator(AbinitInputGenerator): + """Class to generate Abinit non-SCF input sets.""" + + calc_type: str = "bse_mdf" + factory: Callable = bse_with_mdf_from_inputs + pseudos: str | list[str] | PseudoTable | None = None + prev_outputs_deps: tuple = (f"{NSCF}:WFK",) + factory_kwargs: dict = field(default_factory=dict) + factory_prev_inputs_kwargs: dict | None = field( + default_factory=lambda: {"nscf_input": (NSCF,),} + ) + def get_abinit_input( + self, + structure: Structure | None = None, + pseudos: PseudoTable | None = None, + prev_outputs: list[str] | None = None, + abinit_settings: dict | None = None, + factory_kwargs: dict | None = None, + kpoints_settings: dict | KSampling | None = None, + ) -> AbinitInput: + + return super().get_abinit_input( + structure=structure, + pseudos=pseudos, + prev_outputs=prev_outputs, + factory_kwargs=factory_kwargs, + kpoints_settings=kpoints_settings, + ) + +@dataclass +class BSEscrSetGenerator(AbinitInputGenerator): + """Class to generate Abinit non-SCF input sets.""" + + calc_type: str = "bse_full" + factory: Callable = bse_with_mdf_from_inputs + pseudos: str | list[str] | PseudoTable | None = None + prev_outputs_deps: tuple = (f"{NSCF}:WFK", f"{SCREENING}:SCR",) + factory_kwargs: dict = field(default_factory=dict) + factory_prev_inputs_kwargs: dict | None = field( + default_factory=lambda: {"nscf_input": (NSCF,), "scr_input": (SCREENING,),} + ) + def get_abinit_input( + self, + structure: Structure | None = None, + pseudos: PseudoTable | None = None, + prev_outputs: list[str] | None = None, + abinit_settings: dict | None = None, + factory_kwargs: dict | None = None, + kpoints_settings: dict | KSampling | None = None, + ) -> AbinitInput: + + return super().get_abinit_input( + structure=structure, + pseudos=pseudos, + prev_outputs=prev_outputs, + factory_kwargs=factory_kwargs, + kpoints_settings=kpoints_settings, + ) diff --git a/src/atomate2/abinit/sets/factories.py b/src/atomate2/abinit/sets/factories.py index 1f4947de30..3d519d9700 100644 --- a/src/atomate2/abinit/sets/factories.py +++ b/src/atomate2/abinit/sets/factories.py @@ -5,21 +5,27 @@ import numpy as np import pymatgen.io.abinit.abiobjects as aobj from abipy.abio.inputs import AbinitInput +import abipy.core.abinit_units as abu -def bse_with_mdf_from_inputs(nscf_input, sigma_input, bs_loband, bs_nband, - mdf_epsinf, mbpt_sciss, exc_type="TDA", bs_algo="haydock", accuracy="normal", spin_mode="polarized", - smearing="fermi_dirac:0.1 eV") -> AbinitInput: +def bse_with_mdf_from_inputs(nscf_input, bs_loband, nband, + mbpt_sciss=0.0, mdf_epsinf=0.0, scr_input=None, exc_type="TDA", bs_algo="haydock", accuracy="normal", spin_mode="polarized", + zcut="0.1 eV", ecuteps=3.0, coulomb_mode="model_df", bs_freq_mesh=[0.0, 10, 0.01]) -> AbinitInput: """Return a sigma input.""" bse_input = nscf_input.deepcopy() bse_input.pop_irdvars() - - exc_ham = aobj.ExcHamiltonian(bs_loband, bs_nband, mbpt_sciss, coulomb_mode="model_df", ecuteps=sigma_input["ecuteps"], + exc_ham = aobj.ExcHamiltonian(bs_loband=bs_loband, nband=nband, mbpt_sciss=mbpt_sciss*abu.eV_Ha, coulomb_mode=coulomb_mode, ecuteps=ecuteps, spin_mode=spin_mode, mdf_epsinf=mdf_epsinf, exc_type=exc_type, algo=bs_algo, - bs_freq_mesh=None, with_lf=True, zcut=None) + bs_freq_mesh=np.array(bs_freq_mesh)*abu.eV_Ha, with_lf=True, zcut=zcut) bse_input.set_vars(exc_ham.to_abivars()) # TODO: Cannot use istwfk != 1. + if scr_input: + bse_input.set_vars(ecuteps=scr_input["ecuteps"]) + bse_input.set_vars(bs_coulomb_term=11) + bse_input.pop_vars(["mdf_epsinf"]) bse_input.set_vars(istwfk="*1") bse_input.set_vars(ecutwfn=nscf_input["ecut"]) + bse_input.set_vars(bs_haydock_niter=200) + bse_input.set_vars(nband=nband) return bse_input diff --git a/src/atomate2/abinit/sets/gw.py b/src/atomate2/abinit/sets/gw.py index c0b3f05241..7431eee909 100644 --- a/src/atomate2/abinit/sets/gw.py +++ b/src/atomate2/abinit/sets/gw.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, Callable from abipy.abio.factories import scr_from_nscfinput, sigma_from_inputs, scf_input, nscf_from_gsinput -from atomate2.abinit.sets.factories import bse_with_mdf_from_inputs from abipy.abio.input_tags import SCF, NSCF, SCREENING, SIGMA from atomate2.abinit.files import load_abinit_input @@ -139,95 +138,3 @@ def get_abinit_input( kpoints_settings=kpoints_settings, ) -@dataclass -class BSENonSCFSetGenerator(AbinitInputGenerator): - """Class to generate Abinit non-SCF input sets.""" - - calc_type: str = "nscf_bse" - factory: Callable = nscf_from_gsinput - pseudos: str | list[str] | PseudoTable | None = None - restart_from_deps: tuple = (f"{NSCF}:WFK",) - prev_outputs_deps: tuple = (f"{SCF}:DEN",) - nbands_factor: float = 1.2 - factory_kwargs: dict = field(default_factory=dict) - - factory_prev_inputs_kwargs: dict | None = field( - default_factory=lambda: {"gs_input": (SCF,)} - ) - - def get_abinit_input( - self, - structure: Structure | None = None, - pseudos: PseudoTable | None = None, - prev_outputs: list[str] | None = None, - abinit_settings: dict | None = None, - factory_kwargs: dict | None = None, - kpoints_settings: dict | KSampling | None = None, - nscf_ngkpt: tuple | None = None, - nscf_shiftk: tuple | None = None, - ) -> AbinitInput: - """Get AbinitInput object for Non-SCF calculation.""" - factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} - factory_kwargs["nband"] = self._get_nband(prev_outputs) - kpoints_settings=KSampling.monkhorst(nscf_ngkpt, shiftk=nscf_shiftk, chksymbreak=0) - return super().get_abinit_input( - structure=structure, - pseudos=pseudos, - prev_outputs=prev_outputs, - abinit_settings=abinit_settings, - factory_kwargs=factory_kwargs, - kpoints_settings=kpoints_settings, - ) - - def _get_nband(self, prev_outputs: list[str] | None) -> int: - abinit_inputs = self.resolve_prev_inputs( - prev_outputs, self.factory_prev_inputs_kwargs - ) - if len(abinit_inputs) != 1: - raise RuntimeError( - f"Should have exactly one previous output. Found {len(abinit_inputs)}" - ) - previous_abinit_input = next(iter(abinit_inputs.values())) - n_band = previous_abinit_input.get( - "nband", - previous_abinit_input.structure.num_valence_electrons( - previous_abinit_input.pseudos - ), - ) - -@dataclass -class BSEmdfSetGenerator(AbinitInputGenerator): - """Class to generate Abinit non-SCF input sets.""" - - calc_type: str = "bse_mdf" - factory: Callable = bse_with_mdf_from_inputs - pseudos: str | list[str] | PseudoTable | None = None - prev_outputs_deps: tuple = (f"{NSCF}:WFK",) - factory_kwargs: dict = field(default_factory=dict) - factory_prev_inputs_kwargs: dict | None = field( - default_factory=lambda: {"nscf_input": (NSCF,), "sigma_input": (SIGMA,)} - ) - def get_abinit_input( - self, - structure: Structure | None = None, - pseudos: PseudoTable | None = None, - prev_outputs: list[str] | None = None, - abinit_settings: dict | None = None, - factory_kwargs: dict | None = None, - kpoints_settings: dict | KSampling | None = None, - ) -> AbinitInput: - - #factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} - #factory_kwargs["bs_loband"] = bs_loband - #factory_kwargs["bs_nband"] = bs_nband if bs_nband is not None else load_abinit_input(prev_outputs[0])["nband"] - #factory_kwargs["mdf_epsinf"] = mdf_epsinf - #factory_kwargs["mbpt_sciss"] = mbpt_sciss - - return super().get_abinit_input( - structure=structure, - pseudos=pseudos, - prev_outputs=prev_outputs, - factory_kwargs=factory_kwargs, - kpoints_settings=kpoints_settings, - ) - diff --git a/src/atomate2/settings.py b/src/atomate2/settings.py index 4615d54b52..7bcfeba220 100644 --- a/src/atomate2/settings.py +++ b/src/atomate2/settings.py @@ -209,7 +209,7 @@ class Atomate2Settings(BaseSettings): description="Config file for task manager of abipy.", ) ABINIT_MAX_RESTARTS: int = Field( - 5, description="Maximum number of restarts of a job." + 0, description="Maximum number of restarts of a job." ) model_config = SettingsConfigDict(env_prefix=_ENV_PREFIX) diff --git a/tests/test_data/vasp/Si_elastic/elastic_relax_1_6/outputs/transformations.json.gz b/tests/test_data/vasp/Si_elastic/elastic_relax_1_6/outputs/transformations.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..5b648c809498cf58a6da357d22ee87e433c870f3 GIT binary patch literal 601 zcmV-f0;c^RiwFooUWs7<|8#O;ZgXaDa&2LBX>V?GE^2dcZUD_y$%^AJ5d9V7b7Hhu zTTXqLKj^-N5EMt97`!3NO=m*>eX8t9>}&(w%)mHcN_x+wr>c777{8SDwkuq`h~nl@ z+OTs~VvB6=%0mK6=W|=M@hpm8vck44^4`3y+V|R*Zu=U1mxazdWMq5mcbKW_iUQV! z4I$5P;}}CKpMOHQT#|&$B99nV^(Vqc~-h2sVqZ=+8)FI-swB6ZE{MYG3rk|dmSp^TA=NkbWCa{(b?63sb? zAyjfHQv7UZ%kmlaQ5Ri_l}TyB;W&^^B$Wc56TodA9F}@{jN8U#`FY_ASGp=-6JB#H zd7Wh)tmvt~@&>N}^;H7vaDF57dH9J?nh>p|6wC-CH5E{gG@;Dk$&+x^H*e#L@u0@n zjq_npg6-UubqV1d>U{#q-xJy^99|QZe{S$(6uu@BKiKDOsQqCO`DpaRdA4oH>#9xK zV5_b5+w~1_?T8-^k&7ALuDCt*!}%8Di#fJ#Bn+-F+L#o6TuMr%l)pzG2YMC_uA#RZ z=iPqf^Q!5>y1iHuYw+lfS?yiYPMtg6*l=&vS+}=ykz>&(Y%kKvw0t3 zQ}Zfnw8oBcVbXu>-Fu_{SDwA==J%b7b*#I9`+J1F^S6oaO?mnSG->Qa4F>=K#CV?GE^2dcZUD_y%Z}nO5d4)WXRvI? zPfl~#KbW~8giP>2q`YK1qhS&MzU^co;kD9?cBSPIwOvK0+^&LS`cgI9uC(bQN!vqZ z!p_#nl={NGU3G32? zP~>srm_jR8d_uWgVuWW2MUqn~JX{mTd2^W~pk}L9K%Ge6XE8wIC-jgAv zGfWs4k}D-CQ2=ZPKxI^Dl`*Zfkc3C+&j3@l``AukM1;VaU_lfnO2=+CM2KsoiO`%e zp|RE&grqxDRnM@GrtGRX6-E#zSPD!i7MN!01gI~9jZ`{=%r{$`@xQ#8xH?^*5 ztqX&cc$K@Vsi2(0yicI{drEtW!($@zPuDxK!Pg|lFYF5+8h02ZJ~DkcyV``JsXg++ z)LY}W>l@%Y3qNc`E-`q!;`Y=J>wK&)vDn&?FyujNaGYeB1a}Gg+w{@Qvt&pMx!pML z_I5AowhL>2iDI01qT2CN^LBD literal 0 HcmV?d00001 diff --git a/tests/test_data/vasp/Si_elastic/elastic_relax_3_6/outputs/transformations.json.gz b/tests/test_data/vasp/Si_elastic/elastic_relax_3_6/outputs/transformations.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..41f0001fc96eda843896b7f4bfecafb2bec88a42 GIT binary patch literal 602 zcmV-g0;T;QiwFo1UWs7<|8#O;ZgXaDa&2LBX>V?GE^2dcZUD`d%Z}nO5Ji6_${M2h zZL{jd{6Y5)A!HIaM9NE!(`p9s?^6{92mwaw){HbPqO$L$QZCnlL;O-Td0)DC5ykDk zvSI7$#Fp94Rr>^%&KI`q;#m~GWTovo^u2jockhj_T>ct-pM~B#bYwg0x0tE!%M#Y5 z4WY)ht>*eu2lf@XVhitL zi8d@HTu3FfR+MNEHiKYNsu(v?OCcp=SpAcITedq)CQPE4ppeE;N`xSo%LSAqyi`*r zBoj=7DnQ!Ws(PkWSpD%Yb245=@zpyL1(D?nS$VX!z&U0-;(bQei z1zYFV=j(}ZOT!O`$_0eC8}DHKaK6L%0!Hqx1|)(cY08aOLQ+C#s?G1I$6205Ct%2Q zILH<6=S0tZFU*`6KkUbO5V?GE^2dcZUD`d&2HN;498!^@HrJk zS)V!eus3LLK~boVni%jGWVvWoDFT%@@1Cy zyT+$YXqm6eZD@83k0DmR9`Z$&-28z3nz8#*s*o5_6ies6^7$~CL`naJ??O4}% zu6;^XNshN1Q^o2P&DDxCzQ_bKN*H5i>tT_t(6$`3m5wXJosy`ZgagT4RK!WFe8JIRh?+Tr1-M_NWe33e@V=TR!xmtd56JhbFX%hHzI= z%e%51@dW$n%1?9$?5|POhvSo^*WFK&Bm=_*cB)WTX<3LxhA!biD|KQ%Ov8H$_54MB z?D`PXp%NPBrs*0qZ?NBcEcttdP8A*=CsqD=z$dHlwIb<-Z8fAW?hZwMw)*L~+dfrY zJFp@7cJ1T(@q=(q#E(GAI}G<{u~X=K5D6`x literal 0 HcmV?d00001 diff --git a/tests/test_data/vasp/Si_elastic/elastic_relax_5_6/outputs/transformations.json.gz b/tests/test_data/vasp/Si_elastic/elastic_relax_5_6/outputs/transformations.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..cfd5812ba5d4b8174523c2656da1acda460ae193 GIT binary patch literal 604 zcmV-i0;ByOiwFqSU5Q};|8#O;ZgXaDa&2LBX>V?GE^2dcZUD_y$!_965d9U)XRvx- zIOQ;ZklYYLjokrD-qhWW!XW;Ay3A@IN-~i~gK(>=UUj*)2fOsCZi~LM=`2aRZEeEZ zHkqmNjjgv?RIMvb<u0%7jqn zVA!QGlq=t(IG>{oO%jp`EhWKBfGjy7L{OzUrHl!(m?ZPTDy5tx(|CjP_~q<#=Zh3? zEh3hh_*R-JPL+}>>Z^mUDZ-2k$)lNI0P>FjQ*|4NB2Xr{W>_)8v=l@L2xr3)qZDh* zl*CvQtpHn_y8aM+YpcG7?l7E1Gvvn+5`?RLM12`7bbG!_eP{FXq}a;Vwh1tY=MuW! z=6N4AydP>^K@qXVl0ySdCqVDp-vF@;6O9Fr2+I%?ToJ56#zax7xndL{EEmK4R<^pl zsB>*+T{vU{6J6JB9Yq+=KBoM4i4GYaj;k!M=lRK^d@oJ7uql0L-S&{?qx}!31vjB= z8=v`Ln!>nZc>|1T@yh~oX2i?LcW`~P&VzhrNMSDyKn)DkK*t02dXNVzq)*PKd z$A`yfSogB&`mpq8VnP!h6`8lrX8xMLmr{N}>sua<**S9hB608e_#+0R>kl3g_zt?_ z?;SR#uU3i*foc4C;Qx%i_u_v<+XsvAO{zj4+djZLcgC68#($&1^FZUctYH}!yL>He q0YM2Oc#829sRYXz5i?9P%n=cCs3UBwyA5>D%KabSai(Vz2LJ%@`6TxM literal 0 HcmV?d00001 diff --git a/tests/test_data/vasp/Si_elastic/elastic_relax_6_6/outputs/transformations.json.gz b/tests/test_data/vasp/Si_elastic/elastic_relax_6_6/outputs/transformations.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..b4608bde1497501d6da86e2868f50e19fe3d0e94 GIT binary patch literal 615 zcmV-t0+{_DiwFp^U5Q};|8#O;ZgXaDa&2LBX>V?GE^2dcZUD_y+is&U5d9UzbAo)q zw>|Db(~s>lI1LEd|}Hjo<;FPR@$yZzb9|&?xpdS%O8X9v(S5o9@*CV4MwW_vV?VM zLnt!b*vAmc7jIxLmn0#xh$V^%mKw$fTQJED7fKPWg_O^;XgTaErDo9_PO$UhGv})X zr=9_eExeT{T5&;$(26mxxlRFC9|6M&O@&lK14uO{@)2Omc8lR8NjN7!UrHv7q8QAD z6GA1NkyKO3g)|bdjjgH=!PlnjD=bn<6GfFW01Lvnp$9;_A~-DiavyiC%Zl^Dm9BDi z!0UJ{u=q`u^{}FcA>}ou0F_n(>u{bCdfWX(C`~9cKn9RBAzDFsbP5H6L2-*RZG<5l z=yIlqlcim|^7_KoH?8yG$OtcZQ#BQscv}ac{&$22rpM`q?dW=c-#1^Qi3het7aG4i z(tJ-2;oR>w6iwYFU9fd-eZIN_#wz*Y5V;8CY2-V&zB=EbeGyFVt`4Xaib^4;3EO Date: Thu, 3 Oct 2024 01:53:43 +0200 Subject: [PATCH 07/21] checked --- src/atomate2/abinit/flows/bse.py | 101 +++++++++++++++++++++++----- src/atomate2/abinit/flows/gw.py | 9 +-- src/atomate2/abinit/jobs/bse.py | 6 +- src/atomate2/abinit/jobs/core.py | 27 ++------ src/atomate2/abinit/schemas/task.py | 22 +----- src/atomate2/abinit/sets/bse.py | 37 +++++++++- src/atomate2/abinit/utils/common.py | 20 ++++++ 7 files changed, 152 insertions(+), 70 deletions(-) diff --git a/src/atomate2/abinit/flows/bse.py b/src/atomate2/abinit/flows/bse.py index e4f04b0f41..33aa0c2531 100644 --- a/src/atomate2/abinit/flows/bse.py +++ b/src/atomate2/abinit/flows/bse.py @@ -11,13 +11,40 @@ from atomate2.abinit.jobs.base import BaseAbinitMaker from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker from atomate2.abinit.jobs.bse import BSEmdfMaker, BSEscrMaker -from atomate2.abinit.powerups import update_user_abinit_settings, update_factory_kwargs, update_user_kpoints_settings +from atomate2.abinit.flows.gw import G0W0Maker +from atomate2.abinit.powerups import update_user_abinit_settings, update_user_kpoints_settings from pymatgen.io.abinit.abiobjects import KSampling -from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary @dataclass class BSEFlowMaker(Maker): + """ + Maker to generate workflow for BSE calculations. + + Parameters + ---------- + name : str + A name for the job + nscf_maker : .BaseAbinitMaker + The maker to use for the non-scf calculation. + bse_maker : .BaseAbinitMaker + The maker to use for the bse calculations. + kppa: integer + Grid density for k-mesh + shifts : tuple + Shift from gamma centered k-grid + mbpt_sciss : float + Scissor shift added to the conductions states in eV, + Default value 0.0 eV + mdf_epsinf : float + The value of the macroscopic dielectric function + used to model the screening function. + enwinbse : float + Energy window from band-edges in which all conduction + and valence bands are included in BSE calculations in eV. + Default value 3.0 eV + + """ name: str = "BSE mdf calculation" nscf_maker: BaseAbinitMaker = field(default_factory=NonSCFMaker) @@ -95,6 +122,54 @@ def find_bse_params(self, bandlims, enwinbse, directgap): return Response(output=output) +@dataclass +class GWBSEMaker(Maker): + """ + Maker to generate workflow for BSE calculations. + + Parameters + ---------- + name : str + A name for the job + nscf_maker : .BaseAbinitMaker + The maker to use for the non-scf calculation. + bse_maker : .BaseAbinitMaker + The maker to use for the bse calculations. + kppa: integer + Grid density for k-mesh + shifts : tuple + Shift from gamma centered k-grid + mbpt_sciss : float + Scissor shift added to the conductions states in eV, + Default value 0.0 eV + mdf_epsinf : float + The value of the macroscopic dielectric function + used to model the screening function. + enwinbse : float + Energy window from band-edges in which all conduction + and valence bands are included in BSE calculations in eV. + Default value 3.0 eV + + """ + + name: str = "GW-BSE full calculation" + gwflow_maker: BaseAbinitMaker = field(default_factory=G0W0Maker) + bseflow_maker: BaseAbinitMaker = field(default_factory=BSEFlowMaker) + + def __post_init__(self): + self.bseflow_maker.bse_maker=BSEscrMaker() + + def make( + self, + structure: Structure, + ): + gw_job = self.gwflow_maker.make(structure=structure) + self.bseflow_maker.mbpt_sciss=gw_job.output.output.mbpt_sciss + bse_job=self.bseflow_maker.make(structure=structure, prev_outputs=[gw_job.jobs[0].output.dir_name, gw_job.jobs[2].output.dir_name]) + + return Flow([gw_job, bse_job], bse_job.output, name=self.name) + + @dataclass class BSEConvergenceMaker(Maker): @@ -105,6 +180,10 @@ class BSEConvergenceMaker(Maker): ---------- name : str A name for the job + scf_maker : .BaseAbinitMaker + The maker to use for the scf calculation. + bse_maker : .BaseAbinitMaker + The maker to use for the bse calculations. criterion_name: str A name for the convergence criterion. Must be in the run results epsilon: float @@ -118,8 +197,8 @@ class BSEConvergenceMaker(Maker): """ name: str = "BSE convergence" - scf_maker: StaticMaker = field(default_factory=StaticMaker) - bse_maker: Maker = field(default_factory=Maker) + scf_maker: BaseAbinitMaker = field(default_factory=StaticMaker) + bse_maker: BaseAbinitMaker = field(default_factory=BSEFlowMaker) criterion_name: str = "emacro" epsilon: float = 0.1 convergence_field: str = field(default_factory=str) @@ -175,20 +254,10 @@ class BSEMultiShiftedMaker(Maker): If the iterable is depleted and the convergence is not reached, that the job is failed """ - cards: List[str] = field(default_factory=list) - - def __post_init__(self): - if not self.cards: - self.cards = self.create_cards() - - def create_cards(self): - return ['King', 'Queen'] - - name: str = "BSE Mutiple Shifted Grid" - scf_maker: StaticMaker = field(default_factory=StaticMaker) - bse_maker: Maker = field(default_factory=Maker) + scf_maker: BaseAbinitMaker = field(default_factory=StaticMaker) + bse_maker: BaseAbinitMaker = field(default_factory=BSEFlowMaker) shiftks: list = None def make( diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index 0d7e1f3e93..f8c42e1433 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -11,10 +11,7 @@ from atomate2.abinit.jobs.base import BaseAbinitMaker from atomate2.abinit.jobs.core import NonSCFMaker, StaticMaker, ConvergenceMaker from atomate2.abinit.jobs.gw import ScreeningMaker, SigmaMaker -from atomate2.abinit.powerups import update_user_abinit_settings, update_factory_kwargs, update_user_kpoints_settings -from pymatgen.io.abinit.abiobjects import KSampling -from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary - +from atomate2.abinit.powerups import update_user_abinit_settings @dataclass @@ -48,8 +45,8 @@ class G0W0Maker(Maker): name: str = "G0W0 calculation" gw_qprange: int = 0 joblist: List = field(default_factory=lambda: ["scf", "nscf", "scr", "sigma"]) - scf_maker: StaticMaker = field(default_factory=StaticMaker) - nscf_maker: NonSCFMaker = field(default_factory=NonSCFMaker) + scf_maker: BaseAbinitMaker = field(default_factory=StaticMaker) + nscf_maker: BaseAbinitMaker = field(default_factory=NonSCFMaker) scr_maker: BaseAbinitMaker = field(default_factory=ScreeningMaker) sigma_maker: BaseAbinitMaker = field(default_factory=SigmaMaker) diff --git a/src/atomate2/abinit/jobs/bse.py b/src/atomate2/abinit/jobs/bse.py index 71f26fab7f..94121e1449 100644 --- a/src/atomate2/abinit/jobs/bse.py +++ b/src/atomate2/abinit/jobs/bse.py @@ -17,7 +17,7 @@ @dataclass class BSEscrMaker(BaseAbinitMaker): - """Maker to create non SCF calculations.""" + """Maker to create BSE with full dielectric function calculations.""" calc_type: str = "bse_scr" name: str = "BSE scr calculation" @@ -68,7 +68,7 @@ def make( @dataclass class BSEmdfMaker(BaseAbinitMaker): - """Maker to create non SCF calculations.""" + """Maker to create BSE with model dielectric function calculations.""" calc_type: str = "bse_mdf" name: str = "BSE mdf calculation" @@ -103,7 +103,7 @@ def make( if mdf_epsinf==None: raise RuntimeError("Need a value of mdf_epsinf") if len(prev_outputs)!=1: - raise RuntimeError("Need previous SCF calculation") + raise RuntimeError("Need only one previous SCF calculation") self.input_set_generator.factory_kwargs = {"mbpt_sciss": mbpt_sciss, "bs_loband": bs_loband, diff --git a/src/atomate2/abinit/jobs/core.py b/src/atomate2/abinit/jobs/core.py index 41d40868b3..ffe1a1b72b 100644 --- a/src/atomate2/abinit/jobs/core.py +++ b/src/atomate2/abinit/jobs/core.py @@ -4,8 +4,6 @@ import logging import json -import numpy as np -from scipy.integrate import simpson from pathlib import Path from dataclasses import dataclass, field from typing import TYPE_CHECKING, ClassVar @@ -20,6 +18,7 @@ from atomate2.abinit.jobs.base import BaseAbinitMaker from atomate2.abinit.powerups import update_user_abinit_settings, update_user_kpoints_settings +from atomate2.abinit.utils.common import check_convergence from pymatgen.io.abinit.abiobjects import KSampling from atomate2.abinit.schemas.task import AbinitTaskDoc, ConvergenceSummary from atomate2.abinit.sets.core import ( @@ -362,27 +361,9 @@ def update_convergence_file( convergence_data["idx"] = idx if len(convergence_data["criterion_values"]) > 1: # checking for convergence - if type(convergence_data["criterion_values"][-1]) is list: - old_data=np.array(convergence_data["criterion_values"][-2]) - new_data=np.array(convergence_data["criterion_values"][-1]) - mesh0=old_data[0] - mesh=new_data[0] - values0=old_data[1] - values=new_data[1] - values1=np.interp(mesh0, mesh, values) - delta=abs(values1-values0) - delarea=simpson(delta) - area=simpson(values0) - print(delarea/area) - convergence_data["converged"] = bool(delarea/area < self.epsilon) - if type(convergence_data["criterion_values"][-1]) is float: - convergence_data["converged"] = bool( - abs( - convergence_data["criterion_values"][-1] - - convergence_data["criterion_values"][-2] - ) - < self.epsilon - ) + conv = check_convergence(convergence_data["criterion_values"][-1], convergence_data["criterion_values"][-2]) + convergence_data["converged"] = bool(conv < self.epsilon) + job_dir = job_dir.split(":")[-1] convergence_file = Path(job_dir) / CONVERGENCE_FILE_NAME with open(convergence_file, "w") as f: diff --git a/src/atomate2/abinit/schemas/task.py b/src/atomate2/abinit/schemas/task.py index a8f6b3add5..90de4dbea1 100644 --- a/src/atomate2/abinit/schemas/task.py +++ b/src/atomate2/abinit/schemas/task.py @@ -4,8 +4,6 @@ import json import logging -import numpy as np -from scipy.integrate import simpson from collections.abc import Sequence from pathlib import Path from typing import Any, Optional, TypeVar, Union @@ -20,7 +18,7 @@ from atomate2.abinit.files import load_abinit_input from atomate2.abinit.schemas.calculation import AbinitObject, Calculation, TaskState -from atomate2.abinit.utils.common import LOG_FILE_NAME, MPIABORTFILE +from atomate2.abinit.utils.common import LOG_FILE_NAME, MPIABORTFILE, check_convergence from atomate2.utils.datetime import datetime_str from atomate2.utils.path import get_uri, strip_hostname @@ -470,23 +468,7 @@ def from_abinit_calc_doc(cls, calc_doc: Calculation) -> "ConvergenceSummary": with open(convergence_file) as f: convergence_data = json.load(f) - if type(convergence_data["criterion_values"][-1]) is list: - old_data=np.array(convergence_data["criterion_values"][-2]) - new_data=np.array(convergence_data["criterion_values"][-1]) - mesh0=old_data[0] - mesh=new_data[0] - values0=old_data[1] - values=new_data[1] - values1=np.interp(mesh0, mesh, values) - delta=abs(values1-values0) - delarea=simpson(delta) - area=simpson(values0) - actual_epsilon=delarea/area - if type(convergence_data["criterion_values"][-1]) is float: - actual_epsilon=abs( - convergence_data["criterion_values"][-1] - - convergence_data["criterion_values"][-2] - ) + actual_epsilon=check_convergence(convergence_data["criterion_values"][-1], convergence_data["criterion_values"][-2]) return cls( structure=calc_doc.output.structure, diff --git a/src/atomate2/abinit/sets/bse.py b/src/atomate2/abinit/sets/bse.py index 8ffa7a9912..faf99aa37b 100644 --- a/src/atomate2/abinit/sets/bse.py +++ b/src/atomate2/abinit/sets/bse.py @@ -26,7 +26,7 @@ @dataclass class BSEmdfSetGenerator(AbinitInputGenerator): - """Class to generate Abinit non-SCF input sets.""" + """Class to generate Abinit BSE with model dielectric function input sets.""" calc_type: str = "bse_mdf" factory: Callable = bse_with_mdf_from_inputs @@ -46,6 +46,17 @@ def get_abinit_input( kpoints_settings: dict | KSampling | None = None, ) -> AbinitInput: + """Get AbinitInput object for BSE calculation.""" + if prev_outputs is None: + raise RuntimeError("No previous_outputs. Cannot perform BSE calculation.") + if len(prev_outputs) != 1: + raise RuntimeError( + "Should have exactly one previous outputs for mdf-BSE (one NSCF calculation)." + ) + ab1 = load_abinit_input(prev_outputs[0]) + if NSCF not in ab1.runlevel: + raise RuntimeError("Could not find one NSCF calculation.") + return super().get_abinit_input( structure=structure, pseudos=pseudos, @@ -56,7 +67,7 @@ def get_abinit_input( @dataclass class BSEscrSetGenerator(AbinitInputGenerator): - """Class to generate Abinit non-SCF input sets.""" + """Class to generate Abinit BSE with full dielectric function input sets.""" calc_type: str = "bse_full" factory: Callable = bse_with_mdf_from_inputs @@ -76,6 +87,28 @@ def get_abinit_input( kpoints_settings: dict | KSampling | None = None, ) -> AbinitInput: + """Get AbinitInput object for BSE calculation.""" + if prev_outputs is None: + raise RuntimeError("No previous_outputs. Cannot perform BSE calculation.") + if len(prev_outputs) != 2: + raise RuntimeError( + "Should have exactly two previous outputs (one NSCF calculation " + "and one SCREENING calculation)." + ) + ab1 = load_abinit_input(prev_outputs[0]) + ab2 = load_abinit_input(prev_outputs[1]) + if NSCF in ab1.runlevel and SCREENING in ab2.runlevel: + nscf_inp = ab1 + scr_inp = ab2 + elif SCREENING in ab1.runlevel and NSCF in ab2.runlevel: + nscf_inp = ab2 + scr_inp = ab1 + else: + raise RuntimeError("Could not find one NSCF and one SCREENING calculation.") + + if nscf_inp.vars["ngkpt"]!=scr_inp.vars["ngkpt"]: + raise RuntimeError("Screening calculation k-grid is not compatible") + return super().get_abinit_input( structure=structure, pseudos=pseudos, diff --git a/src/atomate2/abinit/utils/common.py b/src/atomate2/abinit/utils/common.py index 3fd19cb55b..4c3cda47e9 100644 --- a/src/atomate2/abinit/utils/common.py +++ b/src/atomate2/abinit/utils/common.py @@ -5,6 +5,8 @@ import logging import os from typing import TYPE_CHECKING +import numpy as np +from scipy.integrate import simpson if TYPE_CHECKING: from pathlib import Path @@ -430,3 +432,21 @@ def get_event_report(ofile: File, mpiabort_file: File) -> EventReport | None: return parser.report_exception(ofile.path, exc) else: return report + + +def check_convergence(old, new)-> float: + if type(old) is list: + mesh_old = old[0] + mesh_new = new[0] + values_old = np.array(old[1]) + values_new = np.array(new[1]) + values_int = np.interp(mesh_old, mesh_new, values_new) + delta = abs(values_int-values_old) + delarea = simpson(delta) + area = simpson(values_old) + conv = delarea/area + elif type(old) is float: + conv = abs(new-old) + else: + raise RuntimeError("Output not supported for convergence check") + return conv From 3530aac2aa524146f6732a3f260bcf1f2d6fc49f Mon Sep 17 00:00:00 2001 From: "Tathagata Biswas (tathagata.biswas@uclouvain.be)" Date: Thu, 3 Oct 2024 02:07:44 +0200 Subject: [PATCH 08/21] GW workflow --- src/atomate2/abinit/flows/gw.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index f8c42e1433..c8e2ca8182 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -27,10 +27,6 @@ class G0W0Maker(Maker): ---------- name : str Name of the flows produced by this maker. - scr_maker : .BaseAbinitMaker - The maker to use for the screening calculation. - sigma_maker : .BaseAbinitMaker - The maker to use for the sigma calculations. gw_qprange: int 0 - Compute the QP corrections only for the fundamental and the direct gap @@ -40,6 +36,19 @@ class G0W0Maker(Maker): -num - Compute the QP corrections for all the k-points in the irreducible zone. Include all occupied states and num empty states. + joblist : list[str] + Steps of GW calculations to be included. + Default is ["scf", "nscf", "scr", "sigma"], + which creates a worflow to perform the + entire GW calculations. + scf_maker : .BaseAbinitMaker + The maker to use for the scf calculation. + nscf_maker : .BaseAbinitMaker + The maker to use for the nscf calculations. + scr_maker : .BaseAbinitMaker + The maker to use for the screening calculation. + sigma_maker : .BaseAbinitMaker + The maker to use for the sigma calculations. """ name: str = "G0W0 calculation" From 501709c6cc16c6165a60040e9ad037cf466d3dfe Mon Sep 17 00:00:00 2001 From: "Tathagata Biswas (tathagata.biswas@uclouvain.be)" Date: Thu, 3 Oct 2024 02:17:54 +0200 Subject: [PATCH 09/21] GW workflow --- src/atomate2/abinit/flows/gw.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/atomate2/abinit/flows/gw.py b/src/atomate2/abinit/flows/gw.py index c8e2ca8182..5473a94c9c 100644 --- a/src/atomate2/abinit/flows/gw.py +++ b/src/atomate2/abinit/flows/gw.py @@ -112,7 +112,7 @@ def make( #SCR step if "scr" in joblist: if len(prev_outputs)!=1: - raise RuntimeError("Previous SCF and NSCF calculation needed in prev_outputs") + raise RuntimeError("Previous NSCF calculation needed in prev_outputs") scr_job = self.scr_maker.make( prev_outputs=[prev_outputs[0]], ) @@ -127,7 +127,7 @@ def make( #SIGMA step if "sigma" in joblist: if len(prev_outputs)!=2: - raise RuntimeError("Previous SCF, NSCF and SCR calculation needed in prev_outputs") + raise RuntimeError("Previous NSCF and SCR calculation needed in prev_outputs") sigma_job = self.sigma_maker.make( prev_outputs=[prev_outputs[0], prev_outputs[1]], ) From 987f7321f03f89414d9a7073021a4058b3630757 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Thu, 3 Oct 2024 06:00:42 +0530 Subject: [PATCH 10/21] Update bse.py --- src/atomate2/abinit/flows/bse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/atomate2/abinit/flows/bse.py b/src/atomate2/abinit/flows/bse.py index 33aa0c2531..34bbbc3d05 100644 --- a/src/atomate2/abinit/flows/bse.py +++ b/src/atomate2/abinit/flows/bse.py @@ -46,7 +46,7 @@ class BSEFlowMaker(Maker): """ - name: str = "BSE mdf calculation" + name: str = "BSE calculation" nscf_maker: BaseAbinitMaker = field(default_factory=NonSCFMaker) bse_maker: BaseAbinitMaker = field(default_factory=BSEmdfMaker) kppa: int = 1000 From 88c478c59ca51a3953bd38afeaeee0d6314c5419 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Thu, 3 Oct 2024 06:07:15 +0530 Subject: [PATCH 11/21] Update bse.py --- src/atomate2/abinit/flows/bse.py | 23 ++++------------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/src/atomate2/abinit/flows/bse.py b/src/atomate2/abinit/flows/bse.py index 34bbbc3d05..9b4a5884c7 100644 --- a/src/atomate2/abinit/flows/bse.py +++ b/src/atomate2/abinit/flows/bse.py @@ -131,25 +131,10 @@ class GWBSEMaker(Maker): ---------- name : str A name for the job - nscf_maker : .BaseAbinitMaker - The maker to use for the non-scf calculation. - bse_maker : .BaseAbinitMaker - The maker to use for the bse calculations. - kppa: integer - Grid density for k-mesh - shifts : tuple - Shift from gamma centered k-grid - mbpt_sciss : float - Scissor shift added to the conductions states in eV, - Default value 0.0 eV - mdf_epsinf : float - The value of the macroscopic dielectric function - used to model the screening function. - enwinbse : float - Energy window from band-edges in which all conduction - and valence bands are included in BSE calculations in eV. - Default value 3.0 eV - + gwflow_maker : .BaseAbinitMaker + The maker to use for the GW workflow calculations. + bseflow_maker : .BaseAbinitMaker + The maker to use for the BSE workflow calculations. """ name: str = "GW-BSE full calculation" From 06f48c9f881567437543aaa5dbfab1b18d273082 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Thu, 3 Oct 2024 06:08:58 +0530 Subject: [PATCH 12/21] Update bse.py --- src/atomate2/abinit/flows/bse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/atomate2/abinit/flows/bse.py b/src/atomate2/abinit/flows/bse.py index 9b4a5884c7..f1df8fc917 100644 --- a/src/atomate2/abinit/flows/bse.py +++ b/src/atomate2/abinit/flows/bse.py @@ -184,7 +184,7 @@ class BSEConvergenceMaker(Maker): name: str = "BSE convergence" scf_maker: BaseAbinitMaker = field(default_factory=StaticMaker) bse_maker: BaseAbinitMaker = field(default_factory=BSEFlowMaker) - criterion_name: str = "emacro" + criterion_name: str = "kppa" epsilon: float = 0.1 convergence_field: str = field(default_factory=str) convergence_steps: list = field(default_factory=list) From f7311bf22dd7349c1ba53dec0ed780da4bde0c7e Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Thu, 3 Oct 2024 06:26:41 +0530 Subject: [PATCH 13/21] Update bse.py --- src/atomate2/abinit/flows/bse.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/atomate2/abinit/flows/bse.py b/src/atomate2/abinit/flows/bse.py index f1df8fc917..cb52d20019 100644 --- a/src/atomate2/abinit/flows/bse.py +++ b/src/atomate2/abinit/flows/bse.py @@ -228,16 +228,13 @@ class BSEMultiShiftedMaker(Maker): ---------- name : str A name for the job - criterion_name: str - A name for the convergence criterion. Must be in the run results - epsilon: float - A difference in criterion value for subsequent runs - convergence_field: str - An input parameter that changes to achieve convergence - convergence_steps: list | tuple - An iterable of the possible values for the convergence field. - If the iterable is depleted and the convergence is not reached, - that the job is failed + scf_maker : .BaseAbinitMaker + The maker to use for the scf calculation. + bse_maker : .BaseAbinitMaker + The maker to use for the bse calculations. + shiftks : list[tuple] + k-grid shifts to be used for multiple BSE calculations. + The resulting absorption spectra will be avaeraged. """ name: str = "BSE Mutiple Shifted Grid" From 4627b91b34e67de666a10488a5df0ef4992ad409 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 09:45:02 +0530 Subject: [PATCH 14/21] Update factories.py --- src/atomate2/abinit/sets/factories.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/atomate2/abinit/sets/factories.py b/src/atomate2/abinit/sets/factories.py index 3d519d9700..6a6cfa0e7f 100644 --- a/src/atomate2/abinit/sets/factories.py +++ b/src/atomate2/abinit/sets/factories.py @@ -1,5 +1,6 @@ # coding: utf-8 """Factory functions for Abinit input files """ +"""This code is planned to be moved into abipy/abio/factories.py""" from __future__ import annotations import numpy as np From 1989964cdbe7d1858ce08ddedef62791f5f7fe8f Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:01:03 +0530 Subject: [PATCH 15/21] Update settings.py --- src/atomate2/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/atomate2/settings.py b/src/atomate2/settings.py index 7bcfeba220..4615d54b52 100644 --- a/src/atomate2/settings.py +++ b/src/atomate2/settings.py @@ -209,7 +209,7 @@ class Atomate2Settings(BaseSettings): description="Config file for task manager of abipy.", ) ABINIT_MAX_RESTARTS: int = Field( - 0, description="Maximum number of restarts of a job." + 5, description="Maximum number of restarts of a job." ) model_config = SettingsConfigDict(env_prefix=_ENV_PREFIX) From feb4b4fec7c53d173a5afce73af19662eb6ce484 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:08:38 +0530 Subject: [PATCH 16/21] Update core.py --- src/atomate2/abinit/sets/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/atomate2/abinit/sets/core.py b/src/atomate2/abinit/sets/core.py index ceda588802..392bfdbce9 100644 --- a/src/atomate2/abinit/sets/core.py +++ b/src/atomate2/abinit/sets/core.py @@ -98,6 +98,7 @@ def get_abinit_input( """Get AbinitInput object for Non-SCF calculation.""" factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} factory_kwargs["nband"] = self._get_nband(prev_outputs) + return super().get_abinit_input( structure=structure, pseudos=pseudos, From 60e87beeb5f1aa93af1695ce77cc1cc71483d05f Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:10:50 +0530 Subject: [PATCH 17/21] Update core.py --- src/atomate2/abinit/sets/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/atomate2/abinit/sets/core.py b/src/atomate2/abinit/sets/core.py index 392bfdbce9..94b23079ce 100644 --- a/src/atomate2/abinit/sets/core.py +++ b/src/atomate2/abinit/sets/core.py @@ -98,7 +98,7 @@ def get_abinit_input( """Get AbinitInput object for Non-SCF calculation.""" factory_kwargs = dict(factory_kwargs) if factory_kwargs else {} factory_kwargs["nband"] = self._get_nband(prev_outputs) - + return super().get_abinit_input( structure=structure, pseudos=pseudos, From d260284e9bd478a31ea63b1990082ee8edffe0d9 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:13:04 +0530 Subject: [PATCH 18/21] Update base.py --- src/atomate2/abinit/sets/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/atomate2/abinit/sets/base.py b/src/atomate2/abinit/sets/base.py index 71e7bdeda1..6077f4b9e1 100644 --- a/src/atomate2/abinit/sets/base.py +++ b/src/atomate2/abinit/sets/base.py @@ -613,7 +613,6 @@ def get_abinit_input( if factory_kwargs: total_factory_kwargs.update(factory_kwargs) generated_input = self.factory(**total_factory_kwargs) - if input_index is not None: generated_input = generated_input[input_index] From 6cdbbbace4b81af209796201cff8aabffdeed255 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:14:07 +0530 Subject: [PATCH 19/21] Update base.py --- src/atomate2/abinit/sets/base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/atomate2/abinit/sets/base.py b/src/atomate2/abinit/sets/base.py index 6077f4b9e1..1d2c8ae21e 100644 --- a/src/atomate2/abinit/sets/base.py +++ b/src/atomate2/abinit/sets/base.py @@ -612,7 +612,9 @@ def get_abinit_input( total_factory_kwargs["pseudos"] = pseudos if factory_kwargs: total_factory_kwargs.update(factory_kwargs) + generated_input = self.factory(**total_factory_kwargs) + if input_index is not None: generated_input = generated_input[input_index] From e9ef7696a715090982eddab4d9c866c5e180451b Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:15:03 +0530 Subject: [PATCH 20/21] Update base.py --- src/atomate2/abinit/sets/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/atomate2/abinit/sets/base.py b/src/atomate2/abinit/sets/base.py index 1d2c8ae21e..a8f80da0e4 100644 --- a/src/atomate2/abinit/sets/base.py +++ b/src/atomate2/abinit/sets/base.py @@ -612,9 +612,9 @@ def get_abinit_input( total_factory_kwargs["pseudos"] = pseudos if factory_kwargs: total_factory_kwargs.update(factory_kwargs) - + generated_input = self.factory(**total_factory_kwargs) - + if input_index is not None: generated_input = generated_input[input_index] From b06dbe27f8fb424032dc6c679428ba18c81794b3 Mon Sep 17 00:00:00 2001 From: Tathagata Biswas Date: Sat, 12 Oct 2024 10:16:34 +0530 Subject: [PATCH 21/21] Update base.py --- src/atomate2/abinit/jobs/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/atomate2/abinit/jobs/base.py b/src/atomate2/abinit/jobs/base.py index a3b2853400..43c467a979 100644 --- a/src/atomate2/abinit/jobs/base.py +++ b/src/atomate2/abinit/jobs/base.py @@ -164,6 +164,7 @@ def make( history=history, wall_time=self.wall_time, ) + # Write abinit input set write_abinit_input_set( structure=structure,