Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 78 additions & 15 deletions src/nomad_simulation_parsers/parsers/abinit/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from nomad.parsing.file_parser import ArchiveWriter, DataTextParser
from nomad.parsing.file_parser.mapping_parser import MetainfoParser, TextParser
from nomad.parsing.parser import MatchingParser
from nomad.units import ureg
from nomad.utils import get_logger
from nomad_simulations.schema_packages.general import Program, Simulation
from nomad_simulations.schema_packages.workflow import (
Expand All @@ -24,6 +25,7 @@
GeometryOptimization,
GeometryOptimizationMethod,
)
from nomad_simulations.schema_packages.workflow.single_point import SinglePointMethod
from structlog.stdlib import BoundLogger

from nomad_simulation_parsers.schema_packages import abinit
Expand Down Expand Up @@ -460,7 +462,9 @@ class MainfileParser(TextParser):
def logger(self):
return LOGGER

text_parser = AbinitOutParser()
def __init__(self):
super().__init__()
self.text_parser = AbinitOutParser()

def get_workflow_method(self) -> str:
ionmov = self.get_input_var('ionmov', 1, 0, scalar=True)
Expand Down Expand Up @@ -520,9 +524,18 @@ def get_energy_contributions(self, source: dict[str, Any]) -> list[dict[str, Any
def get_outputs(self) -> list[dict[str, Any]]:
outputs = []
for dataset in self.data_object.get('dataset', []):
outputs.append(dataset.get('results'))
results = dict(dataset.get('results') or {})
scf_steps = self.get_scf_steps(dataset)
if scf_steps:
results['scf_steps'] = scf_steps
outputs.append(results)
# relaxation steps
outputs.extend(dataset.get('relaxation', []))
for step in dataset.get('relaxation', []):
step_output = dict(step)
scf_steps = self.get_scf_steps(step)
if scf_steps:
step_output['scf_steps'] = scf_steps
outputs.append(step_output)
return outputs

def get_atoms(self) -> list[dict[str, Any]]:
Expand Down Expand Up @@ -601,14 +614,55 @@ def get_geometry_convergence(self):
),
]

def get_single_point_convergence(self) -> list[EnergyConvergenceTarget]:
toldfe = self.get_input_var('toldfe', n_dataset=1, default=None, scalar=True)
if toldfe is None:
return []
return [
EnergyConvergenceTarget(
threshold=toldfe,
threshold_type='absolute',
)
]

def get_scf_steps(self, source: dict[str, Any]) -> dict[str, Any]:
min_columns_for_delta = 2
self_consistent = source.get('self_consistent', {})
iterations = self_consistent.get('energy_total_scf_iteration', [])
if not iterations:
return {}

energies_total = []
delta_energies_total = []
extra_columns = {}

for step in iterations:
row = np.array(step, dtype=float).reshape(-1)
if row.size == 0:
continue
energies_total.append(row[0] * ureg.hartree)
if row.size >= min_columns_for_delta:
delta_energies_total.append(abs(row[1]) * ureg.hartree)
for col_idx, val in enumerate(row[2:], start=3):
extra_columns.setdefault(f'column_{col_idx}', []).append(float(val))

scf_steps = {'energies_total': energies_total}
if delta_energies_total:
scf_steps['delta_energies_total'] = delta_energies_total
if extra_columns:
scf_steps['code_specific_quantities'] = extra_columns
return scf_steps


class DosParser(TextParser):
# TODO temporary fix for structlog unable to propagate logger
@property
def logger(self):
return LOGGER

text_parser = DataTextParser()
def __init__(self):
super().__init__()
self.text_parser = DataTextParser()

def get_dos(self, source: np.ndarray) -> list[dict[str, Any]]:
nsp = self.data.get('nspinpol')
Expand All @@ -622,23 +676,33 @@ def get_dos(self, source: np.ndarray) -> list[dict[str, Any]]:


class AbinitArchiveWriter(ArchiveWriter):
mainfile_parser = MainfileParser()
metainfo_parser = AbinitMetainfoParser()
dos_parser = DosParser()
code_name = 'ABINIT'
annotation_key = abinit.OUT_KEY

def __init__(self):
super().__init__()
self.mainfile_parser = MainfileParser()
self.metainfo_parser = AbinitMetainfoParser()
self.dos_parser = DosParser()

def parse_workflow(self):
ionmov = self.mainfile_parser.get_input_var('ionmov', 1, [0])[0]
vis = self.mainfile_parser.get_input_var('vis', 1, [100.0])[0]
if ionmov in [2, 3, 4, 5, 7, 10, 11, 20] or (ionmov == 1 and vis > 0.0):
self.archive.workflow2 = GeometryOptimization(
model=GeometryOptimizationMethod()
)
workflow = GeometryOptimization()
workflow.method = GeometryOptimizationMethod()
convergence = self.mainfile_parser.get_geometry_convergence()
if convergence:
workflow.method.convergence_targets = convergence
elif ionmov in [6, 8, 9, 12, 13, 14, 23] or (ionmov == 1 and vis == 0.0):
self.archive.workflow2 = MolecularDynamics()
workflow = MolecularDynamics()
else:
self.archive.workflow2 = SinglePoint()
workflow = SinglePoint()
workflow.method = SinglePointMethod()
convergence = self.mainfile_parser.get_single_point_convergence()
if convergence:
workflow.method.convergence_targets = convergence
self.archive.workflow2 = workflow
self.metainfo_parser.annotation_key = self.annotation_key
self.metainfo_parser.data_object = self.archive.workflow2
self.mainfile_parser.convert(self.metainfo_parser)
Expand Down Expand Up @@ -691,8 +755,6 @@ class AbinitParser(MatchingParser):
Main parser interface to NOMAD.
"""

archive_writer = AbinitArchiveWriter()

def is_mainfile(
self,
filename: str,
Expand Down Expand Up @@ -726,4 +788,5 @@ def parse(
logger: BoundLogger = None,
child_archives: dict[str, EntryArchive] = {},
):
self.archive_writer.write(mainfile, archive, logger, child_archives)
archive_writer = AbinitArchiveWriter()
archive_writer.write(mainfile, archive, logger, child_archives)
100 changes: 100 additions & 0 deletions src/nomad_simulation_parsers/parsers/ams/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,24 @@
from nomad.parsing.file_parser import ArchiveWriter
from nomad.parsing.file_parser.mapping_parser import MetainfoParser, TextParser
from nomad.parsing.parser import MatchingParser
from nomad.units import ureg
from nomad.utils import get_logger
from nomad_simulations.schema_packages.general import Program, Simulation
from nomad_simulations.schema_packages.workflow.general import (
EnergyConvergenceTarget,
ForceConvergenceTarget,
)
from nomad_simulations.schema_packages.workflow.geometry_optimization import (
GeometryOptimization,
GeometryOptimizationMethod,
)
from nomad_simulations.schema_packages.workflow.molecular_dynamics import (
MolecularDynamics,
)
from nomad_simulations.schema_packages.workflow.single_point import (
SinglePoint,
SinglePointMethod,
)
from structlog.stdlib import BoundLogger

from nomad_simulation_parsers.parsers.utils.general import search_files
Expand Down Expand Up @@ -56,6 +72,89 @@ def get_eigenvalues(
eigenvalues[n]['occupations'] = occupations
return [eig for eig in eigenvalues if eig]

def get_scf_steps(self, source: dict[str, Any]) -> dict[str, Any]:
self_consistency = source.get('self_consistency', {})
energy_change = self_consistency.get('energy_change')
if energy_change is None:
return {}

delta_energies_total = [abs(value) for value in energy_change]
scf_steps = {'delta_energies_total': delta_energies_total}

scf_options = source.get('scf_options')
code_specific_quantities = {}
if hasattr(scf_options, 'get'):
n_scf_steps_max = scf_options.get('x_ams_ncyclx')
convrg = scf_options.get('x_ams_convrg')
if n_scf_steps_max is not None:
code_specific_quantities['n_scf_steps_max'] = int(n_scf_steps_max)
if convrg is not None:
code_specific_quantities['convrg'] = float(convrg)

if code_specific_quantities:
scf_steps['code_specific_quantities'] = code_specific_quantities

return scf_steps

def _get_scf_energy_threshold(self, source: dict[str, Any]):
scf_options = source.get('scf_options')
if not hasattr(scf_options, 'get'):
return None
convrg = scf_options.get('x_ams_convrg')
if convrg is None:
return None
return float(convrg) * ureg.hartree

def build_workflow(self, source: dict[str, Any]):
if (geometry := source.get('geometry_optimization')) is not None:
workflow = GeometryOptimization()
workflow.method = GeometryOptimizationMethod()
targets = []
force_thr = geometry.get('convergence_tolerance_force_maximum')
if force_thr is not None:
targets.append(
ForceConvergenceTarget(
threshold=force_thr,
threshold_type='maximum',
)
)
energy_thr = geometry.get('convergence_tolerance_energy_difference')
if energy_thr is not None:
targets.append(
EnergyConvergenceTarget(
threshold=energy_thr,
threshold_type='absolute',
)
)
if targets:
workflow.method.convergence_targets = targets

scf_threshold = self._get_scf_energy_threshold(geometry)
if scf_threshold is not None:
workflow.method.single_point_convergence_targets = [
EnergyConvergenceTarget(
threshold=scf_threshold,
threshold_type='absolute',
)
]
return workflow

if source.get('molecular_dynamics') is not None:
return MolecularDynamics()

workflow = SinglePoint()
workflow.method = SinglePointMethod()
single_point = source.get('single_point', source)
scf_threshold = self._get_scf_energy_threshold(single_point)
if scf_threshold is not None:
workflow.method.convergence_targets = [
EnergyConvergenceTarget(
threshold=scf_threshold,
threshold_type='absolute',
)
]
return workflow


class RKFParser(MainfileParser):
# TODO temporary fix for structlog unable to propagate logger
Expand Down Expand Up @@ -92,6 +191,7 @@ def write_to_archive(self):
self.parser.data_object.parse()

self.parser.convert(self.metainfo_parser)
self.archive.workflow2 = self.parser.build_workflow(self.parser.data)


class AMSParser(MatchingParser):
Expand Down
Loading
Loading