Coverage for /builds/ase/ase/ase/io/aims.py: 93.06%
807 statements
« prev ^ index » next coverage.py v7.5.3, created at 2025-08-02 00:12 +0000
« prev ^ index » next coverage.py v7.5.3, created at 2025-08-02 00:12 +0000
1# fmt: off
3"""Defines class/functions to write input and parse output for FHI-aims."""
4import os
5import re
6import time
7import warnings
8from functools import cached_property
9from pathlib import Path
10from typing import Any, Dict, List, Union
12import numpy as np
14from ase import Atom, Atoms
15from ase.calculators.calculator import kpts2mp
16from ase.calculators.singlepoint import SinglePointDFTCalculator
17from ase.constraints import FixAtoms, FixCartesian
18from ase.data import atomic_numbers
19from ase.io import ParseError
20from ase.units import Ang, fs
21from ase.utils import deprecated, reader, writer
23v_unit = Ang / (1000.0 * fs)
25LINE_NOT_FOUND = object()
28class AimsParseError(Exception):
29 """Exception raised if an error occurs when parsing an Aims output file"""
31 def __init__(self, message):
32 self.message = message
33 super().__init__(self.message)
36# Read aims geometry files
37@reader
38def read_aims(fd, apply_constraints=True):
39 """Import FHI-aims geometry type files.
41 Reads unitcell, atom positions and constraints from
42 a geometry.in file.
44 If geometric constraint (symmetry parameters) are in the file
45 include that information in atoms.info["symmetry_block"]
46 """
48 lines = fd.readlines()
49 return parse_geometry_lines(lines, apply_constraints=apply_constraints)
52def parse_geometry_lines(lines, apply_constraints=True):
54 from ase import Atoms
55 from ase.constraints import (
56 FixAtoms,
57 FixCartesian,
58 FixCartesianParametricRelations,
59 FixScaledParametricRelations,
60 )
62 atoms = Atoms()
64 positions = []
65 cell = []
66 symbols = []
67 velocities = []
68 magmoms = []
69 symmetry_block = []
70 charges = []
71 fix = []
72 fix_cart = []
73 xyz = np.array([0, 0, 0])
74 i = -1
75 n_periodic = -1
76 periodic = np.array([False, False, False])
77 cart_positions, scaled_positions = False, False
78 for line in lines:
79 inp = line.split()
80 if inp == []:
81 continue
82 if inp[0] in ["atom", "atom_frac"]:
84 if inp[0] == "atom":
85 cart_positions = True
86 else:
87 scaled_positions = True
89 if xyz.all():
90 fix.append(i)
91 elif xyz.any():
92 fix_cart.append(FixCartesian(i, xyz))
93 floatvect = float(inp[1]), float(inp[2]), float(inp[3])
94 positions.append(floatvect)
95 symbols.append(inp[4])
96 magmoms.append(0.0)
97 charges.append(0.0)
98 xyz = np.array([0, 0, 0])
99 i += 1
101 elif inp[0] == "lattice_vector":
102 floatvect = float(inp[1]), float(inp[2]), float(inp[3])
103 cell.append(floatvect)
104 n_periodic = n_periodic + 1
105 periodic[n_periodic] = True
107 elif inp[0] == "initial_moment":
108 magmoms[-1] = float(inp[1])
110 elif inp[0] == "initial_charge":
111 charges[-1] = float(inp[1])
113 elif inp[0] == "constrain_relaxation":
114 if inp[1] == ".true.":
115 fix.append(i)
116 elif inp[1] == "x":
117 xyz[0] = 1
118 elif inp[1] == "y":
119 xyz[1] = 1
120 elif inp[1] == "z":
121 xyz[2] = 1
123 elif inp[0] == "velocity":
124 floatvect = [v_unit * float(line) for line in inp[1:4]]
125 velocities.append(floatvect)
127 elif inp[0] in [
128 "symmetry_n_params",
129 "symmetry_params",
130 "symmetry_lv",
131 "symmetry_frac",
132 ]:
133 symmetry_block.append(" ".join(inp))
135 if xyz.all():
136 fix.append(i)
137 elif xyz.any():
138 fix_cart.append(FixCartesian(i, xyz))
140 if cart_positions and scaled_positions:
141 raise Exception(
142 "Can't specify atom positions with mixture of "
143 "Cartesian and fractional coordinates"
144 )
145 elif scaled_positions and periodic.any():
146 atoms = Atoms(
147 symbols,
148 scaled_positions=positions,
149 cell=cell,
150 pbc=periodic)
151 else:
152 atoms = Atoms(symbols, positions)
154 if len(velocities) > 0:
155 if len(velocities) != len(positions):
156 raise Exception(
157 "Number of positions and velocities have to coincide.")
158 atoms.set_velocities(velocities)
160 fix_params = []
162 if len(symmetry_block) > 5:
163 params = symmetry_block[1].split()[1:]
165 lattice_expressions = []
166 lattice_params = []
168 atomic_expressions = []
169 atomic_params = []
171 n_lat_param = int(symmetry_block[0].split(" ")[2])
173 lattice_params = params[:n_lat_param]
174 atomic_params = params[n_lat_param:]
176 for ll, line in enumerate(symmetry_block[2:]):
177 expression = " ".join(line.split(" ")[1:])
178 if ll < 3:
179 lattice_expressions += expression.split(",")
180 else:
181 atomic_expressions += expression.split(",")
183 fix_params.append(
184 FixCartesianParametricRelations.from_expressions(
185 list(range(3)),
186 lattice_params,
187 lattice_expressions,
188 use_cell=True,
189 )
190 )
192 fix_params.append(
193 FixScaledParametricRelations.from_expressions(
194 list(range(len(atoms))), atomic_params, atomic_expressions
195 )
196 )
198 if any(magmoms):
199 atoms.set_initial_magnetic_moments(magmoms)
200 if any(charges):
201 atoms.set_initial_charges(charges)
203 if periodic.any():
204 atoms.set_cell(cell)
205 atoms.set_pbc(periodic)
206 if len(fix):
207 atoms.set_constraint([FixAtoms(indices=fix)] + fix_cart + fix_params)
208 else:
209 atoms.set_constraint(fix_cart + fix_params)
211 if fix_params and apply_constraints:
212 atoms.set_positions(atoms.get_positions())
213 return atoms
216def get_aims_header():
217 """Returns the header for aims input files"""
218 lines = ["#" + "=" * 79]
219 for line in [
220 "Created using the Atomic Simulation Environment (ASE)",
221 time.asctime(),
222 ]:
223 lines.append("# " + line + "\n")
224 return lines
227def _write_velocities_alias(args: List, kwargs: Dict[str, Any]) -> bool:
228 arg_position = 5
229 if len(args) > arg_position and args[arg_position]:
230 args[arg_position - 1] = True
231 elif kwargs.get("velocities", False):
232 if len(args) < arg_position:
233 kwargs["write_velocities"] = True
234 else:
235 args[arg_position - 1] = True
236 else:
237 return False
238 return True
241# Write aims geometry files
242@deprecated(
243 "Use of `velocities` is deprecated, please use `write_velocities`",
244 category=FutureWarning,
245 callback=_write_velocities_alias,
246)
247@writer
248def write_aims(
249 fd,
250 atoms,
251 scaled=False,
252 geo_constrain=False,
253 write_velocities=False,
254 velocities=False,
255 ghosts=None,
256 info_str=None,
257 wrap=False,
258):
259 """Method to write FHI-aims geometry files.
261 Writes the atoms positions and constraints (only FixAtoms is
262 supported at the moment).
264 Args:
265 fd: file object
266 File to output structure to
267 atoms: ase.atoms.Atoms
268 structure to output to the file
269 scaled: bool
270 If True use fractional coordinates instead of Cartesian coordinates
271 symmetry_block: list of str
272 List of geometric constraints as defined in:
273 :arxiv:`1908.01610`
274 write_velocities: bool
275 If True add the atomic velocity vectors to the file
276 velocities: bool
277 NOT AN ARRAY OF VELOCITIES, but the legacy version of
278 `write_velocities`
279 ghosts: list of Atoms
280 A list of ghost atoms for the system
281 info_str: str
282 A string to be added to the header of the file
283 wrap: bool
284 Wrap atom positions to cell before writing
286 .. deprecated:: 3.23.0
287 Use of ``velocities`` is deprecated, please use ``write_velocities``.
288 """
290 if scaled and not np.all(atoms.pbc):
291 raise ValueError(
292 "Requesting scaled for a calculation where scaled=True, but "
293 "the system is not periodic")
295 if geo_constrain:
296 if not scaled and np.all(atoms.pbc):
297 warnings.warn(
298 "Setting scaled to True because a symmetry_block is detected."
299 )
300 scaled = True
301 elif not np.all(atoms.pbc):
302 warnings.warn(
303 "Parameteric constraints can only be used in periodic systems."
304 )
305 geo_constrain = False
307 for line in get_aims_header():
308 fd.write(line + "\n")
310 # If writing additional information is requested via info_str:
311 if info_str is not None:
312 fd.write("\n# Additional information:\n")
313 if isinstance(info_str, list):
314 fd.write("\n".join([f"# {s}" for s in info_str]))
315 else:
316 fd.write(f"# {info_str}")
317 fd.write("\n")
319 fd.write("#=======================================================\n")
321 i = 0
322 if atoms.get_pbc().any():
323 for n, vector in enumerate(atoms.get_cell()):
324 fd.write("lattice_vector ")
325 for i in range(3):
326 fd.write(f"{vector[i]:16.16f} ")
327 fd.write("\n")
329 fix_cart = np.zeros((len(atoms), 3), dtype=bool)
330 for constr in atoms.constraints:
331 if isinstance(constr, FixAtoms):
332 fix_cart[constr.index] = (True, True, True)
333 elif isinstance(constr, FixCartesian):
334 fix_cart[constr.index] = constr.mask
336 if ghosts is None:
337 ghosts = np.zeros(len(atoms))
338 else:
339 assert len(ghosts) == len(atoms)
341 wrap = wrap and not geo_constrain
342 scaled_positions = atoms.get_scaled_positions(wrap=wrap)
344 for i, atom in enumerate(atoms):
345 if ghosts[i] == 1:
346 atomstring = "empty "
347 elif scaled:
348 atomstring = "atom_frac "
349 else:
350 atomstring = "atom "
351 fd.write(atomstring)
352 if scaled:
353 for pos in scaled_positions[i]:
354 fd.write(f"{pos:16.16f} ")
355 else:
356 for pos in atom.position:
357 fd.write(f"{pos:16.16f} ")
358 fd.write(atom.symbol)
359 fd.write("\n")
360 # (1) all coords are constrained:
361 if fix_cart[i].all():
362 fd.write(" constrain_relaxation .true.\n")
363 # (2) some coords are constrained:
364 elif fix_cart[i].any():
365 xyz = fix_cart[i]
366 for n in range(3):
367 if xyz[n]:
368 fd.write(f" constrain_relaxation {'xyz'[n]}\n")
369 if atom.charge:
370 fd.write(f" initial_charge {atom.charge:16.6f}\n")
371 if atom.magmom:
372 fd.write(f" initial_moment {atom.magmom:16.6f}\n")
374 if write_velocities and atoms.get_velocities() is not None:
375 v = atoms.get_velocities()[i] / v_unit
376 fd.write(f" velocity {v[0]:.16f} {v[1]:.16f} {v[2]:.16f}\n")
378 if geo_constrain:
379 for line in get_sym_block(atoms):
380 fd.write(line)
383def get_sym_block(atoms):
384 """Get symmetry block for Parametric constraints in atoms.constraints"""
385 from ase.constraints import (
386 FixCartesianParametricRelations,
387 FixScaledParametricRelations,
388 )
390 # Initialize param/expressions lists
391 atomic_sym_params = []
392 lv_sym_params = []
393 atomic_param_constr = np.zeros((len(atoms),), dtype="<U100")
394 lv_param_constr = np.zeros((3,), dtype="<U100")
396 # Populate param/expressions list
397 for constr in atoms.constraints:
398 if isinstance(constr, FixScaledParametricRelations):
399 atomic_sym_params += constr.params
401 if np.any(atomic_param_constr[constr.indices] != ""):
402 warnings.warn(
403 "multiple parametric constraints defined for the same "
404 "atom, using the last one defined"
405 )
407 atomic_param_constr[constr.indices] = [
408 ", ".join(expression) for expression in constr.expressions
409 ]
410 elif isinstance(constr, FixCartesianParametricRelations):
411 lv_sym_params += constr.params
413 if np.any(lv_param_constr[constr.indices] != ""):
414 warnings.warn(
415 "multiple parametric constraints defined for the same "
416 "lattice vector, using the last one defined"
417 )
419 lv_param_constr[constr.indices] = [
420 ", ".join(expression) for expression in constr.expressions
421 ]
423 if np.all(atomic_param_constr == "") and np.all(lv_param_constr == ""):
424 return []
426 # Check Constraint Parameters
427 if len(atomic_sym_params) != len(np.unique(atomic_sym_params)):
428 warnings.warn(
429 "Some parameters were used across constraints, they will be "
430 "combined in the aims calculations"
431 )
432 atomic_sym_params = np.unique(atomic_sym_params)
434 if len(lv_sym_params) != len(np.unique(lv_sym_params)):
435 warnings.warn(
436 "Some parameters were used across constraints, they will be "
437 "combined in the aims calculations"
438 )
439 lv_sym_params = np.unique(lv_sym_params)
441 if np.any(atomic_param_constr == ""):
442 raise OSError(
443 "FHI-aims input files require all atoms have defined parametric "
444 "constraints"
445 )
447 cell_inds = np.where(lv_param_constr == "")[0]
448 for ind in cell_inds:
449 lv_param_constr[ind] = "{:.16f}, {:.16f}, {:.16f}".format(
450 *atoms.cell[ind])
452 n_atomic_params = len(atomic_sym_params)
453 n_lv_params = len(lv_sym_params)
454 n_total_params = n_atomic_params + n_lv_params
456 sym_block = []
457 if n_total_params > 0:
458 sym_block.append("#" + "=" * 55 + "\n")
459 sym_block.append("# Parametric constraints\n")
460 sym_block.append("#" + "=" * 55 + "\n")
461 sym_block.append(
462 "symmetry_n_params {:d} {:d} {:d}\n".format(
463 n_total_params, n_lv_params, n_atomic_params
464 )
465 )
466 sym_block.append(
467 "symmetry_params %s\n" % " ".join(lv_sym_params + atomic_sym_params)
468 )
470 for constr in lv_param_constr:
471 sym_block.append(f"symmetry_lv {constr:s}\n")
473 for constr in atomic_param_constr:
474 sym_block.append(f"symmetry_frac {constr:s}\n")
475 return sym_block
478def format_aims_control_parameter(key, value, format="%s"):
479 """Format a line for the aims control.in
481 Parameter
482 ---------
483 key: str
484 Name of the paramteter to format
485 value: Object
486 The value to pass to the parameter
487 format: str
488 string to format the the text as
490 Returns
491 -------
492 str
493 The properly formatted line for the aims control.in
494 """
495 return f"{key:35s}" + (format % value) + "\n"
498# Write aims control.in files
499@writer
500def write_control(fd, atoms, parameters, verbose_header=False):
501 """Write the control.in file for FHI-aims
502 Parameters
503 ----------
504 fd: str
505 The file object to write to
506 atoms: atoms.Atoms
507 The Atoms object for the requested calculation
508 parameters: dict
509 The dictionary of all paramters for the calculation
510 verbose_header: bool
511 If True then explcitly list the paramters used to generate the
512 control.in file inside the header
513 """
515 parameters = dict(parameters)
516 lim = "#" + "=" * 79
518 if parameters["xc"] == "LDA":
519 parameters["xc"] = "pw-lda"
521 cubes = parameters.pop("cubes", None)
523 for line in get_aims_header():
524 fd.write(line + "\n")
526 if verbose_header:
527 fd.write("# \n# List of parameters used to initialize the calculator:")
528 for p, v in parameters.items():
529 s = f"# {p}:{v}\n"
530 fd.write(s)
531 fd.write(lim + "\n")
533 assert "kpts" not in parameters or "k_grid" not in parameters
534 assert "smearing" not in parameters or "occupation_type" not in parameters
536 for key, value in parameters.items():
537 if key == "kpts":
538 mp = kpts2mp(atoms, parameters["kpts"])
539 dk = 0.5 - 0.5 / np.array(mp)
540 fd.write(
541 format_aims_control_parameter(
542 "k_grid",
543 tuple(mp),
544 "%d %d %d"))
545 fd.write(
546 format_aims_control_parameter(
547 "k_offset",
548 tuple(dk),
549 "%f %f %f"))
550 elif key in ("species_dir", "tier"):
551 continue
552 elif key == "aims_command":
553 continue
554 elif key == "plus_u":
555 continue
556 elif key == "smearing":
557 name = parameters["smearing"][0].lower()
558 if name == "fermi-dirac":
559 name = "fermi"
560 width = parameters["smearing"][1]
561 if name == "methfessel-paxton":
562 order = parameters["smearing"][2]
563 order = " %d" % order
564 else:
565 order = ""
567 fd.write(
568 format_aims_control_parameter(
569 "occupation_type", (name, width, order), "%s %f%s"
570 )
571 )
572 elif key == "output":
573 for output_type in value:
574 fd.write(format_aims_control_parameter(key, output_type, "%s"))
575 elif key == "vdw_correction_hirshfeld" and value:
576 fd.write(format_aims_control_parameter(key, "", "%s"))
577 elif isinstance(value, bool):
578 fd.write(
579 format_aims_control_parameter(
580 key, str(value).lower(), ".%s."))
581 elif isinstance(value, (tuple, list)):
582 fd.write(
583 format_aims_control_parameter(
584 key, " ".join([str(x) for x in value]), "%s"
585 )
586 )
587 elif isinstance(value, str):
588 fd.write(format_aims_control_parameter(key, value, "%s"))
589 else:
590 fd.write(format_aims_control_parameter(key, value, "%r"))
592 if cubes:
593 cubes.write(fd)
595 fd.write(lim + "\n\n")
597 # Get the species directory
598 species_dir = get_species_directory
599 # dicts are ordered as of python 3.7
600 species_array = np.array(list(dict.fromkeys(atoms.symbols)))
601 # Grab the tier specification from the parameters. THis may either
602 # be None, meaning the default should be used for all species, or a
603 # list of integers/None values giving a specific basis set size
604 # for each species in the calculation.
605 tier = parameters.pop("tier", None)
606 tier_array = np.full(len(species_array), tier)
607 # Path to species files for FHI-aims. In this files are specifications
608 # for the basis set sizes depending on which basis set tier is used.
609 species_dir = get_species_directory(parameters.get("species_dir"))
610 # Parse the species files for each species present in the calculation
611 # according to the tier of each species.
612 species_basis_dict = parse_species_path(
613 species_array=species_array, tier_array=tier_array,
614 species_dir=species_dir)
615 # Write the basis functions to be included for each species in the
616 # calculation into the control.in file (fd).
617 write_species(fd, species_basis_dict, parameters)
620def get_species_directory(species_dir=None):
621 """Get the directory where the basis set information is stored
623 If the requested directory does not exist then raise an Error
625 Parameters
626 ----------
627 species_dir: str
628 Requested directory to find the basis set info from. E.g.
629 `~/aims2022/FHIaims/species_defaults/defaults_2020/light`.
631 Returns
632 -------
633 Path
634 The Path to the requested or default species directory.
636 Raises
637 ------
638 RuntimeError
639 If both the requested directory and the default one is not defined
640 or does not exit.
641 """
642 if species_dir is None:
643 species_dir = os.environ.get("AIMS_SPECIES_DIR")
645 if species_dir is None:
646 raise RuntimeError(
647 "Missing species directory! Use species_dir "
648 + "parameter or set $AIMS_SPECIES_DIR environment variable."
649 )
651 species_path = Path(species_dir)
652 if not species_path.exists():
653 raise RuntimeError(
654 f"The requested species_dir {species_dir} does not exist")
656 return species_path
659def write_species(control_file_descriptor, species_basis_dict, parameters):
660 """Write species for the calculation depending on basis set size.
662 The calculation should include certain basis set size function depending
663 on the numerical settings (light, tight, really tight) and the basis set
664 size (minimal, tier1, tier2, tier3, tier4). If the basis set size is not
665 given then a 'standard' basis set size is used for each numerical setting.
666 The species files are defined according to these standard basis set sizes
667 for the numerical settings in the FHI-aims repository.
669 Note, for FHI-aims in ASE, we don't explicitly give the numerical setting.
670 Instead we include the numerical setting in the species path: e.g.
671 `~/aims2022/FHIaims/species_defaults/defaults_2020/light` this path has
672 `light`, the numerical setting, as the last folder in the path.
674 Example - a basis function might be commented in the standard basis set size
675 such as "# hydro 4 f 7.4" and this basis function should be
676 uncommented for another basis set size such as tier4.
678 Args:
679 control_file_descriptor: File descriptor for the control.in file into
680 which we need to write relevant basis functions to be included for
681 the calculation.
682 species_basis_dict: Dictionary where keys as the species symbols and
683 each value is a single string containing all the basis functions
684 to be included in the caclculation.
685 parameters: Calculation parameters as a dict.
686 """
687 # Now for every species (key) in the species_basis_dict, save the
688 # relevant basis functions (values) from the species_basis_dict, by
689 # writing to the file handle (species_file_descriptor) given to this
690 # function.
691 for species_symbol, basis_set_text in species_basis_dict.items():
692 control_file_descriptor.write(basis_set_text)
693 if parameters.get("plus_u") is not None:
694 if species_symbol in parameters.plus_u:
695 control_file_descriptor.write(
696 f"plus_u {parameters.plus_u[species_symbol]} \n")
699def parse_species_path(species_array, tier_array, species_dir):
700 """Parse the species files for each species according to the tier given.
702 Args:
703 species_array: An array of species/element symbols present in the unit
704 cell (e.g. ['C', 'H'].)
705 tier_array: An array of None/integer values which define which basis
706 set size to use for each species/element in the calcualtion.
707 species_dir: Directory containing FHI-aims species files.
709 Returns:
710 Dictionary containing species as keys and the basis set specification
711 for each species as text as the value for the key.
712 """
713 if len(species_array) != len(tier_array):
714 raise ValueError(
715 f"The species array length: {len(species_array)}, "
716 f"is not the same as the tier_array length: {len(tier_array)}")
718 species_basis_dict = {}
720 for symbol, tier in zip(species_array, tier_array):
721 path = species_dir / f"{atomic_numbers[symbol]:02}_{symbol}_default"
722 # Open the species file:
723 with open(path, encoding="utf8") as species_file_handle:
724 # Read the species file into a string.
725 species_file_str = species_file_handle.read()
726 species_basis_dict[symbol] = manipulate_tiers(
727 species_file_str, tier)
728 return species_basis_dict
731def manipulate_tiers(species_string: str, tier: Union[None, int] = 1):
732 """Adds basis set functions based on the tier value.
734 This function takes in the species file as a string, it then searches
735 for relevant basis functions based on the tier value to include in a new
736 string that is returned.
738 Args:
739 species_string: species file (default) for a given numerical setting
740 (light, tight, really tight) given as a string.
741 tier: The basis set size. This will dictate which basis set functions
742 are included in the returned string.
744 Returns:
745 Basis set functions defined by the tier as a string.
746 """
747 if tier is None: # Then we use the default species file untouched.
748 return species_string
749 tier_pattern = r"(# \".* tier\" .*|# +Further.*)"
750 top, *tiers = re.split(tier_pattern, species_string)
751 tier_comments = tiers[::2]
752 tier_basis = tiers[1::2]
753 assert len(
754 tier_comments) == len(tier_basis), "Something wrong with splitting"
755 n_tiers = len(tier_comments)
756 assert tier <= n_tiers, f"Only {n_tiers} tiers available, you choose {tier}"
757 string_new = top
758 for i, (c, b) in enumerate(zip(tier_comments, tier_basis)):
759 b = re.sub(r"\n( *for_aux| *hydro| *ionic| *confined)", r"\n#\g<1>", b)
760 if i < tier:
761 b = re.sub(
762 r"\n#( *for_aux| *hydro| *ionic| *confined)", r"\n\g<1>", b)
763 string_new += c + b
764 return string_new
767# Read aims.out files
768scalar_property_to_line_key = {
769 "free_energy": ["| Electronic free energy"],
770 "number_of_iterations": ["| Number of self-consistency cycles"],
771 "magnetic_moment": ["N_up - N_down"],
772 "n_atoms": ["| Number of atoms"],
773 "n_bands": [
774 "Number of Kohn-Sham states",
775 "Reducing total number of Kohn-Sham states",
776 "Reducing total number of Kohn-Sham states",
777 ],
778 "n_electrons": ["The structure contains"],
779 "n_kpts": ["| Number of k-points"],
780 "n_spins": ["| Number of spin channels"],
781 "electronic_temp": ["Occupation type:"],
782 "fermi_energy": ["| Chemical potential (Fermi level)"],
783}
786class AimsOutChunk:
787 """Base class for AimsOutChunks"""
789 def __init__(self, lines):
790 """Constructor
792 Parameters
793 ----------
794 lines: list of str
795 The set of lines from the output file the encompasses either
796 a single structure within a trajectory or
797 general information about the calculation (header)
798 """
799 self.lines = lines
801 def reverse_search_for(self, keys, line_start=0):
802 """Find the last time one of the keys appears in self.lines
804 Parameters
805 ----------
806 keys: list of str
807 The key strings to search for in self.lines
808 line_start: int
809 The lowest index to search for in self.lines
811 Returns
812 -------
813 int
814 The last time one of the keys appears in self.lines
815 """
816 for ll, line in enumerate(self.lines[line_start:][::-1]):
817 if any(key in line for key in keys):
818 return len(self.lines) - ll - 1
820 return LINE_NOT_FOUND
822 def search_for_all(self, key, line_start=0, line_end=-1):
823 """Find the all times the key appears in self.lines
825 Parameters
826 ----------
827 key: str
828 The key string to search for in self.lines
829 line_start: int
830 The first line to start the search from
831 line_end: int
832 The last line to end the search at
834 Returns
835 -------
836 list of ints
837 All times the key appears in the lines
838 """
839 line_index = []
840 for ll, line in enumerate(self.lines[line_start:line_end]):
841 if key in line:
842 line_index.append(ll + line_start)
843 return line_index
845 def parse_scalar(self, property):
846 """Parse a scalar property from the chunk
848 Parameters
849 ----------
850 property: str
851 The property key to parse
853 Returns
854 -------
855 float
856 The scalar value of the property
857 """
858 line_start = self.reverse_search_for(
859 scalar_property_to_line_key[property])
861 if line_start == LINE_NOT_FOUND:
862 return None
864 line = self.lines[line_start]
865 return float(line.split(":")[-1].strip().split()[0])
868class AimsOutHeaderChunk(AimsOutChunk):
869 """The header of the aims.out file containint general information"""
871 def __init__(self, lines):
872 """Constructor
874 Parameters
875 ----------
876 lines: list of str
877 The lines inside the aims.out header
878 """
879 super().__init__(lines)
881 @cached_property
882 def constraints(self):
883 """Parse the constraints from the aims.out file
885 Constraints for the lattice vectors are not supported.
886 """
888 line_inds = self.search_for_all("Found relaxation constraint for atom")
889 if len(line_inds) == 0:
890 return []
892 fix = []
893 fix_cart = []
894 for ll in line_inds:
895 line = self.lines[ll]
896 xyz = [0, 0, 0]
897 ind = int(line.split()[5][:-1]) - 1
898 if "All coordinates fixed" in line:
899 if ind not in fix:
900 fix.append(ind)
901 if "coordinate fixed" in line:
902 coord = line.split()[6]
903 if coord == "x":
904 xyz[0] = 1
905 elif coord == "y":
906 xyz[1] = 1
907 elif coord == "z":
908 xyz[2] = 1
909 keep = True
910 for n, c in enumerate(fix_cart):
911 if ind == c.index:
912 keep = False
913 break
914 if keep:
915 fix_cart.append(FixCartesian(ind, xyz))
916 else:
917 fix_cart[n].mask[xyz.index(1)] = 1
918 if len(fix) > 0:
919 fix_cart.append(FixAtoms(indices=fix))
921 return fix_cart
923 @cached_property
924 def initial_cell(self):
925 """Parse the initial cell from the aims.out file"""
926 line_start = self.reverse_search_for(["| Unit cell:"])
927 if line_start == LINE_NOT_FOUND:
928 return None
930 return [
931 [float(inp) for inp in line.split()[-3:]]
932 for line in self.lines[line_start + 1:line_start + 4]
933 ]
935 @cached_property
936 def initial_atoms(self):
937 """Create an atoms object for the initial geometry.in structure
938 from the aims.out file"""
939 line_start = self.reverse_search_for(["Atomic structure:"])
940 if line_start == LINE_NOT_FOUND:
941 raise AimsParseError(
942 "No information about the structure in the chunk.")
944 line_start += 2
946 cell = self.initial_cell
947 positions = np.zeros((self.n_atoms, 3))
948 symbols = [""] * self.n_atoms
949 for ll, line in enumerate(
950 self.lines[line_start:line_start + self.n_atoms]):
951 inp = line.split()
952 positions[ll, :] = [float(pos) for pos in inp[4:7]]
953 symbols[ll] = inp[3]
955 atoms = Atoms(symbols=symbols, positions=positions)
957 if cell:
958 atoms.set_cell(cell)
959 atoms.set_pbc([True, True, True])
960 atoms.set_constraint(self.constraints)
962 return atoms
964 @cached_property
965 def is_md(self):
966 """Determine if calculation is a molecular dynamics calculation"""
967 return LINE_NOT_FOUND != self.reverse_search_for(
968 ["Complete information for previous time-step:"]
969 )
971 @cached_property
972 def is_relaxation(self):
973 """Determine if the calculation is a geometry optimization or not"""
974 return LINE_NOT_FOUND != self.reverse_search_for(
975 ["Geometry relaxation:"])
977 @cached_property
978 def _k_points(self):
979 """Get the list of k-points used in the calculation"""
980 n_kpts = self.parse_scalar("n_kpts")
981 if n_kpts is None:
982 return {
983 "k_points": None,
984 "k_point_weights": None,
985 }
986 n_kpts = int(n_kpts)
988 line_start = self.reverse_search_for(["| K-points in task"])
989 line_end = self.reverse_search_for(["| k-point:"])
990 if (
991 (line_start == LINE_NOT_FOUND)
992 or (line_end == LINE_NOT_FOUND)
993 or (line_end - line_start != n_kpts)
994 ):
995 return {
996 "k_points": None,
997 "k_point_weights": None,
998 }
1000 k_points = np.zeros((n_kpts, 3))
1001 k_point_weights = np.zeros(n_kpts)
1002 for kk, line in enumerate(self.lines[line_start + 1:line_end + 1]):
1003 k_points[kk] = [float(inp) for inp in line.split()[4:7]]
1004 k_point_weights[kk] = float(line.split()[-1])
1006 return {
1007 "k_points": k_points,
1008 "k_point_weights": k_point_weights,
1009 }
1011 @cached_property
1012 def n_atoms(self):
1013 """The number of atoms for the material"""
1014 n_atoms = self.parse_scalar("n_atoms")
1015 if n_atoms is None:
1016 raise AimsParseError(
1017 "No information about the number of atoms in the header."
1018 )
1019 return int(n_atoms)
1021 @cached_property
1022 def n_bands(self):
1023 """The number of Kohn-Sham states for the chunk"""
1024 line_start = self.reverse_search_for(
1025 scalar_property_to_line_key["n_bands"])
1027 if line_start == LINE_NOT_FOUND:
1028 raise AimsParseError(
1029 "No information about the number of Kohn-Sham states "
1030 "in the header.")
1032 line = self.lines[line_start]
1033 if "| Number of Kohn-Sham states" in line:
1034 return int(line.split(":")[-1].strip().split()[0])
1036 return int(line.split()[-1].strip()[:-1])
1038 @cached_property
1039 def n_electrons(self):
1040 """The number of electrons for the chunk"""
1041 line_start = self.reverse_search_for(
1042 scalar_property_to_line_key["n_electrons"])
1044 if line_start == LINE_NOT_FOUND:
1045 raise AimsParseError(
1046 "No information about the number of electrons in the header."
1047 )
1049 line = self.lines[line_start]
1050 return int(float(line.split()[-2]))
1052 @cached_property
1053 def n_k_points(self):
1054 """The number of k_ppoints for the calculation"""
1055 n_kpts = self.parse_scalar("n_kpts")
1056 if n_kpts is None:
1057 return None
1059 return int(n_kpts)
1061 @cached_property
1062 def n_spins(self):
1063 """The number of spin channels for the chunk"""
1064 n_spins = self.parse_scalar("n_spins")
1065 if n_spins is None:
1066 raise AimsParseError(
1067 "No information about the number of spin "
1068 "channels in the header.")
1069 return int(n_spins)
1071 @cached_property
1072 def electronic_temperature(self):
1073 """The electronic temperature for the chunk"""
1074 line_start = self.reverse_search_for(
1075 scalar_property_to_line_key["electronic_temp"]
1076 )
1077 if line_start == LINE_NOT_FOUND:
1078 return 0.10
1080 line = self.lines[line_start]
1081 return float(line.split("=")[-1].strip().split()[0])
1083 @property
1084 def k_points(self):
1085 """All k-points listed in the calculation"""
1086 return self._k_points["k_points"]
1088 @property
1089 def k_point_weights(self):
1090 """The k-point weights for the calculation"""
1091 return self._k_points["k_point_weights"]
1093 @cached_property
1094 def header_summary(self):
1095 """Dictionary summarizing the information inside the header"""
1096 return {
1097 "initial_atoms": self.initial_atoms,
1098 "initial_cell": self.initial_cell,
1099 "constraints": self.constraints,
1100 "is_relaxation": self.is_relaxation,
1101 "is_md": self.is_md,
1102 "n_atoms": self.n_atoms,
1103 "n_bands": self.n_bands,
1104 "n_electrons": self.n_electrons,
1105 "n_spins": self.n_spins,
1106 "electronic_temperature": self.electronic_temperature,
1107 "n_k_points": self.n_k_points,
1108 "k_points": self.k_points,
1109 "k_point_weights": self.k_point_weights,
1110 }
1113class AimsOutCalcChunk(AimsOutChunk):
1114 """A part of the aims.out file correponding to a single structure"""
1116 def __init__(self, lines, header):
1117 """Constructor
1119 Parameters
1120 ----------
1121 lines: list of str
1122 The lines used for the structure
1123 header: dict
1124 A summary of the relevant information from the aims.out header
1125 """
1126 super().__init__(lines)
1127 self._header = header.header_summary
1129 @cached_property
1130 def _atoms(self):
1131 """Create an atoms object for the subsequent structures
1132 calculated in the aims.out file"""
1133 start_keys = [
1134 "Atomic structure (and velocities) as used in the preceding "
1135 "time step",
1136 "Updated atomic structure",
1137 "Atomic structure that was used in the preceding time step of "
1138 "the wrapper",
1139 ]
1140 line_start = self.reverse_search_for(start_keys)
1141 if line_start == LINE_NOT_FOUND:
1142 return self.initial_atoms
1144 line_start += 1
1146 line_end = self.reverse_search_for(
1147 [
1148 'Next atomic structure:',
1149 'Writing the current geometry to file "geometry.in.next_step"'
1150 ],
1151 line_start
1152 )
1153 if line_end == LINE_NOT_FOUND:
1154 line_end = len(self.lines)
1156 cell = []
1157 velocities = []
1158 atoms = Atoms()
1159 for line in self.lines[line_start:line_end]:
1160 if "lattice_vector " in line:
1161 cell.append([float(inp) for inp in line.split()[1:]])
1162 elif "atom " in line:
1163 line_split = line.split()
1164 atoms.append(Atom(line_split[4], tuple(
1165 float(inp) for inp in line_split[1:4])))
1166 elif "velocity " in line:
1167 velocities.append([float(inp) for inp in line.split()[1:]])
1169 assert len(atoms) == self.n_atoms
1170 assert (len(velocities) == self.n_atoms) or (len(velocities) == 0)
1171 if len(cell) == 3:
1172 atoms.set_cell(np.array(cell))
1173 atoms.set_pbc([True, True, True])
1174 elif len(cell) != 0:
1175 raise AimsParseError(
1176 "Parsed geometry has incorrect number of lattice vectors."
1177 )
1179 if len(velocities) > 0:
1180 atoms.set_velocities(np.array(velocities))
1181 atoms.set_constraint(self.constraints)
1183 return atoms
1185 @cached_property
1186 def forces(self):
1187 """Parse the forces from the aims.out file"""
1188 line_start = self.reverse_search_for(["Total atomic forces"])
1189 if line_start == LINE_NOT_FOUND:
1190 return None
1192 line_start += 1
1194 return np.array(
1195 [
1196 [float(inp) for inp in line.split()[-3:]]
1197 for line in self.lines[line_start:line_start + self.n_atoms]
1198 ]
1199 )
1201 @cached_property
1202 def stresses(self):
1203 """Parse the stresses from the aims.out file"""
1204 line_start = self.reverse_search_for(
1205 ["Per atom stress (eV) used for heat flux calculation"]
1206 )
1207 if line_start == LINE_NOT_FOUND:
1208 return None
1209 line_start += 3
1210 stresses = []
1211 for line in self.lines[line_start:line_start + self.n_atoms]:
1212 xx, yy, zz, xy, xz, yz = (float(d) for d in line.split()[2:8])
1213 stresses.append([xx, yy, zz, yz, xz, xy])
1215 return np.array(stresses)
1217 @cached_property
1218 def stress(self):
1219 """Parse the stress from the aims.out file"""
1220 from ase.stress import full_3x3_to_voigt_6_stress
1222 line_start = self.reverse_search_for(
1223 [
1224 "Analytical stress tensor - Symmetrized",
1225 "Numerical stress tensor",
1226 ]
1228 ) # Offest to relevant lines
1229 if line_start == LINE_NOT_FOUND:
1230 return None
1232 stress = [
1233 [float(inp) for inp in line.split()[2:5]]
1234 for line in self.lines[line_start + 5:line_start + 8]
1235 ]
1236 return full_3x3_to_voigt_6_stress(stress)
1238 @cached_property
1239 def is_metallic(self):
1240 """Checks the outputfile to see if the chunk corresponds
1241 to a metallic system"""
1242 line_start = self.reverse_search_for(
1243 ["material is metallic within the approximate finite "
1244 "broadening function (occupation_type)"])
1245 return line_start != LINE_NOT_FOUND
1247 @cached_property
1248 def total_energy(self):
1249 """Parse the energy from the aims.out file"""
1250 if np.all(self._atoms.pbc) and self.is_metallic:
1251 line_ind = self.reverse_search_for(["Total energy corrected"])
1252 else:
1253 line_ind = self.reverse_search_for(["Total energy uncorrected"])
1254 if line_ind == LINE_NOT_FOUND:
1255 raise AimsParseError("No energy is associated with the structure.")
1257 return float(self.lines[line_ind].split()[5])
1259 @cached_property
1260 def dipole(self):
1261 """Parse the electric dipole moment from the aims.out file."""
1262 line_start = self.reverse_search_for(["Total dipole moment [eAng]"])
1263 if line_start == LINE_NOT_FOUND:
1264 return None
1266 line = self.lines[line_start]
1267 return np.array([float(inp) for inp in line.split()[6:9]])
1269 @cached_property
1270 def dielectric_tensor(self):
1271 """Parse the dielectric tensor from the aims.out file"""
1272 line_start = self.reverse_search_for(
1273 ["DFPT for dielectric_constant:--->",
1274 "PARSE DFPT_dielectric_tensor"],
1275 )
1276 if line_start == LINE_NOT_FOUND:
1277 return None
1279 # we should find the tensor in the next three lines:
1280 lines = self.lines[line_start + 1:line_start + 4]
1282 # make ndarray and return
1283 return np.array([np.fromstring(line, sep=' ') for line in lines])
1285 @cached_property
1286 def polarization(self):
1287 """ Parse the polarization vector from the aims.out file"""
1288 line_start = self.reverse_search_for(["| Cartesian Polarization"])
1289 if line_start == LINE_NOT_FOUND:
1290 return None
1291 line = self.lines[line_start]
1292 return np.array([float(s) for s in line.split()[-3:]])
1294 @cached_property
1295 def _hirshfeld(self):
1296 """Parse the Hirshfled charges volumes, and dipole moments from the
1297 ouput"""
1298 line_start = self.reverse_search_for(
1299 ["Performing Hirshfeld analysis of fragment charges and moments."]
1300 )
1301 if line_start == LINE_NOT_FOUND:
1302 return {
1303 "charges": None,
1304 "volumes": None,
1305 "atomic_dipoles": None,
1306 "dipole": None,
1307 }
1309 line_inds = self.search_for_all("Hirshfeld charge", line_start, -1)
1310 hirshfeld_charges = np.array(
1311 [float(self.lines[ind].split(":")[1]) for ind in line_inds]
1312 )
1314 line_inds = self.search_for_all("Hirshfeld volume", line_start, -1)
1315 hirshfeld_volumes = np.array(
1316 [float(self.lines[ind].split(":")[1]) for ind in line_inds]
1317 )
1319 line_inds = self.search_for_all(
1320 "Hirshfeld dipole vector", line_start, -1)
1321 hirshfeld_atomic_dipoles = np.array(
1322 [
1323 [float(inp) for inp in self.lines[ind].split(":")[1].split()]
1324 for ind in line_inds
1325 ]
1326 )
1328 if not np.any(self._atoms.pbc):
1329 positions = self._atoms.get_positions()
1330 hirshfeld_dipole = np.sum(
1331 hirshfeld_charges.reshape((-1, 1)) * positions,
1332 axis=1,
1333 )
1334 else:
1335 hirshfeld_dipole = None
1336 return {
1337 "charges": hirshfeld_charges,
1338 "volumes": hirshfeld_volumes,
1339 "atomic_dipoles": hirshfeld_atomic_dipoles,
1340 "dipole": hirshfeld_dipole,
1341 }
1343 @cached_property
1344 def _eigenvalues(self):
1345 """Parse the eigenvalues and occupancies of the system. If eigenvalue
1346 for a particular k-point is not present in the output file
1347 then set it to np.nan
1348 """
1350 line_start = self.reverse_search_for(["Writing Kohn-Sham eigenvalues."])
1351 if line_start == LINE_NOT_FOUND:
1352 return {"eigenvalues": None, "occupancies": None}
1354 line_end_1 = self.reverse_search_for(
1355 ["Self-consistency cycle converged."], line_start
1356 )
1357 line_end_2 = self.reverse_search_for(
1358 [
1359 "What follows are estimated values for band gap, "
1360 "HOMO, LUMO, etc.",
1361 "Current spin moment of the entire structure :",
1362 "Highest occupied state (VBM)"
1363 ],
1364 line_start,
1365 )
1366 if line_end_1 == LINE_NOT_FOUND:
1367 line_end = line_end_2
1368 elif line_end_2 == LINE_NOT_FOUND:
1369 line_end = line_end_1
1370 else:
1371 line_end = min(line_end_1, line_end_2)
1373 n_kpts = self.n_k_points if np.all(self._atoms.pbc) else 1
1374 if n_kpts is None:
1375 return {"eigenvalues": None, "occupancies": None}
1377 eigenvalues = np.full((n_kpts, self.n_bands, self.n_spins), np.nan)
1378 occupancies = np.full((n_kpts, self.n_bands, self.n_spins), np.nan)
1380 occupation_block_start = self.search_for_all(
1381 "State Occupation Eigenvalue [Ha] Eigenvalue [eV]",
1382 line_start,
1383 line_end,
1384 )
1385 kpt_def = self.search_for_all("K-point: ", line_start, line_end)
1387 if len(kpt_def) > 0:
1388 kpt_inds = [int(self.lines[ll].split()[1]) - 1 for ll in kpt_def]
1389 elif (self.n_k_points is None) or (self.n_k_points == 1):
1390 kpt_inds = [0]
1391 else:
1392 raise ParseError("Cannot find k-point definitions")
1394 assert len(kpt_inds) == len(occupation_block_start)
1395 spins = [0] * len(occupation_block_start)
1397 if self.n_spins == 2:
1398 spin_def = self.search_for_all("Spin-", line_start, line_end)
1399 assert len(spin_def) == len(occupation_block_start)
1401 spins = [int("Spin-down eigenvalues:" in self.lines[ll])
1402 for ll in spin_def]
1404 for occ_start, kpt_ind, spin in zip(
1405 occupation_block_start, kpt_inds, spins):
1406 for ll, line in enumerate(
1407 self.lines[occ_start + 1:occ_start + self.n_bands + 1]
1408 ):
1409 if "***" in line:
1410 warn_msg = f"The {ll + 1}th eigenvalue for the "
1411 "{kpt_ind+1}th k-point and {spin}th channels could "
1412 "not be read (likely too large to be printed "
1413 "in the output file)"
1414 warnings.warn(warn_msg)
1415 continue
1416 split_line = line.split()
1417 eigenvalues[kpt_ind, ll, spin] = float(split_line[3])
1418 occupancies[kpt_ind, ll, spin] = float(split_line[1])
1419 return {"eigenvalues": eigenvalues, "occupancies": occupancies}
1421 @cached_property
1422 def atoms(self):
1423 """Convert AimsOutChunk to Atoms object and add all non-standard
1424outputs to atoms.info"""
1425 atoms = self._atoms
1427 atoms.calc = SinglePointDFTCalculator(
1428 atoms,
1429 energy=self.free_energy,
1430 free_energy=self.free_energy,
1431 forces=self.forces,
1432 stress=self.stress,
1433 stresses=self.stresses,
1434 magmom=self.magmom,
1435 dipole=self.dipole,
1436 dielectric_tensor=self.dielectric_tensor,
1437 polarization=self.polarization,
1438 )
1439 return atoms
1441 @property
1442 def results(self):
1443 """Convert an AimsOutChunk to a Results Dictionary"""
1444 results = {
1445 "energy": self.free_energy,
1446 "free_energy": self.free_energy,
1447 "total_energy": self.total_energy,
1448 "forces": self.forces,
1449 "stress": self.stress,
1450 "stresses": self.stresses,
1451 "magmom": self.magmom,
1452 "dipole": self.dipole,
1453 "fermi_energy": self.E_f,
1454 "n_iter": self.n_iter,
1455 "hirshfeld_charges": self.hirshfeld_charges,
1456 "hirshfeld_dipole": self.hirshfeld_dipole,
1457 "hirshfeld_volumes": self.hirshfeld_volumes,
1458 "hirshfeld_atomic_dipoles": self.hirshfeld_atomic_dipoles,
1459 "eigenvalues": self.eigenvalues,
1460 "occupancies": self.occupancies,
1461 "dielectric_tensor": self.dielectric_tensor,
1462 "polarization": self.polarization,
1463 }
1465 return {
1466 key: value for key,
1467 value in results.items() if value is not None}
1469 @property
1470 def initial_atoms(self):
1471 """The initial structure defined in the geoemtry.in file"""
1472 return self._header["initial_atoms"]
1474 @property
1475 def initial_cell(self):
1476 """The initial lattice vectors defined in the geoemtry.in file"""
1477 return self._header["initial_cell"]
1479 @property
1480 def constraints(self):
1481 """The relaxation constraints for the calculation"""
1482 return self._header["constraints"]
1484 @property
1485 def n_atoms(self):
1486 """The number of atoms for the material"""
1487 return self._header["n_atoms"]
1489 @property
1490 def n_bands(self):
1491 """The number of Kohn-Sham states for the chunk"""
1492 return self._header["n_bands"]
1494 @property
1495 def n_electrons(self):
1496 """The number of electrons for the chunk"""
1497 return self._header["n_electrons"]
1499 @property
1500 def n_spins(self):
1501 """The number of spin channels for the chunk"""
1502 return self._header["n_spins"]
1504 @property
1505 def electronic_temperature(self):
1506 """The electronic temperature for the chunk"""
1507 return self._header["electronic_temperature"]
1509 @property
1510 def n_k_points(self):
1511 """The number of electrons for the chunk"""
1512 return self._header["n_k_points"]
1514 @property
1515 def k_points(self):
1516 """The number of spin channels for the chunk"""
1517 return self._header["k_points"]
1519 @property
1520 def k_point_weights(self):
1521 """k_point_weights electronic temperature for the chunk"""
1522 return self._header["k_point_weights"]
1524 @cached_property
1525 def free_energy(self):
1526 """The free energy for the chunk"""
1527 return self.parse_scalar("free_energy")
1529 @cached_property
1530 def n_iter(self):
1531 """The number of SCF iterations needed to converge the SCF cycle for
1532the chunk"""
1533 return self.parse_scalar("number_of_iterations")
1535 @cached_property
1536 def magmom(self):
1537 """The magnetic moment for the chunk"""
1538 return self.parse_scalar("magnetic_moment")
1540 @cached_property
1541 def E_f(self):
1542 """The Fermi energy for the chunk"""
1543 return self.parse_scalar("fermi_energy")
1545 @cached_property
1546 def converged(self):
1547 """True if the chunk is a fully converged final structure"""
1548 return (len(self.lines) > 0) and ("Have a nice day." in self.lines[-5:])
1550 @property
1551 def hirshfeld_charges(self):
1552 """The Hirshfeld charges for the chunk"""
1553 return self._hirshfeld["charges"]
1555 @property
1556 def hirshfeld_atomic_dipoles(self):
1557 """The Hirshfeld atomic dipole moments for the chunk"""
1558 return self._hirshfeld["atomic_dipoles"]
1560 @property
1561 def hirshfeld_volumes(self):
1562 """The Hirshfeld volume for the chunk"""
1563 return self._hirshfeld["volumes"]
1565 @property
1566 def hirshfeld_dipole(self):
1567 """The Hirshfeld systematic dipole moment for the chunk"""
1568 if not np.any(self._atoms.pbc):
1569 return self._hirshfeld["dipole"]
1571 return None
1573 @property
1574 def eigenvalues(self):
1575 """All outputted eigenvalues for the system"""
1576 return self._eigenvalues["eigenvalues"]
1578 @property
1579 def occupancies(self):
1580 """All outputted occupancies for the system"""
1581 return self._eigenvalues["occupancies"]
1584def get_header_chunk(fd):
1585 """Returns the header information from the aims.out file"""
1586 header = []
1587 line = ""
1589 # Stop the header once the first SCF cycle begins
1590 while (
1591 "Convergence: q app. | density | eigen (eV) | Etot (eV)"
1592 not in line
1593 and "Convergence: q app. | density, spin | eigen (eV) |"
1594 not in line
1595 and "Begin self-consistency iteration #" not in line
1596 ):
1597 try:
1598 line = next(fd).strip() # Raises StopIteration on empty file
1599 except StopIteration:
1600 raise ParseError(
1601 "No SCF steps present, calculation failed at setup."
1602 )
1604 header.append(line)
1605 return AimsOutHeaderChunk(header)
1608def get_aims_out_chunks(fd, header_chunk):
1609 """Yield unprocessed chunks (header, lines) for each AimsOutChunk image."""
1610 try:
1611 line = next(fd).strip() # Raises StopIteration on empty file
1612 except StopIteration:
1613 return
1615 # If the calculation is relaxation the updated structural information
1616 # occurs before the re-initialization
1617 if header_chunk.is_relaxation:
1618 chunk_end_line = (
1619 "Geometry optimization: Attempting to predict improved coordinates."
1620 )
1621 else:
1622 chunk_end_line = "Begin self-consistency loop: Re-initialization"
1624 # If SCF is not converged then do not treat the next chunk_end_line as a
1625 # new chunk until after the SCF is re-initialized
1626 ignore_chunk_end_line = False
1627 while True:
1628 try:
1629 line = next(fd).strip() # Raises StopIteration on empty file
1630 except StopIteration:
1631 break
1633 lines = []
1634 while chunk_end_line not in line or ignore_chunk_end_line:
1635 lines.append(line)
1636 # If SCF cycle not converged or numerical stresses are requested,
1637 # don't end chunk on next Re-initialization
1638 patterns = [
1639 (
1640 "Self-consistency cycle not yet converged -"
1641 " restarting mixer to attempt better convergence."
1642 ),
1643 (
1644 "Components of the stress tensor (for mathematical "
1645 "background see comments in numerical_stress.f90)."
1646 ),
1647 "Calculation of numerical stress completed",
1648 ]
1649 if any(pattern in line for pattern in patterns):
1650 ignore_chunk_end_line = True
1651 elif "Begin self-consistency loop: Re-initialization" in line:
1652 ignore_chunk_end_line = False
1654 try:
1655 line = next(fd).strip()
1656 except StopIteration:
1657 break
1659 yield AimsOutCalcChunk(lines, header_chunk)
1662def check_convergence(chunks, non_convergence_ok=False):
1663 """Check if the aims output file is for a converged calculation
1665 Parameters
1666 ----------
1667 chunks: list of AimsOutChunks
1668 The list of chunks for the aims calculations
1669 non_convergence_ok: bool
1670 True if it is okay for the calculation to not be converged
1672 Returns
1673 -------
1674 bool
1675 True if the calculation is converged
1676 """
1677 if not non_convergence_ok and not chunks[-1].converged:
1678 raise ParseError("The calculation did not complete successfully")
1679 return True
1682@reader
1683def read_aims_output(fd, index=-1, non_convergence_ok=False):
1684 """Import FHI-aims output files with all data available, i.e.
1685 relaxations, MD information, force information etc etc etc."""
1686 header_chunk = get_header_chunk(fd)
1687 chunks = list(get_aims_out_chunks(fd, header_chunk))
1688 check_convergence(chunks, non_convergence_ok)
1690 # Relaxations have an additional footer chunk due to how it is split
1691 if header_chunk.is_relaxation:
1692 images = [chunk.atoms for chunk in chunks[:-1]]
1693 else:
1694 images = [chunk.atoms for chunk in chunks]
1695 return images[index]
1698@reader
1699def read_aims_results(fd, index=-1, non_convergence_ok=False):
1700 """Import FHI-aims output files and summarize all relevant information
1701 into a dictionary"""
1702 header_chunk = get_header_chunk(fd)
1703 chunks = list(get_aims_out_chunks(fd, header_chunk))
1704 check_convergence(chunks, non_convergence_ok)
1706 # Relaxations have an additional footer chunk due to how it is split
1707 if header_chunk.is_relaxation and (index == -1):
1708 return chunks[-2].results
1710 return chunks[index].results