Coverage for /builds/ase/ase/ase/utils/abc.py: 100.00%
23 statements
« prev ^ index » next coverage.py v7.5.3, created at 2025-08-02 00:12 +0000
« prev ^ index » next coverage.py v7.5.3, created at 2025-08-02 00:12 +0000
1from abc import ABC, abstractmethod
3import numpy as np
5# Due to the high prevalence of cyclic imports surrounding ase.optimize,
6# we define the Optimizable ABC here in utils.
7# Can we find a better way?
10class Optimizable(ABC):
11 @abstractmethod
12 def ndofs(self) -> int:
13 """Return number of degrees of freedom."""
15 @abstractmethod
16 def get_x(self) -> np.ndarray:
17 """Return current coordinates as a flat ndarray."""
19 @abstractmethod
20 def set_x(self, x: np.ndarray) -> None:
21 """Set flat ndarray as current coordinates."""
23 @abstractmethod
24 def get_gradient(self) -> np.ndarray:
25 """Return gradient at current coordinates as flat ndarray.
27 NOTE: Currently this is still the (flat) "forces" i.e.
28 the negative gradient. This must be fixed before the optimizable
29 API is done."""
30 # Callers who want Nx3 will do ".get_gradient().reshape(-1, 3)".
31 # We can probably weed out most such reshapings.
32 # Grep for the above expression in order to find places that should
33 # be updated.
35 @abstractmethod
36 def get_value(self) -> float:
37 """Return function value at current coordinates."""
39 @abstractmethod
40 def iterimages(self):
41 """Yield domain objects that can be saved as trajectory.
43 For example this can yield Atoms objects if the optimizer
44 has a trajectory that can write Atoms objects."""
46 def converged(self, gradient: np.ndarray, fmax: float) -> bool:
47 """Standard implementation of convergence criterion.
49 This assumes that forces are the actual (Nx3) forces.
50 We can hopefully change this."""
51 assert gradient.ndim == 1
52 return self.gradient_norm(gradient) < fmax
54 def gradient_norm(self, gradient):
55 forces = gradient.reshape(-1, 3) # XXX Cartesian
56 return np.linalg.norm(forces, axis=1).max()
58 def __ase_optimizable__(self) -> 'Optimizable':
59 """Return self, being already an Optimizable."""
60 return self