Coverage for eminus/config.py: 70.54%
112 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-16 10:16 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-16 10:16 +0000
1# SPDX-FileCopyrightText: 2021 The eminus developers
2# SPDX-License-Identifier: Apache-2.0
3"""Consolidated configuration module."""
5import numbers
6import os
7import pathlib
8import sys
10from .logger import log
13class ConfigClass:
14 """Configuration class holding user specifiable variables.
16 An instance of this class will be set as the same name as this module. This will effectively
17 make this module a singleton data class.
18 """
20 def __init__(self):
21 """Initialize the ConfigClass object."""
22 self.backend = "torch" # Use faster Torch FFTs from a different backend if available
23 self.use_gpu = False # Disable GPU by default, since it is slower in my tests
24 self.use_pylibxc = True # Use Libxc over PySCF if available since it is faster
25 self.threads = None # Read threads from environment variables by default
26 self.verbose = "INFO" # Only display warnings (and worse) by default
28 # ### Class properties ###
30 @property
31 def backend(self):
32 """Whether to use SciPy or a different backend if installed."""
33 # Add the logic in the getter method so it does not run on initialization since importing
34 # Torch is rather slow
35 if self._backend == "jax":
36 if "jax" in sys.modules:
37 return "jax"
38 try:
39 import jax
40 import jax.numpy
41 except ImportError:
42 pass
43 else:
44 jax.config.update("jax_enable_x64", val=True)
45 return "jax"
46 if self._backend == "torch":
47 if "torch" in sys.modules:
48 return "torch"
49 try:
50 import torch
51 except ImportError:
52 pass
53 else:
54 torch.set_default_dtype(torch.double)
55 return "torch"
56 return "scipy"
58 @backend.setter
59 def backend(self, value):
60 self._backend = value.lower()
62 @property
63 def use_gpu(self):
64 """Whether to use the GPU if available."""
65 # Only use GPU if Torch or Jax is available
66 if self.backend == "jax" and self._use_gpu:
67 import jax
69 return jax.default_backend() != "cpu"
70 if self.backend == "torch" and self._use_gpu:
71 import torch
73 return torch.cuda.is_available()
74 return False
76 @use_gpu.setter
77 def use_gpu(self, value):
78 self._use_gpu = value
80 @property
81 def use_pylibxc(self):
82 """Whether to use pylibxc or PySCF for functionals if both are installed."""
83 if self._use_pylibxc:
84 try:
85 import pylibxc # noqa: F401
86 except ImportError:
87 pass
88 else:
89 return True
90 return False
92 @use_pylibxc.setter
93 def use_pylibxc(self, value):
94 self._use_pylibxc = value
96 @property
97 def threads(self):
98 """Number of threads used in FFT calculations."""
99 if self._threads is None:
100 try:
101 if self.backend == "torch":
102 import torch
104 return torch.get_num_threads()
105 # Read the OMP threads for the default operators
106 return int(os.environ["OMP_NUM_THREADS"])
107 except KeyError:
108 return None
109 return int(self._threads)
111 @threads.setter
112 def threads(self, value):
113 self._threads = value
114 if isinstance(value, numbers.Integral):
115 if self.backend == "torch":
116 import torch
118 return torch.set_num_threads(value)
119 os.environ["OMP_NUM_THREADS"] = str(value)
120 return None
122 @property
123 def verbose(self):
124 """Logger verbosity level."""
125 return log.verbose
127 @verbose.setter
128 def verbose(self, value):
129 # Logic in setter to run it on initialization
130 log.verbose = value
132 # ### Class methods ###
134 def info(self):
135 """Print configuration and performance information."""
136 sys.stdout.write("--- Configuration infos ---\n")
137 sys.stdout.write(f"Global verbosity : {self.verbose}\n")
138 # Only print if PySCF or pylibxc is installed
139 if not self.use_pylibxc:
140 try:
141 import pyscf # noqa: F401
143 sys.stdout.write("Libxc backend : PySCF\n")
144 except ImportError:
145 pass
146 else:
147 sys.stdout.write("Libxc backend : pylibxc\n")
149 sys.stdout.write(
150 "\n--- Performance infos ---\n"
151 f"FFT backend : {self.backend}\n"
152 f"FFT device : {'GPU' if self.use_gpu else 'CPU'}\n"
153 )
154 # Do not print threading information when using GPU
155 if self.use_gpu:
156 return
157 # Check FFT threads
158 if self.threads is None:
159 sys.stdout.write(
160 "FFT threads : 1\n"
161 "INFO: No OMP_NUM_THREADS environment variable was found.\nTo improve "
162 'performance, add "export OMP_NUM_THREADS=n" to your ".bashrc".\nMake sure to '
163 'replace "n", typically with the number of cores your CPU.\nTemporarily, you can '
164 'set them in your Python environment with "eminus.config.threads=n".\n'
165 )
166 else:
167 sys.stdout.write(f"FFT threads : {self.threads}\n")
170if (
171 "sphinx-build" not in pathlib.Path(sys.argv[0]).name
172 and "stubtest" not in pathlib.Path(sys.argv[0]).name
173):
174 # Do not initialize the class when Sphinx or stubtest is running
175 # Since we set the class instance to the module name Sphinx would only document
176 # the main docstring of the class without the properties
177 sys.modules[__name__] = ConfigClass()
178else:
179 # Add mock variables for all properties and methods of the ConfigClass to the module
180 # This allows IDEs to see that the module has said attribute
181 # This also allows for stubtesting and documentation of these variables and functions
182 backend = "" #: Whether to use SciPy or a different backend if installed.
183 use_gpu = False #: Whether to use the GPU if available.
184 use_pylibxc = False #: Whether to use pylibxc or PySCF for functionals if both are installed.
185 threads = 0 #: Number of threads used in FFT calculations.
186 verbose = "" #: Logger verbosity level.
188 def info():
189 """Print configuration and performance information."""
190 return