Coverage for eminus/config.py: 80.21%

96 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2025-10-21 12:19 +0000

1# SPDX-FileCopyrightText: 2023 The eminus developers 

2# SPDX-License-Identifier: Apache-2.0 

3"""Consolidated configuration module.""" 

4 

5import numbers 

6import os 

7import pathlib 

8import sys 

9 

10from .logger import log 

11 

12 

13class ConfigClass: 

14 """Configuration class holding user specifiable variables. 

15 

16 An instance of this class will be set as the same name as this module. This will effectively 

17 make this module a singleton data class. 

18 """ 

19 

20 def __init__(self): 

21 """Initialize the ConfigClass object.""" 

22 self.backend = "torch" # Use Torch as backend if available, default to NumPy otherwise 

23 self.use_gpu = False # Disable GPU by default, since one may be restrictd by a small VRAM 

24 self.use_pylibxc = True # Use Libxc over PySCF if available since it is faster 

25 self.threads = None # Read threads from environment variables by default 

26 self.verbose = "INFO" # Only display warnings (and worse) by default 

27 

28 # ### Class properties ### 

29 

30 @property 

31 def backend(self): 

32 """Whether to use NumPY or a different backend if installed.""" 

33 return self._backend 

34 

35 @backend.setter 

36 def backend(self, value): 

37 self._backend = value.lower() 

38 if self._backend == "torch": 

39 try: 

40 from array_api_compat import torch 

41 except ImportError: 

42 self._backend = "numpy" 

43 else: 

44 torch.set_default_dtype(torch.float64) 

45 else: 

46 self._backend = "numpy" 

47 

48 @property 

49 def use_gpu(self): 

50 """Whether to use the GPU if available.""" 

51 return self._use_gpu 

52 

53 @use_gpu.setter 

54 def use_gpu(self, value): 

55 if self.backend == "torch": 

56 import torch 

57 

58 # When using set_default_device the whole runtime will use a device context manager 

59 if torch.cuda.is_available(): 

60 if value: 

61 torch.set_default_device("cuda") 

62 self._use_gpu = True 

63 else: 

64 torch.set_default_device("cpu") 

65 self._use_gpu = False 

66 else: 

67 self._use_gpu = False 

68 else: 

69 self._use_gpu = False 

70 

71 @property 

72 def use_pylibxc(self): 

73 """Whether to use pylibxc or PySCF for functionals if both are installed.""" 

74 if self._use_pylibxc: 

75 try: 

76 import pylibxc # noqa: F401 

77 except ImportError: 

78 pass 

79 else: 

80 return True 

81 return False 

82 

83 @use_pylibxc.setter 

84 def use_pylibxc(self, value): 

85 self._use_pylibxc = value 

86 

87 @property 

88 def threads(self): 

89 """Number of threads used in array calculations.""" 

90 if self._threads is None: 

91 try: 

92 if self.backend == "torch": 

93 from array_api_compat import torch 

94 

95 return torch.get_num_threads() 

96 # Read the OMP threads for the default operators 

97 return int(os.environ["OMP_NUM_THREADS"]) 

98 except KeyError: 

99 return None 

100 return int(self._threads) 

101 

102 @threads.setter 

103 def threads(self, value): 

104 self._threads = value 

105 if isinstance(value, numbers.Integral): 

106 if self.backend == "torch": 

107 from array_api_compat import torch 

108 

109 return torch.set_num_threads(value) 

110 os.environ["OMP_NUM_THREADS"] = str(value) 

111 return None 

112 

113 @property 

114 def verbose(self): 

115 """Logger verbosity level.""" 

116 return log.verbose 

117 

118 @verbose.setter 

119 def verbose(self, value): 

120 # Logic in setter to run it on initialization 

121 log.verbose = value 

122 

123 # ### Class methods ### 

124 

125 def info(self): 

126 """Print configuration and performance information.""" 

127 sys.stdout.write("--- Configuration infos ---\n") 

128 sys.stdout.write(f"Global verbosity : {self.verbose}\n") 

129 # Only print if PySCF or pylibxc is installed 

130 if not self.use_pylibxc: 

131 try: 

132 import pyscf # noqa: F401 

133 

134 sys.stdout.write("Libxc backend : PySCF\n") 

135 except ImportError: 

136 pass 

137 else: 

138 sys.stdout.write("Libxc backend : pylibxc\n") 

139 

140 sys.stdout.write( 

141 "\n--- Performance infos ---\n" 

142 f"Array backend : {self.backend}\n" 

143 f"Array device : {'GPU' if self.use_gpu else 'CPU'}\n" 

144 ) 

145 # Do not print threading information when using GPU 

146 if self.use_gpu: 

147 return 

148 # Check threads 

149 if self.threads is None: 

150 sys.stdout.write( 

151 "Array threads : 1\n" 

152 "INFO: No OMP_NUM_THREADS environment variable was found.\nTo improve " 

153 'performance, add "export OMP_NUM_THREADS=n" to your ".bashrc".\nMake sure to ' 

154 'replace "n", typically with the number of cores your CPU.\nTemporarily, you can ' 

155 'set them in your Python environment with "eminus.config.threads=n".\n' 

156 ) 

157 else: 

158 sys.stdout.write(f"Array threads : {self.threads}\n") 

159 

160 

161if ( 

162 "sphinx-build" not in pathlib.Path(sys.argv[0]).name 

163 and "stubtest" not in pathlib.Path(sys.argv[0]).name 

164): 

165 # Do not initialize the class when Sphinx or stubtest is running 

166 # Since we set the class instance to the module name Sphinx would only document 

167 # the main docstring of the class without the properties 

168 sys.modules[__name__] = ConfigClass() 

169else: # pragma: no cover 

170 # Add mock variables for all properties and methods of the ConfigClass to the module 

171 # This allows IDEs to see that the module has said attribute 

172 # This also allows for stubtesting and documentation of these variables and functions 

173 backend = "" #: Whether to use NumPy or a different backend if installed. 

174 use_gpu = False #: Whether to use the GPU if available. 

175 use_pylibxc = False #: Whether to use pylibxc or PySCF for functionals if both are installed. 

176 threads = 0 #: Number of threads used in array calculations. 

177 verbose = "" #: Logger verbosity level. 

178 

179 def info(): 

180 """Print configuration and performance information.""" 

181 return