emmet-builders 0.84.7rc3__py3-none-any.whl → 0.84.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of emmet-builders might be problematic. Click here for more details.

Files changed (41) hide show
  1. emmet/builders/abinit/phonon.py +11 -11
  2. emmet/builders/abinit/sound_velocity.py +10 -10
  3. emmet/builders/feff/xas.py +1 -2
  4. emmet/builders/materials/absorption_spectrum.py +4 -4
  5. emmet/builders/materials/alloys.py +2 -3
  6. emmet/builders/materials/chemenv.py +2 -3
  7. emmet/builders/materials/corrected_entries.py +8 -8
  8. emmet/builders/materials/dielectric.py +3 -4
  9. emmet/builders/materials/elasticity.py +25 -25
  10. emmet/builders/materials/electrodes.py +18 -18
  11. emmet/builders/materials/electronic_structure.py +16 -16
  12. emmet/builders/materials/magnetism.py +3 -3
  13. emmet/builders/materials/ml.py +9 -11
  14. emmet/builders/materials/optimade.py +3 -3
  15. emmet/builders/materials/piezoelectric.py +1 -2
  16. emmet/builders/materials/provenance.py +7 -7
  17. emmet/builders/materials/robocrys.py +2 -3
  18. emmet/builders/materials/substrates.py +8 -7
  19. emmet/builders/materials/thermo.py +10 -10
  20. emmet/builders/matscholar/missing_compositions.py +8 -8
  21. emmet/builders/mobility/migration_graph.py +5 -5
  22. emmet/builders/molecules/atomic.py +22 -23
  23. emmet/builders/molecules/bonds.py +12 -13
  24. emmet/builders/molecules/electric.py +11 -12
  25. emmet/builders/molecules/metal_binding.py +15 -17
  26. emmet/builders/molecules/orbitals.py +11 -12
  27. emmet/builders/molecules/redox.py +21 -22
  28. emmet/builders/molecules/summary.py +13 -13
  29. emmet/builders/molecules/thermo.py +14 -16
  30. emmet/builders/molecules/trajectory.py +18 -19
  31. emmet/builders/molecules/vibration.py +11 -12
  32. emmet/builders/qchem/molecules.py +29 -31
  33. emmet/builders/settings.py +7 -8
  34. emmet/builders/utils.py +5 -5
  35. emmet/builders/vasp/materials.py +11 -11
  36. emmet/builders/vasp/task_validator.py +3 -5
  37. {emmet_builders-0.84.7rc3.dist-info → emmet_builders-0.84.8.dist-info}/METADATA +1 -1
  38. emmet_builders-0.84.8.dist-info/RECORD +54 -0
  39. emmet_builders-0.84.7rc3.dist-info/RECORD +0 -54
  40. {emmet_builders-0.84.7rc3.dist-info → emmet_builders-0.84.8.dist-info}/WHEEL +0 -0
  41. {emmet_builders-0.84.7rc3.dist-info → emmet_builders-0.84.8.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  import os
2
2
  import tempfile
3
3
  from math import ceil
4
- from typing import Dict, Iterator, List, Optional, Tuple
4
+ from typing import Iterator
5
5
 
6
6
  import numpy as np
7
7
  from abipy.abio.inputs import AnaddbInput
@@ -50,8 +50,8 @@ class PhononBuilder(Builder):
50
50
  ddb_files: Store,
51
51
  th_disp: Store,
52
52
  phonon_website: Store,
53
- query: Optional[Dict] = None,
54
- manager: Optional[TaskManager] = None,
53
+ query: dict | None = None,
54
+ manager: TaskManager | None = None,
55
55
  symprec: float = SETTINGS.SYMPREC,
56
56
  angle_tolerance: float = SETTINGS.ANGLE_TOL,
57
57
  chunk_size=100,
@@ -134,7 +134,7 @@ class PhononBuilder(Builder):
134
134
  for mpid_chunk in grouper(mats, N):
135
135
  yield {"query": {self.phonon_materials.key: {"$in": list(mpid_chunk)}}}
136
136
 
137
- def get_items(self) -> Iterator[Dict]:
137
+ def get_items(self) -> Iterator[dict]:
138
138
  """
139
139
  Gets all materials that need phonons
140
140
 
@@ -188,7 +188,7 @@ class PhononBuilder(Builder):
188
188
 
189
189
  yield item
190
190
 
191
- def process_item(self, item: Dict) -> Optional[Dict]:
191
+ def process_item(self, item: dict) -> dict | None:
192
192
  """
193
193
  Generates the full phonon document from an item
194
194
 
@@ -293,7 +293,7 @@ class PhononBuilder(Builder):
293
293
  )
294
294
  return None
295
295
 
296
- def get_phonon_properties(self, item: Dict) -> Dict:
296
+ def get_phonon_properties(self, item: dict) -> dict:
297
297
  """
298
298
  Extracts the phonon properties from the item
299
299
  """
@@ -511,7 +511,7 @@ class PhononBuilder(Builder):
511
511
  dos: str = "tetra",
512
512
  lo_to_splitting: bool = True,
513
513
  use_dieflag: bool = True,
514
- ) -> Tuple[AnaddbInput, Optional[List]]:
514
+ ) -> tuple[AnaddbInput, list | None]:
515
515
  """
516
516
  creates the AnaddbInput object to calculate the phonon properties.
517
517
  It also returns the list of qpoints labels for generating the PhononBandStructureSymmLine.
@@ -637,7 +637,7 @@ class PhononBuilder(Builder):
637
637
 
638
638
  @staticmethod
639
639
  def get_pmg_bs(
640
- phbands: PhononBands, labels_list: List
640
+ phbands: PhononBands, labels_list: list
641
641
  ) -> PhononBandStructureSymmLine:
642
642
  """
643
643
  Generates a PhononBandStructureSymmLine starting from a abipy PhononBands object
@@ -726,7 +726,7 @@ class PhononBuilder(Builder):
726
726
 
727
727
  return data
728
728
 
729
- def update_targets(self, items: List[Dict]):
729
+ def update_targets(self, items: list[dict]):
730
730
  """
731
731
  Inserts the new task_types into the task_types collection
732
732
 
@@ -769,7 +769,7 @@ class PhononBuilder(Builder):
769
769
 
770
770
  def get_warnings(
771
771
  asr_break: float, cnsr_break: float, ph_bs: PhononBandStructureSymmLine
772
- ) -> List[PhononWarnings]:
772
+ ) -> list[PhononWarnings]:
773
773
  """
774
774
 
775
775
  Args:
@@ -810,7 +810,7 @@ def get_warnings(
810
810
 
811
811
  def get_thermodynamic_properties(
812
812
  ph_dos: CompletePhononDos,
813
- ) -> Tuple[ThermodynamicProperties, VibrationalEnergy]:
813
+ ) -> tuple[ThermodynamicProperties, VibrationalEnergy]:
814
814
  """
815
815
  Calculates the thermodynamic properties from a phonon DOS
816
816
 
@@ -1,14 +1,14 @@
1
1
  import tempfile
2
2
  import traceback
3
3
  from math import ceil
4
- from maggma.utils import grouper
5
- from typing import Optional, Dict, List, Iterator
4
+ from typing import Iterator
6
5
 
7
- from abipy.dfpt.vsound import SoundVelocity as AbiSoundVelocity
8
6
  from abipy.dfpt.ddb import DdbFile
7
+ from abipy.dfpt.vsound import SoundVelocity as AbiSoundVelocity
8
+ from abipy.flowtk.tasks import TaskManager
9
9
  from maggma.builders import Builder
10
10
  from maggma.core import Store
11
- from abipy.flowtk.tasks import TaskManager
11
+ from maggma.utils import grouper
12
12
 
13
13
  from emmet.core.phonon import SoundVelocity
14
14
  from emmet.core.utils import jsanitize
@@ -20,8 +20,8 @@ class SoundVelocityBuilder(Builder):
20
20
  phonon_materials: Store,
21
21
  ddb_source: Store,
22
22
  sound_vel: Store,
23
- query: Optional[dict] = None,
24
- manager: Optional[TaskManager] = None,
23
+ query: dict | None = None,
24
+ manager: TaskManager | None = None,
25
25
  **kwargs
26
26
  ):
27
27
  """
@@ -72,7 +72,7 @@ class SoundVelocityBuilder(Builder):
72
72
  for mpid_chunk in grouper(mats, N):
73
73
  yield {"query": {self.phonon_materials.key: {"$in": list(mpid_chunk)}}}
74
74
 
75
- def get_items(self) -> Iterator[Dict]:
75
+ def get_items(self) -> Iterator[dict]:
76
76
  """
77
77
  Gets all materials that need sound velocity.
78
78
 
@@ -117,7 +117,7 @@ class SoundVelocityBuilder(Builder):
117
117
 
118
118
  yield item
119
119
 
120
- def process_item(self, item: Dict) -> Optional[Dict]:
120
+ def process_item(self, item: dict) -> dict | None:
121
121
  """
122
122
  Generates the sound velocity document from an item
123
123
 
@@ -153,7 +153,7 @@ class SoundVelocityBuilder(Builder):
153
153
  return None
154
154
 
155
155
  @staticmethod
156
- def get_sound_vel(item: Dict) -> Dict:
156
+ def get_sound_vel(item: dict) -> dict:
157
157
  """
158
158
  Runs anaddb and return the extracted data for the speed of sound.
159
159
 
@@ -187,7 +187,7 @@ class SoundVelocityBuilder(Builder):
187
187
 
188
188
  return sv_data
189
189
 
190
- def update_targets(self, items: List[Dict]):
190
+ def update_targets(self, items: list[dict]):
191
191
  """
192
192
  Inserts the new task_types into the task_types collection
193
193
 
@@ -1,7 +1,6 @@
1
1
  import traceback
2
2
  from datetime import datetime
3
3
  from itertools import chain
4
- from typing import Dict, List
5
4
 
6
5
  from maggma.builders import GroupBuilder
7
6
  from maggma.core import Store
@@ -27,7 +26,7 @@ class XASBuilder(GroupBuilder):
27
26
  super().__init__(source=tasks, target=xas, grouping_keys=["mp_id"])
28
27
  self._target_keys_field = "xas_ids"
29
28
 
30
- def process_item(self, spectra: List[Dict]) -> Dict:
29
+ def process_item(self, spectra: list[dict]) -> dict:
31
30
  # TODO: Change this to do structure matching against materials collection
32
31
  mpid = spectra[0]["mp_id"]
33
32
 
@@ -1,5 +1,5 @@
1
1
  from math import ceil
2
- from typing import Dict, Iterator, List, Optional
2
+ from typing import Iterator
3
3
 
4
4
  import numpy as np
5
5
  from maggma.builders import Builder
@@ -17,7 +17,7 @@ class AbsorptionBuilder(Builder):
17
17
  materials: Store,
18
18
  tasks: Store,
19
19
  absorption: Store,
20
- query: Optional[Dict] = None,
20
+ query: dict | None = None,
21
21
  **kwargs,
22
22
  ):
23
23
  self.materials = materials
@@ -32,7 +32,7 @@ class AbsorptionBuilder(Builder):
32
32
 
33
33
  super().__init__(sources=[materials, tasks], targets=[absorption], **kwargs)
34
34
 
35
- def prechunk(self, number_splits: int) -> Iterator[Dict]: # pragma: no cover
35
+ def prechunk(self, number_splits: int) -> Iterator[dict]: # pragma: no cover
36
36
  """
37
37
  Prechunk method to perform chunking by the key field
38
38
  """
@@ -44,7 +44,7 @@ class AbsorptionBuilder(Builder):
44
44
  for split in grouper(keys, N):
45
45
  yield {"query": {self.materials.key: {"$in": list(split)}}}
46
46
 
47
- def get_items(self) -> Iterator[List[Dict]]:
47
+ def get_items(self) -> Iterator[list[dict]]:
48
48
  """
49
49
  Gets all items to process
50
50
 
@@ -1,5 +1,4 @@
1
1
  from itertools import chain, combinations
2
- from typing import Dict, List, Tuple, Union
3
2
 
4
3
  from maggma.builders import Builder
5
4
  from matminer.datasets import load_dataset
@@ -40,7 +39,7 @@ class AlloyPairBuilder(Builder):
40
39
  provenance,
41
40
  oxi_states,
42
41
  alloy_pairs,
43
- thermo_type: Union[ThermoType, str] = ThermoType.GGA_GGA_U_R2SCAN,
42
+ thermo_type: ThermoType | str = ThermoType.GGA_GGA_U_R2SCAN,
44
43
  ):
45
44
  self.materials = materials
46
45
  self.thermo = thermo
@@ -274,7 +273,7 @@ class AlloyPairMemberBuilder(Builder):
274
273
  if structures:
275
274
  yield (pairs, structures)
276
275
 
277
- def process_item(self, item: Tuple[List[AlloyPair], Dict[str, Structure]]):
276
+ def process_item(self, item: tuple[list[AlloyPair], dict[str, Structure]]):
278
277
  pairs, structures = item
279
278
 
280
279
  all_pair_members = []
@@ -1,8 +1,7 @@
1
- from typing import Dict, Optional
2
1
  from maggma.builders.map_builder import MapBuilder
3
2
  from maggma.core import Store
4
-
5
3
  from pymatgen.core.structure import Structure
4
+
6
5
  from emmet.core.chemenv import ChemEnvDoc
7
6
  from emmet.core.utils import jsanitize
8
7
 
@@ -12,7 +11,7 @@ class ChemEnvBuilder(MapBuilder):
12
11
  self,
13
12
  oxidation_states: Store,
14
13
  chemenv: Store,
15
- query: Optional[Dict] = None,
14
+ query: dict | None = None,
16
15
  **kwargs
17
16
  ):
18
17
  self.oxidation_states = oxidation_states
@@ -4,7 +4,7 @@ from collections import defaultdict
4
4
  from datetime import datetime
5
5
  from itertools import chain
6
6
  from math import ceil
7
- from typing import Dict, Iterable, Iterator, List, Optional, Union
7
+ from typing import Iterable, Iterator
8
8
 
9
9
  from maggma.core import Builder, Store
10
10
  from maggma.utils import grouper
@@ -22,9 +22,9 @@ class CorrectedEntriesBuilder(Builder):
22
22
  self,
23
23
  materials: Store,
24
24
  corrected_entries: Store,
25
- oxidation_states: Optional[Store] = None,
26
- query: Optional[Dict] = None,
27
- compatibility: Optional[Union[List[Compatibility], List[None]]] = [None],
25
+ oxidation_states: Store | None = None,
26
+ query: dict | None = None,
27
+ compatibility: list[Compatibility] | list[None] | None = [None],
28
28
  chunk_size: int = 1000,
29
29
  **kwargs,
30
30
  ):
@@ -48,7 +48,7 @@ class CorrectedEntriesBuilder(Builder):
48
48
  self.compatibility = compatibility
49
49
  self.oxidation_states = oxidation_states
50
50
  self.chunk_size = chunk_size
51
- self._entries_cache: Dict[str, List[Dict]] = defaultdict(list)
51
+ self._entries_cache: dict[str, list[dict]] = defaultdict(list)
52
52
 
53
53
  if self.corrected_entries.key != "chemsys":
54
54
  warnings.warn(
@@ -93,7 +93,7 @@ class CorrectedEntriesBuilder(Builder):
93
93
  # Search index for corrected_entries
94
94
  self.corrected_entries.ensure_index("chemsys")
95
95
 
96
- def prechunk(self, number_splits: int) -> Iterable[Dict]: # pragma: no cover
96
+ def prechunk(self, number_splits: int) -> Iterable[dict]: # pragma: no cover
97
97
  to_process_chemsys = self._get_chemsys_to_process()
98
98
 
99
99
  N = ceil(len(to_process_chemsys) / number_splits)
@@ -101,7 +101,7 @@ class CorrectedEntriesBuilder(Builder):
101
101
  for chemsys_chunk in grouper(to_process_chemsys, N):
102
102
  yield {"query": {"chemsys": {"$in": list(chemsys_chunk)}}}
103
103
 
104
- def get_items(self) -> Iterator[List[Dict]]:
104
+ def get_items(self) -> Iterator[list[dict]]:
105
105
  """
106
106
  Gets whole chemical systems of entries to process
107
107
  """
@@ -210,7 +210,7 @@ class CorrectedEntriesBuilder(Builder):
210
210
  else:
211
211
  self.logger.info("No corrected entry items to update")
212
212
 
213
- def get_entries(self, chemsys: str) -> List[Dict]:
213
+ def get_entries(self, chemsys: str) -> list[dict]:
214
214
  """
215
215
  Gets entries from the materials collection for the corresponding chemical systems
216
216
  Args:
@@ -1,6 +1,5 @@
1
1
  from math import ceil
2
- from typing import Dict, Optional, Iterator
3
-
2
+ from typing import Iterator
4
3
 
5
4
  import numpy as np
6
5
  from maggma.builders import Builder
@@ -18,7 +17,7 @@ class DielectricBuilder(Builder):
18
17
  materials: Store,
19
18
  tasks: Store,
20
19
  dielectric: Store,
21
- query: Optional[Dict] = None,
20
+ query: dict | None = None,
22
21
  **kwargs,
23
22
  ):
24
23
  self.materials = materials
@@ -33,7 +32,7 @@ class DielectricBuilder(Builder):
33
32
 
34
33
  super().__init__(sources=[materials, tasks], targets=[dielectric], **kwargs)
35
34
 
36
- def prechunk(self, number_splits: int) -> Iterator[Dict]: # pragma: no cover
35
+ def prechunk(self, number_splits: int) -> Iterator[dict]: # pragma: no cover
37
36
  """
38
37
  Prechunk method to perform chunking by the key field
39
38
  """
@@ -18,7 +18,7 @@ The build proceeds in the below steps:
18
18
  """
19
19
 
20
20
  from datetime import datetime
21
- from typing import Any, Dict, Generator, List, Optional, Tuple, Union
21
+ from typing import Any, Generator
22
22
 
23
23
  import numpy as np
24
24
  from maggma.core import Builder, Store
@@ -40,7 +40,7 @@ class ElasticityBuilder(Builder):
40
40
  tasks: Store,
41
41
  materials: Store,
42
42
  elasticity: Store,
43
- query: Optional[Dict] = None,
43
+ query: dict | None = None,
44
44
  fitting_method: str = "finite_difference",
45
45
  **kwargs,
46
46
  ):
@@ -78,7 +78,7 @@ class ElasticityBuilder(Builder):
78
78
 
79
79
  def get_items(
80
80
  self,
81
- ) -> Generator[Tuple[str, Dict[str, str], List[Dict]], None, None]:
81
+ ) -> Generator[tuple[str, dict[str, str], list[dict]], None, None]:
82
82
  """
83
83
  Gets all items to process into elasticity docs.
84
84
 
@@ -129,8 +129,8 @@ class ElasticityBuilder(Builder):
129
129
  yield material_id, calc_types, tasks
130
130
 
131
131
  def process_item(
132
- self, item: Tuple[MPID, Dict[str, str], List[Dict]]
133
- ) -> Union[Dict, None]:
132
+ self, item: tuple[MPID, dict[str, str], list[dict]]
133
+ ) -> dict | None:
134
134
  """
135
135
  Process all tasks belong to the same material into an elasticity doc.
136
136
 
@@ -220,7 +220,7 @@ class ElasticityBuilder(Builder):
220
220
 
221
221
  return elasticity_doc
222
222
 
223
- def update_targets(self, items: List[Dict]):
223
+ def update_targets(self, items: list[dict]):
224
224
  """
225
225
  Insert the new elasticity docs into the elasticity collection.
226
226
 
@@ -233,10 +233,10 @@ class ElasticityBuilder(Builder):
233
233
 
234
234
 
235
235
  def filter_opt_tasks(
236
- tasks: List[Dict],
237
- calc_types: Dict[str, str],
236
+ tasks: list[dict],
237
+ calc_types: dict[str, str],
238
238
  target_calc_type: str = CalcType.GGA_Structure_Optimization,
239
- ) -> List[Dict]:
239
+ ) -> list[dict]:
240
240
  """
241
241
  Filter optimization tasks, by
242
242
  - calculation type
@@ -247,10 +247,10 @@ def filter_opt_tasks(
247
247
 
248
248
 
249
249
  def filter_deform_tasks(
250
- tasks: List[Dict],
251
- calc_types: Dict[str, str],
250
+ tasks: list[dict],
251
+ calc_types: dict[str, str],
252
252
  target_calc_type: str = CalcType.GGA_Deformation,
253
- ) -> List[Dict]:
253
+ ) -> list[dict]:
254
254
  """
255
255
  Filter deformation tasks, by
256
256
  - calculation type
@@ -271,8 +271,8 @@ def filter_deform_tasks(
271
271
 
272
272
 
273
273
  def filter_by_incar_settings(
274
- tasks: List[Dict], incar_settings: Optional[Dict[str, Any]] = None
275
- ) -> List[Dict]:
274
+ tasks: list[dict], incar_settings: dict[str, Any] | None = None
275
+ ) -> list[dict]:
276
276
  """
277
277
  Filter tasks by incar parameters.
278
278
  """
@@ -315,7 +315,7 @@ def filter_by_incar_settings(
315
315
  return selected
316
316
 
317
317
 
318
- def filter_opt_tasks_by_time(tasks: List[Dict], logger) -> Dict:
318
+ def filter_opt_tasks_by_time(tasks: list[dict], logger) -> dict:
319
319
  """
320
320
  Filter a set of tasks to select the latest completed one.
321
321
 
@@ -330,8 +330,8 @@ def filter_opt_tasks_by_time(tasks: List[Dict], logger) -> Dict:
330
330
 
331
331
 
332
332
  def filter_deform_tasks_by_time(
333
- tasks: List[Dict], deform_comp_tol: float = 1e-5, logger=None
334
- ) -> List[Dict]:
333
+ tasks: list[dict], deform_comp_tol: float = 1e-5, logger=None
334
+ ) -> list[dict]:
335
335
  """
336
336
  For deformation tasks with the same deformation, select the latest completed one.
337
337
 
@@ -364,7 +364,7 @@ def filter_deform_tasks_by_time(
364
364
  return selected
365
365
 
366
366
 
367
- def _filter_tasks_by_time(tasks: List[Dict], mode: str, logger) -> Dict:
367
+ def _filter_tasks_by_time(tasks: list[dict], mode: str, logger) -> dict:
368
368
  """
369
369
  Helper function to filter a set of tasks to select the latest completed one.
370
370
  """
@@ -388,11 +388,11 @@ def _filter_tasks_by_time(tasks: List[Dict], mode: str, logger) -> Dict:
388
388
 
389
389
 
390
390
  def select_final_opt_deform_tasks(
391
- opt_tasks: List[Tuple[np.ndarray, Dict]],
392
- deform_tasks: List[Tuple[np.ndarray, List[Dict]]],
391
+ opt_tasks: list[tuple[np.ndarray, dict]],
392
+ deform_tasks: list[tuple[np.ndarray, list[dict]]],
393
393
  logger,
394
394
  lattice_comp_tol: float = 1e-5,
395
- ) -> Tuple[Union[Dict, None], Union[List[Dict], None]]:
395
+ ) -> tuple[dict | None, list[dict] | None]:
396
396
  """
397
397
  Select the final opt task and deform tasks for fitting.
398
398
 
@@ -445,8 +445,8 @@ def select_final_opt_deform_tasks(
445
445
 
446
446
 
447
447
  def group_by_parent_lattice(
448
- tasks: List[Dict], mode: str, lattice_comp_tol: float = 1e-5
449
- ) -> List[Tuple[np.ndarray, List[Dict]]]:
448
+ tasks: list[dict], mode: str, lattice_comp_tol: float = 1e-5
449
+ ) -> list[tuple[np.ndarray, list[dict]]]:
450
450
  """
451
451
  Groups a set of task docs by parent lattice equivalence.
452
452
 
@@ -459,11 +459,11 @@ def group_by_parent_lattice(
459
459
  lattice_comp_tol: tolerance for comparing lattice equivalence.
460
460
 
461
461
  Returns:
462
- [(lattice, List[tasks])]: each tuple gives the common parent lattice of a
462
+ [(lattice, list[tasks])]: each tuple gives the common parent lattice of a
463
463
  list of the structures before deformation (if any), and the list tasks
464
464
  from which the structures are taken.
465
465
  """
466
- docs_by_lattice: List[Tuple[np.ndarray, List[Dict]]] = []
466
+ docs_by_lattice: list[tuple[np.ndarray, list[dict]]] = []
467
467
 
468
468
  for doc in tasks:
469
469
  sim_lattice = get(doc, "output.structure.lattice.matrix")
@@ -4,7 +4,7 @@ from datetime import datetime
4
4
  from functools import lru_cache
5
5
  from itertools import chain
6
6
  from math import ceil
7
- from typing import Any, Dict, Iterator, List, Optional
7
+ from typing import Any, Iterator
8
8
 
9
9
  from maggma.builders import Builder
10
10
  from maggma.stores import MongoStore
@@ -85,7 +85,7 @@ class StructureGroupBuilder(Builder):
85
85
  materials: MongoStore,
86
86
  sgroups: MongoStore,
87
87
  working_ion: str,
88
- query: Optional[dict] = None,
88
+ query: dict | None = None,
89
89
  ltol: float = default_build_settings.LTOL,
90
90
  stol: float = default_build_settings.STOL,
91
91
  angle_tol: float = default_build_settings.ANGLE_TOL,
@@ -115,15 +115,15 @@ class StructureGroupBuilder(Builder):
115
115
  self.check_newer = check_newer
116
116
  self.chunk_size = chunk_size
117
117
 
118
- self.query[
119
- "deprecated"
120
- ] = False # Ensure only non-deprecated materials are chosen
118
+ self.query["deprecated"] = (
119
+ False # Ensure only non-deprecated materials are chosen
120
+ )
121
121
 
122
122
  super().__init__(
123
123
  sources=[materials], targets=[sgroups], chunk_size=chunk_size, **kwargs
124
124
  )
125
125
 
126
- def prechunk(self, number_splits: int) -> Iterator[Dict]: # pragma: no cover
126
+ def prechunk(self, number_splits: int) -> Iterator[dict]: # pragma: no cover
127
127
  """
128
128
  Prechunk method to perform chunking by the key field
129
129
  """
@@ -268,7 +268,7 @@ class StructureGroupBuilder(Builder):
268
268
  else:
269
269
  yield {"chemsys": chemsys, "materials": all_mats_in_chemsys}
270
270
 
271
- def update_targets(self, items: List):
271
+ def update_targets(self, items: list):
272
272
  items = list(filter(None, chain.from_iterable(items)))
273
273
  if len(items) > 0:
274
274
  self.logger.info("Updating {} sgroups documents".format(len(items)))
@@ -319,7 +319,7 @@ class InsertionElectrodeBuilder(Builder):
319
319
  grouped_materials: MongoStore,
320
320
  thermo: MongoStore,
321
321
  insertion_electrode: MongoStore,
322
- query: Optional[Dict] = None,
322
+ query: dict | None = None,
323
323
  strip_structures: bool = False,
324
324
  **kwargs,
325
325
  ):
@@ -335,7 +335,7 @@ class InsertionElectrodeBuilder(Builder):
335
335
  **kwargs,
336
336
  )
337
337
 
338
- def prechunk(self, number_splits: int) -> Iterator[Dict]:
338
+ def prechunk(self, number_splits: int) -> Iterator[dict]:
339
339
  """
340
340
  Prechunk method to perform chunking by the key field
341
341
  """
@@ -418,7 +418,7 @@ class InsertionElectrodeBuilder(Builder):
418
418
  else:
419
419
  yield None
420
420
 
421
- def process_item(self, item) -> Dict:
421
+ def process_item(self, item) -> dict:
422
422
  """
423
423
  - Add volume information to each entry to create the insertion electrode document
424
424
  - Add the host structure
@@ -460,14 +460,14 @@ class InsertionElectrodeBuilder(Builder):
460
460
  # {"failed_reason": "unable to create InsertionElectrode document"}
461
461
  return jsanitize(ie.model_dump())
462
462
 
463
- def update_targets(self, items: List):
463
+ def update_targets(self, items: list):
464
464
  items = list(filter(None, items))
465
465
  if len(items) > 0:
466
466
  self.logger.info("Updating {} battery documents".format(len(items)))
467
467
  for struct_group_dict in items:
468
- struct_group_dict[
469
- self.grouped_materials.last_updated_field
470
- ] = datetime.utcnow()
468
+ struct_group_dict[self.grouped_materials.last_updated_field] = (
469
+ datetime.utcnow()
470
+ )
471
471
  self.insertion_electrode.update(docs=items, key=["battery_id"])
472
472
  else:
473
473
  self.logger.info("No items to update")
@@ -480,7 +480,7 @@ class ConversionElectrodeBuilder(Builder):
480
480
  conversion_electrode_store: MongoStore,
481
481
  working_ion: str,
482
482
  thermo_type: str,
483
- query: Optional[dict] = None,
483
+ query: dict | None = None,
484
484
  **kwargs,
485
485
  ):
486
486
  self.phase_diagram_store = phase_diagram_store
@@ -499,7 +499,7 @@ class ConversionElectrodeBuilder(Builder):
499
499
  **kwargs,
500
500
  )
501
501
 
502
- def prechunk(self, number_splits: int) -> Iterator[Dict]:
502
+ def prechunk(self, number_splits: int) -> Iterator[dict]:
503
503
  """
504
504
  Prechunk method to perform chunking by the key field
505
505
  """
@@ -532,7 +532,7 @@ class ConversionElectrodeBuilder(Builder):
532
532
  for phase_diagram_doc in self.phase_diagram_store.query(criteria=q):
533
533
  yield phase_diagram_doc
534
534
 
535
- def process_item(self, item) -> Dict:
535
+ def process_item(self, item) -> dict:
536
536
  """
537
537
  - For each phase diagram doc, find all possible conversion electrodes and create conversion electrode docs
538
538
  """
@@ -611,7 +611,7 @@ class ConversionElectrodeBuilder(Builder):
611
611
 
612
612
  return new_docs # type: ignore
613
613
 
614
- def update_targets(self, items: List):
614
+ def update_targets(self, items: list):
615
615
  combined_items = []
616
616
  for _items in items:
617
617
  _items = list(filter(None, _items))