jinns 1.3.0__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. jinns/__init__.py +17 -7
  2. jinns/data/_AbstractDataGenerator.py +19 -0
  3. jinns/data/_Batchs.py +31 -12
  4. jinns/data/_CubicMeshPDENonStatio.py +431 -0
  5. jinns/data/_CubicMeshPDEStatio.py +464 -0
  6. jinns/data/_DataGeneratorODE.py +187 -0
  7. jinns/data/_DataGeneratorObservations.py +189 -0
  8. jinns/data/_DataGeneratorParameter.py +206 -0
  9. jinns/data/__init__.py +19 -9
  10. jinns/data/_utils.py +149 -0
  11. jinns/experimental/__init__.py +9 -0
  12. jinns/loss/_DynamicLoss.py +114 -187
  13. jinns/loss/_DynamicLossAbstract.py +74 -69
  14. jinns/loss/_LossODE.py +132 -348
  15. jinns/loss/_LossPDE.py +262 -549
  16. jinns/loss/__init__.py +32 -6
  17. jinns/loss/_abstract_loss.py +128 -0
  18. jinns/loss/_boundary_conditions.py +20 -19
  19. jinns/loss/_loss_components.py +43 -0
  20. jinns/loss/_loss_utils.py +85 -179
  21. jinns/loss/_loss_weight_updates.py +202 -0
  22. jinns/loss/_loss_weights.py +64 -40
  23. jinns/loss/_operators.py +84 -74
  24. jinns/nn/__init__.py +15 -0
  25. jinns/nn/_abstract_pinn.py +22 -0
  26. jinns/nn/_hyperpinn.py +94 -57
  27. jinns/nn/_mlp.py +50 -25
  28. jinns/nn/_pinn.py +33 -19
  29. jinns/nn/_ppinn.py +70 -34
  30. jinns/nn/_save_load.py +21 -51
  31. jinns/nn/_spinn.py +33 -16
  32. jinns/nn/_spinn_mlp.py +28 -22
  33. jinns/nn/_utils.py +38 -0
  34. jinns/parameters/__init__.py +8 -1
  35. jinns/parameters/_derivative_keys.py +116 -177
  36. jinns/parameters/_params.py +18 -46
  37. jinns/plot/__init__.py +2 -0
  38. jinns/plot/_plot.py +35 -34
  39. jinns/solver/_rar.py +80 -63
  40. jinns/solver/_solve.py +207 -92
  41. jinns/solver/_utils.py +4 -6
  42. jinns/utils/__init__.py +2 -0
  43. jinns/utils/_containers.py +16 -10
  44. jinns/utils/_types.py +20 -54
  45. jinns/utils/_utils.py +4 -11
  46. jinns/validation/__init__.py +2 -0
  47. jinns/validation/_validation.py +20 -19
  48. {jinns-1.3.0.dist-info → jinns-1.5.0.dist-info}/METADATA +8 -4
  49. jinns-1.5.0.dist-info/RECORD +55 -0
  50. {jinns-1.3.0.dist-info → jinns-1.5.0.dist-info}/WHEEL +1 -1
  51. jinns/data/_DataGenerators.py +0 -1634
  52. jinns-1.3.0.dist-info/RECORD +0 -44
  53. {jinns-1.3.0.dist-info → jinns-1.5.0.dist-info/licenses}/AUTHORS +0 -0
  54. {jinns-1.3.0.dist-info → jinns-1.5.0.dist-info/licenses}/LICENSE +0 -0
  55. {jinns-1.3.0.dist-info → jinns-1.5.0.dist-info}/top_level.txt +0 -0
jinns/utils/_types.py CHANGED
@@ -1,65 +1,31 @@
1
- # pragma: exclude file
2
1
  from __future__ import (
3
2
  annotations,
4
3
  ) # https://docs.python.org/3/library/typing.html#constant
5
4
 
6
- from typing import TypeAlias, TYPE_CHECKING, NewType
7
- from jaxtyping import Int
5
+ from typing import TypeAlias, TYPE_CHECKING, Callable
6
+ from jaxtyping import Float, Array
8
7
 
9
8
  if TYPE_CHECKING:
10
- from jinns.loss._LossPDE import (
11
- LossPDEStatio,
12
- LossPDENonStatio,
13
- SystemLossPDE,
9
+ from jinns.data._Batchs import ODEBatch, PDEStatioBatch, PDENonStatioBatch
10
+ from jinns.loss._LossODE import LossODE
11
+ from jinns.loss._LossPDE import LossPDEStatio, LossPDENonStatio
12
+ from jinns.loss._loss_components import (
13
+ ODEComponents,
14
+ PDEStatioComponents,
15
+ PDENonStatioComponents,
14
16
  )
15
17
 
16
- from jinns.loss._LossODE import LossODE, SystemLossODE
17
- from jinns.parameters._params import Params, ParamsDict
18
- from jinns.data._DataGenerators import (
19
- DataGeneratorODE,
20
- CubicMeshPDEStatio,
21
- CubicMeshPDENonStatio,
22
- DataGeneratorObservations,
23
- DataGeneratorParameter,
24
- DataGeneratorObservationsMultiPINNs,
25
- )
26
-
27
- from jinns.loss import DynamicLoss
28
- from jinns.data._Batchs import *
29
- from jinns.nn._pinn import PINN
30
- from jinns.nn._hyperpinn import HyperPINN
31
- from jinns.nn._spinn_mlp import SPINN
32
- from jinns.utils._containers import *
33
- from jinns.validation._validation import AbstractValidationModule
34
-
35
- AnyLoss: TypeAlias = (
36
- LossPDEStatio | LossPDENonStatio | SystemLossPDE | LossODE | SystemLossODE
37
- )
38
-
39
- AnyParams: TypeAlias = Params | ParamsDict
18
+ # Here we define types available for the whole package
19
+ BoundaryConditionFun: TypeAlias = Callable[
20
+ [Float[Array, " dim"] | Float[Array, " dim + 1"]], Float[Array, " dim_solution"]
21
+ ]
40
22
 
41
- AnyDataGenerator: TypeAlias = (
42
- DataGeneratorODE | CubicMeshPDEStatio | CubicMeshPDENonStatio
43
- )
44
-
45
- AnyPINN: TypeAlias = PINN | HyperPINN | SPINN
46
-
47
- AnyBatch: TypeAlias = ODEBatch | PDEStatioBatch | PDENonStatioBatch
48
- rar_operands = NewType(
49
- "rar_operands", tuple[AnyLoss, AnyParams, AnyDataGenerator, Int]
50
- )
23
+ AnyBatch: TypeAlias = ODEBatch | PDENonStatioBatch | PDEStatioBatch
24
+ AnyLoss: TypeAlias = LossODE | LossPDEStatio | LossPDENonStatio
51
25
 
52
- main_carry = NewType(
53
- "main_carry",
54
- tuple[
55
- Int,
56
- AnyLoss,
57
- OptimizationContainer,
58
- OptimizationExtraContainer,
59
- DataGeneratorContainer,
60
- AbstractValidationModule,
61
- LossContainer,
62
- StoredObjectContainer,
63
- Float[Array, "n_iter"],
64
- ],
26
+ # here we would like a type from 3.12
27
+ # (https://typing.python.org/en/latest/spec/aliases.html#type-statement) so
28
+ # that we could have a generic AnyLossComponents
29
+ AnyLossComponents: TypeAlias = (
30
+ ODEComponents | PDEStatioComponents | PDENonStatioComponents
65
31
  )
jinns/utils/_utils.py CHANGED
@@ -2,20 +2,13 @@
2
2
  Implements various utility functions
3
3
  """
4
4
 
5
- from math import prod
6
5
  import warnings
7
6
  import jax
8
7
  import jax.numpy as jnp
9
- from jaxtyping import PyTree, Array
8
+ from jaxtyping import PyTree, Array, Bool
10
9
 
11
- from jinns.data._DataGenerators import (
12
- DataGeneratorODE,
13
- CubicMeshPDEStatio,
14
- CubicMeshPDENonStatio,
15
- )
16
10
 
17
-
18
- def _check_nan_in_pytree(pytree: PyTree) -> bool:
11
+ def _check_nan_in_pytree(pytree: PyTree) -> Bool[Array, " "]:
19
12
  """
20
13
  Check if there is a NaN value anywhere is the pytree
21
14
 
@@ -55,7 +48,7 @@ def get_grid(in_array: Array) -> Array:
55
48
 
56
49
 
57
50
  def _check_shape_and_type(
58
- r: Array | int, expected_shape: tuple, cause: str = "", binop: str = ""
51
+ r: Array | int | float, expected_shape: tuple, cause: str = "", binop: str = ""
59
52
  ) -> Array | float:
60
53
  """
61
54
  Ensures float type and correct shapes for broadcasting when performing a
@@ -90,7 +83,7 @@ def _check_shape_and_type(
90
83
 
91
84
 
92
85
  def _subtract_with_check(
93
- a: Array | int, b: Array | int, cause: str = ""
86
+ a: Array | int | float, b: Array, cause: str = ""
94
87
  ) -> Array | float:
95
88
  a = _check_shape_and_type(a, b.shape, cause=cause, binop="-")
96
89
  return a - b
@@ -1 +1,3 @@
1
1
  from ._validation import AbstractValidationModule, ValidationLoss
2
+
3
+ __all__ = ["AbstractValidationModule", "ValidationLoss"]
@@ -7,19 +7,23 @@ from __future__ import (
7
7
  ) # https://docs.python.org/3/library/typing.html#constant
8
8
 
9
9
  import abc
10
- from typing import TYPE_CHECKING, Union
10
+ from typing import TYPE_CHECKING
11
11
  import equinox as eqx
12
12
  import jax
13
13
  import jax.numpy as jnp
14
- from jaxtyping import Array
14
+ from jaxtyping import Array, Float
15
15
 
16
- from jinns.data._DataGenerators import (
16
+ from jinns.data._utils import (
17
17
  append_obs_batch,
18
18
  append_param_batch,
19
19
  )
20
20
 
21
21
  if TYPE_CHECKING:
22
- from jinns.utils._types import *
22
+ from jinns.data._DataGeneratorParameter import DataGeneratorParameter
23
+ from jinns.data._DataGeneratorObservations import DataGeneratorObservations
24
+ from jinns.data._AbstractDataGenerator import AbstractDataGenerator
25
+ from jinns.parameters._params import Params
26
+ from jinns.loss._abstract_loss import AbstractLoss
23
27
 
24
28
  # Using eqx Module for the DataClass + Pytree inheritance
25
29
  # Abstract class and abstract/final pattern is used
@@ -40,8 +44,8 @@ class AbstractValidationModule(eqx.Module):
40
44
 
41
45
  @abc.abstractmethod
42
46
  def __call__(
43
- self, params: Params | ParamsDict
44
- ) -> tuple["AbstractValidationModule", bool, Array, bool]:
47
+ self, params: Params[Array]
48
+ ) -> tuple[AbstractValidationModule, bool, Array, Params[Array]]:
45
49
  raise NotImplementedError
46
50
 
47
51
 
@@ -52,24 +56,20 @@ class ValidationLoss(AbstractValidationModule):
52
56
  for more complicated validation strategy.
53
57
  """
54
58
 
55
- loss: AnyLoss = eqx.field(kw_only=True) # NOTE that
56
- # there used to be a deepcopy here which has been suppressed. 1) No need
57
- # because loss are now eqx.Module (immutable) so no risk of in-place
58
- # modification. 2) deepcopy is buggy with equinox, InitVar etc. (see issue
59
- # #857 on equinox github)
60
- validation_data: Union[AnyDataGenerator] = eqx.field(kw_only=True)
61
- validation_param_data: Union[DataGeneratorParameter, None] = eqx.field(
59
+ loss: AbstractLoss = eqx.field(kw_only=True)
60
+ validation_data: AbstractDataGenerator = eqx.field(kw_only=True)
61
+ validation_param_data: DataGeneratorParameter = eqx.field(
62
+ kw_only=True, default=None
63
+ )
64
+ validation_obs_data: DataGeneratorObservations | None = eqx.field(
62
65
  kw_only=True, default=None
63
66
  )
64
- validation_obs_data: Union[
65
- DataGeneratorObservations, DataGeneratorObservationsMultiPINNs, None
66
- ] = eqx.field(kw_only=True, default=None)
67
67
  call_every: int = eqx.field(kw_only=True, default=250) # concrete typing
68
68
  early_stopping: bool = eqx.field(
69
69
  kw_only=True, default=True
70
70
  ) # globally control if early stopping happens
71
71
 
72
- patience: Union[int] = eqx.field(kw_only=True, default=10)
72
+ patience: int = eqx.field(kw_only=True, default=10)
73
73
  best_val_loss: Array = eqx.field(
74
74
  converter=jnp.asarray, default_factory=lambda: jnp.array(jnp.inf), kw_only=True
75
75
  )
@@ -79,10 +79,11 @@ class ValidationLoss(AbstractValidationModule):
79
79
  )
80
80
 
81
81
  def __call__(
82
- self, params: AnyParams
83
- ) -> tuple["ValidationLoss", bool, float, AnyParams]:
82
+ self, params: Params[Array]
83
+ ) -> tuple[ValidationLoss, bool, Float[Array, " "], Params[Array]]:
84
84
  # do in-place mutation
85
85
 
86
+ # pylint / pyright complains below when using the self attributes see: https://github.com/patrick-kidger/equinox/issues/1013
86
87
  validation_data, val_batch = self.validation_data.get_batch()
87
88
  if self.validation_param_data is not None:
88
89
  validation_param_data, param_batch = self.validation_param_data.get_batch()
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: jinns
3
- Version: 1.3.0
3
+ Version: 1.5.0
4
4
  Summary: Physics Informed Neural Network with JAX
5
5
  Author-email: Hugo Gangloff <hugo.gangloff@inrae.fr>, Nicolas Jouvin <nicolas.jouvin@inrae.fr>
6
6
  Maintainer-email: Hugo Gangloff <hugo.gangloff@inrae.fr>, Nicolas Jouvin <nicolas.jouvin@inrae.fr>
@@ -25,6 +25,7 @@ Requires-Dist: matplotlib
25
25
  Provides-Extra: notebook
26
26
  Requires-Dist: jupyter; extra == "notebook"
27
27
  Requires-Dist: seaborn; extra == "notebook"
28
+ Dynamic: license-file
28
29
 
29
30
  jinns
30
31
  =====
@@ -53,6 +54,9 @@ It can also be used for forward problems and hybrid-modeling.
53
54
 
54
55
  - [Hyper PINNs](https://arxiv.org/pdf/2111.01008.pdf): useful for meta-modeling
55
56
 
57
+ - Other
58
+ - Adaptative Loss Weights are now implemented. Some SoftAdapt, LRAnnealing and ReLoBRaLo are available and users can implement their own strategy. See the [tutorial](https://mia_jinns.gitlab.io/jinns/Notebooks/Tutorials/implementing_your_own_PDE_problem/)
59
+
56
60
 
57
61
  - **Get started**: check out our various notebooks on the [documentation](https://mia_jinns.gitlab.io/jinns/index.html).
58
62
 
@@ -99,7 +103,7 @@ Here are the contributors guidelines:
99
103
  pip install -e .
100
104
  ```
101
105
 
102
- 3. Install pre-commit and run it.
106
+ 3. Install pre-commit and run it. Our pre-commit hooks consist in `ruff format` and `ruff check`. You can install `ruff` simply by `pip install ruff`. We highly recommend you to check the code type hints with `pyright` even though we currently have no rule concerning type checking in the pipeline.
103
107
 
104
108
  ```bash
105
109
  pip install pre-commit
@@ -112,7 +116,7 @@ pre-commit install
112
116
 
113
117
  Don't hesitate to contribute and get your name on the list here !
114
118
 
115
- **List of contributors:** Hugo Gangloff, Nicolas Jouvin, Lucia Clarotto, Inass Soukarieh
119
+ **List of contributors:** Hugo Gangloff, Nicolas Jouvin, Lucia Clarotto, Inass Soukarieh, Mohamed Badi
116
120
 
117
121
  # Cite us
118
122
 
@@ -0,0 +1,55 @@
1
+ jinns/__init__.py,sha256=hyh3QKO2cQGK5cmvFYP0MrXb-tK_DM2T9CwLwO-sEX8,500
2
+ jinns/data/_AbstractDataGenerator.py,sha256=O61TBOyeOFKwf1xqKzFD4KwCWRDnm2XgyJ-kKY9fmB4,557
3
+ jinns/data/_Batchs.py,sha256=-DlD6Qag3zs5QbKtKAOvOzV7JOpNOqAm_P8cwo1dIZg,1574
4
+ jinns/data/_CubicMeshPDENonStatio.py,sha256=c_8czJpxSoEvgZ8LDpL2sqtF9dcW4ELNO4juEFMOxog,16400
5
+ jinns/data/_CubicMeshPDEStatio.py,sha256=stZ0Kbb7_VwFmWUSPs0P6a6qRj2Tu67p7sxEfb1Ajks,17865
6
+ jinns/data/_DataGeneratorODE.py,sha256=5RzUbQFEsooAZsocDw4wRgA_w5lJmDMuY4M6u79K-1c,7260
7
+ jinns/data/_DataGeneratorObservations.py,sha256=jknepLsJatSJHFq5lLMD-fFHkPGj5q286LEjE-vH24k,7738
8
+ jinns/data/_DataGeneratorParameter.py,sha256=IedX3jcOj7ZDW_18IAcRR75KVzQzo85z9SICIKDBJl4,8539
9
+ jinns/data/__init__.py,sha256=4b4eVsoGHV89m2kGDiAOHsrGialZQ6j5ja575qWwQHs,677
10
+ jinns/data/_utils.py,sha256=XxaLIg_HIgcB7ACBIhTpHbCT1HXKcDaY1NABncAYX1c,5223
11
+ jinns/experimental/__init__.py,sha256=DT9e57zbjfzPeRnXemGUqnGd--MhV77FspChT0z4YrE,410
12
+ jinns/experimental/_diffrax_solver.py,sha256=upMr3kTTNrxEiSUO_oLvCXcjS9lPxSjvbB81h3qlhaU,6813
13
+ jinns/loss/_DynamicLoss.py,sha256=4mb7OCP-cGZ_mG2MQ-AniddDcuBT78p4bQI7rZpwte4,22722
14
+ jinns/loss/_DynamicLossAbstract.py,sha256=QhHRgvtcT-ifHlOxTyXbjDtHk9UfPN2Si8s3v9nEQm4,12672
15
+ jinns/loss/_LossODE.py,sha256=DeejnU2ytgrOxUnwuVkQDWWRKJAgNQyjacTx-jT0xPA,13796
16
+ jinns/loss/_LossPDE.py,sha256=ycjWJ99SuXe9DV5nROSWyq--xcp2JJ2PGWxsdWyZZog,36942
17
+ jinns/loss/__init__.py,sha256=z5xYgBipNFf66__5BqQc6R_8r4F6A3TXL60YjsM8Osk,1287
18
+ jinns/loss/_abstract_loss.py,sha256=DMxn0SQe9PW-pq3p5Oqvb0YK3_ulLDOnoIXzK219GV4,4576
19
+ jinns/loss/_boundary_conditions.py,sha256=9HGw1cGLfmEilP4V4B2T0zl0YP1kNtrtXVLQNiBmWgc,12464
20
+ jinns/loss/_loss_components.py,sha256=MMzaGlaRqESPjRzT0j0WU9HAqWQSbIXpGAqM1xQCZHw,1106
21
+ jinns/loss/_loss_utils.py,sha256=R6PffBAtg6z9M8x1DFXmmqZpC095b9gZ_DB1phQxSuY,11168
22
+ jinns/loss/_loss_weight_updates.py,sha256=9Bwouh7shLyc_wrdzN6CYL0ZuQH81uEs-L6wCeiYFx8,6817
23
+ jinns/loss/_loss_weights.py,sha256=kII5WddORgeommFTudT3CSvhICpo6nSe47LclUgu_78,2429
24
+ jinns/loss/_operators.py,sha256=Ds5yRH7hu-jaGRp7PYbt821BgYuEvgWHufWhYgdMjw0,22909
25
+ jinns/nn/__init__.py,sha256=gwE48oqB_FsSIE-hUvCLz0jPaqX350LBxzH6ueFWYk4,456
26
+ jinns/nn/_abstract_pinn.py,sha256=JUFjlV_nyheZw-max_tAUgFh6SspIbD5we_4bn70V6k,671
27
+ jinns/nn/_hyperpinn.py,sha256=hF7HRLMMVBPT9CTQC2DjpDRcQDJCrT9cAj8wfApT_WE,19412
28
+ jinns/nn/_mlp.py,sha256=Xmi-mG6uakN67R2S2UsBazdXIJVaGsD2B6TeJM1QjGY,8881
29
+ jinns/nn/_pinn.py,sha256=4pvgUPQdQaO3cPBuEU7W4UaLV7lodqcR3pVR1sC0ni4,8774
30
+ jinns/nn/_ppinn.py,sha256=LtjGQaLozdA4Kwutn8Pyerbu9yOc0t3_b701yfMb1ac,10392
31
+ jinns/nn/_save_load.py,sha256=UqVy2oBzvIeBy6XB9tb61x3-x8i4dNCXJHC5_-bko-I,7477
32
+ jinns/nn/_spinn.py,sha256=u5YG2FXcrg8p_uS2QFGmWoeFXYLxXnyV2e6BUHpo4xk,4774
33
+ jinns/nn/_spinn_mlp.py,sha256=uCL454sF0Tfj7KT-fdXPnvKJYRQOuq60N0r2b2VAB8Q,7606
34
+ jinns/nn/_utils.py,sha256=9UXz73iHKHVQYPBPIEitrHYJzJ14dspRwPfLA8avx0c,1120
35
+ jinns/parameters/__init__.py,sha256=O0n7y6R1LRmFzzugCxMFCMS2pgsuWSh-XHjfFViN_eg,265
36
+ jinns/parameters/_derivative_keys.py,sha256=YlLDX49PfYhr2Tj--t3praiD8JOUTZU6PTmjbNZsbMc,19173
37
+ jinns/parameters/_params.py,sha256=qn4IGMJhD9lDBqOWmGEMy4gXt5a6KHfirkYZwHO7Vwk,2633
38
+ jinns/plot/__init__.py,sha256=KPHX0Um4FbciZO1yD8kjZbkaT8tT964Y6SE2xCQ4eDU,135
39
+ jinns/plot/_plot.py,sha256=-A5auNeElaz2_8UzVQJQE4143ZFg0zgMjStU7kwttEY,11565
40
+ jinns/solver/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ jinns/solver/_rar.py,sha256=vSVTnCGCusI1vTZCvIkP2_G8we44G_42yZHx2sOK9DE,10291
42
+ jinns/solver/_solve.py,sha256=oVHnuc7Z0V2-ZYgZtCx7xdFd7TpB9w-6AwafX-kgBE4,28379
43
+ jinns/solver/_utils.py,sha256=sM2UbVzYyjw24l4QSIR3IlynJTPGD_S08r8v0lXMxA8,5876
44
+ jinns/utils/__init__.py,sha256=OEYWLCw8pKE7xoQREbd6SHvCjuw2QZHuVA6YwDcsBE8,53
45
+ jinns/utils/_containers.py,sha256=YShcrPKfj5_I9mn3NMAS4Ea9MhhyL7fjv0e3MRbITHg,1837
46
+ jinns/utils/_types.py,sha256=jl_91HtcrtE6UHbdTrRI8iUmr2kBUL0oP0UNIKhAXYw,1170
47
+ jinns/utils/_utils.py,sha256=M7NXX9ok-BkH5o_xo74PB1_Cc8XiDipSl51rq82dTH4,2821
48
+ jinns/validation/__init__.py,sha256=FTyUO-v1b8Tv-FDSQsntrH7zl9E0ENexqKMT_dFRkYo,124
49
+ jinns/validation/_validation.py,sha256=8p6sMKiBAvA6JNm65hjkMj0997LJ0BkyCREEh0AnPVE,4803
50
+ jinns-1.5.0.dist-info/licenses/AUTHORS,sha256=7NwCj9nU-HNG1asvy4qhQ2w7oZHrn-Lk5_wK_Ve7a3M,80
51
+ jinns-1.5.0.dist-info/licenses/LICENSE,sha256=BIAkGtXB59Q_BG8f6_OqtQ1BHPv60ggE9mpXJYz2dRM,11337
52
+ jinns-1.5.0.dist-info/METADATA,sha256=jEp__DP39B1HiTYVhtVcWKPmzS22kSUD6jNVSmHFh8g,5314
53
+ jinns-1.5.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
54
+ jinns-1.5.0.dist-info/top_level.txt,sha256=RXbkr2hzy8WBE8aiRyrJYFqn3JeMJIhMdybLjjLTB9c,6
55
+ jinns-1.5.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.2)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5