flipcosmo 1.0.0__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. docs/conf.py +154 -0
  2. flip/__init__.py +4 -11
  3. flip/covariance/__init__.py +7 -8
  4. flip/covariance/analytical/__init__.py +11 -0
  5. flip/covariance/{adamsblake17plane → analytical/adamsblake17}/coefficients.py +1 -1
  6. flip/covariance/{adamsblake17plane → analytical/adamsblake17}/fisher_terms.py +1 -1
  7. flip/covariance/{adamsblake17 → analytical/adamsblake17}/flip_terms.py +0 -1
  8. flip/covariance/{adamsblake17 → analytical/adamsblake17plane}/coefficients.py +1 -1
  9. flip/covariance/{adamsblake17 → analytical/adamsblake17plane}/fisher_terms.py +1 -1
  10. flip/covariance/{adamsblake17plane → analytical/adamsblake17plane}/flip_terms.py +0 -1
  11. flip/covariance/{adamsblake17plane → analytical/adamsblake17plane}/generator.py +103 -19
  12. flip/covariance/{adamsblake20 → analytical/adamsblake20}/coefficients.py +1 -1
  13. flip/covariance/{adamsblake20 → analytical/adamsblake20}/fisher_terms.py +1 -1
  14. flip/covariance/{adamsblake20 → analytical/adamsblake20}/flip_terms.py +0 -1
  15. flip/covariance/{carreres23 → analytical/carreres23}/coefficients.py +1 -4
  16. flip/covariance/{ravouxnoanchor25 → analytical/carreres23}/fisher_terms.py +1 -1
  17. flip/covariance/{carreres23 → analytical/carreres23}/flip_terms.py +0 -1
  18. flip/covariance/analytical/carreres23/generator.py +198 -0
  19. flip/covariance/analytical/genericzdep/__init__.py +5 -0
  20. flip/covariance/analytical/genericzdep/coefficients.py +53 -0
  21. flip/covariance/analytical/genericzdep/flip_terms.py +99 -0
  22. flip/covariance/{lai22 → analytical/lai22}/coefficients.py +2 -3
  23. flip/covariance/{lai22 → analytical/lai22}/fisher_terms.py +1 -1
  24. flip/covariance/{lai22 → analytical/lai22}/flip_terms.py +0 -1
  25. flip/covariance/{lai22 → analytical/lai22}/generator.py +263 -58
  26. flip/covariance/{lai22 → analytical/lai22}/symbolic.py +55 -19
  27. flip/covariance/{ravouxcarreres → analytical/ravouxcarreres}/coefficients.py +1 -1
  28. flip/covariance/{ravouxcarreres → analytical/ravouxcarreres}/fisher_terms.py +1 -1
  29. flip/covariance/{ravouxcarreres → analytical/ravouxcarreres}/flip_terms.py +0 -1
  30. flip/covariance/{ravouxnoanchor25 → analytical/ravouxnoanchor25}/coefficients.py +3 -2
  31. flip/covariance/{carreres23 → analytical/ravouxnoanchor25}/fisher_terms.py +1 -1
  32. flip/covariance/{ravouxnoanchor25 → analytical/ravouxnoanchor25}/flip_terms.py +0 -9
  33. flip/covariance/{rcrk24 → analytical/rcrk24}/coefficients.py +6 -6
  34. flip/covariance/{rcrk24 → analytical/rcrk24}/fisher_terms.py +7 -9
  35. flip/covariance/{rcrk24 → analytical/rcrk24}/flip_terms.py +0 -8
  36. flip/covariance/contraction.py +82 -40
  37. flip/covariance/cov_utils.py +89 -81
  38. flip/covariance/covariance.py +172 -141
  39. flip/covariance/emulators/__init__.py +1 -1
  40. flip/covariance/emulators/generator.py +73 -3
  41. flip/covariance/emulators/gpmatrix.py +40 -1
  42. flip/covariance/emulators/nnmatrix.py +57 -1
  43. flip/covariance/emulators/skgpmatrix.py +125 -0
  44. flip/covariance/fisher.py +307 -0
  45. flip/{fit_utils.py → covariance/fit_utils.py} +185 -10
  46. flip/{fitter.py → covariance/fitter.py} +151 -125
  47. flip/covariance/generator.py +82 -106
  48. flip/{likelihood.py → covariance/likelihood.py} +286 -64
  49. flip/{plot_utils.py → covariance/plot_utils.py} +79 -4
  50. flip/covariance/symbolic.py +89 -44
  51. flip/data/__init__.py +1 -1
  52. flip/data/data_density.parquet +0 -0
  53. flip/data/data_velocity.parquet +0 -0
  54. flip/data/{grid_window_m.parquet → data_window_density.parquet} +0 -0
  55. flip/{gridding.py → data/gridding.py} +125 -130
  56. flip/data/load_data_test.py +102 -0
  57. flip/data/power_spectrum_mm.txt +2 -2
  58. flip/data/power_spectrum_mt.txt +2 -2
  59. flip/data/power_spectrum_tt.txt +2 -2
  60. flip/data/test_covariance_reference_values.json +145 -0
  61. flip/data/test_e2e_reference_values.json +14 -0
  62. flip/data_vector/basic.py +118 -101
  63. flip/data_vector/cosmo_utils.py +18 -0
  64. flip/data_vector/galaxypv_vectors.py +58 -94
  65. flip/data_vector/snia_vectors.py +60 -3
  66. flip/data_vector/vector_utils.py +47 -1
  67. flip/power_spectra/class_engine.py +36 -1
  68. flip/power_spectra/cosmoprimo_engine.py +37 -2
  69. flip/power_spectra/generator.py +47 -25
  70. flip/power_spectra/models.py +30 -31
  71. flip/power_spectra/pyccl_engine.py +36 -1
  72. flip/simulation/__init__.py +0 -0
  73. flip/utils.py +62 -91
  74. flipcosmo-1.2.1.dist-info/METADATA +78 -0
  75. flipcosmo-1.2.1.dist-info/RECORD +109 -0
  76. {flipcosmo-1.0.0.dist-info → flipcosmo-1.2.1.dist-info}/WHEEL +1 -1
  77. flipcosmo-1.2.1.dist-info/top_level.txt +7 -0
  78. scripts/flip_compute_correlation_model.py +70 -0
  79. scripts/flip_compute_power_spectra.py +50 -0
  80. scripts/flip_fisher_forecast_velocity.py +70 -0
  81. scripts/flip_fisher_rcrk24.py +164 -0
  82. scripts/flip_launch_minuit_density_fit.py +91 -0
  83. scripts/flip_launch_minuit_full_fit.py +117 -0
  84. scripts/flip_launch_minuit_velocity_fit.py +78 -0
  85. scripts/flip_launch_minuit_velocity_fit_full.py +107 -0
  86. scripts/flip_launch_minuit_velocity_fit_interpolation.py +93 -0
  87. test/refresh_reference_values.py +43 -0
  88. test/test_covariance_assembly.py +102 -0
  89. test/test_covariance_reference_values.py +125 -0
  90. test/test_covariance_utils.py +34 -0
  91. test/test_e2e_density.py +50 -0
  92. test/test_e2e_joint.py +65 -0
  93. test/test_e2e_velocity.py +53 -0
  94. test/test_likelihood_inversions.py +31 -0
  95. flip/covariance/carreres23/generator.py +0 -132
  96. flip/data/density_data.parquet +0 -0
  97. flip/data/velocity_data.parquet +0 -0
  98. flip/fisher.py +0 -190
  99. flipcosmo-1.0.0.dist-info/METADATA +0 -32
  100. flipcosmo-1.0.0.dist-info/RECORD +0 -82
  101. flipcosmo-1.0.0.dist-info/top_level.txt +0 -1
  102. /flip/{config.py → _config.py} +0 -0
  103. /flip/covariance/{adamsblake17 → analytical/adamsblake17}/__init__.py +0 -0
  104. /flip/covariance/{adamsblake17plane → analytical/adamsblake17plane}/__init__.py +0 -0
  105. /flip/covariance/{adamsblake20 → analytical/adamsblake20}/__init__.py +0 -0
  106. /flip/covariance/{carreres23 → analytical/carreres23}/__init__.py +0 -0
  107. /flip/covariance/{lai22 → analytical/lai22}/__init__.py +0 -0
  108. /flip/covariance/{lai22 → analytical/lai22}/h_terms.py +0 -0
  109. /flip/covariance/{ravouxcarreres → analytical/ravouxcarreres}/__init__.py +0 -0
  110. /flip/covariance/{ravouxcarreres → analytical/ravouxcarreres}/flip_terms_lmax.py +0 -0
  111. /flip/covariance/{ravouxnoanchor25 → analytical/ravouxnoanchor25}/__init__.py +0 -0
  112. /flip/covariance/{rcrk24 → analytical/rcrk24}/__init__.py +0 -0
  113. {flipcosmo-1.0.0.dist-info → flipcosmo-1.2.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,117 @@
1
+ import os
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from flip.covariance import covariance, fitter
6
+ from pkg_resources import resource_filename
7
+
8
+ from flip import data_vector, utils
9
+
10
+ flip_base = resource_filename("flip", ".")
11
+ data_path = os.path.join(flip_base, "data")
12
+
13
+ ### Load data
14
+ grid = pd.read_parquet(os.path.join(data_path, "data_density.parquet"))
15
+ grid_window = pd.read_parquet(os.path.join(data_path, "data_window_density.parquet"))
16
+ coordinates_density = np.array([grid["ra"], grid["dec"], grid["rcom_zobs"]])
17
+ data_density = {
18
+ "density": np.array(grid["density"]),
19
+ "density_error": np.array(grid["density_error"]),
20
+ }
21
+
22
+
23
+ sn_data = pd.read_parquet(os.path.join(data_path, "data_velocity.parquet"))
24
+
25
+ coordinates_velocity = np.array([sn_data["ra"], sn_data["dec"], sn_data["como_dist"]])
26
+ data_velocity = sn_data.to_dict("list")
27
+ for key in data_velocity.keys():
28
+ data_velocity[key] = np.array(data_velocity[key])
29
+ data_velocity["velocity"] = data_velocity.pop("vpec")
30
+ data_velocity["velocity_error"] = np.zeros_like(data_velocity["velocity"])
31
+
32
+ data_velocity_object = data_vector.DirectVel(data_velocity)
33
+ data_density_object = data_vector.Dens(data_density)
34
+
35
+ data_density_velocity_object = data_vector.DensVel(
36
+ data_density_object, data_velocity_object
37
+ )
38
+
39
+ sigmau_fiducial = 15.0
40
+ sigmag_fiducial = 3.0
41
+
42
+ ktt, ptt = np.loadtxt(os.path.join(data_path, "power_spectrum_tt.txt"))
43
+ kmt, pmt = np.loadtxt(os.path.join(data_path, "power_spectrum_mt.txt"))
44
+ kmm, pmm = np.loadtxt(os.path.join(data_path, "power_spectrum_mm.txt"))
45
+ power_spectrum_dict_bias = {
46
+ "gg": [[kmm, pmm * np.array(grid_window["window_mm"]) ** 2]]
47
+ }
48
+ power_spectrum_dict = {
49
+ "gg": [
50
+ [kmm, pmm * np.array(grid_window["window_mm"]) ** 2],
51
+ [kmt, pmt * np.array(grid_window["window_mt"])],
52
+ [ktt, ptt],
53
+ ],
54
+ "gv": [
55
+ [
56
+ kmt,
57
+ pmt * np.array(grid_window["window_mt"]) * utils.Du(kmt, sigmau_fiducial),
58
+ ],
59
+ [ktt, ptt * utils.Du(kmt, sigmau_fiducial)],
60
+ ],
61
+ "vv": [[ktt, ptt * utils.Du(ktt, sigmau_fiducial) ** 2]],
62
+ }
63
+
64
+ ### Compute covariance
65
+ size_batch = 500_000
66
+ number_worker = 8
67
+
68
+ covariance_fit = covariance.CovMatrix.init_from_flip(
69
+ "adamsblake20",
70
+ "full",
71
+ power_spectrum_dict,
72
+ coordinates_density=coordinates_density,
73
+ coordinates_velocity=coordinates_velocity,
74
+ size_batch=size_batch,
75
+ number_worker=number_worker,
76
+ additional_parameters_values=(sigmag_fiducial,),
77
+ variant="nobeta",
78
+ )
79
+
80
+ ### Load fitter
81
+ likelihood_type = "multivariate_gaussian"
82
+ likelihood_properties = {"inversion_method": "cholesky_inverse"}
83
+
84
+
85
+ parameter_dict = {
86
+ "bs8": {
87
+ "value": 1.0,
88
+ "limit_low": 0.0,
89
+ "limit_up": 3.0,
90
+ "fixed": False,
91
+ },
92
+ "fs8": {
93
+ "value": 0.4,
94
+ "limit_low": 0.0,
95
+ "limit_up": 1.0,
96
+ "fixed": False,
97
+ },
98
+ "sigv": {
99
+ "value": 100,
100
+ "limit_low": None,
101
+ "limit_up": None,
102
+ "fixed": False,
103
+ },
104
+ }
105
+
106
+
107
+ minuit_fitter = fitter.FitMinuit.init_from_covariance(
108
+ covariance_fit,
109
+ data_density_velocity_object,
110
+ parameter_dict,
111
+ likelihood_type=likelihood_type,
112
+ likelihood_properties=likelihood_properties,
113
+ )
114
+
115
+
116
+ ### Fit
117
+ minuit_fitter.run()
@@ -0,0 +1,78 @@
1
+ import os
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from flip.covariance import covariance, fitter
6
+ from pkg_resources import resource_filename
7
+
8
+ from flip import data_vector, utils
9
+
10
+ flip_base = resource_filename("flip", ".")
11
+ data_path = os.path.join(flip_base, "data")
12
+
13
+ ### Load data
14
+ sn_data = pd.read_parquet(os.path.join(data_path, "data_velocity.parquet"))
15
+
16
+
17
+ coordinates_velocity = np.array([sn_data["ra"], sn_data["dec"], sn_data["rcom_zobs"]])
18
+
19
+ data_velocity = sn_data.to_dict("list")
20
+ for key in data_velocity.keys():
21
+ data_velocity[key] = np.array(data_velocity[key])
22
+ data_velocity["velocity"] = data_velocity.pop("vpec")
23
+ data_velocity["velocity_error"] = np.zeros_like(data_velocity["velocity"])
24
+
25
+ data_velocity_object = data_vector.DirectVel(data_velocity)
26
+
27
+ ktt, ptt = np.loadtxt(os.path.join(data_path, "power_spectrum_tt.txt"))
28
+ kmt, pmt = np.loadtxt(os.path.join(data_path, "power_spectrum_mt.txt"))
29
+ kmm, pmm = np.loadtxt(os.path.join(data_path, "power_spectrum_mm.txt"))
30
+
31
+ sigmau_fiducial = 15
32
+
33
+ power_spectrum_dict = {"vv": [[ktt, ptt * utils.Du(ktt, sigmau_fiducial) ** 2]]}
34
+
35
+ ### Compute covariance
36
+ size_batch = 10_000
37
+ number_worker = 16
38
+
39
+ covariance_fit = covariance.CovMatrix.init_from_flip(
40
+ "carreres23",
41
+ "velocity",
42
+ power_spectrum_dict,
43
+ coordinates_velocity=coordinates_velocity,
44
+ size_batch=size_batch,
45
+ number_worker=number_worker,
46
+ )
47
+
48
+
49
+ ### Load fitter
50
+ likelihood_type = "multivariate_gaussian"
51
+ likelihood_properties = {"inversion_method": "cholesky_inverse"}
52
+
53
+
54
+ parameter_dict = {
55
+ "fs8": {
56
+ "value": 0.4,
57
+ "limit_low": 0.0,
58
+ "fixed": False,
59
+ },
60
+ "sigv": {
61
+ "value": 200,
62
+ "limit_low": 0.0,
63
+ "fixed": False,
64
+ },
65
+ }
66
+
67
+
68
+ minuit_fitter = fitter.FitMinuit.init_from_covariance(
69
+ covariance_fit,
70
+ data_velocity_object,
71
+ parameter_dict,
72
+ likelihood_type=likelihood_type,
73
+ likelihood_properties=likelihood_properties,
74
+ )
75
+
76
+
77
+ ### Fit
78
+ minuit_fitter.run()
@@ -0,0 +1,107 @@
1
+ import os
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from flip.covariance import covariance, fitter
6
+ from pkg_resources import resource_filename
7
+
8
+ from flip import data_vector, utils
9
+
10
+ flip_base = resource_filename("flip", ".")
11
+ data_path = os.path.join(flip_base, "data")
12
+
13
+ ### Load data
14
+ sn_data = pd.read_parquet(os.path.join(data_path, "data_velocity.parquet"))
15
+
16
+ coordinates_velocity = np.array([sn_data["ra"], sn_data["dec"], sn_data["rcom_zobs"]])
17
+
18
+ data_velocity = sn_data.to_dict("list")
19
+ for key in data_velocity.keys():
20
+ data_velocity[key] = np.array(data_velocity[key])
21
+ data_velocity["velocity"] = data_velocity.pop("vpec")
22
+ data_velocity["velocity_error"] = np.zeros_like(data_velocity["velocity"])
23
+
24
+ data_velocity_object = data_vector.snia_vectors.VelFromSALTfit(
25
+ data_velocity, velocity_estimator="full", h=0.7
26
+ )
27
+
28
+
29
+ ktt, ptt = np.loadtxt(os.path.join(data_path, "power_spectrum_tt.txt"))
30
+ kmt, pmt = np.loadtxt(os.path.join(data_path, "power_spectrum_mt.txt"))
31
+ kmm, pmm = np.loadtxt(os.path.join(data_path, "power_spectrum_mm.txt"))
32
+
33
+ sigmau_fiducial = 15
34
+
35
+ power_spectrum_dict = {"vv": [[ktt, ptt * utils.Du(ktt, sigmau_fiducial) ** 2]]}
36
+
37
+ ### Compute covariance
38
+ size_batch = 10_000
39
+ number_worker = 16
40
+
41
+ covariance_fit = covariance.CovMatrix.init_from_flip(
42
+ "carreres23",
43
+ "velocity",
44
+ power_spectrum_dict,
45
+ coordinates_velocity=coordinates_velocity,
46
+ size_batch=size_batch,
47
+ number_worker=number_worker,
48
+ )
49
+
50
+
51
+ ### Load fitter
52
+ likelihood_type = "multivariate_gaussian"
53
+ likelihood_properties = {
54
+ "inversion_method": "cholesky_inverse",
55
+ }
56
+
57
+
58
+ parameter_dict = {
59
+ "fs8": {
60
+ "value": 0.4,
61
+ "limit_low": 0.0,
62
+ "limit_up": 1.0,
63
+ "fixed": False,
64
+ },
65
+ "sigv": {
66
+ "value": 200,
67
+ "limit_low": 0.0,
68
+ "limit_up": 300,
69
+ "fixed": False,
70
+ },
71
+ "alpha": {
72
+ "value": 0.1,
73
+ "limit_low": 0.05,
74
+ "limit_up": 0.15,
75
+ "fixed": False,
76
+ },
77
+ "beta": {
78
+ "value": 3.0,
79
+ "limit_low": 1.5,
80
+ "limit_up": 4.5,
81
+ "fixed": False,
82
+ },
83
+ "M_0": {
84
+ "value": -19,
85
+ "limit_low": -21,
86
+ "limit_up": -18,
87
+ "fixed": False,
88
+ },
89
+ "sigma_M": {
90
+ "value": 0.1,
91
+ "limit_low": 0.0,
92
+ "limit_up": 1.0,
93
+ "fixed": False,
94
+ },
95
+ }
96
+
97
+ minuit_fitter = fitter.FitMinuit.init_from_covariance(
98
+ covariance_fit,
99
+ data_velocity_object,
100
+ parameter_dict,
101
+ likelihood_type=likelihood_type,
102
+ likelihood_properties=likelihood_properties,
103
+ )
104
+
105
+
106
+ ### Fit
107
+ minuit_fitter.run()
@@ -0,0 +1,93 @@
1
+ import os
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from flip.covariance import covariance, fitter
6
+ from pkg_resources import resource_filename
7
+
8
+ from flip import data_vector, utils
9
+
10
+ flip_base = resource_filename("flip", ".")
11
+ data_path = os.path.join(flip_base, "data")
12
+
13
+ ### Load data
14
+ sn_data = pd.read_parquet(os.path.join(data_path, "data_velocity.parquet"))
15
+
16
+ coordinates_velocity = np.array([sn_data["ra"], sn_data["dec"], sn_data["rcom_zobs"]])
17
+
18
+ data_velocity = sn_data.to_dict("list")
19
+ for key in data_velocity.keys():
20
+ data_velocity[key] = np.array(data_velocity[key])
21
+ data_velocity["velocity"] = data_velocity.pop("vpec")
22
+ data_velocity["velocity_error"] = np.zeros_like(data_velocity["velocity"])
23
+
24
+ data_velocity_object = data_vector.DirectVel(data_velocity)
25
+
26
+
27
+ ktt, ptt = np.loadtxt(os.path.join(data_path, "power_spectrum_tt.txt"))
28
+ kmt, pmt = np.loadtxt(os.path.join(data_path, "power_spectrum_mt.txt"))
29
+ kmm, pmm = np.loadtxt(os.path.join(data_path, "power_spectrum_mm.txt"))
30
+
31
+ sigmau_fiducial = 15
32
+
33
+ power_spectrum_dict = {"vv": [[ktt, ptt * utils.Du(ktt, sigmau_fiducial) ** 2]]}
34
+
35
+ ### Compute covariance
36
+ sigmau_list = np.linspace(10.0, 20.0, 10)
37
+ covariance_list = []
38
+ size_batch = 10_000
39
+ number_worker = 16
40
+
41
+
42
+ for sigu in sigmau_list:
43
+ power_spectrum_dict = {"vv": [[ktt, ptt * utils.Du(ktt, sigu) ** 2]]}
44
+
45
+ covariance_list.append(
46
+ covariance.CovMatrix.init_from_flip(
47
+ "carreres23",
48
+ "velocity",
49
+ power_spectrum_dict,
50
+ coordinates_velocity=coordinates_velocity,
51
+ size_batch=size_batch,
52
+ number_worker=number_worker,
53
+ )
54
+ )
55
+
56
+
57
+ ### Load fitter
58
+ likelihood_type = "multivariate_gaussian_interp1d"
59
+ likelihood_properties = {"inversion_method": "cholesky_inverse"}
60
+
61
+ parameter_dict = {
62
+ "fs8": {
63
+ "value": 0.4,
64
+ "limit_low": 0.0,
65
+ "fixed": False,
66
+ },
67
+ "sigv": {
68
+ "value": 200,
69
+ "limit_low": 0.0,
70
+ "fixed": False,
71
+ },
72
+ "sigu": {
73
+ "value": 15.0,
74
+ "limit_low": 13.0,
75
+ "limit_up": 17.0,
76
+ "fixed": False,
77
+ },
78
+ }
79
+
80
+
81
+ minuit_fitter = fitter.FitMinuit.init_from_covariance(
82
+ covariance_list,
83
+ data_velocity_object,
84
+ parameter_dict,
85
+ likelihood_type=likelihood_type,
86
+ likelihood_properties=likelihood_properties,
87
+ interpolation_value_name="sigu",
88
+ interpolation_value_range=sigmau_list,
89
+ )
90
+
91
+
92
+ ### Fit
93
+ minuit_fitter.run()
@@ -0,0 +1,43 @@
1
+ import json
2
+
3
+ import test_covariance_reference_values
4
+ import test_e2e_density
5
+ import test_e2e_joint
6
+ import test_e2e_velocity
7
+
8
+
9
+ def refresh_covariance_reference_values():
10
+ ref: dict = {}
11
+ for m in test_covariance_reference_values.model_to_test:
12
+ model, kind = m[0], m[1]
13
+ key = f"{model}:{kind}"
14
+ ref[key] = test_covariance_reference_values.test_covariance_reference_metrics(
15
+ model,
16
+ kind,
17
+ debug_return=True,
18
+ )
19
+
20
+ out_main = "test_covariance_reference_values.json"
21
+ with open(out_main, "w") as f:
22
+ json.dump(ref, f, indent=2)
23
+
24
+ return ref
25
+
26
+
27
+ def refresh_e2e_reference_values():
28
+ ref = {
29
+ "e2e_density": test_e2e_density.test_e2e_density(debug_return=True),
30
+ "e2e_velocity": test_e2e_velocity.test_e2e_velocity(debug_return=True),
31
+ "e2e_joint": test_e2e_joint.test_e2e_joint(debug_return=True),
32
+ }
33
+
34
+ out_main = "test_e2e_reference_values.json"
35
+ with open(out_main, "w") as f:
36
+ json.dump(ref, f, indent=2)
37
+
38
+ return ref
39
+
40
+
41
+ if __name__ == "__main__":
42
+ refresh_covariance_reference_values()
43
+ refresh_e2e_reference_values()
@@ -0,0 +1,102 @@
1
+ import numpy as np
2
+ from flip.data import load_data_test
3
+
4
+ from flip import covariance, data_vector
5
+
6
+
7
+ def test_covariance_assembly_density_velocity():
8
+
9
+ coordinates_density, density_data = load_data_test.load_density_data(subsample=50)
10
+ density_data_vector = data_vector.Dens(density_data)
11
+
12
+ coordinates_velocity, velocity_data = load_data_test.load_velocity_data(
13
+ subsample=50
14
+ )
15
+ velocity_data_vector = data_vector.DirectVel(velocity_data)
16
+
17
+ density_velocity_data_vector = data_vector.DensVel(
18
+ density_data_vector, velocity_data_vector
19
+ )
20
+
21
+ power_spectrum_dict = load_data_test.load_power_spectrum_dict()
22
+
23
+ model_name = "ravouxcarreres"
24
+ model_type = "density_velocity"
25
+ sigmag_fiducial = 5.0
26
+
27
+ covariance_object = covariance.CovMatrix.init_from_flip(
28
+ model_name,
29
+ model_type,
30
+ power_spectrum_dict,
31
+ coordinates_density=coordinates_density,
32
+ coordinates_velocity=coordinates_velocity,
33
+ size_batch=50_000,
34
+ number_worker=1,
35
+ additional_parameters_values=(sigmag_fiducial,),
36
+ variant="nobeta",
37
+ )
38
+
39
+ # Assemble total covariance with simple coefficients
40
+ coefficients = {"bs8": 1.0, "fs8": 1.0, "sigv": 0.0}
41
+ _, data_variance = density_velocity_data_vector.give_data_and_variance()
42
+ covariance_matrix = covariance_object.compute_covariance_sum(
43
+ coefficients, data_variance
44
+ )
45
+
46
+ # Check shapes and that cross-block is exactly zero (no gv provided)
47
+ n_density = len(density_data_vector.data["density"])
48
+ n_velocity = len(velocity_data_vector.data["velocity"])
49
+ covariance_density_density = covariance_matrix[:n_density, :n_density]
50
+ covariance_velocity_velocity = covariance_matrix[n_density:, n_density:]
51
+ covariance_density_velocity = covariance_matrix[:n_density, n_density:]
52
+
53
+ np.testing.assert_allclose(covariance_density_velocity, 0.0, atol=0.0)
54
+ assert covariance_matrix.shape == (n_density + n_velocity, n_density + n_velocity)
55
+ assert np.all(np.diag(covariance_density_density) > 0)
56
+ assert np.all(np.diag(covariance_velocity_velocity) > 0)
57
+
58
+
59
+ def test_covariance_assembly_full():
60
+
61
+ coordinates_density, density_data = load_data_test.load_density_data(subsample=50)
62
+ density_data_vector = data_vector.Dens(density_data)
63
+
64
+ coordinates_velocity, velocity_data = load_data_test.load_velocity_data(
65
+ subsample=50
66
+ )
67
+ velocity_data_vector = data_vector.DirectVel(velocity_data)
68
+
69
+ density_velocity_data_vector = data_vector.DensVel(
70
+ density_data_vector, velocity_data_vector
71
+ )
72
+
73
+ power_spectrum_dict = load_data_test.load_power_spectrum_dict()
74
+
75
+ model_name = "ravouxcarreres"
76
+ model_type = "full"
77
+ sigmag_fiducial = 5.0
78
+
79
+ covariance_object = covariance.CovMatrix.init_from_flip(
80
+ model_name,
81
+ model_type,
82
+ power_spectrum_dict,
83
+ coordinates_density=coordinates_density,
84
+ coordinates_velocity=coordinates_velocity,
85
+ size_batch=50_000,
86
+ number_worker=1,
87
+ additional_parameters_values=(sigmag_fiducial,),
88
+ variant="nobeta",
89
+ )
90
+
91
+ coefficients = {"bs8": 1.0, "fs8": 1.0, "sigv": 0.0}
92
+ _, data_variance = density_velocity_data_vector.give_data_and_variance()
93
+ covariance_matrix = covariance_object.compute_covariance_sum(
94
+ coefficients, data_variance
95
+ )
96
+
97
+ n_density = len(density_data_vector.data["density"])
98
+ covariance_density_velocity = covariance_matrix[:n_density, n_density:]
99
+
100
+ print(np.min(np.abs(covariance_density_velocity)))
101
+
102
+ assert np.any(np.abs(covariance_density_velocity) > 0)
@@ -0,0 +1,125 @@
1
+ import numpy as np
2
+ import pytest
3
+ from flip.data import load_data_test
4
+
5
+ from flip import covariance, data_vector
6
+
7
+ model_to_test = [
8
+ ("carreres23", "velocity"),
9
+ ("adamsblake17plane", "density"),
10
+ ("adamsblake17plane", "velocity"),
11
+ ("adamsblake17plane", "full"),
12
+ ("adamsblake20", "density"),
13
+ ("adamsblake20", "velocity"),
14
+ ("adamsblake20", "full"),
15
+ ("ravouxcarreres", "velocity"),
16
+ ("ravouxcarreres", "density"),
17
+ ("ravouxcarreres", "full"),
18
+ ("lai22", "density"),
19
+ ("lai22", "velocity"),
20
+ ("lai22", "full"),
21
+ ]
22
+
23
+
24
+ def compute_covariance_metrics(
25
+ model,
26
+ model_type,
27
+ ):
28
+
29
+ if model_type == "density":
30
+ coordinates_density, density_data = load_data_test.load_density_data(
31
+ subsample=50
32
+ )
33
+ data_vector_obj = data_vector.Dens(density_data)
34
+ coordinates_velocity = None
35
+
36
+ elif model_type == "velocity":
37
+ coordinates_velocity, velocity_data = load_data_test.load_velocity_data(
38
+ subsample=50
39
+ )
40
+ data_vector_obj = data_vector.DirectVel(velocity_data)
41
+ coordinates_density = None
42
+ else:
43
+ coordinates_velocity, velocity_data = load_data_test.load_velocity_data(
44
+ subsample=50
45
+ )
46
+ coordinates_density, density_data = load_data_test.load_density_data(
47
+ subsample=50
48
+ )
49
+ data_vector_obj = data_vector.DensVel(
50
+ data_vector.Dens(density_data),
51
+ data_vector.DirectVel(velocity_data),
52
+ )
53
+
54
+ power_spectrum_dict = load_data_test.load_power_spectrum_dict()
55
+
56
+ additional_parameters = ()
57
+ variant = None
58
+ parameters = {"bs8": 1.0, "fs8": 0.5, "sigv": 0.0}
59
+ if model in {"adamsblake20", "ravouxcarreres"}:
60
+ parameters["beta_f"] = parameters["fs8"] / max(parameters["bs8"], 1e-6)
61
+ additional_parameters = (15.0,)
62
+ variant = "nobeta"
63
+ if model == "lai22":
64
+ parameters["sigg"] = 1.0
65
+ parameters["beta_f"] = parameters["fs8"] / max(parameters["bs8"], 1e-6)
66
+ variant = "nobeta"
67
+
68
+ covariance_object = covariance.CovMatrix.init_from_flip(
69
+ model,
70
+ model_type,
71
+ power_spectrum_dict,
72
+ coordinates_density=coordinates_density,
73
+ coordinates_velocity=coordinates_velocity,
74
+ size_batch=50_000,
75
+ number_worker=1,
76
+ variant=variant,
77
+ additional_parameters_values=additional_parameters,
78
+ )
79
+
80
+ _, data_variance = data_vector_obj.give_data_and_variance(
81
+ parameters if "M_0" in data_vector_obj.free_par else {}
82
+ )
83
+ covariance_matrix = covariance_object.compute_covariance_sum(
84
+ parameters, data_variance
85
+ )
86
+ return {
87
+ "shape": [int(covariance_matrix.shape[0]), int(covariance_matrix.shape[1])],
88
+ "trace": float(np.trace(covariance_matrix)),
89
+ "diag_mean": float(np.mean(np.diag(covariance_matrix))),
90
+ "entry_0_0": float(covariance_matrix[0, 0]),
91
+ "entry_0_-1": float(covariance_matrix[0, -1]),
92
+ "entry_mid_mid": float(
93
+ covariance_matrix[
94
+ covariance_matrix.shape[0] // 2, covariance_matrix.shape[1] // 2
95
+ ]
96
+ ),
97
+ }
98
+
99
+
100
+ @pytest.mark.parametrize(
101
+ "model,kind",
102
+ model_to_test,
103
+ )
104
+ def test_covariance_reference_metrics(model, kind, debug_return=False):
105
+
106
+ got = compute_covariance_metrics(model, kind)
107
+
108
+ if debug_return:
109
+ return got
110
+
111
+ reference_values = load_data_test.load_covariance_test_reference_values()
112
+
113
+ key = f"{model}:{kind}"
114
+ assert key in reference_values, f"Missing reference metrics for {key}"
115
+ ref = reference_values[key]
116
+ assert got["shape"] == ref["shape"]
117
+ np.testing.assert_allclose(got["trace"], ref["trace"], rtol=1e-6, atol=1e-8)
118
+ np.testing.assert_allclose(got["diag_mean"], ref["diag_mean"], rtol=1e-6, atol=1e-8)
119
+ np.testing.assert_allclose(got["entry_0_0"], ref["entry_0_0"], rtol=1e-6, atol=1e-8)
120
+ np.testing.assert_allclose(
121
+ got["entry_0_-1"], ref["entry_0_-1"], rtol=1e-6, atol=1e-8
122
+ )
123
+ np.testing.assert_allclose(
124
+ got["entry_mid_mid"], ref["entry_mid_mid"], rtol=1e-6, atol=1e-8
125
+ )
@@ -0,0 +1,34 @@
1
+ import numpy as np
2
+ from flip.covariance import cov_utils
3
+
4
+
5
+ def test_flat_matrix_roundtrip_small():
6
+ # Covariance flattening stores a single variance value (shared diagonal)
7
+ # followed by upper-triangular off-diagonals. Build a matrix consistent with this contract.
8
+ rng = np.random.default_rng(0)
9
+ n = 5
10
+ off = rng.standard_normal((n, n))
11
+ off = np.triu(off, k=1)
12
+ off = off + off.T
13
+ var = 2.3
14
+ cov = off.copy()
15
+ np.fill_diagonal(cov, var)
16
+
17
+ flat = cov_utils.return_flat_covariance(cov)
18
+ cov_back = cov_utils.return_matrix_covariance(flat)
19
+
20
+ assert cov_back.shape == cov.shape
21
+ np.testing.assert_allclose(cov_back, cov, rtol=0, atol=1e-12)
22
+
23
+
24
+ def test_flat_cross_roundtrip_shape():
25
+ # Cross-covariance flattening/reshaping
26
+ g, v = 3, 4
27
+ rng = np.random.default_rng(1)
28
+ cross = rng.standard_normal((g, v))
29
+
30
+ flat = cov_utils.return_flat_cross_cov(cross)
31
+ back = cov_utils.return_matrix_covariance_cross(flat, g, v)
32
+
33
+ assert back.shape == (g, v)
34
+ np.testing.assert_allclose(back, cross)