territories-dashboard-lib 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of territories-dashboard-lib might be problematic. Click here for more details.
- territories_dashboard_lib/commons/__init__.py +0 -0
- territories_dashboard_lib/commons/decorators.py +36 -0
- territories_dashboard_lib/commons/models.py +9 -0
- territories_dashboard_lib/geo_lib/__init__.py +0 -0
- territories_dashboard_lib/geo_lib/admin.py +64 -0
- territories_dashboard_lib/geo_lib/enums.py +7 -0
- territories_dashboard_lib/geo_lib/migrations/0001_initial.py +51 -0
- territories_dashboard_lib/geo_lib/migrations/__init__.py +0 -0
- territories_dashboard_lib/geo_lib/models.py +58 -0
- territories_dashboard_lib/geo_lib/urls.py +27 -0
- territories_dashboard_lib/geo_lib/views.py +239 -0
- territories_dashboard_lib/indicators_lib/__init__.py +0 -0
- territories_dashboard_lib/indicators_lib/admin.py +140 -0
- territories_dashboard_lib/indicators_lib/enums.py +59 -0
- territories_dashboard_lib/indicators_lib/export.py +29 -0
- territories_dashboard_lib/indicators_lib/format.py +34 -0
- territories_dashboard_lib/indicators_lib/methodo_pdf.py +99 -0
- territories_dashboard_lib/indicators_lib/migrations/0001_initial.py +138 -0
- territories_dashboard_lib/indicators_lib/migrations/__init__.py +0 -0
- territories_dashboard_lib/indicators_lib/models.py +230 -0
- territories_dashboard_lib/indicators_lib/payloads.py +54 -0
- territories_dashboard_lib/indicators_lib/query/commons.py +223 -0
- territories_dashboard_lib/indicators_lib/query/comparison.py +70 -0
- territories_dashboard_lib/indicators_lib/query/details.py +64 -0
- territories_dashboard_lib/indicators_lib/query/histogram.py +82 -0
- territories_dashboard_lib/indicators_lib/query/indicator_card.py +102 -0
- territories_dashboard_lib/indicators_lib/query/top_10.py +100 -0
- territories_dashboard_lib/indicators_lib/query/utils.py +20 -0
- territories_dashboard_lib/indicators_lib/refresh_filters.py +17 -0
- territories_dashboard_lib/indicators_lib/table.py +154 -0
- territories_dashboard_lib/indicators_lib/urls.py +97 -0
- territories_dashboard_lib/indicators_lib/views.py +490 -0
- territories_dashboard_lib/superset_lib/__init__.py +0 -0
- territories_dashboard_lib/superset_lib/admin.py +22 -0
- territories_dashboard_lib/superset_lib/guest_token.py +64 -0
- territories_dashboard_lib/superset_lib/logic.py +67 -0
- territories_dashboard_lib/superset_lib/migrations/0001_initial.py +45 -0
- territories_dashboard_lib/superset_lib/migrations/__init__.py +0 -0
- territories_dashboard_lib/superset_lib/models.py +52 -0
- territories_dashboard_lib/superset_lib/serializers.py +10 -0
- territories_dashboard_lib/superset_lib/urls.py +10 -0
- territories_dashboard_lib/superset_lib/views.py +19 -0
- territories_dashboard_lib/tracking_lib/__init__.py +0 -0
- territories_dashboard_lib/tracking_lib/enums.py +7 -0
- territories_dashboard_lib/tracking_lib/logic.py +78 -0
- territories_dashboard_lib/tracking_lib/migrations/0001_initial.py +45 -0
- territories_dashboard_lib/tracking_lib/migrations/__init__.py +0 -0
- territories_dashboard_lib/tracking_lib/models.py +79 -0
- territories_dashboard_lib/website_lib/__init__.py +0 -0
- territories_dashboard_lib/website_lib/admin.py +40 -0
- territories_dashboard_lib/website_lib/context_processors.py +27 -0
- territories_dashboard_lib/website_lib/forms.py +47 -0
- territories_dashboard_lib/website_lib/migrations/0001_initial.py +91 -0
- territories_dashboard_lib/website_lib/migrations/__init__.py +0 -0
- territories_dashboard_lib/website_lib/models.py +148 -0
- territories_dashboard_lib/website_lib/navigation.py +124 -0
- territories_dashboard_lib/website_lib/params.py +268 -0
- territories_dashboard_lib/website_lib/serializers.py +105 -0
- territories_dashboard_lib/website_lib/static_content.py +20 -0
- territories_dashboard_lib/website_lib/templatetags/htmlparams.py +75 -0
- territories_dashboard_lib/website_lib/templatetags/other_filters.py +30 -0
- territories_dashboard_lib/website_lib/views.py +212 -0
- {territories_dashboard_lib-0.1.0.dist-info → territories_dashboard_lib-0.1.1.dist-info}/METADATA +1 -1
- territories_dashboard_lib-0.1.1.dist-info/RECORD +67 -0
- territories_dashboard_lib-0.1.0.dist-info/RECORD +0 -5
- {territories_dashboard_lib-0.1.0.dist-info → territories_dashboard_lib-0.1.1.dist-info}/WHEEL +0 -0
- {territories_dashboard_lib-0.1.0.dist-info → territories_dashboard_lib-0.1.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
from ..enums import DEFAULT_MESH, FRANCE_DB_VALUES, GeoLevel, MeshLevel
|
|
2
|
+
from ..models import AggregationFunctions, Indicator
|
|
3
|
+
from .utils import get_breakdown_dimension, run_custom_query
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_last_year(indicator, mesh):
|
|
7
|
+
query = f""" SELECT MAX(annee) as last_year FROM "{indicator.db_table_prefix}_{mesh}" """
|
|
8
|
+
return run_custom_query(query)[0]["last_year"]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def generate_aggregate_query(indicator, territory, submesh, filters, slicer):
|
|
12
|
+
query = f"""
|
|
13
|
+
WITH annee_max AS
|
|
14
|
+
(SELECT MAX(annee) FROM "{indicator.db_table_prefix}_{submesh}"),
|
|
15
|
+
aggregat AS
|
|
16
|
+
(
|
|
17
|
+
SELECT
|
|
18
|
+
{calculate_aggregate_values(indicator)}, {slicer}
|
|
19
|
+
FROM
|
|
20
|
+
{get_table_data_for_geography(indicator, territory, submesh)}
|
|
21
|
+
{add_optional_filters(indicator, filters)}
|
|
22
|
+
AND annee = (SELECT * FROM annee_max)
|
|
23
|
+
GROUP BY (indic.{slicer})
|
|
24
|
+
)
|
|
25
|
+
"""
|
|
26
|
+
return query
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def generate_aggregate_query_for_location(indicator, territory, submesh, filters):
|
|
30
|
+
return generate_aggregate_query(
|
|
31
|
+
indicator, territory, submesh, filters, f"code_{submesh}"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def add_optional_filters(indicator: Indicator, filters):
|
|
36
|
+
condition = ""
|
|
37
|
+
all_dimensions = [dimension.db_name for dimension in indicator.dimensions.all()]
|
|
38
|
+
for dimension in all_dimensions:
|
|
39
|
+
if filters and filters.get(dimension):
|
|
40
|
+
filters_str = ", ".join(
|
|
41
|
+
[f"'{value.replace("'", "''")}'" for value in filters.get(dimension)]
|
|
42
|
+
)
|
|
43
|
+
condition += f' AND indic."{dimension}" in ({filters_str}) '
|
|
44
|
+
return condition
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def get_place_names_join(include_place_name, mapped_submesh, submesh, flows=False):
|
|
48
|
+
join_clause = ""
|
|
49
|
+
if include_place_name:
|
|
50
|
+
if flows:
|
|
51
|
+
join_clause = f"""
|
|
52
|
+
JOIN
|
|
53
|
+
(SELECT distinct("NOM_{mapped_submesh}") as territory_1, "{mapped_submesh}" as territory_1_id, "{mapped_submesh}" from arborescence_geo) arbo1
|
|
54
|
+
on arbo1."{mapped_submesh}" = indic.code_{submesh.lower()}_1
|
|
55
|
+
JOIN
|
|
56
|
+
(SELECT distinct("NOM_{mapped_submesh}") as territory_2, "{mapped_submesh}" as territory_2_id, "{mapped_submesh}" from arborescence_geo) arbo2
|
|
57
|
+
on arbo2."{mapped_submesh}" = indic.code_{submesh.lower()}_2
|
|
58
|
+
"""
|
|
59
|
+
else:
|
|
60
|
+
join_clause = f"""
|
|
61
|
+
JOIN
|
|
62
|
+
(SELECT distinct("NOM_{mapped_submesh}") as lieu, "{mapped_submesh}" as territoryid, "{mapped_submesh}" from arborescence_geo) arbo
|
|
63
|
+
on arbo."{mapped_submesh}" = indic.code_{submesh.lower()}
|
|
64
|
+
"""
|
|
65
|
+
return join_clause
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def get_table_data_for_geography(
|
|
69
|
+
indicator,
|
|
70
|
+
territory,
|
|
71
|
+
submesh=MeshLevel.Region,
|
|
72
|
+
include_place_names=None,
|
|
73
|
+
flows=False,
|
|
74
|
+
):
|
|
75
|
+
mapped_mesh = "DEPCOM" if territory.mesh == "com" else territory.mesh.upper()
|
|
76
|
+
mapped_submesh = "DEPCOM" if submesh == "com" else submesh.upper()
|
|
77
|
+
|
|
78
|
+
geo_id_values = "', '".join([id.strip() for id in territory.id.split(",")])
|
|
79
|
+
|
|
80
|
+
table_prefix = (
|
|
81
|
+
indicator.flows_db_table_prefix if flows else indicator.db_table_prefix
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
arbo_sub_query = f"""
|
|
85
|
+
SELECT DISTINCT("{mapped_submesh}")
|
|
86
|
+
FROM arborescence_geo arbo
|
|
87
|
+
WHERE arbo."{mapped_mesh}" in('{geo_id_values}')
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
where = (
|
|
91
|
+
f"""
|
|
92
|
+
WHERE (indic.code_{submesh.lower()}_1 in ({arbo_sub_query})
|
|
93
|
+
OR indic.code_{submesh.lower()}_2 in ({arbo_sub_query}))
|
|
94
|
+
"""
|
|
95
|
+
if flows
|
|
96
|
+
else f"""
|
|
97
|
+
WHERE indic.code_{submesh.lower()} in ({arbo_sub_query})
|
|
98
|
+
"""
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
return f"""
|
|
102
|
+
"{table_prefix}_{submesh.lower()}" indic
|
|
103
|
+
{get_place_names_join(include_place_names, mapped_submesh, submesh, flows)}
|
|
104
|
+
{where}
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def calculate_aggregate_values(indicator, with_alternative=True):
|
|
109
|
+
if not indicator.is_composite:
|
|
110
|
+
return "SUM(valeur) as valeur"
|
|
111
|
+
|
|
112
|
+
if indicator.aggregation_function == AggregationFunctions.DISCRETE_COMPONENT_2:
|
|
113
|
+
sql = f"SUM(composante_1) / COALESCE(NULLIF(SUM(composante_2), 0), 1) * {indicator.aggregation_constant} as valeur"
|
|
114
|
+
if with_alternative:
|
|
115
|
+
sql += ", SUM(composante_1) as valeur_alternative"
|
|
116
|
+
return sql
|
|
117
|
+
breakdown_dimension = get_breakdown_dimension(indicator)
|
|
118
|
+
breakdown_count = (
|
|
119
|
+
f" * COUNT(DISTINCT({breakdown_dimension.db_name})) "
|
|
120
|
+
if breakdown_dimension
|
|
121
|
+
else ""
|
|
122
|
+
)
|
|
123
|
+
sql = f"SUM(composante_1) / COALESCE(NULLIF(SUM(composante_2), 0), 1) {breakdown_count} * {indicator.aggregation_constant} as valeur"
|
|
124
|
+
if with_alternative:
|
|
125
|
+
sql += ", SUM(composante_1) as valeur_alternative"
|
|
126
|
+
return sql
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def order_filters(filters, bo_ordered_filters):
|
|
130
|
+
# Filters that exist in the predefined order
|
|
131
|
+
ordered_filters = [filter for filter in bo_ordered_filters if filter in filters]
|
|
132
|
+
|
|
133
|
+
# Filters that are not in the predefined order, sorted alphabetically
|
|
134
|
+
not_in_bo = sorted(
|
|
135
|
+
[item for item in filters if item not in bo_ordered_filters],
|
|
136
|
+
key=lambda x: x.lower(), # Case-insensitive alphabetical sort
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Combine both lists
|
|
140
|
+
final_array = ordered_filters + not_in_bo
|
|
141
|
+
return final_array
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def get_mesh_level_for_geo_level(mesh, submesh):
|
|
145
|
+
if mesh == GeoLevel.Region:
|
|
146
|
+
return (
|
|
147
|
+
MeshLevel.Department
|
|
148
|
+
if submesh is None or submesh in [MeshLevel.National, MeshLevel.Region]
|
|
149
|
+
else submesh
|
|
150
|
+
)
|
|
151
|
+
elif mesh == GeoLevel.Department:
|
|
152
|
+
return (
|
|
153
|
+
MeshLevel.Epci
|
|
154
|
+
if submesh is None
|
|
155
|
+
or submesh in [MeshLevel.National, MeshLevel.Region, MeshLevel.Department]
|
|
156
|
+
else submesh
|
|
157
|
+
)
|
|
158
|
+
elif mesh in [GeoLevel.Epci, GeoLevel.Town]:
|
|
159
|
+
return MeshLevel.Town
|
|
160
|
+
else:
|
|
161
|
+
return submesh or DEFAULT_MESH
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def get_territories_ids(main_territory_codes, territory_mesh, submesh):
|
|
165
|
+
mapped_territory_mesh = (
|
|
166
|
+
"DEPCOM" if territory_mesh == "com" else territory_mesh.upper()
|
|
167
|
+
)
|
|
168
|
+
mapped_submesh = "DEPCOM" if submesh == "com" else submesh.upper()
|
|
169
|
+
|
|
170
|
+
query = f"""
|
|
171
|
+
SELECT DISTINCT "{mapped_submesh}" as code
|
|
172
|
+
FROM arborescence_geo
|
|
173
|
+
WHERE "{mapped_territory_mesh}" IN ('{"', '".join(main_territory_codes)}')
|
|
174
|
+
"""
|
|
175
|
+
|
|
176
|
+
territories_ids = [r["code"] for r in run_custom_query(query)]
|
|
177
|
+
return territories_ids
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def get_sub_territories(territory, submesh):
|
|
181
|
+
territory_codes = territory.id.split(",")
|
|
182
|
+
mapped_territory_mesh = (
|
|
183
|
+
"DEPCOM" if territory.mesh == "com" else territory.mesh.upper()
|
|
184
|
+
)
|
|
185
|
+
mapped_submesh = "DEPCOM" if submesh == "com" else submesh.upper()
|
|
186
|
+
|
|
187
|
+
query = f"""
|
|
188
|
+
SELECT DISTINCT "{mapped_submesh}" as code, "NOM_{mapped_submesh}" as name
|
|
189
|
+
FROM arborescence_geo
|
|
190
|
+
WHERE "{mapped_territory_mesh}" IN ('{"', '".join(territory_codes)}')
|
|
191
|
+
"""
|
|
192
|
+
|
|
193
|
+
return run_custom_query(query)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def get_where_territory(territory):
|
|
197
|
+
territory_id = (
|
|
198
|
+
FRANCE_DB_VALUES[territory.id]
|
|
199
|
+
if territory.mesh == MeshLevel.National
|
|
200
|
+
else territory.id
|
|
201
|
+
)
|
|
202
|
+
return f""" "code_{territory.mesh}" = '{territory_id}' """
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def get_values_for_territory(indicator, territory, filters=None):
|
|
206
|
+
value = calculate_aggregate_values(indicator)
|
|
207
|
+
where_territory = get_where_territory(territory)
|
|
208
|
+
query = f"""
|
|
209
|
+
SELECT {value}, annee
|
|
210
|
+
FROM "{indicator.db_table_prefix}_{territory.mesh}" as indic
|
|
211
|
+
WHERE {where_territory}
|
|
212
|
+
{add_optional_filters(indicator, filters)}
|
|
213
|
+
GROUP BY annee
|
|
214
|
+
ORDER BY annee DESC
|
|
215
|
+
"""
|
|
216
|
+
return query
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def get_territory_name(territory):
|
|
220
|
+
territory_mesh = "DEPCOM" if territory.mesh == "com" else territory.mesh.upper()
|
|
221
|
+
query = f"""SELECT "NOM_{territory_mesh}" as nom FROM arborescence_geo WHERE "{territory_mesh}" = '{territory.id}' LIMIT 1;"""
|
|
222
|
+
results = run_custom_query(query)
|
|
223
|
+
return results[0]["nom"] if results else ""
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
|
|
3
|
+
from .commons import add_optional_filters, calculate_aggregate_values
|
|
4
|
+
from .utils import run_custom_query
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_comparison_values_and_buckets(
|
|
8
|
+
indicator, submesh, territories, cmp_territories, filters
|
|
9
|
+
):
|
|
10
|
+
territories_ids = [t["code"] for t in territories + cmp_territories]
|
|
11
|
+
territories_sql = f"('{"', '".join(territories_ids)}')"
|
|
12
|
+
table_name = f"{indicator.db_table_prefix}_{submesh}"
|
|
13
|
+
filters = add_optional_filters(indicator, filters)
|
|
14
|
+
value = calculate_aggregate_values(indicator, with_alternative=False)
|
|
15
|
+
number_of_buckets = min(10, len(territories) + len(cmp_territories))
|
|
16
|
+
sub_query = f"""
|
|
17
|
+
SELECT
|
|
18
|
+
{value},
|
|
19
|
+
code_{submesh} as geo_code
|
|
20
|
+
FROM {table_name} indic
|
|
21
|
+
WHERE code_{submesh} IN {territories_sql}
|
|
22
|
+
AND annee = (
|
|
23
|
+
SELECT MAX(annee)
|
|
24
|
+
FROM {table_name}
|
|
25
|
+
)
|
|
26
|
+
AND valeur IS NOT NULL
|
|
27
|
+
{filters}
|
|
28
|
+
GROUP BY geo_code
|
|
29
|
+
"""
|
|
30
|
+
query = f"""
|
|
31
|
+
WITH aggregated_data AS ({sub_query}),
|
|
32
|
+
range_bounds AS (
|
|
33
|
+
SELECT
|
|
34
|
+
MIN(valeur) - 1e-10 AS min_value,
|
|
35
|
+
MAX(valeur) + 1e-10 AS max_value
|
|
36
|
+
FROM aggregated_data
|
|
37
|
+
)
|
|
38
|
+
SELECT
|
|
39
|
+
aggregated_data.valeur,
|
|
40
|
+
aggregated_data.geo_code,
|
|
41
|
+
WIDTH_BUCKET(aggregated_data.valeur, range_bounds.min_value, range_bounds.max_value, {number_of_buckets}) AS bucket
|
|
42
|
+
FROM aggregated_data
|
|
43
|
+
CROSS JOIN
|
|
44
|
+
range_bounds
|
|
45
|
+
"""
|
|
46
|
+
results = run_custom_query(query)
|
|
47
|
+
raw_values = [r["valeur"] for r in results]
|
|
48
|
+
min_value = min(raw_values)
|
|
49
|
+
max_value = max(raw_values)
|
|
50
|
+
bucket_width = (max_value - min_value) / number_of_buckets
|
|
51
|
+
buckets = []
|
|
52
|
+
bucket_min = min_value
|
|
53
|
+
for i in range(1, number_of_buckets + 1):
|
|
54
|
+
bucket_max = (
|
|
55
|
+
max_value if i == number_of_buckets else min_value + i * bucket_width
|
|
56
|
+
)
|
|
57
|
+
buckets.append([bucket_min, bucket_max])
|
|
58
|
+
bucket_min = bucket_max
|
|
59
|
+
values = defaultdict(list)
|
|
60
|
+
cmp_values = defaultdict(list)
|
|
61
|
+
territories_ids_set = {t["code"] for t in territories}
|
|
62
|
+
cmp_territories_ids_set = {t["code"] for t in cmp_territories}
|
|
63
|
+
territories_dict = {t["code"]: t["name"] for t in territories + cmp_territories}
|
|
64
|
+
for r in results:
|
|
65
|
+
geo_code = r["geo_code"]
|
|
66
|
+
if geo_code in territories_ids_set:
|
|
67
|
+
values[r["bucket"]].append(territories_dict[geo_code])
|
|
68
|
+
if geo_code in cmp_territories_ids_set:
|
|
69
|
+
cmp_values[r["bucket"]].append(territories_dict[geo_code])
|
|
70
|
+
return values, cmp_values, buckets
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
from ..enums import MeshLevel
|
|
2
|
+
from ..models import Indicator
|
|
3
|
+
from ..payloads import Territory
|
|
4
|
+
from .commons import (
|
|
5
|
+
add_optional_filters,
|
|
6
|
+
calculate_aggregate_values,
|
|
7
|
+
get_last_year,
|
|
8
|
+
get_place_names_join,
|
|
9
|
+
get_where_territory,
|
|
10
|
+
)
|
|
11
|
+
from .utils import get_breakdown_dimension, run_custom_query
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_proportions_chart(indicator, territory, filters):
|
|
15
|
+
where_territory = get_where_territory(territory)
|
|
16
|
+
last_year = get_last_year(indicator, territory.mesh)
|
|
17
|
+
breakdown_dimension = get_breakdown_dimension(indicator).db_name
|
|
18
|
+
|
|
19
|
+
query = f"""
|
|
20
|
+
SELECT {calculate_aggregate_values(indicator, with_alternative=False)}, "{breakdown_dimension}" as dimension
|
|
21
|
+
FROM "{indicator.db_table_prefix}_{territory.mesh}" as indic
|
|
22
|
+
WHERE {where_territory} AND annee = {last_year}
|
|
23
|
+
{add_optional_filters(indicator, filters)}
|
|
24
|
+
GROUP BY "{breakdown_dimension}"
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
data = run_custom_query(query)
|
|
28
|
+
data_dict = {
|
|
29
|
+
d["dimension"]: {"label": d["dimension"], "data": [d["valeur"]]} for d in data
|
|
30
|
+
}
|
|
31
|
+
breakdown_filters = filters[breakdown_dimension]
|
|
32
|
+
sorted_data = (
|
|
33
|
+
[data_dict[filter] for filter in breakdown_filters if filter in data_dict]
|
|
34
|
+
if filters
|
|
35
|
+
else list(data_dict.values())
|
|
36
|
+
)
|
|
37
|
+
return sorted_data
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_values_for_submesh_territories(
|
|
41
|
+
indicator: Indicator, submesh: MeshLevel, territory: Territory, filters
|
|
42
|
+
):
|
|
43
|
+
submesh = submesh.lower()
|
|
44
|
+
territory_mesh = "DEPCOM" if territory.mesh == "com" else territory.mesh.upper()
|
|
45
|
+
mapped_submesh = "DEPCOM" if submesh == "com" else submesh.upper()
|
|
46
|
+
|
|
47
|
+
geo_ids = "', '".join([id.strip() for id in territory.id.split(",")])
|
|
48
|
+
query = f"""
|
|
49
|
+
SELECT {calculate_aggregate_values(indicator)}, code_{submesh} as geocode, arbo.lieu as geoname
|
|
50
|
+
FROM "{indicator.db_table_prefix}_{submesh}" indic
|
|
51
|
+
{get_place_names_join(True, mapped_submesh, submesh)}
|
|
52
|
+
WHERE code_{submesh} IN (
|
|
53
|
+
SELECT DISTINCT("{mapped_submesh}")
|
|
54
|
+
FROM arborescence_geo arbo
|
|
55
|
+
WHERE arbo."{territory_mesh}" IN ('{geo_ids}')
|
|
56
|
+
)
|
|
57
|
+
{add_optional_filters(indicator, filters)}
|
|
58
|
+
AND annee = (
|
|
59
|
+
SELECT MAX(annee)
|
|
60
|
+
FROM "{indicator.db_table_prefix}_{submesh}"
|
|
61
|
+
)
|
|
62
|
+
GROUP BY geocode, geoname
|
|
63
|
+
"""
|
|
64
|
+
return run_custom_query(query)
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from .details import get_values_for_submesh_territories
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def get_territories_histogram_data(territory_values, unite):
|
|
5
|
+
if all([t["valeur"] is None for t in territory_values]):
|
|
6
|
+
return ({}, [])
|
|
7
|
+
values_for_min_max = [
|
|
8
|
+
data["valeur"] for data in territory_values if data["valeur"] is not None
|
|
9
|
+
]
|
|
10
|
+
min_value_last_year = min(values_for_min_max) if values_for_min_max else 0
|
|
11
|
+
max_value_last_year = max(values_for_min_max) if values_for_min_max else 0
|
|
12
|
+
|
|
13
|
+
# if we are between 99.5 and 100.5, spread histogram for better visualization
|
|
14
|
+
if 99.5 < min_value_last_year < 100.5 and 99.5 < max_value_last_year < 100.5:
|
|
15
|
+
min_value_last_year = 90
|
|
16
|
+
|
|
17
|
+
histogram_values_dict = {}
|
|
18
|
+
for territory_data in territory_values:
|
|
19
|
+
territory_id = territory_data["geocode"]
|
|
20
|
+
if territory_id not in histogram_values_dict:
|
|
21
|
+
histogram_values_dict[territory_id] = {
|
|
22
|
+
"valeur": 0,
|
|
23
|
+
"geoname": territory_data["geoname"],
|
|
24
|
+
}
|
|
25
|
+
histogram_values_dict[territory_id]["valeur"] += (
|
|
26
|
+
territory_data["valeur"] if territory_data["valeur"] is not None else 0
|
|
27
|
+
)
|
|
28
|
+
histogram_values = list(histogram_values_dict.values())
|
|
29
|
+
|
|
30
|
+
deciles = [
|
|
31
|
+
min_value_last_year + (max_value_last_year - min_value_last_year) * (i / 10)
|
|
32
|
+
for i in range(10 if min_value_last_year != max_value_last_year else 1)
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
data_by_decile = [
|
|
36
|
+
{
|
|
37
|
+
"decile": decile,
|
|
38
|
+
"count": len(
|
|
39
|
+
[
|
|
40
|
+
data
|
|
41
|
+
for data in histogram_values
|
|
42
|
+
if decile
|
|
43
|
+
<= data["valeur"]
|
|
44
|
+
<= (deciles[i + 1] if i < len(deciles) - 1 else max_value_last_year)
|
|
45
|
+
]
|
|
46
|
+
),
|
|
47
|
+
"text": "\n".join(
|
|
48
|
+
[
|
|
49
|
+
data["geoname"]
|
|
50
|
+
for data in histogram_values
|
|
51
|
+
if decile
|
|
52
|
+
<= data["valeur"]
|
|
53
|
+
<= (deciles[i + 1] if i < len(deciles) - 1 else max_value_last_year)
|
|
54
|
+
]
|
|
55
|
+
),
|
|
56
|
+
}
|
|
57
|
+
for i, decile in enumerate(deciles)
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
datasets_histogram_bar_chart = {
|
|
61
|
+
"label": unite,
|
|
62
|
+
"data": [{"x": data["decile"], "y": data["count"]} for data in data_by_decile],
|
|
63
|
+
"comments": [data["text"] for data in data_by_decile],
|
|
64
|
+
"backgroundColor": "#6a6af4",
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return (datasets_histogram_bar_chart, deciles)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def get_indicator_histogram_data(indicator, territory, submesh, filters):
|
|
71
|
+
indicator_details = {}
|
|
72
|
+
territory_values = get_values_for_submesh_territories(
|
|
73
|
+
indicator, submesh, territory, filters
|
|
74
|
+
)
|
|
75
|
+
datasets_histogram_bar_chart, deciles = get_territories_histogram_data(
|
|
76
|
+
territory_values, indicator.unite
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
indicator_details["datasetsHistogramBarChart"] = datasets_histogram_bar_chart
|
|
80
|
+
indicator_details["deciles"] = deciles
|
|
81
|
+
|
|
82
|
+
return indicator_details
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from .commons import generate_aggregate_query_for_location
|
|
2
|
+
from .utils import run_custom_query
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def generate_min_max_queries(indicator):
|
|
6
|
+
result = """
|
|
7
|
+
, min_value AS (SELECT MIN(valeur) AS min FROM aggregat),
|
|
8
|
+
max_value AS (SELECT MAX(valeur) AS max FROM aggregat)
|
|
9
|
+
"""
|
|
10
|
+
if indicator.is_composite:
|
|
11
|
+
result += """
|
|
12
|
+
, min_alternative_value AS (SELECT MIN(valeur_alternative) AS min_alternative FROM aggregat),
|
|
13
|
+
max_alternative_value AS (SELECT MAX(valeur_alternative) AS max_alternative FROM aggregat)
|
|
14
|
+
"""
|
|
15
|
+
return result
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_min_max_statistics(indicator, submesh):
|
|
19
|
+
result = f"""
|
|
20
|
+
{get_values_for("min", submesh)}
|
|
21
|
+
{get_values_for("max", submesh)}
|
|
22
|
+
"""
|
|
23
|
+
if indicator.is_composite:
|
|
24
|
+
result += f"""
|
|
25
|
+
{get_values_for("min_alternative", submesh, True)}
|
|
26
|
+
{get_values_for("max_alternative", submesh, True)}
|
|
27
|
+
"""
|
|
28
|
+
return result
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_values_for(extremum, submesh, is_alternative=False):
|
|
32
|
+
return f"""
|
|
33
|
+
{extremum}_value,
|
|
34
|
+
(
|
|
35
|
+
SELECT code_{submesh} AS code_{extremum}
|
|
36
|
+
FROM aggregat
|
|
37
|
+
WHERE valeur{"_alternative" if is_alternative else ""} = (SELECT * FROM {extremum}_value) LIMIT 1
|
|
38
|
+
) AS code_{extremum},
|
|
39
|
+
(
|
|
40
|
+
SELECT count(code_{submesh}) AS count_{extremum}
|
|
41
|
+
FROM aggregat
|
|
42
|
+
WHERE valeur{"_alternative" if is_alternative else ""} = (SELECT * FROM {extremum}_value)
|
|
43
|
+
) AS count_{extremum},
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def get_med_values(indicator):
|
|
48
|
+
return f"""
|
|
49
|
+
(SELECT
|
|
50
|
+
PERCENTILE_DISC(0.5) WITHIN GROUP (order by valeur) AS med
|
|
51
|
+
{", PERCENTILE_DISC(0.5) WITHIN GROUP (order by valeur_alternative) AS med_alternative" if indicator.is_composite else ""}
|
|
52
|
+
FROM aggregat) AS meds
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def get_geography_statistics_values_for_indicator(
|
|
57
|
+
indicator, territory, submesh, filters
|
|
58
|
+
):
|
|
59
|
+
query = f"""
|
|
60
|
+
{generate_aggregate_query_for_location(indicator, territory, submesh, filters)}
|
|
61
|
+
{generate_min_max_queries(indicator)}
|
|
62
|
+
|
|
63
|
+
SELECT * FROM
|
|
64
|
+
{get_min_max_statistics(indicator, submesh)}
|
|
65
|
+
{get_med_values(indicator)}
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
return query
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_indicator_filters(indicator):
|
|
72
|
+
if not indicator.db_table_prefix or not indicator.dimension:
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
# Get the possible indicator's dimension's values
|
|
76
|
+
query = f"""
|
|
77
|
+
SELECT DISTINCT({indicator.dimension}) as filter FROM "{indicator.db_table_prefix}_reg"
|
|
78
|
+
"""
|
|
79
|
+
return query
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_names_from_codes(dict_result, submesh):
|
|
83
|
+
codes_to_fetch = []
|
|
84
|
+
for key, value in dict_result.items():
|
|
85
|
+
if key.startswith("code_"):
|
|
86
|
+
codes_to_fetch.append(value)
|
|
87
|
+
mapped_submesh = "DEPCOM" if submesh == "com" else submesh.upper()
|
|
88
|
+
query = f"""
|
|
89
|
+
SELECT DISTINCT "{mapped_submesh}" as code, "NOM_{mapped_submesh}" as name
|
|
90
|
+
FROM arborescence_geo
|
|
91
|
+
WHERE "{mapped_submesh}" IN ({", ".join(["'" + c + "'" for c in codes_to_fetch])});
|
|
92
|
+
"""
|
|
93
|
+
rows = run_custom_query(query)
|
|
94
|
+
names_dict = {}
|
|
95
|
+
for row in rows:
|
|
96
|
+
names_dict[row["code"]] = row["name"]
|
|
97
|
+
updated_dict = {}
|
|
98
|
+
for key, value in dict_result.items():
|
|
99
|
+
updated_dict[key] = value
|
|
100
|
+
if key.startswith("code_"):
|
|
101
|
+
updated_dict[key + "_name"] = names_dict[value]
|
|
102
|
+
return updated_dict
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
|
|
3
|
+
from .commons import (
|
|
4
|
+
add_optional_filters,
|
|
5
|
+
calculate_aggregate_values,
|
|
6
|
+
get_last_year,
|
|
7
|
+
get_table_data_for_geography,
|
|
8
|
+
)
|
|
9
|
+
from .utils import get_breakdown_dimension, run_custom_query
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def breakdown_territories(indicator, submesh, year, filters, top_10_territories):
|
|
13
|
+
territories_code = ", ".join(
|
|
14
|
+
[f"'{territory['code_geo']}'" for territory in top_10_territories]
|
|
15
|
+
)
|
|
16
|
+
breakdown_dimension = get_breakdown_dimension(indicator).db_name
|
|
17
|
+
|
|
18
|
+
query = f"""
|
|
19
|
+
SELECT {calculate_aggregate_values(indicator, with_alternative=False)},
|
|
20
|
+
"{breakdown_dimension}" as dimension,
|
|
21
|
+
"code_{submesh}" as code_geo
|
|
22
|
+
FROM "{indicator.db_table_prefix}_{submesh}" as indic
|
|
23
|
+
WHERE code_{submesh} in ({territories_code}) AND annee = {year}
|
|
24
|
+
{add_optional_filters(indicator, filters)}
|
|
25
|
+
GROUP BY "code_{submesh}", "{breakdown_dimension}"
|
|
26
|
+
"""
|
|
27
|
+
results = run_custom_query(query)
|
|
28
|
+
return results
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_top_10_territories(indicator, territory, submesh, year, filters):
|
|
32
|
+
query = f"""
|
|
33
|
+
SELECT {calculate_aggregate_values(indicator, with_alternative=False)},
|
|
34
|
+
indic.code_{submesh} as code_geo, arbo.lieu as lieu
|
|
35
|
+
FROM {get_table_data_for_geography(indicator, territory, submesh, include_place_names=True)}
|
|
36
|
+
{add_optional_filters(indicator, filters)}
|
|
37
|
+
AND annee = {year}
|
|
38
|
+
GROUP BY code_{submesh}, arbo.lieu
|
|
39
|
+
ORDER BY valeur DESC
|
|
40
|
+
LIMIT 10
|
|
41
|
+
"""
|
|
42
|
+
results = run_custom_query(query)
|
|
43
|
+
return results
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def get_indicator_top_10_data(indicator, territory, submesh, filters):
|
|
47
|
+
indicator_details = {}
|
|
48
|
+
|
|
49
|
+
breakdown_dimension = get_breakdown_dimension(indicator)
|
|
50
|
+
breakdown_dimension = breakdown_dimension.db_name if breakdown_dimension else None
|
|
51
|
+
breakdown_filters = filters.get(breakdown_dimension, [])
|
|
52
|
+
|
|
53
|
+
last_year = get_last_year(indicator, submesh)
|
|
54
|
+
|
|
55
|
+
territories = get_top_10_territories(
|
|
56
|
+
indicator, territory, submesh, last_year, filters
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
if breakdown_dimension:
|
|
60
|
+
breakdown = breakdown_territories(
|
|
61
|
+
indicator, submesh, last_year, filters, territories
|
|
62
|
+
)
|
|
63
|
+
breakdown_by_geocode = defaultdict(dict)
|
|
64
|
+
for row in breakdown:
|
|
65
|
+
breakdown_by_geocode[row["code_geo"]][row["dimension"]] = row["valeur"]
|
|
66
|
+
datasets_top_bar_chart = [
|
|
67
|
+
{
|
|
68
|
+
"label": f,
|
|
69
|
+
"data": [
|
|
70
|
+
breakdown_by_geocode[territory["code_geo"]][f]
|
|
71
|
+
for territory in territories
|
|
72
|
+
],
|
|
73
|
+
}
|
|
74
|
+
for f in breakdown_filters
|
|
75
|
+
]
|
|
76
|
+
else:
|
|
77
|
+
datasets_top_bar_chart = [
|
|
78
|
+
{
|
|
79
|
+
"label": indicator.unite,
|
|
80
|
+
"data": [territory["valeur"] for territory in territories],
|
|
81
|
+
}
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
labels_top_bar_chart = [territory["lieu"] for territory in territories]
|
|
85
|
+
|
|
86
|
+
csv_data = []
|
|
87
|
+
for territory in territories:
|
|
88
|
+
csv_row = {}
|
|
89
|
+
csv_row["Territoire"] = territory["lieu"]
|
|
90
|
+
csv_row["Code Géographique"] = territory["code_geo"]
|
|
91
|
+
csv_row[f"Valeur {indicator.unite}"] = territory["valeur"]
|
|
92
|
+
if breakdown_dimension:
|
|
93
|
+
for filter in breakdown_filters:
|
|
94
|
+
csv_row[filter] = breakdown_by_geocode[territory["code_geo"]][filter]
|
|
95
|
+
csv_data.append(csv_row)
|
|
96
|
+
|
|
97
|
+
indicator_details["labelsTopBarChart"] = labels_top_bar_chart
|
|
98
|
+
indicator_details["datasetsTopBarChart"] = datasets_top_bar_chart
|
|
99
|
+
|
|
100
|
+
return indicator_details, csv_data
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from django.conf import settings
|
|
2
|
+
from django.db import connections
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_breakdown_dimension(indicator):
|
|
6
|
+
breakdown_dimension = (
|
|
7
|
+
indicator.dimensions.filter(is_breakdown=True).first()
|
|
8
|
+
if indicator.dimensions.count() > 1
|
|
9
|
+
else indicator.dimensions.first()
|
|
10
|
+
)
|
|
11
|
+
return breakdown_dimension
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def run_custom_query(query, params=None):
|
|
15
|
+
with connections[settings.INDICATORS_DATABASE].cursor() as cursor:
|
|
16
|
+
cursor.execute(query, params)
|
|
17
|
+
columns = [col[0] for col in cursor.description]
|
|
18
|
+
rows = cursor.fetchall()
|
|
19
|
+
results = [dict(zip(columns, row)) for row in rows]
|
|
20
|
+
return results
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from django.apps import apps
|
|
2
|
+
|
|
3
|
+
from territories_dashboard_lib.indicators_lib.query.utils import run_custom_query
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def refresh_filters(dimension):
|
|
7
|
+
if dimension:
|
|
8
|
+
query = f'SELECT DISTINCT({dimension.db_name}) as filter FROM "{dimension.indicator.db_table_prefix}_reg"'
|
|
9
|
+
results = run_custom_query(query)
|
|
10
|
+
Filter = apps.get_model("tdbmd_indicators", "Filter")
|
|
11
|
+
Filter.objects.filter(dimension=dimension).delete()
|
|
12
|
+
Filter.objects.bulk_create(
|
|
13
|
+
[
|
|
14
|
+
Filter(dimension=dimension, db_name=value["filter"], order=index)
|
|
15
|
+
for index, value in enumerate(results)
|
|
16
|
+
]
|
|
17
|
+
)
|