setta 0.0.9.dev0__py3-none-any.whl → 0.0.9.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of setta might be problematic. Click here for more details.
- setta/__init__.py +1 -1
- setta/code_gen/python/generate_code.py +2 -0
- setta/database/db/codeInfo/copy.py +1 -4
- setta/database/db/projects/saveAs.py +0 -3
- setta/database/db/projects/utils.py +2 -2
- setta/database/db/sectionVariants/copy.py +12 -3
- setta/database/db/sections/copy.py +4 -1
- setta/database/db/sections/jsonSource.py +109 -42
- setta/database/db/sections/load.py +130 -73
- setta/database/settings_file.py +1 -1
- setta/lsp/specific_file_watcher.py +33 -4
- setta/routers/projects.py +1 -1
- setta/routers/sections.py +15 -5
- setta/static/constants/constants.json +1 -1
- setta/static/constants/defaultValues.json +3 -2
- setta/static/constants/settingsProject.json +200 -29
- setta/static/frontend/assets/{index-c59176d8.css → index-cf887608.css} +1 -1
- setta/static/frontend/assets/{index-0134e43f.js → index-e049efee.js} +151 -151
- setta/static/frontend/index.html +2 -2
- setta/utils/constants.py +1 -4
- {setta-0.0.9.dev0.dist-info → setta-0.0.9.dev1.dist-info}/METADATA +1 -1
- {setta-0.0.9.dev0.dist-info → setta-0.0.9.dev1.dist-info}/RECORD +26 -26
- {setta-0.0.9.dev0.dist-info → setta-0.0.9.dev1.dist-info}/WHEEL +1 -1
- {setta-0.0.9.dev0.dist-info → setta-0.0.9.dev1.dist-info}/LICENSE +0 -0
- {setta-0.0.9.dev0.dist-info → setta-0.0.9.dev1.dist-info}/entry_points.txt +0 -0
- {setta-0.0.9.dev0.dist-info → setta-0.0.9.dev1.dist-info}/top_level.txt +0 -0
setta/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.0.9.
|
1
|
+
__version__ = "0.0.9.dev1"
|
@@ -258,6 +258,8 @@ def convert_var_names_to_readable_form(
|
|
258
258
|
# only create nice_var_name if we haven't already created it for this var_name
|
259
259
|
if var_name not in var_name_to_nice_var_name_mapping:
|
260
260
|
nice_name = exporter_obj.output[var_name]["name"]
|
261
|
+
# TODO: do the same thing on the frontend to find naming conflicts
|
262
|
+
nice_name = nice_name.replace(" ", "_")
|
261
263
|
nice_var_name = create_nice_var_name(top_level_symbols, nice_name)
|
262
264
|
top_level_symbols.add(nice_var_name)
|
263
265
|
var_name_to_nice_var_name_mapping[var_name] = nice_var_name
|
@@ -1,11 +1,8 @@
|
|
1
1
|
from setta.database.utils import remap_ids, rename_keys
|
2
|
-
from setta.utils.constants import is_from_json_source
|
3
2
|
|
4
3
|
|
5
4
|
def copy_code_info(code_info):
|
6
|
-
|
7
|
-
new_code_info, code_info_id_map = remap_ids(code_info, ids_to_rename)
|
8
|
-
code_info_id_map.update({k: k for k in code_info.keys() if is_from_json_source(k)})
|
5
|
+
new_code_info, code_info_id_map = remap_ids(code_info)
|
9
6
|
|
10
7
|
for c in new_code_info.values():
|
11
8
|
c["id"] = code_info_id_map[c["id"]]
|
@@ -1,12 +1,10 @@
|
|
1
1
|
import copy
|
2
2
|
|
3
|
-
from ..sections.jsonSource import remove_json_source_data
|
4
3
|
from .copy import copy_project_config, copy_project_details
|
5
4
|
from .save import save_project_details
|
6
5
|
|
7
6
|
|
8
7
|
def save_as_new_project_config(db, project, new_config_name, with_refs):
|
9
|
-
remove_json_source_data(project)
|
10
8
|
if not with_refs:
|
11
9
|
project_to_save = copy_project_details(
|
12
10
|
project, new_config_name, do_create_new_id=True
|
@@ -21,7 +19,6 @@ def save_as_new_project_config(db, project, new_config_name, with_refs):
|
|
21
19
|
|
22
20
|
|
23
21
|
def save_as_existing_project_config(db, project, config_name):
|
24
|
-
remove_json_source_data(project)
|
25
22
|
query = """
|
26
23
|
SELECT *
|
27
24
|
FROM ProjectConfig
|
@@ -43,8 +43,8 @@ def remove_empty(x):
|
|
43
43
|
return {k: v for k, v in x.items() if len(v) > 0}
|
44
44
|
|
45
45
|
|
46
|
-
def filter_data_for_json_export(p
|
47
|
-
remove_json_source_data(p
|
46
|
+
def filter_data_for_json_export(p):
|
47
|
+
remove_json_source_data(p)
|
48
48
|
|
49
49
|
p["projectConfig"] = filter_dict(
|
50
50
|
p["projectConfig"],
|
@@ -2,11 +2,20 @@ from setta.database.utils import remap_ids, rename_keys
|
|
2
2
|
from setta.utils.generate_memorable_string import generate_memorable_string
|
3
3
|
|
4
4
|
|
5
|
-
def copy_section_variants(
|
5
|
+
def copy_section_variants(
|
6
|
+
sections, section_variants, code_info_id_map, code_info_col_id_map
|
7
|
+
):
|
6
8
|
new_section_variants, section_variant_id_map = remap_ids(section_variants)
|
9
|
+
keep_old_name = set()
|
10
|
+
for s in sections.values():
|
11
|
+
if not s["jsonSource"]:
|
12
|
+
continue
|
13
|
+
for v in s["variantIds"]:
|
14
|
+
keep_old_name.add(section_variant_id_map[v])
|
7
15
|
|
8
|
-
for obj in new_section_variants.
|
9
|
-
|
16
|
+
for id, obj in new_section_variants.items():
|
17
|
+
if id not in keep_old_name:
|
18
|
+
obj["name"] = generate_memorable_string()
|
10
19
|
obj["values"] = rename_keys(obj["values"], code_info_id_map)
|
11
20
|
if obj["codeInfoColId"]:
|
12
21
|
obj["codeInfoColId"] = code_info_col_id_map[obj["codeInfoColId"]]
|
@@ -18,6 +18,9 @@ def copy_sections(
|
|
18
18
|
for section in new_sections.values():
|
19
19
|
section["id"] = section_id_map[section["id"]]
|
20
20
|
section["variantId"] = section_variant_id_map[section["variantId"]]
|
21
|
+
section["defaultVariantId"] = section_variant_id_map[
|
22
|
+
section["defaultVariantId"]
|
23
|
+
]
|
21
24
|
section["uiTypeId"] = ui_type_id_map[section["uiTypeId"]]
|
22
25
|
if section["uiTypeColId"]:
|
23
26
|
section["uiTypeColId"] = ui_type_col_id_map[section["uiTypeColId"]]
|
@@ -41,7 +44,7 @@ def copy_sections_and_other_info(x):
|
|
41
44
|
x["codeInfoCols"], code_info_id_map
|
42
45
|
)
|
43
46
|
new_section_variants, section_variant_id_map = copy_section_variants(
|
44
|
-
x["sectionVariants"], code_info_id_map, code_info_col_id_map
|
47
|
+
x["sections"], x["sectionVariants"], code_info_id_map, code_info_col_id_map
|
45
48
|
)
|
46
49
|
new_ui_types, ui_type_id_map = copy_ui_types(x["uiTypes"])
|
47
50
|
new_ui_type_cols, ui_type_col_id_map = copy_ui_type_cols(
|
@@ -1,8 +1,13 @@
|
|
1
1
|
import json
|
2
2
|
from collections import defaultdict
|
3
3
|
|
4
|
-
from setta.utils.constants import BASE_UI_TYPE_IDS, C
|
5
|
-
from setta.utils.utils import
|
4
|
+
from setta.utils.constants import BASE_UI_TYPE_IDS, C
|
5
|
+
from setta.utils.utils import (
|
6
|
+
recursive_dict_merge,
|
7
|
+
replace_null_keys_with_none,
|
8
|
+
save_json_to_file,
|
9
|
+
try_json,
|
10
|
+
)
|
6
11
|
|
7
12
|
|
8
13
|
def save_json_source_data(p, section_ids=None, forking_from=None):
|
@@ -19,24 +24,29 @@ def save_json_source_data(p, section_ids=None, forking_from=None):
|
|
19
24
|
with open(forking_from, "r") as f:
|
20
25
|
forking_from_data = json.load(f)
|
21
26
|
|
27
|
+
p["codeInfoCols"] = replace_null_keys_with_none(p["codeInfoCols"])
|
28
|
+
|
29
|
+
ancestor_paths = build_ancestor_paths(p["codeInfo"], p["codeInfoCols"])
|
30
|
+
|
22
31
|
for s in sections.values():
|
32
|
+
if not s["jsonSource"] or s["jsonSourceMissing"]:
|
33
|
+
continue
|
34
|
+
|
23
35
|
for variantId in s["variantIds"]:
|
24
36
|
variant = p["sectionVariants"][variantId]
|
25
|
-
|
37
|
+
codeInfoColId = variant["codeInfoColId"]
|
38
|
+
codeInfoCol = p["codeInfoCols"][codeInfoColId]
|
26
39
|
filename = variant["name"]
|
27
|
-
for k, children in codeInfoCol["children"].items():
|
28
|
-
if is_from_json_source(k):
|
29
|
-
metadata = json.loads(k.removeprefix(C.JSON_SOURCE_PREFIX))
|
30
|
-
key_path = metadata["key"]
|
31
|
-
value = try_getting_value(variant, k, children)
|
32
|
-
|
33
|
-
current_dict = add_key_path_to_dict(
|
34
|
-
to_be_saved[filename], key_path[:-1]
|
35
|
-
)
|
36
40
|
|
37
|
-
|
38
|
-
|
39
|
-
|
41
|
+
recursively_add_keys(
|
42
|
+
p,
|
43
|
+
variant,
|
44
|
+
codeInfoCol,
|
45
|
+
to_be_saved[filename],
|
46
|
+
None,
|
47
|
+
s["jsonSourceKeys"],
|
48
|
+
ancestor_paths,
|
49
|
+
)
|
40
50
|
|
41
51
|
# Make sure the jsonSourceKeys are present.
|
42
52
|
# (They might not be because they are completely empty)
|
@@ -50,12 +60,60 @@ def save_json_source_data(p, section_ids=None, forking_from=None):
|
|
50
60
|
return to_be_saved
|
51
61
|
|
52
62
|
|
53
|
-
def
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
63
|
+
def build_ancestor_paths(codeInfo, codeInfoCols):
|
64
|
+
parent_map = {}
|
65
|
+
for col in codeInfoCols.values():
|
66
|
+
for parent_id, children in col["children"].items():
|
67
|
+
for child_id in children:
|
68
|
+
parent_map[(codeInfo[child_id]["jsonSource"], child_id)] = parent_id
|
69
|
+
|
70
|
+
ancestor_paths = {}
|
71
|
+
for node_id in codeInfo:
|
72
|
+
if node_id not in ancestor_paths:
|
73
|
+
path = []
|
74
|
+
current_id = node_id
|
75
|
+
|
76
|
+
# Traverse up to build the path
|
77
|
+
while current_id is not None:
|
78
|
+
if current_id in codeInfo: # Skip if not a valid codeInfo node
|
79
|
+
name = codeInfo[current_id]["name"]
|
80
|
+
path.insert(0, name)
|
81
|
+
|
82
|
+
# Get parent using the map
|
83
|
+
parent_id = parent_map.get(
|
84
|
+
(codeInfo[current_id]["jsonSource"], current_id)
|
85
|
+
)
|
86
|
+
current_id = parent_id
|
87
|
+
else:
|
88
|
+
break
|
89
|
+
|
90
|
+
ancestor_paths[node_id] = path
|
91
|
+
|
92
|
+
return ancestor_paths
|
93
|
+
|
94
|
+
|
95
|
+
def recursively_add_keys(
|
96
|
+
p, variant, codeInfoCol, input_dict, codeInfoId, jsonSourceKeys, ancestor_paths
|
97
|
+
):
|
98
|
+
for k in codeInfoCol["children"][codeInfoId]:
|
99
|
+
children = codeInfoCol["children"][k]
|
100
|
+
json_source = p["codeInfo"][k].get("jsonSource")
|
101
|
+
|
102
|
+
if json_source:
|
103
|
+
# Get pre-computed key path
|
104
|
+
key_path = [*jsonSourceKeys, *ancestor_paths[k]]
|
105
|
+
value = try_getting_value(variant, k, children)
|
106
|
+
|
107
|
+
current_dict = add_key_path_to_dict(input_dict, key_path[:-1])
|
108
|
+
|
109
|
+
# Set the value at the final position
|
110
|
+
if key_path: # Only set if we have a path
|
111
|
+
current_dict[key_path[-1]] = value
|
112
|
+
|
113
|
+
# Continue recursion regardless of whether this node has a jsonSource
|
114
|
+
recursively_add_keys(
|
115
|
+
p, variant, codeInfoCol, input_dict, k, jsonSourceKeys, ancestor_paths
|
116
|
+
)
|
59
117
|
|
60
118
|
|
61
119
|
def add_key_path_to_dict(output, key_path):
|
@@ -67,36 +125,43 @@ def add_key_path_to_dict(output, key_path):
|
|
67
125
|
return output
|
68
126
|
|
69
127
|
|
70
|
-
def
|
71
|
-
if
|
72
|
-
|
73
|
-
|
128
|
+
def try_getting_value(variant, codeInfoId, codeInfoChildren):
|
129
|
+
if len(codeInfoChildren) == 0:
|
130
|
+
if codeInfoId in variant["values"]:
|
131
|
+
return try_json(variant["values"][codeInfoId]["value"])
|
132
|
+
return ""
|
133
|
+
return {}
|
134
|
+
|
74
135
|
|
136
|
+
def condition_keep_code_info(codeInfo, jsonCodeInfoWithUIType):
|
137
|
+
if not codeInfo:
|
138
|
+
return False
|
139
|
+
return not codeInfo["jsonSource"] or codeInfo["id"] in jsonCodeInfoWithUIType
|
75
140
|
|
76
|
-
|
141
|
+
|
142
|
+
def remove_json_source_data(p):
|
77
143
|
for variant in p["sectionVariants"].values():
|
78
144
|
variant["values"] = {
|
79
|
-
k: v
|
145
|
+
k: v
|
146
|
+
for k, v in variant["values"].items()
|
147
|
+
if not p["codeInfo"][k]["jsonSource"]
|
80
148
|
}
|
81
149
|
|
82
150
|
jsonCodeInfoWithUIType = set()
|
83
|
-
|
84
|
-
for
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
)
|
92
|
-
jsonCodeInfoWithUIType.add(paramInfoId)
|
151
|
+
for uiTypeCol in p["uiTypeCols"].values():
|
152
|
+
for paramInfoId, uiTypeInfo in uiTypeCol.items():
|
153
|
+
# we want to know which json source params have an associated uiTypeId
|
154
|
+
# only if it's not the base TEXT type, since that's the default
|
155
|
+
if (
|
156
|
+
p["codeInfo"][paramInfoId]["jsonSource"]
|
157
|
+
and uiTypeInfo["uiTypeId"] != BASE_UI_TYPE_IDS[C.TEXT]
|
158
|
+
):
|
159
|
+
jsonCodeInfoWithUIType.add(paramInfoId)
|
93
160
|
|
94
161
|
p["codeInfo"] = {
|
95
162
|
k: v
|
96
163
|
for k, v in p["codeInfo"].items()
|
97
|
-
if condition_keep_code_info(
|
98
|
-
k, jsonCodeInfoWithUIType, keepCodeInfoThatHaveUITypes
|
99
|
-
)
|
164
|
+
if condition_keep_code_info(v, jsonCodeInfoWithUIType)
|
100
165
|
}
|
101
166
|
|
102
167
|
for codeInfoColId in p["codeInfoCols"].keys():
|
@@ -106,7 +171,8 @@ def remove_json_source_data(p, keepCodeInfoThatHaveUITypes=True):
|
|
106
171
|
for k, v in codeInfoCol["children"].items()
|
107
172
|
if k is None
|
108
173
|
or condition_keep_code_info(
|
109
|
-
k,
|
174
|
+
p["codeInfo"].get(k),
|
175
|
+
jsonCodeInfoWithUIType,
|
110
176
|
)
|
111
177
|
}
|
112
178
|
for id, children in codeInfoCol["children"].items():
|
@@ -114,6 +180,7 @@ def remove_json_source_data(p, keepCodeInfoThatHaveUITypes=True):
|
|
114
180
|
c
|
115
181
|
for c in children
|
116
182
|
if condition_keep_code_info(
|
117
|
-
c,
|
183
|
+
p["codeInfo"].get(c),
|
184
|
+
jsonCodeInfoWithUIType,
|
118
185
|
)
|
119
186
|
]
|
@@ -7,10 +7,10 @@ from collections import defaultdict
|
|
7
7
|
|
8
8
|
from setta.database.db.artifacts.load import load_artifact_groups
|
9
9
|
from setta.database.db.codeInfo.utils import new_code_info_col, with_code_info_defaults
|
10
|
+
from setta.database.db.sections.jsonSource import build_ancestor_paths
|
10
11
|
from setta.database.db.sections.utils import with_section_defaults
|
11
12
|
from setta.database.db.sectionVariants.utils import new_ev_entry, new_section_variant
|
12
13
|
from setta.database.utils import create_new_id
|
13
|
-
from setta.utils.constants import C
|
14
14
|
|
15
15
|
from ..sectionVariants.load import load_section_variants
|
16
16
|
from ..uiTypes.load import load_uitypecols, load_uitypes
|
@@ -214,34 +214,9 @@ def load_json_sources_into_data_structures(
|
|
214
214
|
f'Attempting to read {s["jsonSource"]} with keys {s["jsonSourceKeys"]}'
|
215
215
|
)
|
216
216
|
new_data = load_json_source(s["jsonSource"], s["jsonSourceKeys"])
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
variantId = None
|
221
|
-
for vid in s["variantIds"]:
|
222
|
-
if sectionVariants[vid]["name"] == filename:
|
223
|
-
variantId = vid
|
224
|
-
break
|
225
|
-
if not variantId:
|
226
|
-
variantId, section_variant = new_section_variant(
|
227
|
-
name=filename,
|
228
|
-
)
|
229
|
-
s["variantIds"].append(variantId)
|
230
|
-
sectionVariants[variantId] = section_variant
|
231
|
-
|
232
|
-
curr_section_variant = sectionVariants[variantId]
|
233
|
-
curr_section_variant["values"] = data["sectionVariantValues"]
|
234
|
-
codeInfoColId = curr_section_variant["codeInfoColId"]
|
235
|
-
|
236
|
-
if not codeInfoColId:
|
237
|
-
codeInfoColId = create_new_id()
|
238
|
-
curr_section_variant["codeInfoColId"] = codeInfoColId
|
239
|
-
codeInfoCols[codeInfoColId] = new_code_info_col()
|
240
|
-
|
241
|
-
codeInfoCols[codeInfoColId]["children"] = data["codeInfoColChildren"]
|
242
|
-
|
243
|
-
s["configLanguage"] = "json"
|
244
|
-
filenames_loaded.add(filename)
|
217
|
+
filenames_loaded.update(
|
218
|
+
merge_into_existing(new_data, s, sectionVariants, codeInfo, codeInfoCols)
|
219
|
+
)
|
245
220
|
|
246
221
|
# delete variants that aren't associated with a loaded file
|
247
222
|
to_delete = []
|
@@ -281,6 +256,74 @@ def load_json_sources_into_data_structures(
|
|
281
256
|
s["defaultVariantId"] = s["variantId"]
|
282
257
|
|
283
258
|
|
259
|
+
def merge_into_existing(new_data, section, sectionVariants, codeInfo, codeInfoCols):
|
260
|
+
filenames_loaded = set()
|
261
|
+
jsonSourceMetadata_to_id = {}
|
262
|
+
ancestor_paths = build_ancestor_paths(codeInfo, codeInfoCols)
|
263
|
+
for id, info in codeInfo.items():
|
264
|
+
jsonSourceMetadata_to_id[
|
265
|
+
createMetadataJsonString(info["jsonSource"], ancestor_paths[id])
|
266
|
+
] = id
|
267
|
+
|
268
|
+
for filename, data in new_data.items():
|
269
|
+
replacements = {}
|
270
|
+
new_ancestor_paths = build_ancestor_paths(
|
271
|
+
data["codeInfo"], {None: {"children": data["codeInfoColChildren"]}}
|
272
|
+
)
|
273
|
+
for newId, newInfo in data["codeInfo"].items():
|
274
|
+
existingId = jsonSourceMetadata_to_id.get(
|
275
|
+
createMetadataJsonString(
|
276
|
+
newInfo["jsonSource"], new_ancestor_paths[newId]
|
277
|
+
)
|
278
|
+
)
|
279
|
+
if existingId:
|
280
|
+
replacements[newId] = existingId
|
281
|
+
else:
|
282
|
+
codeInfo[newId] = newInfo
|
283
|
+
|
284
|
+
for newId, existingId in replacements.items():
|
285
|
+
del data["codeInfo"][newId]
|
286
|
+
data["codeInfoColChildren"][existingId] = [
|
287
|
+
replacements.get(x, x) for x in data["codeInfoColChildren"][newId]
|
288
|
+
]
|
289
|
+
data["codeInfoColChildren"][None] = [
|
290
|
+
replacements.get(x, x) for x in data["codeInfoColChildren"][None]
|
291
|
+
]
|
292
|
+
del data["codeInfoColChildren"][newId]
|
293
|
+
data["sectionVariantValues"][existingId] = data["sectionVariantValues"][
|
294
|
+
newId
|
295
|
+
]
|
296
|
+
del data["sectionVariantValues"][newId]
|
297
|
+
|
298
|
+
variantId = None
|
299
|
+
for vid in section["variantIds"]:
|
300
|
+
if sectionVariants[vid]["name"] == filename:
|
301
|
+
variantId = vid
|
302
|
+
break
|
303
|
+
if not variantId:
|
304
|
+
variantId, section_variant = new_section_variant(
|
305
|
+
name=filename,
|
306
|
+
)
|
307
|
+
section["variantIds"].append(variantId)
|
308
|
+
sectionVariants[variantId] = section_variant
|
309
|
+
|
310
|
+
curr_section_variant = sectionVariants[variantId]
|
311
|
+
curr_section_variant["values"] = data["sectionVariantValues"]
|
312
|
+
codeInfoColId = curr_section_variant["codeInfoColId"]
|
313
|
+
|
314
|
+
if not codeInfoColId:
|
315
|
+
codeInfoColId = create_new_id()
|
316
|
+
curr_section_variant["codeInfoColId"] = codeInfoColId
|
317
|
+
codeInfoCols[codeInfoColId] = new_code_info_col()
|
318
|
+
|
319
|
+
codeInfoCols[codeInfoColId]["children"] = data["codeInfoColChildren"]
|
320
|
+
|
321
|
+
section["configLanguage"] = "json"
|
322
|
+
filenames_loaded.add(filename)
|
323
|
+
|
324
|
+
return filenames_loaded
|
325
|
+
|
326
|
+
|
284
327
|
def load_json_source(filename_glob, jsonSourceKeys):
|
285
328
|
new_data = {}
|
286
329
|
|
@@ -289,83 +332,97 @@ def load_json_source(filename_glob, jsonSourceKeys):
|
|
289
332
|
try:
|
290
333
|
with open(filename, "r") as f:
|
291
334
|
jsonSourceData = json.load(f)
|
292
|
-
except:
|
335
|
+
except json.JSONDecodeError:
|
336
|
+
jsonSourceData = {}
|
337
|
+
except FileNotFoundError:
|
293
338
|
logger.debug(f"couldn't find: {filename}")
|
294
339
|
continue
|
295
340
|
|
296
|
-
new_data[filename] =
|
297
|
-
|
298
|
-
"codeInfoColChildren": {},
|
299
|
-
"sectionVariantValues": {},
|
300
|
-
}
|
301
|
-
|
302
|
-
try:
|
303
|
-
for k in jsonSourceKeys:
|
304
|
-
jsonSourceData = jsonSourceData[k]
|
305
|
-
except:
|
306
|
-
# TODO print warning or something
|
307
|
-
pass
|
308
|
-
|
309
|
-
process_json_object(
|
310
|
-
new_data, jsonSourceData, filename, filename_glob, jsonSourceKeys
|
341
|
+
new_data[filename] = process_json_object(
|
342
|
+
jsonSourceData, filename, jsonSourceKeys
|
311
343
|
)
|
312
344
|
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
345
|
+
return new_data
|
346
|
+
|
347
|
+
|
348
|
+
def process_json_object(jsonSourceData, filename, jsonSourceKeys):
|
349
|
+
new_data = {
|
350
|
+
"codeInfo": {},
|
351
|
+
"codeInfoColChildren": {},
|
352
|
+
"sectionVariantValues": {},
|
353
|
+
}
|
354
|
+
|
355
|
+
try:
|
356
|
+
for k in jsonSourceKeys:
|
357
|
+
jsonSourceData = jsonSourceData[k]
|
358
|
+
except:
|
359
|
+
# TODO print warning or something
|
360
|
+
pass
|
361
|
+
|
362
|
+
metadataToId = {}
|
363
|
+
|
364
|
+
highest_key = process_json_object_helper(
|
365
|
+
new_data, jsonSourceData, filename, jsonSourceKeys, metadataToId
|
366
|
+
)
|
367
|
+
|
368
|
+
if len(jsonSourceKeys) > 0:
|
369
|
+
# point directly from None (the root) to the children
|
370
|
+
codeInfoChildren = new_data["codeInfoColChildren"]
|
371
|
+
codeInfoChildren[None] = codeInfoChildren[highest_key]
|
372
|
+
del codeInfoChildren[highest_key]
|
319
373
|
|
320
374
|
return new_data
|
321
375
|
|
322
376
|
|
323
|
-
def
|
377
|
+
def process_json_object_helper(output, obj, filename, current_path, metadataToId):
|
324
378
|
if not isinstance(obj, dict):
|
325
379
|
return
|
326
380
|
|
327
381
|
children_keys = []
|
328
382
|
for k, v in obj.items():
|
329
383
|
path = current_path + [k]
|
330
|
-
|
331
|
-
|
332
|
-
)
|
333
|
-
children_keys.append(full_key)
|
384
|
+
paramInfoId, is_dict = create_json_code_info(filename, k, v, output)
|
385
|
+
metadataToId[createMetadataJsonString(filename, path)] = paramInfoId
|
386
|
+
children_keys.append(paramInfoId)
|
334
387
|
if is_dict:
|
335
|
-
|
388
|
+
process_json_object_helper(output, v, filename, path, metadataToId)
|
336
389
|
|
337
390
|
parent_id = None
|
338
391
|
if len(current_path) > 0:
|
339
|
-
|
392
|
+
metadata = createMetadataJsonString(filename, current_path)
|
393
|
+
parent_id = metadataToId.get(metadata)
|
394
|
+
if not parent_id:
|
395
|
+
parent_id = create_new_id()
|
396
|
+
metadataToId[metadata] = parent_id
|
340
397
|
|
341
|
-
output[
|
398
|
+
output["codeInfoColChildren"][parent_id] = children_keys
|
399
|
+
return parent_id
|
342
400
|
|
343
401
|
|
344
|
-
def create_json_code_info(filename,
|
345
|
-
|
402
|
+
def create_json_code_info(filename, key, value, output):
|
403
|
+
paramInfoId = create_new_id()
|
346
404
|
# Create code info entry
|
347
|
-
output[
|
348
|
-
id=
|
405
|
+
output["codeInfo"][paramInfoId] = with_code_info_defaults(
|
406
|
+
id=paramInfoId,
|
407
|
+
name=key,
|
408
|
+
editable=True,
|
409
|
+
jsonSource=filename,
|
349
410
|
)
|
350
|
-
output[
|
411
|
+
output["codeInfoColChildren"][paramInfoId] = []
|
351
412
|
|
352
413
|
is_dict = isinstance(value, dict)
|
353
414
|
# Create variant value entry
|
354
415
|
if is_dict:
|
355
416
|
# For objects, store empty value and process children
|
356
|
-
output[
|
417
|
+
output["sectionVariantValues"][paramInfoId] = new_ev_entry()
|
357
418
|
else:
|
358
419
|
# For non-objects, store the value directly
|
359
|
-
output[
|
420
|
+
output["sectionVariantValues"][paramInfoId] = new_ev_entry(
|
360
421
|
value=json.dumps(value)
|
361
422
|
)
|
362
423
|
|
363
|
-
return
|
424
|
+
return paramInfoId, is_dict
|
364
425
|
|
365
426
|
|
366
|
-
def
|
367
|
-
|
368
|
-
key = json.dumps(
|
369
|
-
{"filenameGlob": filename_glob, "key": path}, separators=(",", ":")
|
370
|
-
)
|
371
|
-
return f"{C.JSON_SOURCE_PREFIX}{key}"
|
427
|
+
def createMetadataJsonString(filename, path):
|
428
|
+
return json.dumps({"filename": filename, "key": path})
|
setta/database/settings_file.py
CHANGED