setta 0.0.8.dev1__py3-none-any.whl → 0.0.9.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of setta might be problematic. Click here for more details.

Files changed (32) hide show
  1. setta/__init__.py +1 -1
  2. setta/code_gen/export_selected.py +6 -4
  3. setta/code_gen/python/generate_code.py +2 -0
  4. setta/database/db/codeInfo/copy.py +1 -4
  5. setta/database/db/projects/saveAs.py +0 -3
  6. setta/database/db/projects/utils.py +2 -2
  7. setta/database/db/sectionVariants/copy.py +12 -3
  8. setta/database/db/sections/copy.py +4 -1
  9. setta/database/db/sections/jsonSource.py +109 -42
  10. setta/database/db/sections/load.py +145 -73
  11. setta/database/settings_file.py +1 -1
  12. setta/lsp/file_watcher.py +0 -16
  13. setta/lsp/specific_file_watcher.py +278 -0
  14. setta/lsp/utils.py +20 -0
  15. setta/routers/dependencies.py +4 -0
  16. setta/routers/projects.py +1 -1
  17. setta/routers/sections.py +28 -5
  18. setta/server.py +6 -0
  19. setta/static/constants/constants.json +3 -1
  20. setta/static/constants/defaultValues.json +3 -2
  21. setta/static/constants/settingsProject.json +200 -29
  22. setta/static/frontend/assets/{index-c59176d8.css → index-cf887608.css} +1 -1
  23. setta/static/frontend/assets/{index-20612afa.js → index-e049efee.js} +157 -157
  24. setta/static/frontend/index.html +2 -2
  25. setta/utils/constants.py +1 -4
  26. setta/utils/websocket_manager.py +5 -0
  27. {setta-0.0.8.dev1.dist-info → setta-0.0.9.dev1.dist-info}/METADATA +10 -5
  28. {setta-0.0.8.dev1.dist-info → setta-0.0.9.dev1.dist-info}/RECORD +32 -31
  29. {setta-0.0.8.dev1.dist-info → setta-0.0.9.dev1.dist-info}/WHEEL +1 -1
  30. {setta-0.0.8.dev1.dist-info → setta-0.0.9.dev1.dist-info}/LICENSE +0 -0
  31. {setta-0.0.8.dev1.dist-info → setta-0.0.9.dev1.dist-info}/entry_points.txt +0 -0
  32. {setta-0.0.8.dev1.dist-info → setta-0.0.9.dev1.dist-info}/top_level.txt +0 -0
setta/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.0.8.dev1"
1
+ __version__ = "0.0.9.dev1"
@@ -549,7 +549,7 @@ class ExporterForInMemoryFn:
549
549
  C.LIST_ROOT,
550
550
  C.DICT_ROOT,
551
551
  C.GROUP,
552
- # C.IMAGE,
552
+ C.IMAGE,
553
553
  # C.CHART,
554
554
  C.DRAW,
555
555
  C.CHAT,
@@ -588,9 +588,11 @@ class ExporterForInMemoryFn:
588
588
  value[child_name] = self.export_section(
589
589
  c, f'{name}["{child_name}"]'
590
590
  )
591
- # elif type in [C.IMAGE, C.CHART]:
592
- # value = get_artifacts(self.p, id)
593
- # self.create_var_mapping((id, "artifact"), name)
591
+ elif type in [C.IMAGE]:
592
+ artifacts = get_artifacts(self.p, id)
593
+ img = artifacts[0]["value"] if len(artifacts) > 0 else None
594
+ value = {"image": img}
595
+ self.create_var_mapping((id, "image"), f'{name}["image"]')
594
596
  elif type == C.DRAW:
595
597
  value = {"drawing": get_drawing(self.p, id)}
596
598
  self.create_var_mapping((id, "drawing"), f'{name}["drawing"]')
@@ -258,6 +258,8 @@ def convert_var_names_to_readable_form(
258
258
  # only create nice_var_name if we haven't already created it for this var_name
259
259
  if var_name not in var_name_to_nice_var_name_mapping:
260
260
  nice_name = exporter_obj.output[var_name]["name"]
261
+ # TODO: do the same thing on the frontend to find naming conflicts
262
+ nice_name = nice_name.replace(" ", "_")
261
263
  nice_var_name = create_nice_var_name(top_level_symbols, nice_name)
262
264
  top_level_symbols.add(nice_var_name)
263
265
  var_name_to_nice_var_name_mapping[var_name] = nice_var_name
@@ -1,11 +1,8 @@
1
1
  from setta.database.utils import remap_ids, rename_keys
2
- from setta.utils.constants import is_from_json_source
3
2
 
4
3
 
5
4
  def copy_code_info(code_info):
6
- ids_to_rename = [k for k in code_info.keys() if not is_from_json_source(k)]
7
- new_code_info, code_info_id_map = remap_ids(code_info, ids_to_rename)
8
- code_info_id_map.update({k: k for k in code_info.keys() if is_from_json_source(k)})
5
+ new_code_info, code_info_id_map = remap_ids(code_info)
9
6
 
10
7
  for c in new_code_info.values():
11
8
  c["id"] = code_info_id_map[c["id"]]
@@ -1,12 +1,10 @@
1
1
  import copy
2
2
 
3
- from ..sections.jsonSource import remove_json_source_data
4
3
  from .copy import copy_project_config, copy_project_details
5
4
  from .save import save_project_details
6
5
 
7
6
 
8
7
  def save_as_new_project_config(db, project, new_config_name, with_refs):
9
- remove_json_source_data(project)
10
8
  if not with_refs:
11
9
  project_to_save = copy_project_details(
12
10
  project, new_config_name, do_create_new_id=True
@@ -21,7 +19,6 @@ def save_as_new_project_config(db, project, new_config_name, with_refs):
21
19
 
22
20
 
23
21
  def save_as_existing_project_config(db, project, config_name):
24
- remove_json_source_data(project)
25
22
  query = """
26
23
  SELECT *
27
24
  FROM ProjectConfig
@@ -43,8 +43,8 @@ def remove_empty(x):
43
43
  return {k: v for k, v in x.items() if len(v) > 0}
44
44
 
45
45
 
46
- def filter_data_for_json_export(p, keepCodeInfoThatHaveUITypes):
47
- remove_json_source_data(p, keepCodeInfoThatHaveUITypes)
46
+ def filter_data_for_json_export(p):
47
+ remove_json_source_data(p)
48
48
 
49
49
  p["projectConfig"] = filter_dict(
50
50
  p["projectConfig"],
@@ -2,11 +2,20 @@ from setta.database.utils import remap_ids, rename_keys
2
2
  from setta.utils.generate_memorable_string import generate_memorable_string
3
3
 
4
4
 
5
- def copy_section_variants(section_variants, code_info_id_map, code_info_col_id_map):
5
+ def copy_section_variants(
6
+ sections, section_variants, code_info_id_map, code_info_col_id_map
7
+ ):
6
8
  new_section_variants, section_variant_id_map = remap_ids(section_variants)
9
+ keep_old_name = set()
10
+ for s in sections.values():
11
+ if not s["jsonSource"]:
12
+ continue
13
+ for v in s["variantIds"]:
14
+ keep_old_name.add(section_variant_id_map[v])
7
15
 
8
- for obj in new_section_variants.values():
9
- obj["name"] = generate_memorable_string()
16
+ for id, obj in new_section_variants.items():
17
+ if id not in keep_old_name:
18
+ obj["name"] = generate_memorable_string()
10
19
  obj["values"] = rename_keys(obj["values"], code_info_id_map)
11
20
  if obj["codeInfoColId"]:
12
21
  obj["codeInfoColId"] = code_info_col_id_map[obj["codeInfoColId"]]
@@ -18,6 +18,9 @@ def copy_sections(
18
18
  for section in new_sections.values():
19
19
  section["id"] = section_id_map[section["id"]]
20
20
  section["variantId"] = section_variant_id_map[section["variantId"]]
21
+ section["defaultVariantId"] = section_variant_id_map[
22
+ section["defaultVariantId"]
23
+ ]
21
24
  section["uiTypeId"] = ui_type_id_map[section["uiTypeId"]]
22
25
  if section["uiTypeColId"]:
23
26
  section["uiTypeColId"] = ui_type_col_id_map[section["uiTypeColId"]]
@@ -41,7 +44,7 @@ def copy_sections_and_other_info(x):
41
44
  x["codeInfoCols"], code_info_id_map
42
45
  )
43
46
  new_section_variants, section_variant_id_map = copy_section_variants(
44
- x["sectionVariants"], code_info_id_map, code_info_col_id_map
47
+ x["sections"], x["sectionVariants"], code_info_id_map, code_info_col_id_map
45
48
  )
46
49
  new_ui_types, ui_type_id_map = copy_ui_types(x["uiTypes"])
47
50
  new_ui_type_cols, ui_type_col_id_map = copy_ui_type_cols(
@@ -1,8 +1,13 @@
1
1
  import json
2
2
  from collections import defaultdict
3
3
 
4
- from setta.utils.constants import BASE_UI_TYPE_IDS, C, is_from_json_source
5
- from setta.utils.utils import recursive_dict_merge, save_json_to_file, try_json
4
+ from setta.utils.constants import BASE_UI_TYPE_IDS, C
5
+ from setta.utils.utils import (
6
+ recursive_dict_merge,
7
+ replace_null_keys_with_none,
8
+ save_json_to_file,
9
+ try_json,
10
+ )
6
11
 
7
12
 
8
13
  def save_json_source_data(p, section_ids=None, forking_from=None):
@@ -19,24 +24,29 @@ def save_json_source_data(p, section_ids=None, forking_from=None):
19
24
  with open(forking_from, "r") as f:
20
25
  forking_from_data = json.load(f)
21
26
 
27
+ p["codeInfoCols"] = replace_null_keys_with_none(p["codeInfoCols"])
28
+
29
+ ancestor_paths = build_ancestor_paths(p["codeInfo"], p["codeInfoCols"])
30
+
22
31
  for s in sections.values():
32
+ if not s["jsonSource"] or s["jsonSourceMissing"]:
33
+ continue
34
+
23
35
  for variantId in s["variantIds"]:
24
36
  variant = p["sectionVariants"][variantId]
25
- codeInfoCol = p["codeInfoCols"][variant["codeInfoColId"]]
37
+ codeInfoColId = variant["codeInfoColId"]
38
+ codeInfoCol = p["codeInfoCols"][codeInfoColId]
26
39
  filename = variant["name"]
27
- for k, children in codeInfoCol["children"].items():
28
- if is_from_json_source(k):
29
- metadata = json.loads(k.removeprefix(C.JSON_SOURCE_PREFIX))
30
- key_path = metadata["key"]
31
- value = try_getting_value(variant, k, children)
32
-
33
- current_dict = add_key_path_to_dict(
34
- to_be_saved[filename], key_path[:-1]
35
- )
36
40
 
37
- # Set the value at the final position
38
- if key_path: # Only set if we have a path
39
- current_dict[key_path[-1]] = value
41
+ recursively_add_keys(
42
+ p,
43
+ variant,
44
+ codeInfoCol,
45
+ to_be_saved[filename],
46
+ None,
47
+ s["jsonSourceKeys"],
48
+ ancestor_paths,
49
+ )
40
50
 
41
51
  # Make sure the jsonSourceKeys are present.
42
52
  # (They might not be because they are completely empty)
@@ -50,12 +60,60 @@ def save_json_source_data(p, section_ids=None, forking_from=None):
50
60
  return to_be_saved
51
61
 
52
62
 
53
- def try_getting_value(variant, codeInfoId, codeInfoChildren):
54
- if len(codeInfoChildren) == 0:
55
- if codeInfoId in variant["values"]:
56
- return try_json(variant["values"][codeInfoId]["value"])
57
- return ""
58
- return {}
63
+ def build_ancestor_paths(codeInfo, codeInfoCols):
64
+ parent_map = {}
65
+ for col in codeInfoCols.values():
66
+ for parent_id, children in col["children"].items():
67
+ for child_id in children:
68
+ parent_map[(codeInfo[child_id]["jsonSource"], child_id)] = parent_id
69
+
70
+ ancestor_paths = {}
71
+ for node_id in codeInfo:
72
+ if node_id not in ancestor_paths:
73
+ path = []
74
+ current_id = node_id
75
+
76
+ # Traverse up to build the path
77
+ while current_id is not None:
78
+ if current_id in codeInfo: # Skip if not a valid codeInfo node
79
+ name = codeInfo[current_id]["name"]
80
+ path.insert(0, name)
81
+
82
+ # Get parent using the map
83
+ parent_id = parent_map.get(
84
+ (codeInfo[current_id]["jsonSource"], current_id)
85
+ )
86
+ current_id = parent_id
87
+ else:
88
+ break
89
+
90
+ ancestor_paths[node_id] = path
91
+
92
+ return ancestor_paths
93
+
94
+
95
+ def recursively_add_keys(
96
+ p, variant, codeInfoCol, input_dict, codeInfoId, jsonSourceKeys, ancestor_paths
97
+ ):
98
+ for k in codeInfoCol["children"][codeInfoId]:
99
+ children = codeInfoCol["children"][k]
100
+ json_source = p["codeInfo"][k].get("jsonSource")
101
+
102
+ if json_source:
103
+ # Get pre-computed key path
104
+ key_path = [*jsonSourceKeys, *ancestor_paths[k]]
105
+ value = try_getting_value(variant, k, children)
106
+
107
+ current_dict = add_key_path_to_dict(input_dict, key_path[:-1])
108
+
109
+ # Set the value at the final position
110
+ if key_path: # Only set if we have a path
111
+ current_dict[key_path[-1]] = value
112
+
113
+ # Continue recursion regardless of whether this node has a jsonSource
114
+ recursively_add_keys(
115
+ p, variant, codeInfoCol, input_dict, k, jsonSourceKeys, ancestor_paths
116
+ )
59
117
 
60
118
 
61
119
  def add_key_path_to_dict(output, key_path):
@@ -67,36 +125,43 @@ def add_key_path_to_dict(output, key_path):
67
125
  return output
68
126
 
69
127
 
70
- def condition_keep_code_info(k, jsonCodeInfoWithUIType, keepCodeInfoThatHaveUITypes):
71
- if keepCodeInfoThatHaveUITypes:
72
- return k in jsonCodeInfoWithUIType or not is_from_json_source(k)
73
- return not is_from_json_source(k)
128
+ def try_getting_value(variant, codeInfoId, codeInfoChildren):
129
+ if len(codeInfoChildren) == 0:
130
+ if codeInfoId in variant["values"]:
131
+ return try_json(variant["values"][codeInfoId]["value"])
132
+ return ""
133
+ return {}
134
+
74
135
 
136
+ def condition_keep_code_info(codeInfo, jsonCodeInfoWithUIType):
137
+ if not codeInfo:
138
+ return False
139
+ return not codeInfo["jsonSource"] or codeInfo["id"] in jsonCodeInfoWithUIType
75
140
 
76
- def remove_json_source_data(p, keepCodeInfoThatHaveUITypes=True):
141
+
142
+ def remove_json_source_data(p):
77
143
  for variant in p["sectionVariants"].values():
78
144
  variant["values"] = {
79
- k: v for k, v in variant["values"].items() if not is_from_json_source(k)
145
+ k: v
146
+ for k, v in variant["values"].items()
147
+ if not p["codeInfo"][k]["jsonSource"]
80
148
  }
81
149
 
82
150
  jsonCodeInfoWithUIType = set()
83
- if keepCodeInfoThatHaveUITypes:
84
- for uiTypeCol in p["uiTypeCols"].values():
85
- for paramInfoId, uiTypeInfo in uiTypeCol.items():
86
- # we want to know which json source params have an associated uiTypeId
87
- # only if it's not the base TEXT type, since that's the default
88
- if (
89
- is_from_json_source(paramInfoId)
90
- and uiTypeInfo["uiTypeId"] != BASE_UI_TYPE_IDS[C.TEXT]
91
- ):
92
- jsonCodeInfoWithUIType.add(paramInfoId)
151
+ for uiTypeCol in p["uiTypeCols"].values():
152
+ for paramInfoId, uiTypeInfo in uiTypeCol.items():
153
+ # we want to know which json source params have an associated uiTypeId
154
+ # only if it's not the base TEXT type, since that's the default
155
+ if (
156
+ p["codeInfo"][paramInfoId]["jsonSource"]
157
+ and uiTypeInfo["uiTypeId"] != BASE_UI_TYPE_IDS[C.TEXT]
158
+ ):
159
+ jsonCodeInfoWithUIType.add(paramInfoId)
93
160
 
94
161
  p["codeInfo"] = {
95
162
  k: v
96
163
  for k, v in p["codeInfo"].items()
97
- if condition_keep_code_info(
98
- k, jsonCodeInfoWithUIType, keepCodeInfoThatHaveUITypes
99
- )
164
+ if condition_keep_code_info(v, jsonCodeInfoWithUIType)
100
165
  }
101
166
 
102
167
  for codeInfoColId in p["codeInfoCols"].keys():
@@ -106,7 +171,8 @@ def remove_json_source_data(p, keepCodeInfoThatHaveUITypes=True):
106
171
  for k, v in codeInfoCol["children"].items()
107
172
  if k is None
108
173
  or condition_keep_code_info(
109
- k, jsonCodeInfoWithUIType, keepCodeInfoThatHaveUITypes
174
+ p["codeInfo"].get(k),
175
+ jsonCodeInfoWithUIType,
110
176
  )
111
177
  }
112
178
  for id, children in codeInfoCol["children"].items():
@@ -114,6 +180,7 @@ def remove_json_source_data(p, keepCodeInfoThatHaveUITypes=True):
114
180
  c
115
181
  for c in children
116
182
  if condition_keep_code_info(
117
- c, jsonCodeInfoWithUIType, keepCodeInfoThatHaveUITypes
183
+ p["codeInfo"].get(c),
184
+ jsonCodeInfoWithUIType,
118
185
  )
119
186
  ]
@@ -7,10 +7,10 @@ from collections import defaultdict
7
7
 
8
8
  from setta.database.db.artifacts.load import load_artifact_groups
9
9
  from setta.database.db.codeInfo.utils import new_code_info_col, with_code_info_defaults
10
+ from setta.database.db.sections.jsonSource import build_ancestor_paths
10
11
  from setta.database.db.sections.utils import with_section_defaults
11
12
  from setta.database.db.sectionVariants.utils import new_ev_entry, new_section_variant
12
13
  from setta.database.utils import create_new_id
13
- from setta.utils.constants import C
14
14
 
15
15
  from ..sectionVariants.load import load_section_variants
16
16
  from ..uiTypes.load import load_uitypecols, load_uitypes
@@ -210,40 +210,22 @@ def load_json_sources_into_data_structures(
210
210
  if v["jsonSource"] and ((not section_ids) or k in section_ids)
211
211
  }
212
212
  for s in sections.values():
213
+ logger.debug(
214
+ f'Attempting to read {s["jsonSource"]} with keys {s["jsonSourceKeys"]}'
215
+ )
213
216
  new_data = load_json_source(s["jsonSource"], s["jsonSourceKeys"])
214
- for filename, data in new_data.items():
215
- codeInfo.update(data["codeInfo"])
216
- variantId = None
217
- for vid in s["variantIds"]:
218
- if sectionVariants[vid]["name"] == filename:
219
- variantId = vid
220
- break
221
- if not variantId:
222
- variantId, section_variant = new_section_variant(
223
- name=filename,
224
- )
225
- s["variantIds"].append(variantId)
226
- sectionVariants[variantId] = section_variant
227
-
228
- curr_section_variant = sectionVariants[variantId]
229
- curr_section_variant["values"] = data["sectionVariantValues"]
230
- codeInfoColId = curr_section_variant["codeInfoColId"]
231
-
232
- if not codeInfoColId:
233
- codeInfoColId = create_new_id()
234
- curr_section_variant["codeInfoColId"] = codeInfoColId
235
- codeInfoCols[codeInfoColId] = new_code_info_col()
236
-
237
- codeInfoCols[codeInfoColId]["children"] = data["codeInfoColChildren"]
238
-
239
- s["configLanguage"] = "json"
240
- filenames_loaded.add(filename)
217
+ filenames_loaded.update(
218
+ merge_into_existing(new_data, s, sectionVariants, codeInfo, codeInfoCols)
219
+ )
241
220
 
242
221
  # delete variants that aren't associated with a loaded file
243
222
  to_delete = []
244
223
  for s in sections.values():
245
224
  for vid in s["variantIds"]:
246
225
  if sectionVariants[vid]["name"] not in filenames_loaded:
226
+ logger.debug(
227
+ f'Removing variant {sectionVariants[vid]["name"]} because the associated json was not found'
228
+ )
247
229
  to_delete.append(vid)
248
230
 
249
231
  for vid in to_delete:
@@ -255,17 +237,93 @@ def load_json_sources_into_data_structures(
255
237
  s["jsonSourceMissing"] = False
256
238
  s["variantIds"] = [v for v in s["variantIds"] if v in sectionVariants]
257
239
  if len(s["variantIds"]) == 0:
240
+ logger.debug("Section has no variantIds. Creating new section variant.")
258
241
  variantId, variant = new_section_variant()
259
- s["variantIds"].append(variantId)
260
242
  sectionVariants[variantId] = variant
243
+ s["variantId"] = variantId
244
+ s["variantIds"].append(variantId)
261
245
  s["jsonSourceMissing"] = True
262
246
  elif s["variantId"] not in s["variantIds"]:
247
+ logger.debug(
248
+ "Selected variantId is not in list of variantIds. Changing selected variantId"
249
+ )
263
250
  s["variantId"] = s["variantIds"][0]
264
251
 
265
252
  if s["defaultVariantId"] not in s["variantIds"]:
253
+ logger.debug(
254
+ "Default variantId is not in list of variantIds. Changing default variantId"
255
+ )
266
256
  s["defaultVariantId"] = s["variantId"]
267
257
 
268
258
 
259
+ def merge_into_existing(new_data, section, sectionVariants, codeInfo, codeInfoCols):
260
+ filenames_loaded = set()
261
+ jsonSourceMetadata_to_id = {}
262
+ ancestor_paths = build_ancestor_paths(codeInfo, codeInfoCols)
263
+ for id, info in codeInfo.items():
264
+ jsonSourceMetadata_to_id[
265
+ createMetadataJsonString(info["jsonSource"], ancestor_paths[id])
266
+ ] = id
267
+
268
+ for filename, data in new_data.items():
269
+ replacements = {}
270
+ new_ancestor_paths = build_ancestor_paths(
271
+ data["codeInfo"], {None: {"children": data["codeInfoColChildren"]}}
272
+ )
273
+ for newId, newInfo in data["codeInfo"].items():
274
+ existingId = jsonSourceMetadata_to_id.get(
275
+ createMetadataJsonString(
276
+ newInfo["jsonSource"], new_ancestor_paths[newId]
277
+ )
278
+ )
279
+ if existingId:
280
+ replacements[newId] = existingId
281
+ else:
282
+ codeInfo[newId] = newInfo
283
+
284
+ for newId, existingId in replacements.items():
285
+ del data["codeInfo"][newId]
286
+ data["codeInfoColChildren"][existingId] = [
287
+ replacements.get(x, x) for x in data["codeInfoColChildren"][newId]
288
+ ]
289
+ data["codeInfoColChildren"][None] = [
290
+ replacements.get(x, x) for x in data["codeInfoColChildren"][None]
291
+ ]
292
+ del data["codeInfoColChildren"][newId]
293
+ data["sectionVariantValues"][existingId] = data["sectionVariantValues"][
294
+ newId
295
+ ]
296
+ del data["sectionVariantValues"][newId]
297
+
298
+ variantId = None
299
+ for vid in section["variantIds"]:
300
+ if sectionVariants[vid]["name"] == filename:
301
+ variantId = vid
302
+ break
303
+ if not variantId:
304
+ variantId, section_variant = new_section_variant(
305
+ name=filename,
306
+ )
307
+ section["variantIds"].append(variantId)
308
+ sectionVariants[variantId] = section_variant
309
+
310
+ curr_section_variant = sectionVariants[variantId]
311
+ curr_section_variant["values"] = data["sectionVariantValues"]
312
+ codeInfoColId = curr_section_variant["codeInfoColId"]
313
+
314
+ if not codeInfoColId:
315
+ codeInfoColId = create_new_id()
316
+ curr_section_variant["codeInfoColId"] = codeInfoColId
317
+ codeInfoCols[codeInfoColId] = new_code_info_col()
318
+
319
+ codeInfoCols[codeInfoColId]["children"] = data["codeInfoColChildren"]
320
+
321
+ section["configLanguage"] = "json"
322
+ filenames_loaded.add(filename)
323
+
324
+ return filenames_loaded
325
+
326
+
269
327
  def load_json_source(filename_glob, jsonSourceKeys):
270
328
  new_data = {}
271
329
 
@@ -274,83 +332,97 @@ def load_json_source(filename_glob, jsonSourceKeys):
274
332
  try:
275
333
  with open(filename, "r") as f:
276
334
  jsonSourceData = json.load(f)
277
- except:
335
+ except json.JSONDecodeError:
336
+ jsonSourceData = {}
337
+ except FileNotFoundError:
278
338
  logger.debug(f"couldn't find: {filename}")
279
339
  continue
280
340
 
281
- new_data[filename] = {
282
- "codeInfo": {},
283
- "codeInfoColChildren": {},
284
- "sectionVariantValues": {},
285
- }
286
-
287
- try:
288
- for k in jsonSourceKeys:
289
- jsonSourceData = jsonSourceData[k]
290
- except:
291
- # TODO print warning or something
292
- pass
293
-
294
- process_json_object(
295
- new_data, jsonSourceData, filename, filename_glob, jsonSourceKeys
341
+ new_data[filename] = process_json_object(
342
+ jsonSourceData, filename, jsonSourceKeys
296
343
  )
297
344
 
298
- if len(jsonSourceKeys) > 0:
299
- # point directly from None (the root) to the children
300
- highest_key = create_json_code_info_key(filename_glob, jsonSourceKeys)
301
- codeInfoChildren = new_data[filename]["codeInfoColChildren"]
302
- codeInfoChildren[None] = codeInfoChildren[highest_key]
303
- del codeInfoChildren[highest_key]
345
+ return new_data
346
+
347
+
348
+ def process_json_object(jsonSourceData, filename, jsonSourceKeys):
349
+ new_data = {
350
+ "codeInfo": {},
351
+ "codeInfoColChildren": {},
352
+ "sectionVariantValues": {},
353
+ }
354
+
355
+ try:
356
+ for k in jsonSourceKeys:
357
+ jsonSourceData = jsonSourceData[k]
358
+ except:
359
+ # TODO print warning or something
360
+ pass
361
+
362
+ metadataToId = {}
363
+
364
+ highest_key = process_json_object_helper(
365
+ new_data, jsonSourceData, filename, jsonSourceKeys, metadataToId
366
+ )
367
+
368
+ if len(jsonSourceKeys) > 0:
369
+ # point directly from None (the root) to the children
370
+ codeInfoChildren = new_data["codeInfoColChildren"]
371
+ codeInfoChildren[None] = codeInfoChildren[highest_key]
372
+ del codeInfoChildren[highest_key]
304
373
 
305
374
  return new_data
306
375
 
307
376
 
308
- def process_json_object(output, obj, filename, filename_glob, current_path):
377
+ def process_json_object_helper(output, obj, filename, current_path, metadataToId):
309
378
  if not isinstance(obj, dict):
310
379
  return
311
380
 
312
381
  children_keys = []
313
382
  for k, v in obj.items():
314
383
  path = current_path + [k]
315
- full_key, is_dict = create_json_code_info(
316
- filename, filename_glob, path, k, v, output
317
- )
318
- children_keys.append(full_key)
384
+ paramInfoId, is_dict = create_json_code_info(filename, k, v, output)
385
+ metadataToId[createMetadataJsonString(filename, path)] = paramInfoId
386
+ children_keys.append(paramInfoId)
319
387
  if is_dict:
320
- process_json_object(output, v, filename, filename_glob, path)
388
+ process_json_object_helper(output, v, filename, path, metadataToId)
321
389
 
322
390
  parent_id = None
323
391
  if len(current_path) > 0:
324
- parent_id = create_json_code_info_key(filename_glob, current_path)
392
+ metadata = createMetadataJsonString(filename, current_path)
393
+ parent_id = metadataToId.get(metadata)
394
+ if not parent_id:
395
+ parent_id = create_new_id()
396
+ metadataToId[metadata] = parent_id
325
397
 
326
- output[filename]["codeInfoColChildren"][parent_id] = children_keys
398
+ output["codeInfoColChildren"][parent_id] = children_keys
399
+ return parent_id
327
400
 
328
401
 
329
- def create_json_code_info(filename, filename_glob, path, key, value, output):
330
- full_key = create_json_code_info_key(filename_glob, path)
402
+ def create_json_code_info(filename, key, value, output):
403
+ paramInfoId = create_new_id()
331
404
  # Create code info entry
332
- output[filename]["codeInfo"][full_key] = with_code_info_defaults(
333
- id=full_key, name=key
405
+ output["codeInfo"][paramInfoId] = with_code_info_defaults(
406
+ id=paramInfoId,
407
+ name=key,
408
+ editable=True,
409
+ jsonSource=filename,
334
410
  )
335
- output[filename]["codeInfoColChildren"][full_key] = []
411
+ output["codeInfoColChildren"][paramInfoId] = []
336
412
 
337
413
  is_dict = isinstance(value, dict)
338
414
  # Create variant value entry
339
415
  if is_dict:
340
416
  # For objects, store empty value and process children
341
- output[filename]["sectionVariantValues"][full_key] = new_ev_entry()
417
+ output["sectionVariantValues"][paramInfoId] = new_ev_entry()
342
418
  else:
343
419
  # For non-objects, store the value directly
344
- output[filename]["sectionVariantValues"][full_key] = new_ev_entry(
420
+ output["sectionVariantValues"][paramInfoId] = new_ev_entry(
345
421
  value=json.dumps(value)
346
422
  )
347
423
 
348
- return full_key, is_dict
424
+ return paramInfoId, is_dict
349
425
 
350
426
 
351
- def create_json_code_info_key(filename_glob, path):
352
- # specify separators to make json.dumps equivalent to JSON.stringify
353
- key = json.dumps(
354
- {"filenameGlob": filename_glob, "key": path}, separators=(",", ":")
355
- )
356
- return f"{C.JSON_SOURCE_PREFIX}{key}"
427
+ def createMetadataJsonString(filename, path):
428
+ return json.dumps({"filename": filename, "key": path})
@@ -110,5 +110,5 @@ class MetaSettingsFile:
110
110
 
111
111
  def save_settings_project(self, p):
112
112
  save_json_source_data(p)
113
- filter_data_for_json_export(p, keepCodeInfoThatHaveUITypes=False)
113
+ filter_data_for_json_export(p)
114
114
  save_json_to_file(self.path_meta_settings, p)
setta/lsp/file_watcher.py CHANGED
@@ -53,22 +53,6 @@ class LSPFileWatcher:
53
53
  self.observer.stop()
54
54
  self.observer.join()
55
55
 
56
- def is_watching(self, path: str) -> bool:
57
- """
58
- Check if a path is being watched.
59
-
60
- Args:
61
- path: Path to check
62
-
63
- Returns:
64
- bool: True if the path is being watched
65
- """
66
- absolute_path = os.path.abspath(path)
67
- return any(
68
- absolute_path.startswith(watched_path)
69
- for watched_path in self.watched_paths
70
- )
71
-
72
56
 
73
57
  class LSPEventHandler(FileSystemEventHandler):
74
58
  def __init__(self, callback, loop):