vfbquery 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -102,6 +102,26 @@ def format_for_readme(data):
102
102
  except Exception as e:
103
103
  return f"Error formatting JSON: {str(e)}"
104
104
 
105
+ def sort_rows_in_data(data):
106
+ """Sort rows in data structures by id to ensure consistent ordering"""
107
+ if isinstance(data, dict):
108
+ result = {}
109
+ for k, v in data.items():
110
+ if k == 'rows' and isinstance(v, list):
111
+ # Sort rows by id if they have id field
112
+ try:
113
+ sorted_rows = sorted(v, key=lambda x: x.get('id', '') if isinstance(x, dict) else str(x))
114
+ result[k] = sorted_rows
115
+ except (TypeError, AttributeError):
116
+ result[k] = v
117
+ else:
118
+ result[k] = sort_rows_in_data(v)
119
+ return result
120
+ elif isinstance(data, list):
121
+ return [sort_rows_in_data(item) for item in data]
122
+ else:
123
+ return data
124
+
105
125
  def remove_nulls(data):
106
126
  if isinstance(data, dict):
107
127
  new_dict = {}
@@ -124,199 +144,102 @@ def remove_nulls(data):
124
144
  def main():
125
145
  init(autoreset=True)
126
146
 
127
- # Import the results from generated files
147
+ # Import the python code blocks
128
148
  try:
129
- from test_results import results as json_blocks
130
- from test_examples import results as python_blocks
149
+ from .test_examples_code import results as python_blocks
131
150
  except ImportError as e:
132
151
  print(f"{Fore.RED}Error importing test files: {e}{Style.RESET_ALL}")
133
152
  sys.exit(1)
134
153
 
135
154
  print(f'Found {len(python_blocks)} Python code blocks')
136
- print(f'Found {len(json_blocks)} JSON blocks')
137
-
138
- if len(python_blocks) != len(json_blocks):
139
- print(f"{Fore.RED}Error: Number of Python blocks ({len(python_blocks)}) doesn't match JSON blocks ({len(json_blocks)}){Style.RESET_ALL}")
140
- sys.exit(1)
141
155
 
142
156
  failed = False
143
157
 
144
- for i, (python_code, expected_json) in enumerate(zip(python_blocks, json_blocks)):
145
- python_code = stringify_numeric_keys(python_code)
146
- expected_json = stringify_numeric_keys(expected_json)
158
+ for i, python_code in enumerate(python_blocks):
147
159
 
148
- # Apply remove_nulls to both dictionaries before diffing
149
- python_code_filtered = remove_nulls(python_code)
150
- expected_json_filtered = remove_nulls(expected_json)
151
- diff = DeepDiff(expected_json_filtered, python_code_filtered,
152
- ignore_order=True,
153
- ignore_numeric_type_changes=True,
154
- report_repetition=True,
155
- verbose_level=2)
160
+ print(f'\n{Fore.CYAN}Example #{i+1}:{Style.RESET_ALL}')
161
+ print(f' README query: {python_code}')
156
162
 
157
- if diff:
158
- failed = True
159
- print(f'\n{Fore.RED}Error in example #{i+1}:{Style.RESET_ALL}')
163
+ # Execute the python code and get result
164
+ try:
165
+ # Evaluate the code to get the result
166
+ result = eval(python_code)
160
167
 
161
- # Print a cleaner diff output with context
162
- if 'dictionary_item_added' in diff:
163
- print(f'\n{Fore.GREEN}Added keys:{Style.RESET_ALL}')
164
- for item in diff['dictionary_item_added']:
165
- key = item.replace('root', '')
166
- path_parts = key.strip('[]').split('][')
167
-
168
- # Get the actual value that was added
169
- current = python_code
170
- for part in path_parts:
171
- if part.startswith("'") and part.endswith("'"):
172
- part = part.strip("'")
173
- elif part.startswith('"') and part.endswith('"'):
174
- part = part.strip('"')
175
- try:
176
- if part.startswith('number:'):
177
- part = float(part.split(':')[1])
178
- current = current[part]
179
- except (KeyError, TypeError):
180
- current = '[Unable to access path]'
181
- break
182
-
183
- # Show the key and a brief representation of its value
184
- print(f' {Fore.GREEN}+{key}: {get_brief_dict_representation(current)}{Style.RESET_ALL}')
185
-
186
- if 'dictionary_item_removed' in diff:
187
- print(f'\n{Fore.RED}Removed keys:{Style.RESET_ALL}')
188
- for item in diff['dictionary_item_removed']:
189
- key = item.replace('root', '')
190
- path_parts = key.strip('[]').split('][')
191
-
192
- # Get the actual value that was removed
193
- current = expected_json
194
- for part in path_parts:
195
- if part.startswith("'") and part.endswith("'"):
196
- part = part.strip("'")
197
- elif part.startswith('"') and part.endswith('"'):
198
- part = part.strip('"')
199
- try:
200
- if part.startswith('number:'):
201
- part = float(part.split(':')[1])
202
- current = current[part]
203
- except (KeyError, TypeError):
204
- current = '[Unable to access path]'
205
- break
206
-
207
- print(f' {Fore.RED}-{key}: {get_brief_dict_representation(current)}{Style.RESET_ALL}')
208
-
209
- if 'values_changed' in diff:
210
- print(f'\n{Fore.YELLOW}Changed values:{Style.RESET_ALL}')
211
- for key, value in diff['values_changed'].items():
212
- path = key.replace('root', '')
213
- old_val = value.get('old_value', 'N/A')
214
- new_val = value.get('new_value', 'N/A')
215
- print(f' {Fore.YELLOW}{path}:{Style.RESET_ALL}')
216
- print(f' {Fore.RED}- {old_val}{Style.RESET_ALL}')
217
- print(f' {Fore.GREEN}+ {new_val}{Style.RESET_ALL}')
168
+ # Validate structure based on function
169
+ if 'get_term_info' in python_code:
170
+ # Should be a dict with specific keys
171
+ if not isinstance(result, dict):
172
+ print(f'{Fore.RED}get_term_info should return a dict{Style.RESET_ALL}')
173
+ failed = True
174
+ continue
175
+
176
+ expected_keys = ['IsIndividual', 'IsClass', 'Images', 'Examples', 'Domains', 'Licenses', 'Publications', 'Synonyms']
177
+ for key in expected_keys:
178
+ if key not in result:
179
+ print(f'{Fore.RED}Missing key: {key}{Style.RESET_ALL}')
180
+ failed = True
181
+ elif key in ['IsIndividual', 'IsClass'] and not isinstance(result[key], bool):
182
+ print(f'{Fore.RED}Key {key} is not bool: {type(result[key])}{Style.RESET_ALL}')
183
+ failed = True
184
+
185
+ if 'SuperTypes' in result and not isinstance(result['SuperTypes'], list):
186
+ print(f'{Fore.RED}SuperTypes is not list{Style.RESET_ALL}')
187
+ failed = True
188
+
189
+ if 'Queries' in result and not isinstance(result['Queries'], list):
190
+ print(f'{Fore.RED}Queries is not list{Style.RESET_ALL}')
191
+ failed = True
218
192
 
219
- if 'iterable_item_added' in diff:
220
- print(f'\n{Fore.GREEN}Added list items:{Style.RESET_ALL}')
221
- for key, value in diff['iterable_item_added'].items():
222
- path = key.replace('root', '')
223
- # Show the actual content for complex items
224
- if isinstance(value, (dict, list)):
225
- print(f' {Fore.GREEN}+{path}:{Style.RESET_ALL}')
226
- if isinstance(value, dict):
227
- for k, v in value.items():
228
- brief_v = get_brief_dict_representation(v)
229
- print(f' {Fore.GREEN}+{k}: {brief_v}{Style.RESET_ALL}')
230
- else:
231
- # Fixed the problematic line by breaking it into simpler parts
232
- items = value[:3]
233
- items_str = ", ".join([get_brief_dict_representation(item) for item in items])
234
- ellipsis = "..." if len(value) > 3 else ""
235
- print(f' {Fore.GREEN}[{items_str}{ellipsis}]{Style.RESET_ALL}')
236
- else:
237
- print(f' {Fore.GREEN}+{path}: {value}{Style.RESET_ALL}')
193
+ elif 'get_instances' in python_code:
194
+ # Should be a list of dicts or a dict with rows
195
+ if isinstance(result, list):
196
+ if len(result) > 0 and not isinstance(result[0], dict):
197
+ print(f'{Fore.RED}get_instances items should be dicts{Style.RESET_ALL}')
198
+ failed = True
199
+ elif isinstance(result, dict):
200
+ # Check if it has 'rows' key
201
+ if 'rows' not in result:
202
+ print(f'{Fore.RED}get_instances dict should have "rows" key{Style.RESET_ALL}')
203
+ failed = True
204
+ elif not isinstance(result['rows'], list):
205
+ print(f'{Fore.RED}get_instances "rows" should be list{Style.RESET_ALL}')
206
+ failed = True
207
+ else:
208
+ print(f'{Fore.RED}get_instances should return a list or dict, got {type(result)}{Style.RESET_ALL}')
209
+ failed = True
210
+ continue
238
211
 
239
- if 'iterable_item_removed' in diff:
240
- print(f'\n{Fore.RED}Removed list items:{Style.RESET_ALL}')
241
- for key, value in diff['iterable_item_removed'].items():
242
- path = key.replace('root', '')
243
- # Show the actual content for complex items
244
- if isinstance(value, (dict, list)):
245
- print(f' {Fore.RED}-{path}:{Style.RESET_ALL}')
246
- if isinstance(value, dict):
247
- for k, v in value.items():
248
- brief_v = get_brief_dict_representation(v)
249
- print(f' {Fore.RED}-{k}: {brief_v}{Style.RESET_ALL}')
250
- else:
251
- # Fixed the problematic line by breaking it into simpler parts
252
- items = value[:3]
253
- items_str = ", ".join([get_brief_dict_representation(item) for item in items])
254
- ellipsis = "..." if len(value) > 3 else ""
255
- print(f' {Fore.RED}[{items_str}{ellipsis}]{Style.RESET_ALL}')
256
- else:
257
- print(f' {Fore.RED}-{path}: {value}{Style.RESET_ALL}')
258
-
259
- # For comparing complex row objects that have significant differences
260
- if 'iterable_item_added' in diff and 'iterable_item_removed' in diff:
261
- added_rows = [(k, v) for k, v in diff['iterable_item_added'].items() if 'rows' in k]
262
- removed_rows = [(k, v) for k, v in diff['iterable_item_removed'].items() if 'rows' in k]
212
+ elif 'get_templates' in python_code:
213
+ # Should be a dict with rows
214
+ if not isinstance(result, dict):
215
+ print(f'{Fore.RED}get_templates should return a dict{Style.RESET_ALL}')
216
+ failed = True
217
+ continue
263
218
 
264
- if added_rows and removed_rows:
265
- print(f'\n{Fore.YELLOW}Row differences (sample):{Style.RESET_ALL}')
266
- # Compare up to 2 rows to show examples of the differences
267
- for i in range(min(2, len(added_rows), len(removed_rows))):
268
- added_key, added_val = added_rows[i]
269
- removed_key, removed_val = removed_rows[i]
270
-
271
- if isinstance(added_val, dict) and isinstance(removed_val, dict):
272
- # Compare the two row objects and show key differences
273
- row_diff = compare_objects(removed_val, added_val, f'Row {i}')
274
- if row_diff:
275
- print(f' {Fore.YELLOW}Row {i} differences:{Style.RESET_ALL}')
276
- for line in row_diff:
277
- print(f' {line}')
219
+ if 'rows' not in result:
220
+ print(f'{Fore.RED}get_templates dict should have "rows" key{Style.RESET_ALL}')
221
+ failed = True
222
+ elif not isinstance(result['rows'], list):
223
+ print(f'{Fore.RED}get_templates "rows" should be list{Style.RESET_ALL}')
224
+ failed = True
278
225
 
279
- if 'type_changes' in diff:
280
- print(f'\n{Fore.YELLOW}Type changes:{Style.RESET_ALL}')
281
- for key, value in diff['type_changes'].items():
282
- path = key.replace('root', '')
283
- old_type = type(value.get('old_value', 'N/A')).__name__
284
- new_type = type(value.get('new_value', 'N/A')).__name__
285
- old_val = value.get('old_value', 'N/A')
286
- new_val = value.get('new_value', 'N/A')
287
- print(f' {Fore.YELLOW}{path}:{Style.RESET_ALL}')
288
- print(f' {Fore.RED}- {old_type}: {str(old_val)[:100] + "..." if len(str(old_val)) > 100 else old_val}{Style.RESET_ALL}')
289
- print(f' {Fore.GREEN}+ {new_type}: {str(new_val)[:100] + "..." if len(str(new_val)) > 100 else new_val}{Style.RESET_ALL}')
290
-
291
- # Print a summary of the differences
292
- print(f'\n{Fore.YELLOW}Summary of differences:{Style.RESET_ALL}')
293
- add_keys = len(diff.get('dictionary_item_added', []))
294
- add_items = len(diff.get('iterable_item_added', {}))
295
- rem_keys = len(diff.get('dictionary_item_removed', []))
296
- rem_items = len(diff.get('iterable_item_removed', {}))
297
- changed_vals = len(diff.get('values_changed', {}))
298
- type_changes = len(diff.get('type_changes', {}))
226
+ else:
227
+ print(f'{Fore.RED}Unknown function in code{Style.RESET_ALL}')
228
+ failed = True
229
+ continue
299
230
 
300
- print(f' {Fore.GREEN}Added:{Style.RESET_ALL} {add_keys} keys, {add_items} list items')
301
- print(f' {Fore.RED}Removed:{Style.RESET_ALL} {rem_keys} keys, {rem_items} list items')
302
- print(f' {Fore.YELLOW}Changed:{Style.RESET_ALL} {changed_vals} values, {type_changes} type changes')
303
-
304
- # After printing the summary, add the formatted output for README
305
- print(f'\n{Fore.CYAN}Suggested README update for example #{i+1}:{Style.RESET_ALL}')
231
+ if not failed:
232
+ print(f'{Fore.GREEN}Structure validation passed{Style.RESET_ALL}')
306
233
 
307
- # Mark a clear copy-paste section
308
- print(f'\n{Fore.CYAN}--- COPY FROM HERE ---{Style.RESET_ALL}')
309
- print(format_for_readme(python_code).replace('\033[36m', '').replace('\033[0m', ''))
310
- print(f'{Fore.CYAN}--- END COPY ---{Style.RESET_ALL}')
311
-
312
- else:
313
- print(f'\n{Fore.GREEN}Example #{i+1}: ✓ PASS{Style.RESET_ALL}')
314
-
234
+ except Exception as e:
235
+ print(f'{Fore.RED}Error executing code: {e}{Style.RESET_ALL}')
236
+ failed = True
237
+
315
238
  if failed:
316
- print(f'\n{Fore.RED}Some examples failed. Please check the differences above.{Style.RESET_ALL}')
239
+ print(f'\n{Fore.RED}Some tests failed{Style.RESET_ALL}')
317
240
  sys.exit(1)
318
241
  else:
319
- print(f'\n{Fore.GREEN}All examples passed!{Style.RESET_ALL}')
242
+ print(f'\n{Fore.GREEN}All tests passed{Style.RESET_ALL}')
320
243
 
321
244
  if __name__ == "__main__":
322
245
  main()
@@ -15,9 +15,9 @@ class NeuronsPartHereTest(unittest.TestCase):
15
15
  """Set up test fixtures"""
16
16
  self.medulla_id = 'FBbt_00003748'
17
17
  # Expected count based on VFB data (as of test creation)
18
- # Allowing tolerance for data updates
19
- self.expected_count = 471
20
- self.count_tolerance = 5 # Allow ±5 for data updates
18
+ # Data can grow over time, so we test for minimum expected count
19
+ self.expected_count = 470 # Minimum expected count (actual was 472)
20
+ self.count_tolerance = 5 # Allow some tolerance for variations
21
21
 
22
22
  def test_neurons_part_here_returns_results(self):
23
23
  """Test that NeuronsPartHere query returns results for medulla"""
@@ -49,23 +49,22 @@ class NeuronsPartHereTest(unittest.TestCase):
49
49
  )
50
50
 
51
51
  actual_count = len(results_df)
52
- count_diff = abs(actual_count - self.expected_count)
52
+ count_diff = actual_count - self.expected_count
53
53
 
54
- print(f"Expected: {self.expected_count} results")
54
+ print(f"Expected: at least {self.expected_count} results")
55
55
  print(f"Actual: {actual_count} results")
56
- print(f"Difference: {count_diff}")
57
56
 
58
- # Allow some tolerance for data updates
59
- self.assertLessEqual(
60
- count_diff,
61
- self.count_tolerance,
62
- f"Result count {actual_count} differs from expected {self.expected_count} by more than {self.count_tolerance}"
57
+ # Data can grow over time, so we require at least the expected minimum
58
+ self.assertGreaterEqual(
59
+ actual_count,
60
+ self.expected_count,
61
+ f"Result count {actual_count} is less than expected minimum {self.expected_count}"
63
62
  )
64
63
 
65
64
  if count_diff > 0:
66
- print(f" Count differs by {count_diff} (within tolerance of {self.count_tolerance})")
65
+ print(f" Count increased by {count_diff} (data growth)")
67
66
  else:
68
- print(f"✓ Exact count match: {actual_count}")
67
+ print(f"✓ Minimum count met: {actual_count}")
69
68
 
70
69
  def test_neurons_part_here_result_structure(self):
71
70
  """Test that results have the expected structure with required columns"""
@@ -53,13 +53,9 @@ class QueryPerformanceTest(unittest.TestCase):
53
53
 
54
54
  @classmethod
55
55
  def setUpClass(cls):
56
- """Enable caching for performance tests"""
57
- # Import caching module
58
- from vfbquery import cache_enhancements
59
-
60
- # Enable caching to speed up repeated queries
61
- cache_enhancements.enable_vfbquery_caching()
62
- print("\n🔥 Caching enabled for performance tests")
56
+ """Set up for performance tests"""
57
+ # SOLR caching is enabled by default
58
+ print("\n🔥 SOLR caching enabled for performance tests")
63
59
 
64
60
  def setUp(self):
65
61
  """Set up test data"""
vfbquery/__init__.py CHANGED
@@ -1,55 +1,67 @@
1
1
  from .vfb_queries import *
2
2
  from .solr_result_cache import get_solr_cache
3
3
 
4
- # Caching enhancements (optional import - don't break if dependencies missing)
4
+ # SOLR-based caching (simplified single-layer approach)
5
5
  try:
6
- from .cache_enhancements import (
7
- enable_vfbquery_caching,
8
- disable_vfbquery_caching,
9
- clear_vfbquery_cache,
10
- get_vfbquery_cache_stats,
11
- set_cache_ttl,
12
- set_cache_memory_limit,
13
- set_cache_max_items,
14
- enable_disk_cache,
15
- disable_disk_cache,
16
- get_cache_config,
17
- CacheConfig
18
- )
19
6
  from .cached_functions import (
20
7
  get_term_info_cached,
21
- get_instances_cached,
22
- patch_vfbquery_with_caching,
23
- unpatch_vfbquery_caching
8
+ get_instances_cached,
9
+ get_templates_cached,
10
+ get_related_anatomy_cached,
11
+ get_similar_neurons_cached,
12
+ get_individual_neuron_inputs_cached,
13
+ get_expression_overlaps_here_cached,
14
+ get_neurons_with_part_in_cached,
15
+ get_neurons_with_synapses_in_cached,
16
+ get_neurons_with_presynaptic_terminals_in_cached,
17
+ get_neurons_with_postsynaptic_terminals_in_cached,
18
+ get_components_of_cached,
19
+ get_parts_of_cached,
20
+ get_subclasses_of_cached,
21
+ get_neuron_classes_fasciculating_here_cached,
22
+ get_tracts_nerves_innervating_here_cached,
23
+ get_lineage_clones_in_cached,
24
+ get_neuron_neuron_connectivity_cached,
25
+ get_neuron_region_connectivity_cached,
26
+ get_images_neurons_cached,
27
+ get_images_that_develop_from_cached,
28
+ get_expression_pattern_fragments_cached,
29
+ get_anatomy_scrnaseq_cached,
30
+ get_cluster_expression_cached,
31
+ get_expression_cluster_cached,
32
+ get_scrnaseq_dataset_data_cached,
33
+ get_similar_morphology_cached,
34
+ get_similar_morphology_part_of_cached,
35
+ get_similar_morphology_part_of_exp_cached,
36
+ get_similar_morphology_nb_cached,
37
+ get_similar_morphology_nb_exp_cached,
38
+ get_similar_morphology_userdata_cached,
39
+ get_painted_domains_cached,
40
+ get_dataset_images_cached,
41
+ get_all_aligned_images_cached,
42
+ get_aligned_datasets_cached,
43
+ get_all_datasets_cached,
44
+ get_terms_for_pub_cached,
45
+ get_transgene_expression_here_cached,
24
46
  )
25
47
  __caching_available__ = True
26
-
27
- # Enable caching by default with 3-month TTL and 2GB memory cache
48
+
49
+ # Enable SOLR caching by default with 3-month TTL
28
50
  import os
29
51
 
30
52
  # Check if caching should be disabled via environment variable
31
53
  cache_disabled = os.getenv('VFBQUERY_CACHE_ENABLED', 'true').lower() in ('false', '0', 'no', 'off')
32
54
 
33
55
  if not cache_disabled:
34
- # Enable caching with VFB_connect-like defaults
35
- enable_vfbquery_caching(
36
- cache_ttl_hours=2160, # 3 months (90 days)
37
- memory_cache_size_mb=2048, # 2GB memory cache
38
- max_items=10000, # Max 10k items as safeguard
39
- disk_cache_enabled=True # Persistent across sessions
40
- )
41
-
42
- # Automatically patch existing functions for transparent caching
56
+ # Import and patch functions with caching
57
+ from .cached_functions import patch_vfbquery_with_caching
43
58
  patch_vfbquery_with_caching()
44
-
45
- print("VFBquery: Caching enabled by default (3-month TTL, 2GB memory)")
59
+ print("VFBquery: SOLR caching enabled by default (3-month TTL)")
46
60
  print(" Disable with: export VFBQUERY_CACHE_ENABLED=false")
47
-
61
+
48
62
  except ImportError:
49
63
  __caching_available__ = False
50
- print("VFBquery: Caching not available (dependencies missing)")
51
-
52
- # Convenience function for clearing SOLR cache entries
64
+ print("VFBquery: Caching not available (dependencies missing)")# Convenience function for clearing SOLR cache entries
53
65
  def clear_solr_cache(query_type: str, term_id: str) -> bool:
54
66
  """
55
67
  Clear a specific SOLR cache entry to force refresh
@@ -83,4 +95,4 @@ except ImportError:
83
95
  __solr_caching_available__ = False
84
96
 
85
97
  # Version information
86
- __version__ = "0.5.0"
98
+ __version__ = "0.5.1"