praisonaiagents 0.0.22__py3-none-any.whl → 0.0.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- praisonaiagents/agent/agent.py +22 -33
- praisonaiagents/agents/agents.py +18 -4
- praisonaiagents/tools/__init__.py +165 -2
- praisonaiagents/tools/arxiv_tools.py +292 -0
- praisonaiagents/tools/calculator_tools.py +278 -0
- praisonaiagents/tools/csv_tools.py +266 -0
- praisonaiagents/tools/duckdb_tools.py +268 -0
- praisonaiagents/tools/duckduckgo_tools.py +52 -0
- praisonaiagents/tools/excel_tools.py +310 -0
- praisonaiagents/tools/file_tools.py +274 -0
- praisonaiagents/tools/json_tools.py +515 -0
- praisonaiagents/tools/newspaper_tools.py +354 -0
- praisonaiagents/tools/pandas_tools.py +326 -0
- praisonaiagents/tools/python_tools.py +423 -0
- praisonaiagents/tools/shell_tools.py +278 -0
- praisonaiagents/tools/spider_tools.py +431 -0
- praisonaiagents/tools/test.py +56 -0
- praisonaiagents/tools/tools.py +5 -36
- praisonaiagents/tools/wikipedia_tools.py +272 -0
- praisonaiagents/tools/xml_tools.py +498 -0
- praisonaiagents/tools/yaml_tools.py +417 -0
- praisonaiagents/tools/yfinance_tools.py +213 -0
- {praisonaiagents-0.0.22.dist-info → praisonaiagents-0.0.24.dist-info}/METADATA +1 -1
- praisonaiagents-0.0.24.dist-info/RECORD +42 -0
- praisonaiagents-0.0.22.dist-info/RECORD +0 -24
- {praisonaiagents-0.0.22.dist-info → praisonaiagents-0.0.24.dist-info}/WHEEL +0 -0
- {praisonaiagents-0.0.22.dist-info → praisonaiagents-0.0.24.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,515 @@
|
|
1
|
+
"""Tools for working with JSON files.
|
2
|
+
|
3
|
+
Usage:
|
4
|
+
from praisonaiagents.tools import json_tools
|
5
|
+
data = json_tools.read_json("data.json")
|
6
|
+
|
7
|
+
or
|
8
|
+
from praisonaiagents.tools import read_json, write_json, merge_json
|
9
|
+
data = read_json("data.json")
|
10
|
+
"""
|
11
|
+
|
12
|
+
import logging
|
13
|
+
from typing import List, Dict, Union, Optional, Any, Tuple
|
14
|
+
from importlib import util
|
15
|
+
import json
|
16
|
+
from datetime import datetime
|
17
|
+
|
18
|
+
class JSONTools:
|
19
|
+
"""Tools for working with JSON files."""
|
20
|
+
|
21
|
+
def __init__(self):
|
22
|
+
"""Initialize JSONTools."""
|
23
|
+
pass
|
24
|
+
|
25
|
+
def read_json(
|
26
|
+
self,
|
27
|
+
filepath: str,
|
28
|
+
encoding: str = 'utf-8',
|
29
|
+
validate_schema: Optional[Dict[str, Any]] = None
|
30
|
+
) -> Dict[str, Any]:
|
31
|
+
"""Read a JSON file with optional schema validation.
|
32
|
+
|
33
|
+
Args:
|
34
|
+
filepath: Path to JSON file
|
35
|
+
encoding: File encoding
|
36
|
+
validate_schema: Optional JSON schema for validation
|
37
|
+
|
38
|
+
Returns:
|
39
|
+
Dict with JSON data
|
40
|
+
"""
|
41
|
+
try:
|
42
|
+
# Read JSON file
|
43
|
+
with open(filepath, 'r', encoding=encoding) as f:
|
44
|
+
data = json.load(f)
|
45
|
+
|
46
|
+
# Validate against schema if provided
|
47
|
+
if validate_schema:
|
48
|
+
if util.find_spec('jsonschema') is None:
|
49
|
+
error_msg = "jsonschema package is not available. Please install it using: pip install jsonschema"
|
50
|
+
logging.error(error_msg)
|
51
|
+
return {"error": error_msg}
|
52
|
+
import jsonschema
|
53
|
+
try:
|
54
|
+
jsonschema.validate(instance=data, schema=validate_schema)
|
55
|
+
except jsonschema.exceptions.ValidationError as e:
|
56
|
+
error_msg = f"JSON validation failed: {str(e)}"
|
57
|
+
logging.error(error_msg)
|
58
|
+
return {"error": error_msg}
|
59
|
+
|
60
|
+
return data
|
61
|
+
|
62
|
+
except Exception as e:
|
63
|
+
error_msg = f"Error reading JSON file {filepath}: {str(e)}"
|
64
|
+
logging.error(error_msg)
|
65
|
+
return {"error": error_msg}
|
66
|
+
|
67
|
+
def write_json(
|
68
|
+
self,
|
69
|
+
data: Union[Dict[str, Any], List[Any]],
|
70
|
+
filepath: str,
|
71
|
+
encoding: str = 'utf-8',
|
72
|
+
indent: int = 2,
|
73
|
+
sort_keys: bool = False,
|
74
|
+
ensure_ascii: bool = False
|
75
|
+
) -> bool:
|
76
|
+
"""Write data to a JSON file.
|
77
|
+
|
78
|
+
Args:
|
79
|
+
data: Data to write
|
80
|
+
filepath: Output file path
|
81
|
+
encoding: File encoding
|
82
|
+
indent: Number of spaces for indentation
|
83
|
+
sort_keys: Sort dictionary keys
|
84
|
+
ensure_ascii: Escape non-ASCII characters
|
85
|
+
|
86
|
+
Returns:
|
87
|
+
True if successful, False otherwise
|
88
|
+
"""
|
89
|
+
try:
|
90
|
+
with open(filepath, 'w', encoding=encoding) as f:
|
91
|
+
json.dump(
|
92
|
+
data,
|
93
|
+
f,
|
94
|
+
indent=indent,
|
95
|
+
sort_keys=sort_keys,
|
96
|
+
ensure_ascii=ensure_ascii
|
97
|
+
)
|
98
|
+
return True
|
99
|
+
except Exception as e:
|
100
|
+
error_msg = f"Error writing JSON file {filepath}: {str(e)}"
|
101
|
+
logging.error(error_msg)
|
102
|
+
return False
|
103
|
+
|
104
|
+
def merge_json(
|
105
|
+
self,
|
106
|
+
files: List[str],
|
107
|
+
output_file: str,
|
108
|
+
merge_arrays: bool = True,
|
109
|
+
overwrite_duplicates: bool = True
|
110
|
+
) -> bool:
|
111
|
+
"""Merge multiple JSON files.
|
112
|
+
|
113
|
+
Args:
|
114
|
+
files: List of JSON files to merge
|
115
|
+
output_file: Output file path
|
116
|
+
merge_arrays: Merge arrays instead of overwriting
|
117
|
+
overwrite_duplicates: Overwrite duplicate keys
|
118
|
+
|
119
|
+
Returns:
|
120
|
+
True if successful, False otherwise
|
121
|
+
"""
|
122
|
+
try:
|
123
|
+
if len(files) < 2:
|
124
|
+
raise ValueError("At least two files are required for merging")
|
125
|
+
|
126
|
+
# Read first file
|
127
|
+
result = self.read_json(files[0])
|
128
|
+
|
129
|
+
# Merge with remaining files
|
130
|
+
for file in files[1:]:
|
131
|
+
data = self.read_json(file)
|
132
|
+
result = self._deep_merge(
|
133
|
+
result,
|
134
|
+
data,
|
135
|
+
merge_arrays=merge_arrays,
|
136
|
+
overwrite_duplicates=overwrite_duplicates
|
137
|
+
)
|
138
|
+
|
139
|
+
# Write merged result
|
140
|
+
return self.write_json(result, output_file)
|
141
|
+
except Exception as e:
|
142
|
+
error_msg = f"Error merging JSON files: {str(e)}"
|
143
|
+
logging.error(error_msg)
|
144
|
+
return False
|
145
|
+
|
146
|
+
def _deep_merge(
|
147
|
+
self,
|
148
|
+
dict1: Dict[str, Any],
|
149
|
+
dict2: Dict[str, Any],
|
150
|
+
merge_arrays: bool = True,
|
151
|
+
overwrite_duplicates: bool = True
|
152
|
+
) -> Dict[str, Any]:
|
153
|
+
"""Deep merge two dictionaries.
|
154
|
+
|
155
|
+
Args:
|
156
|
+
dict1: First dictionary
|
157
|
+
dict2: Second dictionary
|
158
|
+
merge_arrays: Merge arrays instead of overwriting
|
159
|
+
overwrite_duplicates: Overwrite duplicate keys
|
160
|
+
|
161
|
+
Returns:
|
162
|
+
Merged dictionary
|
163
|
+
"""
|
164
|
+
result = dict1.copy()
|
165
|
+
|
166
|
+
for key, value in dict2.items():
|
167
|
+
if key in result:
|
168
|
+
if isinstance(result[key], dict) and isinstance(value, dict):
|
169
|
+
result[key] = self._deep_merge(
|
170
|
+
result[key],
|
171
|
+
value,
|
172
|
+
merge_arrays=merge_arrays,
|
173
|
+
overwrite_duplicates=overwrite_duplicates
|
174
|
+
)
|
175
|
+
elif isinstance(result[key], list) and isinstance(value, list):
|
176
|
+
if merge_arrays:
|
177
|
+
result[key].extend(value)
|
178
|
+
elif overwrite_duplicates:
|
179
|
+
result[key] = value
|
180
|
+
elif overwrite_duplicates:
|
181
|
+
result[key] = value
|
182
|
+
else:
|
183
|
+
result[key] = value
|
184
|
+
|
185
|
+
return result
|
186
|
+
|
187
|
+
def validate_json(
|
188
|
+
self,
|
189
|
+
data: Union[Dict[str, Any], str],
|
190
|
+
schema: Dict[str, Any]
|
191
|
+
) -> Tuple[bool, Optional[str]]:
|
192
|
+
"""Validate JSON data against a schema.
|
193
|
+
|
194
|
+
Args:
|
195
|
+
data: JSON data or filepath
|
196
|
+
schema: JSON schema for validation
|
197
|
+
|
198
|
+
Returns:
|
199
|
+
Tuple of (is_valid, error_message)
|
200
|
+
"""
|
201
|
+
try:
|
202
|
+
if util.find_spec('jsonschema') is None:
|
203
|
+
error_msg = "jsonschema package is not available. Please install it using: pip install jsonschema"
|
204
|
+
logging.error(error_msg)
|
205
|
+
return False, error_msg
|
206
|
+
import jsonschema
|
207
|
+
|
208
|
+
# Load data if filepath provided
|
209
|
+
if isinstance(data, str):
|
210
|
+
with open(data, 'r') as f:
|
211
|
+
data = json.load(f)
|
212
|
+
|
213
|
+
jsonschema.validate(instance=data, schema=schema)
|
214
|
+
return True, None
|
215
|
+
|
216
|
+
except jsonschema.exceptions.ValidationError as e:
|
217
|
+
error_msg = f"JSON validation failed: {str(e)}"
|
218
|
+
logging.error(error_msg)
|
219
|
+
return False, error_msg
|
220
|
+
|
221
|
+
except Exception as e:
|
222
|
+
error_msg = f"Error validating JSON: {str(e)}"
|
223
|
+
logging.error(error_msg)
|
224
|
+
return False, error_msg
|
225
|
+
|
226
|
+
def analyze_json(
|
227
|
+
self,
|
228
|
+
data: Union[Dict[str, Any], str],
|
229
|
+
max_depth: int = 10
|
230
|
+
) -> Dict[str, Any]:
|
231
|
+
"""Analyze JSON data structure.
|
232
|
+
|
233
|
+
Args:
|
234
|
+
data: JSON data or filepath
|
235
|
+
max_depth: Maximum depth to analyze
|
236
|
+
|
237
|
+
Returns:
|
238
|
+
Dict with analysis results
|
239
|
+
"""
|
240
|
+
try:
|
241
|
+
# Load data if filepath provided
|
242
|
+
if isinstance(data, str):
|
243
|
+
data = self.read_json(data)
|
244
|
+
|
245
|
+
def analyze_value(value: Any, depth: int = 0) -> Dict[str, Any]:
|
246
|
+
if depth >= max_depth:
|
247
|
+
return {'type': 'max_depth_reached'}
|
248
|
+
|
249
|
+
result = {'type': type(value).__name__}
|
250
|
+
|
251
|
+
if isinstance(value, dict):
|
252
|
+
result['size'] = len(value)
|
253
|
+
result['keys'] = list(value.keys())
|
254
|
+
if depth < max_depth - 1:
|
255
|
+
result['children'] = {
|
256
|
+
k: analyze_value(v, depth + 1)
|
257
|
+
for k, v in value.items()
|
258
|
+
}
|
259
|
+
|
260
|
+
elif isinstance(value, list):
|
261
|
+
result['length'] = len(value)
|
262
|
+
if value:
|
263
|
+
result['element_types'] = list(set(
|
264
|
+
type(x).__name__ for x in value
|
265
|
+
))
|
266
|
+
if depth < max_depth - 1:
|
267
|
+
result['sample_elements'] = [
|
268
|
+
analyze_value(x, depth + 1)
|
269
|
+
for x in value[:5]
|
270
|
+
]
|
271
|
+
|
272
|
+
elif isinstance(value, (int, float)):
|
273
|
+
result.update({
|
274
|
+
'value': value,
|
275
|
+
'is_integer': isinstance(value, int)
|
276
|
+
})
|
277
|
+
|
278
|
+
elif isinstance(value, str):
|
279
|
+
result.update({
|
280
|
+
'length': len(value),
|
281
|
+
'sample': value[:100] if len(value) > 100 else value
|
282
|
+
})
|
283
|
+
|
284
|
+
return result
|
285
|
+
|
286
|
+
return {
|
287
|
+
'analysis_time': datetime.now().isoformat(),
|
288
|
+
'structure': analyze_value(data)
|
289
|
+
}
|
290
|
+
except Exception as e:
|
291
|
+
error_msg = f"Error analyzing JSON: {str(e)}"
|
292
|
+
logging.error(error_msg)
|
293
|
+
return {}
|
294
|
+
|
295
|
+
def transform_json(
|
296
|
+
self,
|
297
|
+
data: Union[Dict[str, Any], str],
|
298
|
+
transformations: List[Dict[str, Any]]
|
299
|
+
) -> Dict[str, Any]:
|
300
|
+
"""Transform JSON data using a list of operations.
|
301
|
+
|
302
|
+
Args:
|
303
|
+
data: JSON data or filepath
|
304
|
+
transformations: List of transformation operations
|
305
|
+
|
306
|
+
Returns:
|
307
|
+
Transformed JSON data
|
308
|
+
"""
|
309
|
+
try:
|
310
|
+
# Load data if filepath provided
|
311
|
+
if isinstance(data, str):
|
312
|
+
data = self.read_json(data)
|
313
|
+
|
314
|
+
result = data.copy()
|
315
|
+
|
316
|
+
for transform in transformations:
|
317
|
+
op = transform.get('operation')
|
318
|
+
path = transform.get('path', '').split('.')
|
319
|
+
value = transform.get('value')
|
320
|
+
|
321
|
+
if op == 'set':
|
322
|
+
self._set_value(result, path, value)
|
323
|
+
elif op == 'delete':
|
324
|
+
self._delete_value(result, path)
|
325
|
+
elif op == 'rename':
|
326
|
+
old_path = path
|
327
|
+
new_path = value.split('.')
|
328
|
+
self._rename_key(result, old_path, new_path)
|
329
|
+
elif op == 'move':
|
330
|
+
old_path = path
|
331
|
+
new_path = value.split('.')
|
332
|
+
self._move_value(result, old_path, new_path)
|
333
|
+
|
334
|
+
return result
|
335
|
+
except Exception as e:
|
336
|
+
error_msg = f"Error transforming JSON: {str(e)}"
|
337
|
+
logging.error(error_msg)
|
338
|
+
return data
|
339
|
+
|
340
|
+
def _set_value(self, data: Dict[str, Any], path: List[str], value: Any):
|
341
|
+
"""Set a value at the specified path."""
|
342
|
+
current = data
|
343
|
+
for key in path[:-1]:
|
344
|
+
if key not in current:
|
345
|
+
current[key] = {}
|
346
|
+
current = current[key]
|
347
|
+
current[path[-1]] = value
|
348
|
+
|
349
|
+
def _delete_value(self, data: Dict[str, Any], path: List[str]):
|
350
|
+
"""Delete a value at the specified path."""
|
351
|
+
current = data
|
352
|
+
for key in path[:-1]:
|
353
|
+
if key not in current:
|
354
|
+
return
|
355
|
+
current = current[key]
|
356
|
+
if path[-1] in current:
|
357
|
+
del current[path[-1]]
|
358
|
+
|
359
|
+
def _rename_key(
|
360
|
+
self,
|
361
|
+
data: Dict[str, Any],
|
362
|
+
old_path: List[str],
|
363
|
+
new_path: List[str]
|
364
|
+
):
|
365
|
+
"""Rename a key at the specified path."""
|
366
|
+
value = self._get_value(data, old_path)
|
367
|
+
if value is not None:
|
368
|
+
self._delete_value(data, old_path)
|
369
|
+
self._set_value(data, new_path, value)
|
370
|
+
|
371
|
+
def _move_value(
|
372
|
+
self,
|
373
|
+
data: Dict[str, Any],
|
374
|
+
old_path: List[str],
|
375
|
+
new_path: List[str]
|
376
|
+
):
|
377
|
+
"""Move a value from one path to another."""
|
378
|
+
self._rename_key(data, old_path, new_path)
|
379
|
+
|
380
|
+
def _get_value(
|
381
|
+
self,
|
382
|
+
data: Dict[str, Any],
|
383
|
+
path: List[str]
|
384
|
+
) -> Optional[Any]:
|
385
|
+
"""Get a value at the specified path."""
|
386
|
+
current = data
|
387
|
+
for key in path:
|
388
|
+
if key not in current:
|
389
|
+
return None
|
390
|
+
current = current[key]
|
391
|
+
return current
|
392
|
+
|
393
|
+
# Create instance for direct function access
|
394
|
+
_json_tools = JSONTools()
|
395
|
+
read_json = _json_tools.read_json
|
396
|
+
write_json = _json_tools.write_json
|
397
|
+
merge_json = _json_tools.merge_json
|
398
|
+
validate_json = _json_tools.validate_json
|
399
|
+
analyze_json = _json_tools.analyze_json
|
400
|
+
transform_json = _json_tools.transform_json
|
401
|
+
|
402
|
+
if __name__ == "__main__":
|
403
|
+
# Example usage
|
404
|
+
print("\n==================================================")
|
405
|
+
print("JSONTools Demonstration")
|
406
|
+
print("==================================================\n")
|
407
|
+
|
408
|
+
# Sample data
|
409
|
+
data1 = {
|
410
|
+
'id': 1,
|
411
|
+
'name': 'Alice',
|
412
|
+
'scores': [95, 87, 92],
|
413
|
+
'details': {
|
414
|
+
'age': 25,
|
415
|
+
'city': 'New York'
|
416
|
+
}
|
417
|
+
}
|
418
|
+
|
419
|
+
data2 = {
|
420
|
+
'id': 2,
|
421
|
+
'name': 'Bob',
|
422
|
+
'scores': [88, 90, 85],
|
423
|
+
'details': {
|
424
|
+
'age': 30,
|
425
|
+
'country': 'USA'
|
426
|
+
}
|
427
|
+
}
|
428
|
+
|
429
|
+
# 1. Write JSON files
|
430
|
+
print("1. Writing JSON Files")
|
431
|
+
print("------------------------------")
|
432
|
+
success = write_json(data1, 'test1.json')
|
433
|
+
print(f"First file written: {success}")
|
434
|
+
success = write_json(data2, 'test2.json')
|
435
|
+
print(f"Second file written: {success}")
|
436
|
+
print()
|
437
|
+
|
438
|
+
# 2. Read JSON file
|
439
|
+
print("2. Reading JSON File")
|
440
|
+
print("------------------------------")
|
441
|
+
data = read_json('test1.json')
|
442
|
+
print("Contents of test1.json:")
|
443
|
+
print(json.dumps(data, indent=2))
|
444
|
+
print()
|
445
|
+
|
446
|
+
# 3. Merge JSON files
|
447
|
+
print("3. Merging JSON Files")
|
448
|
+
print("------------------------------")
|
449
|
+
success = merge_json(['test1.json', 'test2.json'], 'merged.json')
|
450
|
+
print(f"Files merged: {success}")
|
451
|
+
if success:
|
452
|
+
print("Merged contents:")
|
453
|
+
print(json.dumps(read_json('merged.json'), indent=2))
|
454
|
+
print()
|
455
|
+
|
456
|
+
# 4. Validate JSON
|
457
|
+
print("4. Validating JSON")
|
458
|
+
print("------------------------------")
|
459
|
+
schema = {
|
460
|
+
'type': 'object',
|
461
|
+
'properties': {
|
462
|
+
'id': {'type': 'integer'},
|
463
|
+
'name': {'type': 'string'},
|
464
|
+
'scores': {
|
465
|
+
'type': 'array',
|
466
|
+
'items': {'type': 'number'}
|
467
|
+
},
|
468
|
+
'details': {
|
469
|
+
'type': 'object',
|
470
|
+
'properties': {
|
471
|
+
'age': {'type': 'integer'},
|
472
|
+
'city': {'type': 'string'}
|
473
|
+
}
|
474
|
+
}
|
475
|
+
},
|
476
|
+
'required': ['id', 'name']
|
477
|
+
}
|
478
|
+
|
479
|
+
is_valid, error = validate_json(data1, schema)
|
480
|
+
print(f"Validation result: {is_valid}")
|
481
|
+
if error:
|
482
|
+
print(f"Validation error: {error}")
|
483
|
+
print()
|
484
|
+
|
485
|
+
# 5. Analyze JSON
|
486
|
+
print("5. Analyzing JSON")
|
487
|
+
print("------------------------------")
|
488
|
+
analysis = analyze_json(data1)
|
489
|
+
print("Analysis results:")
|
490
|
+
print(json.dumps(analysis, indent=2))
|
491
|
+
print()
|
492
|
+
|
493
|
+
# 6. Transform JSON
|
494
|
+
print("6. Transforming JSON")
|
495
|
+
print("------------------------------")
|
496
|
+
transformations = [
|
497
|
+
{
|
498
|
+
'operation': 'set',
|
499
|
+
'path': 'details.status',
|
500
|
+
'value': 'active'
|
501
|
+
},
|
502
|
+
{
|
503
|
+
'operation': 'rename',
|
504
|
+
'path': 'details.city',
|
505
|
+
'value': 'details.location'
|
506
|
+
}
|
507
|
+
]
|
508
|
+
|
509
|
+
transformed = transform_json(data1, transformations)
|
510
|
+
print("Transformed data:")
|
511
|
+
print(json.dumps(transformed, indent=2))
|
512
|
+
|
513
|
+
print("\n==================================================")
|
514
|
+
print("Demonstration Complete")
|
515
|
+
print("==================================================")
|