claude-mpm 4.17.0__py3-none-any.whl → 4.17.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of claude-mpm might be problematic. Click here for more details.
- claude_mpm/VERSION +1 -1
- claude_mpm/skills/bundled/api-documentation.md +393 -0
- claude_mpm/skills/bundled/async-testing.md +571 -0
- claude_mpm/skills/bundled/code-review.md +143 -0
- claude_mpm/skills/bundled/database-migration.md +199 -0
- claude_mpm/skills/bundled/docker-containerization.md +194 -0
- claude_mpm/skills/bundled/express-local-dev.md +1429 -0
- claude_mpm/skills/bundled/fastapi-local-dev.md +1199 -0
- claude_mpm/skills/bundled/git-workflow.md +414 -0
- claude_mpm/skills/bundled/imagemagick.md +204 -0
- claude_mpm/skills/bundled/json-data-handling.md +223 -0
- claude_mpm/skills/bundled/nextjs-local-dev.md +807 -0
- claude_mpm/skills/bundled/pdf.md +141 -0
- claude_mpm/skills/bundled/performance-profiling.md +567 -0
- claude_mpm/skills/bundled/refactoring-patterns.md +180 -0
- claude_mpm/skills/bundled/security-scanning.md +327 -0
- claude_mpm/skills/bundled/systematic-debugging.md +473 -0
- claude_mpm/skills/bundled/test-driven-development.md +378 -0
- claude_mpm/skills/bundled/vite-local-dev.md +1061 -0
- claude_mpm/skills/bundled/web-performance-optimization.md +2305 -0
- claude_mpm/skills/bundled/xlsx.md +157 -0
- {claude_mpm-4.17.0.dist-info → claude_mpm-4.17.1.dist-info}/METADATA +1 -1
- {claude_mpm-4.17.0.dist-info → claude_mpm-4.17.1.dist-info}/RECORD +27 -7
- {claude_mpm-4.17.0.dist-info → claude_mpm-4.17.1.dist-info}/WHEEL +0 -0
- {claude_mpm-4.17.0.dist-info → claude_mpm-4.17.1.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.17.0.dist-info → claude_mpm-4.17.1.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.17.0.dist-info → claude_mpm-4.17.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
---
|
|
2
|
+
skill_id: json-data-handling
|
|
3
|
+
skill_version: 0.1.0
|
|
4
|
+
description: Working effectively with JSON data structures.
|
|
5
|
+
updated_at: 2025-10-30T17:00:00Z
|
|
6
|
+
tags: [json, data, parsing, serialization]
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
# JSON Data Handling
|
|
10
|
+
|
|
11
|
+
Working effectively with JSON data structures.
|
|
12
|
+
|
|
13
|
+
## Python
|
|
14
|
+
|
|
15
|
+
### Basic Operations
|
|
16
|
+
|
|
17
|
+
```python
|
|
18
|
+
import json
|
|
19
|
+
|
|
20
|
+
# Parse JSON string
|
|
21
|
+
data = json.loads('{"name": "John", "age": 30}')
|
|
22
|
+
|
|
23
|
+
# Convert to JSON string
|
|
24
|
+
json_str = json.dumps(data)
|
|
25
|
+
|
|
26
|
+
# Pretty print
|
|
27
|
+
json_str = json.dumps(data, indent=2)
|
|
28
|
+
|
|
29
|
+
# Read from file
|
|
30
|
+
with open('data.json', 'r') as f:
|
|
31
|
+
data = json.load(f)
|
|
32
|
+
|
|
33
|
+
# Write to file
|
|
34
|
+
with open('output.json', 'w') as f:
|
|
35
|
+
json.dump(data, f, indent=2)
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
### Advanced
|
|
39
|
+
|
|
40
|
+
```python
|
|
41
|
+
# Custom encoder for datetime
|
|
42
|
+
from datetime import datetime
|
|
43
|
+
|
|
44
|
+
class DateTimeEncoder(json.JSONEncoder):
|
|
45
|
+
def default(self, obj):
|
|
46
|
+
if isinstance(obj, datetime):
|
|
47
|
+
return obj.isoformat()
|
|
48
|
+
return super().default(obj)
|
|
49
|
+
|
|
50
|
+
json_str = json.dumps({'date': datetime.now()}, cls=DateTimeEncoder)
|
|
51
|
+
|
|
52
|
+
# Handle None values
|
|
53
|
+
json.dumps(data, skipkeys=True)
|
|
54
|
+
|
|
55
|
+
# Sort keys
|
|
56
|
+
json.dumps(data, sort_keys=True)
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## JavaScript
|
|
60
|
+
|
|
61
|
+
### Basic Operations
|
|
62
|
+
|
|
63
|
+
```javascript
|
|
64
|
+
// Parse JSON string
|
|
65
|
+
const data = JSON.parse('{"name": "John", "age": 30}');
|
|
66
|
+
|
|
67
|
+
// Convert to JSON string
|
|
68
|
+
const jsonStr = JSON.stringify(data);
|
|
69
|
+
|
|
70
|
+
// Pretty print
|
|
71
|
+
const jsonStr = JSON.stringify(data, null, 2);
|
|
72
|
+
|
|
73
|
+
// Read from file (Node.js)
|
|
74
|
+
const fs = require('fs');
|
|
75
|
+
const data = JSON.parse(fs.readFileSync('data.json', 'utf8'));
|
|
76
|
+
|
|
77
|
+
// Write to file
|
|
78
|
+
fs.writeFileSync('output.json', JSON.stringify(data, null, 2));
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Advanced
|
|
82
|
+
|
|
83
|
+
```javascript
|
|
84
|
+
// Custom replacer
|
|
85
|
+
const jsonStr = JSON.stringify(data, (key, value) => {
|
|
86
|
+
if (typeof value === 'bigint') {
|
|
87
|
+
return value.toString();
|
|
88
|
+
}
|
|
89
|
+
return value;
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
// Filter properties
|
|
93
|
+
const filtered = JSON.stringify(data, ['name', 'age']);
|
|
94
|
+
|
|
95
|
+
// Handle circular references
|
|
96
|
+
const getCircularReplacer = () => {
|
|
97
|
+
const seen = new WeakSet();
|
|
98
|
+
return (key, value) => {
|
|
99
|
+
if (typeof value === 'object' && value !== null) {
|
|
100
|
+
if (seen.has(value)) return;
|
|
101
|
+
seen.add(value);
|
|
102
|
+
}
|
|
103
|
+
return value;
|
|
104
|
+
};
|
|
105
|
+
};
|
|
106
|
+
JSON.stringify(circularObj, getCircularReplacer());
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
## Common Patterns
|
|
110
|
+
|
|
111
|
+
### Validation
|
|
112
|
+
|
|
113
|
+
```python
|
|
114
|
+
from jsonschema import validate
|
|
115
|
+
|
|
116
|
+
schema = {
|
|
117
|
+
"type": "object",
|
|
118
|
+
"properties": {
|
|
119
|
+
"name": {"type": "string"},
|
|
120
|
+
"age": {"type": "number", "minimum": 0}
|
|
121
|
+
},
|
|
122
|
+
"required": ["name", "age"]
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
# Validate
|
|
126
|
+
validate(instance=data, schema=schema)
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
### Deep Merge
|
|
130
|
+
|
|
131
|
+
```python
|
|
132
|
+
def deep_merge(dict1, dict2):
|
|
133
|
+
result = dict1.copy()
|
|
134
|
+
for key, value in dict2.items():
|
|
135
|
+
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
136
|
+
result[key] = deep_merge(result[key], value)
|
|
137
|
+
else:
|
|
138
|
+
result[key] = value
|
|
139
|
+
return result
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
### Nested Access
|
|
143
|
+
|
|
144
|
+
```python
|
|
145
|
+
# Safe nested access
|
|
146
|
+
def get_nested(data, *keys, default=None):
|
|
147
|
+
for key in keys:
|
|
148
|
+
try:
|
|
149
|
+
data = data[key]
|
|
150
|
+
except (KeyError, TypeError, IndexError):
|
|
151
|
+
return default
|
|
152
|
+
return data
|
|
153
|
+
|
|
154
|
+
# Usage
|
|
155
|
+
value = get_nested(data, 'user', 'address', 'city', default='Unknown')
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
### Transform Keys
|
|
159
|
+
|
|
160
|
+
```python
|
|
161
|
+
# Convert snake_case to camelCase
|
|
162
|
+
def to_camel_case(snake_str):
|
|
163
|
+
components = snake_str.split('_')
|
|
164
|
+
return components[0] + ''.join(x.title() for x in components[1:])
|
|
165
|
+
|
|
166
|
+
def transform_keys(obj):
|
|
167
|
+
if isinstance(obj, dict):
|
|
168
|
+
return {to_camel_case(k): transform_keys(v) for k, v in obj.items()}
|
|
169
|
+
elif isinstance(obj, list):
|
|
170
|
+
return [transform_keys(item) for item in obj]
|
|
171
|
+
return obj
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
## Best Practices
|
|
175
|
+
|
|
176
|
+
### ✅ DO
|
|
177
|
+
|
|
178
|
+
```python
|
|
179
|
+
# Use context managers for files
|
|
180
|
+
with open('data.json', 'r') as f:
|
|
181
|
+
data = json.load(f)
|
|
182
|
+
|
|
183
|
+
# Handle exceptions
|
|
184
|
+
try:
|
|
185
|
+
data = json.loads(json_str)
|
|
186
|
+
except json.JSONDecodeError as e:
|
|
187
|
+
print(f"Invalid JSON: {e}")
|
|
188
|
+
|
|
189
|
+
# Validate structure
|
|
190
|
+
assert 'required_field' in data
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
### ❌ DON'T
|
|
194
|
+
|
|
195
|
+
```python
|
|
196
|
+
# Don't parse untrusted JSON without validation
|
|
197
|
+
data = json.loads(user_input) # Validate first!
|
|
198
|
+
|
|
199
|
+
# Don't load huge files at once
|
|
200
|
+
# Use streaming for large files
|
|
201
|
+
|
|
202
|
+
# Don't use eval() as alternative to json.loads()
|
|
203
|
+
data = eval(json_str) # NEVER DO THIS!
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
## Streaming Large JSON
|
|
207
|
+
|
|
208
|
+
```python
|
|
209
|
+
import ijson
|
|
210
|
+
|
|
211
|
+
# Stream large JSON file
|
|
212
|
+
with open('large_data.json', 'rb') as f:
|
|
213
|
+
objects = ijson.items(f, 'item')
|
|
214
|
+
for obj in objects:
|
|
215
|
+
process(obj)
|
|
216
|
+
```
|
|
217
|
+
|
|
218
|
+
## Remember
|
|
219
|
+
- Always validate JSON structure
|
|
220
|
+
- Handle parse errors gracefully
|
|
221
|
+
- Use schemas for complex structures
|
|
222
|
+
- Stream large JSON files
|
|
223
|
+
- Pretty print for debugging
|