json-repair 0.55.0__py3-none-any.whl → 0.55.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- json_repair/json_parser.py +7 -9
- json_repair/json_repair.py +10 -13
- json_repair/parse_comment.py +1 -2
- json_repair/parse_number.py +5 -6
- json_repair/parse_object.py +43 -8
- json_repair/parse_string.py +23 -25
- json_repair/utils/object_comparer.py +1 -1
- json_repair/utils/string_file_wrapper.py +40 -35
- {json_repair-0.55.0.dist-info → json_repair-0.55.2.dist-info}/METADATA +12 -3
- json_repair-0.55.2.dist-info/RECORD +22 -0
- {json_repair-0.55.0.dist-info → json_repair-0.55.2.dist-info}/WHEEL +1 -1
- json_repair-0.55.0.dist-info/RECORD +0 -22
- {json_repair-0.55.0.dist-info → json_repair-0.55.2.dist-info}/entry_points.txt +0 -0
- {json_repair-0.55.0.dist-info → json_repair-0.55.2.dist-info}/licenses/LICENSE +0 -0
- {json_repair-0.55.0.dist-info → json_repair-0.55.2.dist-info}/top_level.txt +0 -0
json_repair/json_parser.py
CHANGED
|
@@ -108,8 +108,7 @@ class JSONParser:
|
|
|
108
108
|
raise ValueError("Multiple top-level JSON elements found in strict mode.")
|
|
109
109
|
if self.logging:
|
|
110
110
|
return json, self.logger
|
|
111
|
-
|
|
112
|
-
return json
|
|
111
|
+
return json
|
|
113
112
|
|
|
114
113
|
def parse_json(
|
|
115
114
|
self,
|
|
@@ -120,24 +119,23 @@ class JSONParser:
|
|
|
120
119
|
if char is None:
|
|
121
120
|
return ""
|
|
122
121
|
# <object> starts with '{'
|
|
123
|
-
|
|
122
|
+
if char == "{":
|
|
124
123
|
self.index += 1
|
|
125
124
|
return self.parse_object()
|
|
126
125
|
# <array> starts with '['
|
|
127
|
-
|
|
126
|
+
if char == "[":
|
|
128
127
|
self.index += 1
|
|
129
128
|
return self.parse_array()
|
|
130
129
|
# <string> starts with a quote
|
|
131
|
-
|
|
130
|
+
if not self.context.empty and (char in STRING_DELIMITERS or char.isalpha()):
|
|
132
131
|
return self.parse_string()
|
|
133
132
|
# <number> starts with [0-9] or minus
|
|
134
|
-
|
|
133
|
+
if not self.context.empty and (char.isdigit() or char == "-" or char == "."):
|
|
135
134
|
return self.parse_number()
|
|
136
|
-
|
|
135
|
+
if char in ["#", "/"]:
|
|
137
136
|
return self.parse_comment()
|
|
138
137
|
# If everything else fails, we just ignore and move on
|
|
139
|
-
|
|
140
|
-
self.index += 1
|
|
138
|
+
self.index += 1
|
|
141
139
|
|
|
142
140
|
def get_char_at(self, count: int = 0) -> str | None:
|
|
143
141
|
# Why not use something simpler? Because try/except in python is a faster alternative to an "if" statement that is often True
|
json_repair/json_repair.py
CHANGED
|
@@ -25,6 +25,7 @@ All supported use cases are in the unit tests
|
|
|
25
25
|
import argparse
|
|
26
26
|
import json
|
|
27
27
|
import sys
|
|
28
|
+
from pathlib import Path
|
|
28
29
|
from typing import Any, Literal, TextIO, overload
|
|
29
30
|
|
|
30
31
|
from .json_parser import JSONParser
|
|
@@ -103,7 +104,7 @@ def repair_json(
|
|
|
103
104
|
return parsed_json, []
|
|
104
105
|
return parsed_json
|
|
105
106
|
# Avoid returning only a pair of quotes if it's an empty string
|
|
106
|
-
|
|
107
|
+
if parsed_json == "":
|
|
107
108
|
return ""
|
|
108
109
|
return json.dumps(parsed_json, **json_dumps_args)
|
|
109
110
|
|
|
@@ -170,7 +171,7 @@ def load(
|
|
|
170
171
|
|
|
171
172
|
|
|
172
173
|
def from_file(
|
|
173
|
-
filename: str,
|
|
174
|
+
filename: str | Path,
|
|
174
175
|
skip_json_loads: bool = False,
|
|
175
176
|
logging: bool = False,
|
|
176
177
|
chunk_length: int = 0,
|
|
@@ -180,7 +181,7 @@ def from_file(
|
|
|
180
181
|
This function is a wrapper around `load()` so you can pass the filename as string
|
|
181
182
|
|
|
182
183
|
Args:
|
|
183
|
-
filename (str): The name of the file containing JSON data to load and repair.
|
|
184
|
+
filename (str | Path): The name of the file containing JSON data to load and repair.
|
|
184
185
|
skip_json_loads (bool, optional): If True, skip calling the built-in json.loads() function to verify that the json is valid before attempting to repair. Defaults to False.
|
|
185
186
|
logging (bool, optional): If True, return a tuple with the repaired json and a log of all repair actions. Defaults to False.
|
|
186
187
|
chunk_length (int, optional): Size in bytes of the file chunks to read at once. Defaults to 1MB.
|
|
@@ -189,8 +190,8 @@ def from_file(
|
|
|
189
190
|
Returns:
|
|
190
191
|
Union[JSONReturnType, Tuple[JSONReturnType, List[Dict[str, str]]]]: The repaired JSON object or a tuple with the repaired JSON object and repair log.
|
|
191
192
|
"""
|
|
192
|
-
with
|
|
193
|
-
|
|
193
|
+
with Path(filename).open() as fd:
|
|
194
|
+
return load(
|
|
194
195
|
fd=fd,
|
|
195
196
|
skip_json_loads=skip_json_loads,
|
|
196
197
|
logging=logging,
|
|
@@ -198,8 +199,6 @@ def from_file(
|
|
|
198
199
|
strict=strict,
|
|
199
200
|
)
|
|
200
201
|
|
|
201
|
-
return jsonobj
|
|
202
|
-
|
|
203
202
|
|
|
204
203
|
def cli(inline_args: list[str] | None = None) -> int:
|
|
205
204
|
"""
|
|
@@ -259,7 +258,7 @@ def cli(inline_args: list[str] | None = None) -> int:
|
|
|
259
258
|
help="Raise on duplicate keys, missing separators, empty keys/values, and other unrecoverable structures instead of repairing them",
|
|
260
259
|
)
|
|
261
260
|
|
|
262
|
-
args = parser.parse_args(
|
|
261
|
+
args = parser.parse_args(inline_args)
|
|
263
262
|
|
|
264
263
|
# Inline mode requires a filename, so error out if none was provided.
|
|
265
264
|
if args.inline and not args.filename: # pragma: no cover
|
|
@@ -270,9 +269,7 @@ def cli(inline_args: list[str] | None = None) -> int:
|
|
|
270
269
|
print("Error: You cannot pass both --inline and --output", file=sys.stderr)
|
|
271
270
|
sys.exit(1)
|
|
272
271
|
|
|
273
|
-
ensure_ascii =
|
|
274
|
-
if args.ensure_ascii:
|
|
275
|
-
ensure_ascii = True
|
|
272
|
+
ensure_ascii = args.ensure_ascii
|
|
276
273
|
|
|
277
274
|
try:
|
|
278
275
|
# Use from_file if a filename is provided; otherwise read from stdin.
|
|
@@ -282,11 +279,11 @@ def cli(inline_args: list[str] | None = None) -> int:
|
|
|
282
279
|
data = sys.stdin.read()
|
|
283
280
|
result = loads(data, strict=args.strict)
|
|
284
281
|
if args.inline or args.output:
|
|
285
|
-
with
|
|
282
|
+
with Path(args.output or args.filename).open(mode="w") as fd:
|
|
286
283
|
json.dump(result, fd, indent=args.indent, ensure_ascii=ensure_ascii)
|
|
287
284
|
else:
|
|
288
285
|
print(json.dumps(result, indent=args.indent, ensure_ascii=ensure_ascii))
|
|
289
|
-
except
|
|
286
|
+
except (OSError, TypeError, ValueError) as e: # pragma: no cover
|
|
290
287
|
print(f"Error: {str(e)}", file=sys.stderr)
|
|
291
288
|
return 1
|
|
292
289
|
|
json_repair/parse_comment.py
CHANGED
json_repair/parse_number.py
CHANGED
|
@@ -20,20 +20,19 @@ def parse_number(self: "JSONParser") -> JSONReturnType:
|
|
|
20
20
|
number_str += char
|
|
21
21
|
self.index += 1
|
|
22
22
|
char = self.get_char_at()
|
|
23
|
+
if (self.get_char_at() or "").isalpha():
|
|
24
|
+
# this was a string instead, sorry
|
|
25
|
+
self.index -= len(number_str)
|
|
26
|
+
return self.parse_string()
|
|
23
27
|
if number_str and number_str[-1] in "-eE/,":
|
|
24
28
|
# The number ends with a non valid character for a number/currency, rolling back one
|
|
25
29
|
number_str = number_str[:-1]
|
|
26
30
|
self.index -= 1
|
|
27
|
-
elif (self.get_char_at() or "").isalpha():
|
|
28
|
-
# this was a string instead, sorry
|
|
29
|
-
self.index -= len(number_str)
|
|
30
|
-
return self.parse_string()
|
|
31
31
|
try:
|
|
32
32
|
if "," in number_str:
|
|
33
33
|
return number_str
|
|
34
34
|
if "." in number_str or "e" in number_str or "E" in number_str:
|
|
35
35
|
return float(number_str)
|
|
36
|
-
|
|
37
|
-
return int(number_str)
|
|
36
|
+
return int(number_str)
|
|
38
37
|
except ValueError:
|
|
39
38
|
return number_str
|
json_repair/parse_object.py
CHANGED
|
@@ -42,7 +42,7 @@ def parse_object(self: "JSONParser") -> JSONReturnType:
|
|
|
42
42
|
# Is this an array?
|
|
43
43
|
# Need to check if the previous parsed value contained in obj is an array and in that case parse and merge the two
|
|
44
44
|
prev_key = list(obj.keys())[-1] if obj else None
|
|
45
|
-
if prev_key and isinstance(obj[prev_key], list):
|
|
45
|
+
if prev_key and isinstance(obj[prev_key], list) and not self.strict:
|
|
46
46
|
# If the previous key's value is an array, parse the new array and merge
|
|
47
47
|
self.index += 1
|
|
48
48
|
new_array = self.parse_array()
|
|
@@ -50,14 +50,49 @@ def parse_object(self: "JSONParser") -> JSONReturnType:
|
|
|
50
50
|
# Merge and flatten the arrays
|
|
51
51
|
prev_value = obj[prev_key]
|
|
52
52
|
if isinstance(prev_value, list):
|
|
53
|
-
prev_value
|
|
54
|
-
|
|
53
|
+
list_lengths = [len(item) for item in prev_value if isinstance(item, list)]
|
|
54
|
+
expected_len = (
|
|
55
|
+
list_lengths[0]
|
|
56
|
+
if list_lengths and all(length == list_lengths[0] for length in list_lengths)
|
|
57
|
+
else None
|
|
55
58
|
)
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
59
|
+
if expected_len:
|
|
60
|
+
# Matrix-style JSON: list of uniform-length rows.
|
|
61
|
+
# Repair a missing inner "[" by regrouping trailing scalar cells into rows.
|
|
62
|
+
tail = []
|
|
63
|
+
while prev_value and not isinstance(prev_value[-1], list):
|
|
64
|
+
tail.append(prev_value.pop())
|
|
65
|
+
if tail:
|
|
66
|
+
tail.reverse()
|
|
67
|
+
if len(tail) % expected_len == 0:
|
|
68
|
+
self.log(
|
|
69
|
+
"While parsing an object we found row values without an inner array, grouping them into rows",
|
|
70
|
+
)
|
|
71
|
+
for i in range(0, len(tail), expected_len):
|
|
72
|
+
prev_value.append(tail[i : i + expected_len])
|
|
73
|
+
else:
|
|
74
|
+
prev_value.extend(tail)
|
|
75
|
+
# Keep incoming rows as rows instead of flattening them into the table.
|
|
76
|
+
if new_array:
|
|
77
|
+
if all(isinstance(item, list) for item in new_array):
|
|
78
|
+
self.log(
|
|
79
|
+
"While parsing an object we found additional rows, appending them without flattening",
|
|
80
|
+
)
|
|
81
|
+
prev_value.extend(new_array)
|
|
82
|
+
else:
|
|
83
|
+
prev_value.append(new_array)
|
|
84
|
+
else:
|
|
85
|
+
# Fallback to legacy merge behavior when not a uniform row list or in strict mode.
|
|
86
|
+
prev_value.extend(
|
|
87
|
+
new_array[0]
|
|
88
|
+
if len(new_array) == 1 and isinstance(new_array[0], list)
|
|
89
|
+
else new_array
|
|
90
|
+
)
|
|
91
|
+
self.skip_whitespaces()
|
|
92
|
+
if self.get_char_at() == ",":
|
|
93
|
+
self.index += 1
|
|
94
|
+
self.skip_whitespaces()
|
|
95
|
+
continue
|
|
61
96
|
raw_key = self.parse_string()
|
|
62
97
|
assert isinstance(raw_key, str)
|
|
63
98
|
key = raw_key
|
json_repair/parse_string.py
CHANGED
|
@@ -76,15 +76,14 @@ def parse_string(self: "JSONParser") -> JSONReturnType:
|
|
|
76
76
|
):
|
|
77
77
|
self.index += 1
|
|
78
78
|
return ""
|
|
79
|
-
|
|
79
|
+
if self.get_char_at(1) == lstring_delimiter:
|
|
80
80
|
# There's something fishy about this, we found doubled quotes and then again quotes
|
|
81
81
|
self.log(
|
|
82
82
|
"While parsing a string, we found a doubled quote and then a quote again, ignoring it",
|
|
83
83
|
)
|
|
84
84
|
if self.strict:
|
|
85
85
|
raise ValueError("Found doubled quotes followed by another quote.")
|
|
86
|
-
|
|
87
|
-
return ""
|
|
86
|
+
return ""
|
|
88
87
|
# Find the next delimiter
|
|
89
88
|
i = self.skip_to_character(character=rstring_delimiter, idx=1)
|
|
90
89
|
next_c = self.get_char_at(i)
|
|
@@ -111,7 +110,7 @@ def parse_string(self: "JSONParser") -> JSONReturnType:
|
|
|
111
110
|
)
|
|
112
111
|
self.index += 1
|
|
113
112
|
return ""
|
|
114
|
-
|
|
113
|
+
if next_c not in [",", "]", "}"]:
|
|
115
114
|
self.log(
|
|
116
115
|
"While parsing a string, we found a doubled quote but it was a mistake, removing one quote",
|
|
117
116
|
)
|
|
@@ -135,7 +134,7 @@ def parse_string(self: "JSONParser") -> JSONReturnType:
|
|
|
135
134
|
"While parsing a string missing the left delimiter in object key context, we found a :, stopping here",
|
|
136
135
|
)
|
|
137
136
|
break
|
|
138
|
-
|
|
137
|
+
if self.context.current == ContextValues.ARRAY and char in ["]", ","]:
|
|
139
138
|
self.log(
|
|
140
139
|
"While parsing a string missing the left delimiter in array context, we found a ] or ,, stopping here",
|
|
141
140
|
)
|
|
@@ -190,25 +189,24 @@ def parse_string(self: "JSONParser") -> JSONReturnType:
|
|
|
190
189
|
if next_c:
|
|
191
190
|
# OK then this is a systemic issue with the output
|
|
192
191
|
break
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
break
|
|
192
|
+
# skip any whitespace first
|
|
193
|
+
i = self.scroll_whitespaces(idx=1)
|
|
194
|
+
# We couldn't find any rstring_delimeter before the end of the string
|
|
195
|
+
# check if this is the last string of an object and therefore we can keep going
|
|
196
|
+
# make an exception if this is the last char before the closing brace
|
|
197
|
+
j = self.skip_to_character(character="}", idx=i)
|
|
198
|
+
if j - i > 1:
|
|
199
|
+
# Ok it's not right after the comma
|
|
200
|
+
# Let's ignore
|
|
201
|
+
rstring_delimiter_missing = False
|
|
202
|
+
# Check that j was not out of bound
|
|
203
|
+
elif self.get_char_at(j):
|
|
204
|
+
# Check for an unmatched opening brace in string_acc
|
|
205
|
+
for c in reversed(string_acc):
|
|
206
|
+
if c == "{":
|
|
207
|
+
# Ok then this is part of the string
|
|
208
|
+
rstring_delimiter_missing = False
|
|
209
|
+
break
|
|
212
210
|
if rstring_delimiter_missing:
|
|
213
211
|
self.log(
|
|
214
212
|
"While parsing a string missing the left delimiter in object value context, we found a , or } and we couldn't determine that a right delimiter was present. Stopping here",
|
|
@@ -267,7 +265,7 @@ def parse_string(self: "JSONParser") -> JSONReturnType:
|
|
|
267
265
|
self.index += 1
|
|
268
266
|
char = self.get_char_at()
|
|
269
267
|
continue
|
|
270
|
-
|
|
268
|
+
if char in ["u", "x"]:
|
|
271
269
|
# If we find a unicode escape sequence, normalize it
|
|
272
270
|
num_chars = 4 if char == "u" else 2
|
|
273
271
|
next_chars = self.json_str[self.index + 1 : self.index + 1 + num_chars]
|
|
@@ -82,18 +82,7 @@ class StringFileWrapper:
|
|
|
82
82
|
# self.buffers[index]: the row in the array of length 1MB, index is `i` modulo CHUNK_LENGTH
|
|
83
83
|
# self.buffures[index][j]: the column of the row that is `i` remainder CHUNK_LENGTH
|
|
84
84
|
if isinstance(index, slice):
|
|
85
|
-
|
|
86
|
-
start = 0 if index.start is None else index.start
|
|
87
|
-
stop = total_len if index.stop is None else index.stop
|
|
88
|
-
step = 1 if index.step is None else index.step
|
|
89
|
-
|
|
90
|
-
if start < 0:
|
|
91
|
-
start += total_len
|
|
92
|
-
if stop < 0:
|
|
93
|
-
stop += total_len
|
|
94
|
-
|
|
95
|
-
start = max(start, 0)
|
|
96
|
-
stop = min(stop, total_len)
|
|
85
|
+
start, stop, step = self._normalize_slice(index)
|
|
97
86
|
|
|
98
87
|
if step == 0:
|
|
99
88
|
raise ValueError("slice step cannot be zero")
|
|
@@ -102,29 +91,14 @@ class StringFileWrapper:
|
|
|
102
91
|
|
|
103
92
|
if start >= stop:
|
|
104
93
|
return ""
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
buffer = self.get_buffer(buffer_index)
|
|
114
|
-
return buffer[start_mod:stop_mod]
|
|
115
|
-
|
|
116
|
-
start_slice = self.get_buffer(buffer_index)[start_mod:]
|
|
117
|
-
end_slice = self.get_buffer(buffer_end)[:stop_mod]
|
|
118
|
-
middle_slices = [self.get_buffer(i) for i in range(buffer_index + 1, buffer_end)]
|
|
119
|
-
return start_slice + "".join(middle_slices) + end_slice
|
|
120
|
-
else:
|
|
121
|
-
if index < 0:
|
|
122
|
-
index += len(self)
|
|
123
|
-
if index < 0:
|
|
124
|
-
raise IndexError("string index out of range")
|
|
125
|
-
buffer_index = index // self.buffer_length
|
|
126
|
-
buffer = self.get_buffer(buffer_index)
|
|
127
|
-
return buffer[index % self.buffer_length]
|
|
94
|
+
return self._slice_from_buffers(start, stop)
|
|
95
|
+
if index < 0:
|
|
96
|
+
index += len(self)
|
|
97
|
+
if index < 0:
|
|
98
|
+
raise IndexError("string index out of range")
|
|
99
|
+
buffer_index = index // self.buffer_length
|
|
100
|
+
buffer = self.get_buffer(buffer_index)
|
|
101
|
+
return buffer[index % self.buffer_length]
|
|
128
102
|
|
|
129
103
|
def __len__(self) -> int:
|
|
130
104
|
"""
|
|
@@ -140,6 +114,37 @@ class StringFileWrapper:
|
|
|
140
114
|
assert self.length is not None
|
|
141
115
|
return self.length
|
|
142
116
|
|
|
117
|
+
def _normalize_slice(self, index: slice) -> tuple[int, int, int]:
|
|
118
|
+
total_len = len(self)
|
|
119
|
+
start = 0 if index.start is None else index.start
|
|
120
|
+
stop = total_len if index.stop is None else index.stop
|
|
121
|
+
step = 1 if index.step is None else index.step
|
|
122
|
+
|
|
123
|
+
if start < 0:
|
|
124
|
+
start += total_len
|
|
125
|
+
if stop < 0:
|
|
126
|
+
stop += total_len
|
|
127
|
+
|
|
128
|
+
start = max(start, 0)
|
|
129
|
+
stop = min(stop, total_len)
|
|
130
|
+
return start, stop, step
|
|
131
|
+
|
|
132
|
+
def _slice_from_buffers(self, start: int, stop: int) -> str:
|
|
133
|
+
buffer_index = start // self.buffer_length
|
|
134
|
+
buffer_end = (stop - 1) // self.buffer_length
|
|
135
|
+
start_mod = start % self.buffer_length
|
|
136
|
+
stop_mod = stop % self.buffer_length
|
|
137
|
+
if stop_mod == 0 and stop > start:
|
|
138
|
+
stop_mod = self.buffer_length
|
|
139
|
+
if buffer_index == buffer_end:
|
|
140
|
+
buffer = self.get_buffer(buffer_index)
|
|
141
|
+
return buffer[start_mod:stop_mod]
|
|
142
|
+
|
|
143
|
+
start_slice = self.get_buffer(buffer_index)[start_mod:]
|
|
144
|
+
end_slice = self.get_buffer(buffer_end)[:stop_mod]
|
|
145
|
+
middle_slices = [self.get_buffer(i) for i in range(buffer_index + 1, buffer_end)]
|
|
146
|
+
return start_slice + "".join(middle_slices) + end_slice
|
|
147
|
+
|
|
143
148
|
def __setitem__(self, index: int | slice, value: str) -> None: # pragma: no cover
|
|
144
149
|
"""
|
|
145
150
|
Set a character or a slice of characters in the file.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: json_repair
|
|
3
|
-
Version: 0.55.
|
|
3
|
+
Version: 0.55.2
|
|
4
4
|
Summary: A package to repair broken json strings
|
|
5
5
|
Author-email: Stefano Baccianella <4247706+mangiucugna@users.noreply.github.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -22,14 +22,20 @@ Dynamic: license-file
|
|
|
22
22
|
[](https://github.com/sponsors/mangiucugna)
|
|
23
23
|
[](https://github.com/mangiucugna/json_repair/stargazers)
|
|
24
24
|
|
|
25
|
+
English | [中文](README.zh.md)
|
|
25
26
|
|
|
26
27
|
This simple package can be used to fix an invalid json string. To know all cases in which this package will work, check out the unit test.
|
|
27
28
|
|
|
28
29
|

|
|
29
30
|
|
|
30
31
|
---
|
|
31
|
-
|
|
32
|
-
|
|
32
|
+
|
|
33
|
+
# Think about sponsoring this library!
|
|
34
|
+
This library is free for everyone and it's maintained and developed as a side project so, if you find this library useful for your work, consider becoming a sponsor via this link: https://github.com/sponsors/mangiucugna
|
|
35
|
+
|
|
36
|
+
## Premium sponsors
|
|
37
|
+
- [Icana-AI](https://github.com/Icana-AI) Makers of CallCoach, the world's best Call Centre AI Coach. Visit [https://www.icana.ai/](https://www.icana.ai/)
|
|
38
|
+
- [mjharte](https://github.com/mjharte)
|
|
33
39
|
|
|
34
40
|
---
|
|
35
41
|
|
|
@@ -268,6 +274,9 @@ If something is wrong (a missing parentheses or quotes for example) it will use
|
|
|
268
274
|
|
|
269
275
|
I am sure some corner cases will be missing, if you have examples please open an issue or even better push a PR
|
|
270
276
|
|
|
277
|
+
# Contributing
|
|
278
|
+
If you want to contribute, start with `CONTRIBUTING.md` and read the Code Wiki writeup for a tour of the codebase and key entry points: https://codewiki.google/github.com/mangiucugna/json_repair
|
|
279
|
+
|
|
271
280
|
# How to develop
|
|
272
281
|
Just create a virtual environment with `requirements.txt`, the setup uses [pre-commit](https://pre-commit.com/) to make sure all tests are run.
|
|
273
282
|
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
json_repair/__init__.py,sha256=JQ4Nm8YzR8Id2a527Ql0Az-rKapTp8DCMPKybLtQ620,180
|
|
2
|
+
json_repair/__main__.py,sha256=EsJb-y89uZEvGQQg1GdIDWzfDwfOMvVekKEtdguQXCM,67
|
|
3
|
+
json_repair/json_parser.py,sha256=Xilflg_rBhbqLckOYL7Tai3pDCOnZFa9CR9i-LAIIaY,8545
|
|
4
|
+
json_repair/json_repair.py,sha256=5RQ51UwxJiduskHCWBHRyij53ddzLrHGa3Blgm7pOw8,12717
|
|
5
|
+
json_repair/parse_array.py,sha256=rZfnRiS86vBATOUHqSx2T5fE79Ndlk2NoTsg9Wek7l4,2239
|
|
6
|
+
json_repair/parse_comment.py,sha256=ozOPPZBI1pyHxN98vTtmtMNNbdg4dOjvC06dkuXSzMs,2646
|
|
7
|
+
json_repair/parse_number.py,sha256=j-tYSnQbXOVENk4aJEeNY6wfAGUEjgLzQz5T5Nnvuj0,1296
|
|
8
|
+
json_repair/parse_object.py,sha256=hU7cDlXRk6cUzyaLHGzG6OlmGkyDxOWwCZIceXgSP8s,10221
|
|
9
|
+
json_repair/parse_string.py,sha256=Kq_u7Ya5ioctsOueQh1eY_fpXHWGLfW2193tJ7yesB4,26151
|
|
10
|
+
json_repair/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
json_repair/parse_string_helpers/parse_boolean_or_null.py,sha256=pGmH1QATBls70kTvUlJv4F8NiPaBWcyGhRL03sTOnto,871
|
|
12
|
+
json_repair/parse_string_helpers/parse_json_llm_block.py,sha256=wPSm-8RY30Ek8HxzjCkCRtdLq4-Cez-PJB3vOk_vP3w,670
|
|
13
|
+
json_repair/utils/constants.py,sha256=cv2gvyosuq0me0600WyTysM9avrtfXPuXYR26tawcuo,158
|
|
14
|
+
json_repair/utils/json_context.py,sha256=WsMOjqpGSr6aaDONcrk8UFtTurzWon2Qq9AoBBYseoI,934
|
|
15
|
+
json_repair/utils/object_comparer.py,sha256=eKpIi9pMitZ3T_X1gIDlZv0-aPuvZqxSTphjIYD-8ys,1714
|
|
16
|
+
json_repair/utils/string_file_wrapper.py,sha256=dUYoSaMn1ahl72B1BfnNe0iqlV1wsxiZQK3gB0oHcyg,7054
|
|
17
|
+
json_repair-0.55.2.dist-info/licenses/LICENSE,sha256=wrjQo8MhNrNCicXtMe3MHmS-fx8AmQk1ue8AQwiiFV8,1076
|
|
18
|
+
json_repair-0.55.2.dist-info/METADATA,sha256=RHNReNcfM19tOSRDFVSDKy_Z0v3aMkIebsMTdfCk-gQ,12787
|
|
19
|
+
json_repair-0.55.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
20
|
+
json_repair-0.55.2.dist-info/entry_points.txt,sha256=SNfge3zPSP-ASqriYU9r3NAPaXdseYr7ciPMKdV2uSw,57
|
|
21
|
+
json_repair-0.55.2.dist-info/top_level.txt,sha256=7-VZwZN2CgB_n0NlSLk-rEUFh8ug21lESbsblOYuZqw,12
|
|
22
|
+
json_repair-0.55.2.dist-info/RECORD,,
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
json_repair/__init__.py,sha256=JQ4Nm8YzR8Id2a527Ql0Az-rKapTp8DCMPKybLtQ620,180
|
|
2
|
-
json_repair/__main__.py,sha256=EsJb-y89uZEvGQQg1GdIDWzfDwfOMvVekKEtdguQXCM,67
|
|
3
|
-
json_repair/json_parser.py,sha256=1C24BNRNOg-hwJzLsCWuW75xYctm0haSynrDsGCy3d8,8595
|
|
4
|
-
json_repair/json_repair.py,sha256=iT-OJgpBnKUJVIV4IUlXmMUkOyW6bNnKCZLB7Fys8hk,12758
|
|
5
|
-
json_repair/parse_array.py,sha256=rZfnRiS86vBATOUHqSx2T5fE79Ndlk2NoTsg9Wek7l4,2239
|
|
6
|
-
json_repair/parse_comment.py,sha256=MUDxrx8BFfAaKvx6x4gWviJNvwRi2yv5qnrR6honmas,2660
|
|
7
|
-
json_repair/parse_number.py,sha256=0pq0IVasNzjWCN6771DZQ1wS4SJafhm-uVry-qxBel4,1316
|
|
8
|
-
json_repair/parse_object.py,sha256=DH5xAqXIAM0hBhUUblfADuEs2JM8jnvd73h-KbFDcbQ,7859
|
|
9
|
-
json_repair/parse_string.py,sha256=Dad1iQZDhF5MGAQ5eYs55O810yV1HCYPBkcHYH5AVBU,26275
|
|
10
|
-
json_repair/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
json_repair/parse_string_helpers/parse_boolean_or_null.py,sha256=pGmH1QATBls70kTvUlJv4F8NiPaBWcyGhRL03sTOnto,871
|
|
12
|
-
json_repair/parse_string_helpers/parse_json_llm_block.py,sha256=wPSm-8RY30Ek8HxzjCkCRtdLq4-Cez-PJB3vOk_vP3w,670
|
|
13
|
-
json_repair/utils/constants.py,sha256=cv2gvyosuq0me0600WyTysM9avrtfXPuXYR26tawcuo,158
|
|
14
|
-
json_repair/utils/json_context.py,sha256=WsMOjqpGSr6aaDONcrk8UFtTurzWon2Qq9AoBBYseoI,934
|
|
15
|
-
json_repair/utils/object_comparer.py,sha256=XKV3MRab8H7_v4sm-wpEa5le0XX9OeycWo5S-MFm-GI,1716
|
|
16
|
-
json_repair/utils/string_file_wrapper.py,sha256=jaS1vl_6qRQLJ-r37wZgMRtUJR6MLgNIGMVLqeGYBSk,6901
|
|
17
|
-
json_repair-0.55.0.dist-info/licenses/LICENSE,sha256=wrjQo8MhNrNCicXtMe3MHmS-fx8AmQk1ue8AQwiiFV8,1076
|
|
18
|
-
json_repair-0.55.0.dist-info/METADATA,sha256=HQTIQfL3kWWZ5fZ1fzht8cMo6pa7tzF46Ygy1P_udEY,12222
|
|
19
|
-
json_repair-0.55.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
20
|
-
json_repair-0.55.0.dist-info/entry_points.txt,sha256=SNfge3zPSP-ASqriYU9r3NAPaXdseYr7ciPMKdV2uSw,57
|
|
21
|
-
json_repair-0.55.0.dist-info/top_level.txt,sha256=7-VZwZN2CgB_n0NlSLk-rEUFh8ug21lESbsblOYuZqw,12
|
|
22
|
-
json_repair-0.55.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|