config2py 0.1.45__py3-none-any.whl → 0.1.46__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- config2py/__init__.py +1 -0
- config2py/codecs.py +723 -0
- config2py/tests/test_codecs.py +378 -0
- config2py/util.py +1 -1
- {config2py-0.1.45.dist-info → config2py-0.1.46.dist-info}/METADATA +26 -1
- {config2py-0.1.45.dist-info → config2py-0.1.46.dist-info}/RECORD +8 -6
- {config2py-0.1.45.dist-info → config2py-0.1.46.dist-info}/WHEEL +1 -1
- {config2py-0.1.45.dist-info → config2py-0.1.46.dist-info}/licenses/LICENSE +0 -0
config2py/__init__.py
CHANGED
config2py/codecs.py
ADDED
|
@@ -0,0 +1,723 @@
|
|
|
1
|
+
"""Extension-based codec registries for configuration file parsing.
|
|
2
|
+
|
|
3
|
+
This module provides a flexible pattern for encoding and decoding configuration files
|
|
4
|
+
based on their file extensions. It includes codecs for bytes <-> JSON-friendly Python types.
|
|
5
|
+
|
|
6
|
+
Examples:
|
|
7
|
+
>>> # Basic usage
|
|
8
|
+
>>> data = {'name': 'config2py', 'version': '1.0'}
|
|
9
|
+
>>>
|
|
10
|
+
>>> # Encode to bytes
|
|
11
|
+
>>> encoded = encode_by_extension('config.json', data)
|
|
12
|
+
>>> assert isinstance(encoded, bytes)
|
|
13
|
+
>>>
|
|
14
|
+
>>> # Decode from bytes
|
|
15
|
+
>>> decoded = decode_by_extension('config.json', encoded)
|
|
16
|
+
>>> assert decoded == data
|
|
17
|
+
>>>
|
|
18
|
+
>>> # Register custom codec
|
|
19
|
+
>>> @register_decoder('.custom')
|
|
20
|
+
... def decode_custom(data: bytes) -> dict:
|
|
21
|
+
... return {'custom': data.decode()}
|
|
22
|
+
>>>
|
|
23
|
+
>>> @register_encoder('.custom')
|
|
24
|
+
... def encode_custom(obj: dict) -> bytes:
|
|
25
|
+
... return obj.get('custom', '').encode()
|
|
26
|
+
|
|
27
|
+
The module automatically registers codecs for standard formats (json, toml, ini, etc.)
|
|
28
|
+
and conditionally registers codecs that require third-party libraries (yaml, json5, etc.).
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
from typing import Callable, TypeVar, Any, Optional
|
|
32
|
+
import json
|
|
33
|
+
import pickle
|
|
34
|
+
import csv
|
|
35
|
+
import io
|
|
36
|
+
from configparser import ConfigParser
|
|
37
|
+
from pathlib import Path
|
|
38
|
+
|
|
39
|
+
__all__ = [
|
|
40
|
+
# Core functions
|
|
41
|
+
"decode_by_extension",
|
|
42
|
+
"encode_by_extension",
|
|
43
|
+
"get_extension",
|
|
44
|
+
# Registration functions
|
|
45
|
+
"register_codec",
|
|
46
|
+
"register_decoder",
|
|
47
|
+
"register_encoder",
|
|
48
|
+
# Registry access
|
|
49
|
+
"list_registered_extensions",
|
|
50
|
+
"is_extension_registered",
|
|
51
|
+
"get_codec_info",
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
VT = TypeVar("VT")
|
|
55
|
+
EncodedT = TypeVar("EncodedT")
|
|
56
|
+
|
|
57
|
+
# --------------------------------------------------------------------------------------
|
|
58
|
+
# Core Registry
|
|
59
|
+
# --------------------------------------------------------------------------------------
|
|
60
|
+
|
|
61
|
+
EXTENSION_TO_DECODER: dict[str, Callable[[bytes], Any]] = {}
|
|
62
|
+
EXTENSION_TO_ENCODER: dict[str, Callable[[Any], bytes]] = {}
|
|
63
|
+
|
|
64
|
+
# Track which codecs require optional dependencies
|
|
65
|
+
_CODEC_DEPENDENCIES: dict[str, str] = {}
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def get_extension(key: str) -> str:
|
|
69
|
+
"""Extract extension from a key (filename, path, etc.).
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
key: A string that may contain a file extension
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
Extension without the dot, or empty string if no extension found
|
|
76
|
+
|
|
77
|
+
Examples:
|
|
78
|
+
>>> get_extension('config.json')
|
|
79
|
+
'json'
|
|
80
|
+
>>> get_extension('/path/to/data.yaml')
|
|
81
|
+
'yaml'
|
|
82
|
+
>>> get_extension('no_extension')
|
|
83
|
+
''
|
|
84
|
+
>>> get_extension('.env')
|
|
85
|
+
'env'
|
|
86
|
+
>>> get_extension('/path/to/.env')
|
|
87
|
+
'env'
|
|
88
|
+
"""
|
|
89
|
+
if not key or "." not in key:
|
|
90
|
+
return ""
|
|
91
|
+
|
|
92
|
+
# Get the basename (last part of path)
|
|
93
|
+
basename = Path(key).name
|
|
94
|
+
|
|
95
|
+
# Handle dotfiles (e.g., .env, .gitignore)
|
|
96
|
+
if basename.startswith(".") and basename.count(".") == 1:
|
|
97
|
+
return basename.lstrip(".").lower()
|
|
98
|
+
|
|
99
|
+
# Normal case: extract extension using Path.suffix
|
|
100
|
+
suffix = Path(key).suffix
|
|
101
|
+
return suffix.lstrip(".").lower() if suffix else ""
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def decode_by_extension(key: str, data: bytes) -> Any:
|
|
105
|
+
"""Decode data based on key's extension.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
key: Key or filename with extension
|
|
109
|
+
data: Bytes to decode
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
Decoded Python object
|
|
113
|
+
|
|
114
|
+
Raises:
|
|
115
|
+
ValueError: If no decoder registered for extension
|
|
116
|
+
|
|
117
|
+
Examples:
|
|
118
|
+
>>> data = b'{"key": "value"}'
|
|
119
|
+
>>> decode_by_extension('config.json', data)
|
|
120
|
+
{'key': 'value'}
|
|
121
|
+
"""
|
|
122
|
+
ext = get_extension(key)
|
|
123
|
+
ext_with_dot = f".{ext}" if ext else ""
|
|
124
|
+
decoder = EXTENSION_TO_DECODER.get(ext_with_dot)
|
|
125
|
+
if decoder is None:
|
|
126
|
+
available = ", ".join(sorted(EXTENSION_TO_DECODER.keys()))
|
|
127
|
+
raise ValueError(
|
|
128
|
+
f"No decoder registered for extension: '{ext_with_dot}'. "
|
|
129
|
+
f"Available: {available}"
|
|
130
|
+
)
|
|
131
|
+
return decoder(data)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def encode_by_extension(key: str, obj: Any) -> bytes:
|
|
135
|
+
"""Encode object based on key's extension.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
key: Key or filename with extension
|
|
139
|
+
obj: Python object to encode
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Encoded bytes
|
|
143
|
+
|
|
144
|
+
Raises:
|
|
145
|
+
ValueError: If no encoder registered for extension
|
|
146
|
+
|
|
147
|
+
Examples:
|
|
148
|
+
>>> obj = {'key': 'value'}
|
|
149
|
+
>>> encoded = encode_by_extension('config.json', obj)
|
|
150
|
+
>>> assert b'"key"' in encoded
|
|
151
|
+
"""
|
|
152
|
+
ext = get_extension(key)
|
|
153
|
+
ext_with_dot = f".{ext}" if ext else ""
|
|
154
|
+
encoder = EXTENSION_TO_ENCODER.get(ext_with_dot)
|
|
155
|
+
if encoder is None:
|
|
156
|
+
available = ", ".join(sorted(EXTENSION_TO_ENCODER.keys()))
|
|
157
|
+
raise ValueError(
|
|
158
|
+
f"No encoder registered for extension: '{ext_with_dot}'. "
|
|
159
|
+
f"Available: {available}"
|
|
160
|
+
)
|
|
161
|
+
return encoder(obj)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
# --------------------------------------------------------------------------------------
|
|
165
|
+
# Registration Functions
|
|
166
|
+
# --------------------------------------------------------------------------------------
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def register_codec(
|
|
170
|
+
extension: str,
|
|
171
|
+
*,
|
|
172
|
+
encoder: Optional[Callable[[Any], bytes]] = None,
|
|
173
|
+
decoder: Optional[Callable[[bytes], Any]] = None,
|
|
174
|
+
overwrite: bool = False,
|
|
175
|
+
dependency: Optional[str] = None,
|
|
176
|
+
):
|
|
177
|
+
"""Register encoder and/or decoder for an extension.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
extension: File extension (with or without leading dot)
|
|
181
|
+
encoder: Function to encode objects to bytes
|
|
182
|
+
decoder: Function to decode bytes to objects
|
|
183
|
+
overwrite: Whether to overwrite existing codec
|
|
184
|
+
dependency: Optional package name required for this codec
|
|
185
|
+
|
|
186
|
+
Raises:
|
|
187
|
+
ValueError: If codec already registered and overwrite=False
|
|
188
|
+
|
|
189
|
+
Examples:
|
|
190
|
+
>>> def my_encoder(obj): return str(obj).encode()
|
|
191
|
+
>>> def my_decoder(data): return eval(data.decode())
|
|
192
|
+
>>> register_codec('.custom', encoder=my_encoder, decoder=my_decoder, overwrite=True)
|
|
193
|
+
"""
|
|
194
|
+
if not extension.startswith("."):
|
|
195
|
+
extension = f".{extension}"
|
|
196
|
+
extension = extension.lower()
|
|
197
|
+
|
|
198
|
+
if not overwrite:
|
|
199
|
+
if encoder and extension in EXTENSION_TO_ENCODER:
|
|
200
|
+
raise ValueError(f"Encoder for '{extension}' already registered")
|
|
201
|
+
if decoder and extension in EXTENSION_TO_DECODER:
|
|
202
|
+
raise ValueError(f"Decoder for '{extension}' already registered")
|
|
203
|
+
|
|
204
|
+
if encoder:
|
|
205
|
+
EXTENSION_TO_ENCODER[extension] = encoder
|
|
206
|
+
if decoder:
|
|
207
|
+
EXTENSION_TO_DECODER[extension] = decoder
|
|
208
|
+
if dependency:
|
|
209
|
+
_CODEC_DEPENDENCIES[extension] = dependency
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def register_decoder(extension: str, *, overwrite: bool = False):
|
|
213
|
+
"""Decorator to register a decoder function.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
extension: File extension (with or without leading dot)
|
|
217
|
+
overwrite: Whether to overwrite existing decoder
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
Decorator function
|
|
221
|
+
|
|
222
|
+
Examples:
|
|
223
|
+
>>> @register_decoder('.custom', overwrite=True)
|
|
224
|
+
... def decode_custom(data: bytes) -> dict:
|
|
225
|
+
... return {'data': data.decode()}
|
|
226
|
+
"""
|
|
227
|
+
if not extension.startswith("."):
|
|
228
|
+
extension = f".{extension}"
|
|
229
|
+
extension = extension.lower()
|
|
230
|
+
|
|
231
|
+
def decorator(func: Callable[[bytes], Any]) -> Callable:
|
|
232
|
+
if not overwrite and extension in EXTENSION_TO_DECODER:
|
|
233
|
+
raise ValueError(f"Decoder for '{extension}' already registered")
|
|
234
|
+
EXTENSION_TO_DECODER[extension] = func
|
|
235
|
+
return func
|
|
236
|
+
|
|
237
|
+
return decorator
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def register_encoder(extension: str, *, overwrite: bool = False):
|
|
241
|
+
"""Decorator to register an encoder function.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
extension: File extension (with or without leading dot)
|
|
245
|
+
overwrite: Whether to overwrite existing encoder
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
Decorator function
|
|
249
|
+
|
|
250
|
+
Examples:
|
|
251
|
+
>>> @register_encoder('.custom', overwrite=True)
|
|
252
|
+
... def encode_custom(obj: dict) -> bytes:
|
|
253
|
+
... return obj.get('data', '').encode()
|
|
254
|
+
"""
|
|
255
|
+
if not extension.startswith("."):
|
|
256
|
+
extension = f".{extension}"
|
|
257
|
+
extension = extension.lower()
|
|
258
|
+
|
|
259
|
+
def decorator(func: Callable[[Any], bytes]) -> Callable:
|
|
260
|
+
if not overwrite and extension in EXTENSION_TO_ENCODER:
|
|
261
|
+
raise ValueError(f"Encoder for '{extension}' already registered")
|
|
262
|
+
EXTENSION_TO_ENCODER[extension] = func
|
|
263
|
+
return func
|
|
264
|
+
|
|
265
|
+
return decorator
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
# --------------------------------------------------------------------------------------
|
|
269
|
+
# Registry Introspection
|
|
270
|
+
# --------------------------------------------------------------------------------------
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def list_registered_extensions() -> list[str]:
|
|
274
|
+
"""List all registered extensions.
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
Sorted list of registered extensions
|
|
278
|
+
|
|
279
|
+
Examples:
|
|
280
|
+
>>> extensions = list_registered_extensions()
|
|
281
|
+
>>> '.json' in extensions
|
|
282
|
+
True
|
|
283
|
+
"""
|
|
284
|
+
all_extensions = set(EXTENSION_TO_DECODER.keys()) | set(EXTENSION_TO_ENCODER.keys())
|
|
285
|
+
return sorted(all_extensions)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def is_extension_registered(extension: str) -> bool:
|
|
289
|
+
"""Check if an extension has any codec registered.
|
|
290
|
+
|
|
291
|
+
Args:
|
|
292
|
+
extension: File extension (with or without leading dot)
|
|
293
|
+
|
|
294
|
+
Returns:
|
|
295
|
+
True if decoder or encoder is registered
|
|
296
|
+
|
|
297
|
+
Examples:
|
|
298
|
+
>>> is_extension_registered('.json')
|
|
299
|
+
True
|
|
300
|
+
>>> is_extension_registered('.nonexistent')
|
|
301
|
+
False
|
|
302
|
+
"""
|
|
303
|
+
if not extension.startswith("."):
|
|
304
|
+
extension = f".{extension}"
|
|
305
|
+
extension = extension.lower()
|
|
306
|
+
return extension in EXTENSION_TO_DECODER or extension in EXTENSION_TO_ENCODER
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def get_codec_info(extension: str) -> dict[str, Any]:
|
|
310
|
+
"""Get information about a registered codec.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
extension: File extension (with or without leading dot)
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
Dictionary with codec information
|
|
317
|
+
|
|
318
|
+
Examples:
|
|
319
|
+
>>> info = get_codec_info('.json')
|
|
320
|
+
>>> info['has_encoder']
|
|
321
|
+
True
|
|
322
|
+
>>> info['has_decoder']
|
|
323
|
+
True
|
|
324
|
+
"""
|
|
325
|
+
if not extension.startswith("."):
|
|
326
|
+
extension = f".{extension}"
|
|
327
|
+
extension = extension.lower()
|
|
328
|
+
|
|
329
|
+
return {
|
|
330
|
+
"extension": extension,
|
|
331
|
+
"has_encoder": extension in EXTENSION_TO_ENCODER,
|
|
332
|
+
"has_decoder": extension in EXTENSION_TO_DECODER,
|
|
333
|
+
"dependency": _CODEC_DEPENDENCIES.get(extension),
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
# --------------------------------------------------------------------------------------
|
|
338
|
+
# Standard Library Codecs (Always Available)
|
|
339
|
+
# --------------------------------------------------------------------------------------
|
|
340
|
+
|
|
341
|
+
# JSON - Most common config format
|
|
342
|
+
register_codec(
|
|
343
|
+
".json",
|
|
344
|
+
encoder=lambda obj: json.dumps(obj, indent=2, ensure_ascii=False).encode("utf-8"),
|
|
345
|
+
decoder=lambda data: json.loads(data.decode("utf-8")),
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
# Pickle - Python object serialization (not text-based, but useful)
|
|
349
|
+
register_codec(
|
|
350
|
+
".pkl",
|
|
351
|
+
encoder=pickle.dumps,
|
|
352
|
+
decoder=pickle.loads,
|
|
353
|
+
)
|
|
354
|
+
register_codec(".pickle", encoder=pickle.dumps, decoder=pickle.loads)
|
|
355
|
+
|
|
356
|
+
# Plain text
|
|
357
|
+
register_codec(
|
|
358
|
+
".txt",
|
|
359
|
+
encoder=lambda obj: str(obj).encode("utf-8"),
|
|
360
|
+
decoder=lambda data: data.decode("utf-8"),
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
# Line-separated values (LSV) - one value per line
|
|
365
|
+
def _lsv_encoder(obj: Any) -> bytes:
|
|
366
|
+
"""Encode list/iterable to line-separated values."""
|
|
367
|
+
if isinstance(obj, (list, tuple)):
|
|
368
|
+
return "\n".join(str(item) for item in obj).encode("utf-8")
|
|
369
|
+
elif isinstance(obj, str):
|
|
370
|
+
return obj.encode("utf-8")
|
|
371
|
+
else:
|
|
372
|
+
return str(obj).encode("utf-8")
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def _lsv_decoder(data: bytes) -> list[str]:
|
|
376
|
+
"""Decode line-separated values to list."""
|
|
377
|
+
text = data.decode("utf-8")
|
|
378
|
+
return [line.strip() for line in text.splitlines() if line.strip()]
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
register_codec(".lsv", encoder=_lsv_encoder, decoder=_lsv_decoder)
|
|
382
|
+
register_codec(".list", encoder=_lsv_encoder, decoder=_lsv_decoder)
|
|
383
|
+
register_codec(".lines", encoder=_lsv_encoder, decoder=_lsv_decoder)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
# CSV - Comma-separated values
|
|
387
|
+
def _csv_encoder(obj: Any) -> bytes:
|
|
388
|
+
"""Encode list of dicts or list of lists to CSV."""
|
|
389
|
+
output = io.StringIO()
|
|
390
|
+
|
|
391
|
+
if not obj:
|
|
392
|
+
return b""
|
|
393
|
+
|
|
394
|
+
if isinstance(obj, dict):
|
|
395
|
+
# Single dict - treat as one row
|
|
396
|
+
obj = [obj]
|
|
397
|
+
|
|
398
|
+
if isinstance(obj[0], dict):
|
|
399
|
+
# List of dicts
|
|
400
|
+
fieldnames = list(obj[0].keys())
|
|
401
|
+
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
402
|
+
writer.writeheader()
|
|
403
|
+
writer.writerows(obj)
|
|
404
|
+
else:
|
|
405
|
+
# List of lists/tuples
|
|
406
|
+
writer = csv.writer(output)
|
|
407
|
+
writer.writerows(obj)
|
|
408
|
+
|
|
409
|
+
return output.getvalue().encode("utf-8")
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def _csv_decoder(data: bytes) -> list[dict[str, str]]:
|
|
413
|
+
"""Decode CSV to list of dicts."""
|
|
414
|
+
text = data.decode("utf-8")
|
|
415
|
+
reader = csv.DictReader(io.StringIO(text))
|
|
416
|
+
return list(reader)
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
register_codec(".csv", encoder=_csv_encoder, decoder=_csv_decoder)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
# TSV - Tab-separated values
|
|
423
|
+
def _tsv_encoder(obj: Any) -> bytes:
|
|
424
|
+
"""Encode list of dicts or list of lists to TSV."""
|
|
425
|
+
output = io.StringIO()
|
|
426
|
+
|
|
427
|
+
if not obj:
|
|
428
|
+
return b""
|
|
429
|
+
|
|
430
|
+
if isinstance(obj, dict):
|
|
431
|
+
obj = [obj]
|
|
432
|
+
|
|
433
|
+
if isinstance(obj[0], dict):
|
|
434
|
+
fieldnames = list(obj[0].keys())
|
|
435
|
+
writer = csv.DictWriter(output, fieldnames=fieldnames, delimiter="\t")
|
|
436
|
+
writer.writeheader()
|
|
437
|
+
writer.writerows(obj)
|
|
438
|
+
else:
|
|
439
|
+
writer = csv.writer(output, delimiter="\t")
|
|
440
|
+
writer.writerows(obj)
|
|
441
|
+
|
|
442
|
+
return output.getvalue().encode("utf-8")
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def _tsv_decoder(data: bytes) -> list[dict[str, str]]:
|
|
446
|
+
"""Decode TSV to list of dicts."""
|
|
447
|
+
text = data.decode("utf-8")
|
|
448
|
+
reader = csv.DictReader(io.StringIO(text), delimiter="\t")
|
|
449
|
+
return list(reader)
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
register_codec(".tsv", encoder=_tsv_encoder, decoder=_tsv_decoder)
|
|
453
|
+
register_codec(".tab", encoder=_tsv_encoder, decoder=_tsv_decoder)
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
# INI/CFG - ConfigParser format
|
|
457
|
+
def _ini_encoder(obj: dict) -> bytes:
|
|
458
|
+
"""Encode dict to INI format."""
|
|
459
|
+
config = ConfigParser()
|
|
460
|
+
for section, values in obj.items():
|
|
461
|
+
config[section] = {k: str(v) for k, v in values.items()}
|
|
462
|
+
output = io.StringIO()
|
|
463
|
+
config.write(output)
|
|
464
|
+
return output.getvalue().encode("utf-8")
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def _ini_decoder(data: bytes) -> dict[str, dict[str, str]]:
|
|
468
|
+
"""Decode INI format to nested dict."""
|
|
469
|
+
config = ConfigParser()
|
|
470
|
+
config.read_string(data.decode("utf-8"))
|
|
471
|
+
return {section: dict(config[section]) for section in config.sections()}
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
register_codec(".ini", encoder=_ini_encoder, decoder=_ini_decoder)
|
|
475
|
+
register_codec(".cfg", encoder=_ini_encoder, decoder=_ini_decoder)
|
|
476
|
+
register_codec(".conf", encoder=_ini_encoder, decoder=_ini_decoder)
|
|
477
|
+
|
|
478
|
+
# XML - Basic XML support using ElementTree
|
|
479
|
+
try:
|
|
480
|
+
import xml.etree.ElementTree as ET
|
|
481
|
+
|
|
482
|
+
def _xml_encoder(obj: dict) -> bytes:
|
|
483
|
+
"""Encode dict to simple XML format."""
|
|
484
|
+
|
|
485
|
+
def dict_to_xml(tag: str, d: dict) -> ET.Element:
|
|
486
|
+
"""Convert dict to XML element."""
|
|
487
|
+
elem = ET.Element(tag)
|
|
488
|
+
for key, val in d.items():
|
|
489
|
+
child = ET.SubElement(elem, key)
|
|
490
|
+
if isinstance(val, dict):
|
|
491
|
+
for k, v in val.items():
|
|
492
|
+
subchild = ET.SubElement(child, k)
|
|
493
|
+
subchild.text = str(v)
|
|
494
|
+
elif isinstance(val, (list, tuple)):
|
|
495
|
+
for item in val:
|
|
496
|
+
item_elem = ET.SubElement(child, "item")
|
|
497
|
+
item_elem.text = str(item)
|
|
498
|
+
else:
|
|
499
|
+
child.text = str(val)
|
|
500
|
+
return elem
|
|
501
|
+
|
|
502
|
+
root = dict_to_xml("root", obj)
|
|
503
|
+
return ET.tostring(root, encoding="utf-8", xml_declaration=True)
|
|
504
|
+
|
|
505
|
+
def _xml_decoder(data: bytes) -> dict:
|
|
506
|
+
"""Decode XML to dict."""
|
|
507
|
+
|
|
508
|
+
def xml_to_dict(elem: ET.Element) -> dict:
|
|
509
|
+
"""Convert XML element to dict."""
|
|
510
|
+
result = {}
|
|
511
|
+
for child in elem:
|
|
512
|
+
if len(child) == 0:
|
|
513
|
+
# Leaf node
|
|
514
|
+
result[child.tag] = child.text
|
|
515
|
+
else:
|
|
516
|
+
# Has children
|
|
517
|
+
if child.tag not in result:
|
|
518
|
+
result[child.tag] = {}
|
|
519
|
+
child_dict = xml_to_dict(child)
|
|
520
|
+
if isinstance(result[child.tag], dict):
|
|
521
|
+
result[child.tag].update(child_dict)
|
|
522
|
+
else:
|
|
523
|
+
result[child.tag] = child_dict
|
|
524
|
+
return result
|
|
525
|
+
|
|
526
|
+
root = ET.fromstring(data)
|
|
527
|
+
return xml_to_dict(root)
|
|
528
|
+
|
|
529
|
+
register_codec(".xml", encoder=_xml_encoder, decoder=_xml_decoder)
|
|
530
|
+
except ImportError:
|
|
531
|
+
pass
|
|
532
|
+
|
|
533
|
+
# --------------------------------------------------------------------------------------
|
|
534
|
+
# TOML - Conditionally available based on Python version
|
|
535
|
+
# --------------------------------------------------------------------------------------
|
|
536
|
+
|
|
537
|
+
# Try Python 3.11+ built-in tomllib for reading
|
|
538
|
+
try:
|
|
539
|
+
import tomllib
|
|
540
|
+
|
|
541
|
+
register_codec(
|
|
542
|
+
".toml",
|
|
543
|
+
decoder=lambda data: tomllib.loads(data.decode("utf-8")),
|
|
544
|
+
)
|
|
545
|
+
except ImportError:
|
|
546
|
+
# Try tomli for older Python versions
|
|
547
|
+
try:
|
|
548
|
+
import tomli
|
|
549
|
+
|
|
550
|
+
register_codec(
|
|
551
|
+
".toml",
|
|
552
|
+
decoder=lambda data: tomli.loads(data.decode("utf-8")),
|
|
553
|
+
dependency="tomli",
|
|
554
|
+
)
|
|
555
|
+
except ImportError:
|
|
556
|
+
pass
|
|
557
|
+
|
|
558
|
+
# Try tomli_w for TOML writing (not in stdlib)
|
|
559
|
+
try:
|
|
560
|
+
import tomli_w
|
|
561
|
+
|
|
562
|
+
# Register encoder (or update if decoder already registered)
|
|
563
|
+
if ".toml" in EXTENSION_TO_DECODER:
|
|
564
|
+
EXTENSION_TO_ENCODER[".toml"] = lambda obj: tomli_w.dumps(obj).encode("utf-8")
|
|
565
|
+
else:
|
|
566
|
+
register_codec(
|
|
567
|
+
".toml",
|
|
568
|
+
encoder=lambda obj: tomli_w.dumps(obj).encode("utf-8"),
|
|
569
|
+
dependency="tomli_w",
|
|
570
|
+
)
|
|
571
|
+
except ImportError:
|
|
572
|
+
pass
|
|
573
|
+
|
|
574
|
+
# --------------------------------------------------------------------------------------
|
|
575
|
+
# Third-Party Codecs (Conditionally Registered)
|
|
576
|
+
# --------------------------------------------------------------------------------------
|
|
577
|
+
|
|
578
|
+
# YAML - Requires PyYAML
|
|
579
|
+
try:
|
|
580
|
+
import yaml
|
|
581
|
+
|
|
582
|
+
def _yaml_encoder(obj: Any) -> bytes:
|
|
583
|
+
"""Encode object to YAML."""
|
|
584
|
+
return yaml.dump(obj, default_flow_style=False, allow_unicode=True).encode(
|
|
585
|
+
"utf-8"
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
def _yaml_decoder(data: bytes) -> Any:
|
|
589
|
+
"""Decode YAML to Python object."""
|
|
590
|
+
return yaml.safe_load(data.decode("utf-8"))
|
|
591
|
+
|
|
592
|
+
register_codec(
|
|
593
|
+
".yaml", encoder=_yaml_encoder, decoder=_yaml_decoder, dependency="pyyaml"
|
|
594
|
+
)
|
|
595
|
+
register_codec(
|
|
596
|
+
".yml", encoder=_yaml_encoder, decoder=_yaml_decoder, dependency="pyyaml"
|
|
597
|
+
)
|
|
598
|
+
except ImportError:
|
|
599
|
+
pass
|
|
600
|
+
|
|
601
|
+
# ENV - Environment files (requires python-dotenv for full support)
|
|
602
|
+
try:
|
|
603
|
+
from dotenv import dotenv_values
|
|
604
|
+
|
|
605
|
+
def _env_encoder(obj: dict) -> bytes:
|
|
606
|
+
"""Encode dict to .env format."""
|
|
607
|
+
lines = []
|
|
608
|
+
for key, value in obj.items():
|
|
609
|
+
# Quote values with spaces or special chars
|
|
610
|
+
if isinstance(value, str) and (
|
|
611
|
+
" " in value or '"' in value or "'" in value
|
|
612
|
+
):
|
|
613
|
+
value = f'"{value}"'
|
|
614
|
+
lines.append(f"{key}={value}")
|
|
615
|
+
return "\n".join(lines).encode("utf-8")
|
|
616
|
+
|
|
617
|
+
def _env_decoder(data: bytes) -> dict:
|
|
618
|
+
"""Decode .env file to dict."""
|
|
619
|
+
# Write to temp StringIO for dotenv_values
|
|
620
|
+
return dotenv_values(stream=io.StringIO(data.decode("utf-8")))
|
|
621
|
+
|
|
622
|
+
register_codec(
|
|
623
|
+
".env", encoder=_env_encoder, decoder=_env_decoder, dependency="python-dotenv"
|
|
624
|
+
)
|
|
625
|
+
except ImportError:
|
|
626
|
+
# Fallback simple .env parser without dotenv
|
|
627
|
+
def _env_encoder_simple(obj: dict) -> bytes:
|
|
628
|
+
"""Simple .env encoder without python-dotenv."""
|
|
629
|
+
lines = []
|
|
630
|
+
for key, value in obj.items():
|
|
631
|
+
if isinstance(value, str) and (
|
|
632
|
+
" " in value or '"' in value or "'" in value
|
|
633
|
+
):
|
|
634
|
+
value = f'"{value}"'
|
|
635
|
+
lines.append(f"{key}={value}")
|
|
636
|
+
return "\n".join(lines).encode("utf-8")
|
|
637
|
+
|
|
638
|
+
def _env_decoder_simple(data: bytes) -> dict:
|
|
639
|
+
"""Simple .env decoder without python-dotenv."""
|
|
640
|
+
result = {}
|
|
641
|
+
for line in data.decode("utf-8").splitlines():
|
|
642
|
+
line = line.strip()
|
|
643
|
+
if line and not line.startswith("#"):
|
|
644
|
+
if "=" in line:
|
|
645
|
+
key, _, value = line.partition("=")
|
|
646
|
+
key = key.strip()
|
|
647
|
+
value = value.strip().strip('"').strip("'")
|
|
648
|
+
result[key] = value
|
|
649
|
+
return result
|
|
650
|
+
|
|
651
|
+
register_codec(".env", encoder=_env_encoder_simple, decoder=_env_decoder_simple)
|
|
652
|
+
|
|
653
|
+
# JSON5 - More lenient JSON (requires json5 package)
|
|
654
|
+
try:
|
|
655
|
+
import json5
|
|
656
|
+
|
|
657
|
+
register_codec(
|
|
658
|
+
".json5",
|
|
659
|
+
encoder=lambda obj: json5.dumps(obj, indent=2).encode("utf-8"),
|
|
660
|
+
decoder=lambda data: json5.loads(data.decode("utf-8")),
|
|
661
|
+
dependency="json5",
|
|
662
|
+
)
|
|
663
|
+
except ImportError:
|
|
664
|
+
pass
|
|
665
|
+
|
|
666
|
+
# Properties files (Java-style)
|
|
667
|
+
try:
|
|
668
|
+
from jproperties import Properties
|
|
669
|
+
|
|
670
|
+
def _properties_encoder(obj: dict) -> bytes:
|
|
671
|
+
"""Encode dict to .properties format."""
|
|
672
|
+
props = Properties()
|
|
673
|
+
for key, value in obj.items():
|
|
674
|
+
props[key] = str(value)
|
|
675
|
+
output = io.BytesIO()
|
|
676
|
+
props.store(output, encoding="utf-8")
|
|
677
|
+
return output.getvalue()
|
|
678
|
+
|
|
679
|
+
def _properties_decoder(data: bytes) -> dict:
|
|
680
|
+
"""Decode .properties file to dict."""
|
|
681
|
+
props = Properties()
|
|
682
|
+
props.load(io.BytesIO(data), encoding="utf-8")
|
|
683
|
+
return {k: v.data for k, v in props.items()}
|
|
684
|
+
|
|
685
|
+
register_codec(
|
|
686
|
+
".properties",
|
|
687
|
+
encoder=_properties_encoder,
|
|
688
|
+
decoder=_properties_decoder,
|
|
689
|
+
dependency="jproperties",
|
|
690
|
+
)
|
|
691
|
+
except ImportError:
|
|
692
|
+
# Fallback simple properties parser
|
|
693
|
+
def _properties_encoder_simple(obj: dict) -> bytes:
|
|
694
|
+
"""Simple .properties encoder."""
|
|
695
|
+
lines = []
|
|
696
|
+
for key, value in obj.items():
|
|
697
|
+
# Escape special characters
|
|
698
|
+
value_str = str(value).replace("\\", "\\\\").replace("\n", "\\n")
|
|
699
|
+
lines.append(f"{key}={value_str}")
|
|
700
|
+
return "\n".join(lines).encode("utf-8")
|
|
701
|
+
|
|
702
|
+
def _properties_decoder_simple(data: bytes) -> dict:
|
|
703
|
+
"""Simple .properties decoder."""
|
|
704
|
+
result = {}
|
|
705
|
+
for line in data.decode("utf-8").splitlines():
|
|
706
|
+
line = line.strip()
|
|
707
|
+
if line and not line.startswith("#") and not line.startswith("!"):
|
|
708
|
+
if "=" in line or ":" in line:
|
|
709
|
+
# Support both = and : as separators
|
|
710
|
+
sep = "=" if "=" in line else ":"
|
|
711
|
+
key, _, value = line.partition(sep)
|
|
712
|
+
key = key.strip()
|
|
713
|
+
value = value.strip()
|
|
714
|
+
# Unescape
|
|
715
|
+
value = value.replace("\\n", "\n").replace("\\\\", "\\")
|
|
716
|
+
result[key] = value
|
|
717
|
+
return result
|
|
718
|
+
|
|
719
|
+
register_codec(
|
|
720
|
+
".properties",
|
|
721
|
+
encoder=_properties_encoder_simple,
|
|
722
|
+
decoder=_properties_decoder_simple,
|
|
723
|
+
)
|
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
"""Tests for config2py.codecs module."""
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
from config2py import codecs
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TestCodecRegistry:
|
|
8
|
+
"""Test codec registration and introspection."""
|
|
9
|
+
|
|
10
|
+
def test_list_registered_extensions(self):
|
|
11
|
+
"""Test listing all registered extensions."""
|
|
12
|
+
extensions = codecs.list_registered_extensions()
|
|
13
|
+
assert isinstance(extensions, list)
|
|
14
|
+
assert '.json' in extensions
|
|
15
|
+
assert '.ini' in extensions
|
|
16
|
+
assert '.csv' in extensions
|
|
17
|
+
|
|
18
|
+
def test_is_extension_registered(self):
|
|
19
|
+
"""Test checking if extension is registered."""
|
|
20
|
+
assert codecs.is_extension_registered('.json')
|
|
21
|
+
assert codecs.is_extension_registered('json') # without dot
|
|
22
|
+
assert not codecs.is_extension_registered('.nonexistent')
|
|
23
|
+
|
|
24
|
+
def test_get_codec_info(self):
|
|
25
|
+
"""Test getting codec information."""
|
|
26
|
+
info = codecs.get_codec_info('.json')
|
|
27
|
+
assert info['extension'] == '.json'
|
|
28
|
+
assert info['has_encoder'] is True
|
|
29
|
+
assert info['has_decoder'] is True
|
|
30
|
+
assert info['dependency'] is None # json is stdlib
|
|
31
|
+
|
|
32
|
+
def test_get_extension(self):
|
|
33
|
+
"""Test extension extraction."""
|
|
34
|
+
assert codecs.get_extension('config.json') == 'json'
|
|
35
|
+
assert codecs.get_extension('/path/to/file.yaml') == 'yaml'
|
|
36
|
+
assert codecs.get_extension('no_extension') == ''
|
|
37
|
+
assert codecs.get_extension('file.tar.gz') == 'gz'
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class TestJSONCodec:
|
|
41
|
+
"""Test JSON encoding/decoding."""
|
|
42
|
+
|
|
43
|
+
def test_json_roundtrip(self):
|
|
44
|
+
"""Test JSON encode -> decode roundtrip."""
|
|
45
|
+
data = {'name': 'test', 'value': 42, 'nested': {'key': 'value'}}
|
|
46
|
+
encoded = codecs.encode_by_extension('config.json', data)
|
|
47
|
+
assert isinstance(encoded, bytes)
|
|
48
|
+
decoded = codecs.decode_by_extension('config.json', encoded)
|
|
49
|
+
assert decoded == data
|
|
50
|
+
|
|
51
|
+
def test_json_with_unicode(self):
|
|
52
|
+
"""Test JSON with unicode characters."""
|
|
53
|
+
data = {'message': 'Hello 世界', 'emoji': '🎉'}
|
|
54
|
+
encoded = codecs.encode_by_extension('data.json', data)
|
|
55
|
+
decoded = codecs.decode_by_extension('data.json', encoded)
|
|
56
|
+
assert decoded == data
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class TestPickleCodec:
|
|
60
|
+
"""Test pickle encoding/decoding."""
|
|
61
|
+
|
|
62
|
+
def test_pickle_roundtrip(self):
|
|
63
|
+
"""Test pickle encode -> decode roundtrip."""
|
|
64
|
+
data = {'list': [1, 2, 3], 'tuple': (4, 5, 6), 'set': {7, 8, 9}}
|
|
65
|
+
encoded = codecs.encode_by_extension('data.pkl', data)
|
|
66
|
+
decoded = codecs.decode_by_extension('data.pkl', encoded)
|
|
67
|
+
assert decoded == data
|
|
68
|
+
|
|
69
|
+
def test_pickle_alias(self):
|
|
70
|
+
"""Test .pickle extension alias."""
|
|
71
|
+
data = {'key': 'value'}
|
|
72
|
+
encoded = codecs.encode_by_extension('data.pickle', data)
|
|
73
|
+
decoded = codecs.decode_by_extension('data.pickle', encoded)
|
|
74
|
+
assert decoded == data
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class TestTextCodec:
|
|
78
|
+
"""Test plain text encoding/decoding."""
|
|
79
|
+
|
|
80
|
+
def test_text_roundtrip(self):
|
|
81
|
+
"""Test text encode -> decode roundtrip."""
|
|
82
|
+
text = "Hello, World!\nThis is a test."
|
|
83
|
+
encoded = codecs.encode_by_extension('file.txt', text)
|
|
84
|
+
decoded = codecs.decode_by_extension('file.txt', encoded)
|
|
85
|
+
assert decoded == text
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class TestLSVCodec:
|
|
89
|
+
"""Test line-separated values encoding/decoding."""
|
|
90
|
+
|
|
91
|
+
def test_lsv_list_roundtrip(self):
|
|
92
|
+
"""Test LSV with list input."""
|
|
93
|
+
data = ['apple', 'banana', 'cherry']
|
|
94
|
+
encoded = codecs.encode_by_extension('data.lsv', data)
|
|
95
|
+
decoded = codecs.decode_by_extension('data.lsv', encoded)
|
|
96
|
+
assert decoded == data
|
|
97
|
+
|
|
98
|
+
def test_lsv_aliases(self):
|
|
99
|
+
"""Test .list and .lines aliases."""
|
|
100
|
+
data = ['one', 'two', 'three']
|
|
101
|
+
|
|
102
|
+
# .list extension
|
|
103
|
+
encoded = codecs.encode_by_extension('data.list', data)
|
|
104
|
+
decoded = codecs.decode_by_extension('data.list', encoded)
|
|
105
|
+
assert decoded == data
|
|
106
|
+
|
|
107
|
+
# .lines extension
|
|
108
|
+
encoded = codecs.encode_by_extension('data.lines', data)
|
|
109
|
+
decoded = codecs.decode_by_extension('data.lines', encoded)
|
|
110
|
+
assert decoded == data
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class TestCSVCodec:
|
|
114
|
+
"""Test CSV encoding/decoding."""
|
|
115
|
+
|
|
116
|
+
def test_csv_dict_list_roundtrip(self):
|
|
117
|
+
"""Test CSV with list of dicts."""
|
|
118
|
+
data = [
|
|
119
|
+
{'name': 'Alice', 'age': '30'},
|
|
120
|
+
{'name': 'Bob', 'age': '25'},
|
|
121
|
+
]
|
|
122
|
+
encoded = codecs.encode_by_extension('data.csv', data)
|
|
123
|
+
decoded = codecs.decode_by_extension('data.csv', encoded)
|
|
124
|
+
# CSV decoder returns all values as strings
|
|
125
|
+
assert decoded == data
|
|
126
|
+
|
|
127
|
+
def test_csv_empty(self):
|
|
128
|
+
"""Test CSV with empty data."""
|
|
129
|
+
encoded = codecs.encode_by_extension('data.csv', [])
|
|
130
|
+
assert encoded == b''
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class TestTSVCodec:
|
|
134
|
+
"""Test TSV encoding/decoding."""
|
|
135
|
+
|
|
136
|
+
def test_tsv_roundtrip(self):
|
|
137
|
+
"""Test TSV encode -> decode roundtrip."""
|
|
138
|
+
data = [
|
|
139
|
+
{'col1': 'value1', 'col2': 'value2'},
|
|
140
|
+
{'col1': 'value3', 'col2': 'value4'},
|
|
141
|
+
]
|
|
142
|
+
encoded = codecs.encode_by_extension('data.tsv', data)
|
|
143
|
+
decoded = codecs.decode_by_extension('data.tsv', encoded)
|
|
144
|
+
assert decoded == data
|
|
145
|
+
|
|
146
|
+
def test_tsv_tab_alias(self):
|
|
147
|
+
"""Test .tab extension alias."""
|
|
148
|
+
data = [{'a': '1', 'b': '2'}]
|
|
149
|
+
encoded = codecs.encode_by_extension('data.tab', data)
|
|
150
|
+
decoded = codecs.decode_by_extension('data.tab', encoded)
|
|
151
|
+
assert decoded == data
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class TestINICodec:
|
|
155
|
+
"""Test INI/CFG encoding/decoding."""
|
|
156
|
+
|
|
157
|
+
def test_ini_roundtrip(self):
|
|
158
|
+
"""Test INI encode -> decode roundtrip."""
|
|
159
|
+
data = {
|
|
160
|
+
'section1': {'key1': 'value1', 'key2': 'value2'},
|
|
161
|
+
'section2': {'key3': 'value3'},
|
|
162
|
+
}
|
|
163
|
+
encoded = codecs.encode_by_extension('config.ini', data)
|
|
164
|
+
decoded = codecs.decode_by_extension('config.ini', encoded)
|
|
165
|
+
assert decoded == data
|
|
166
|
+
|
|
167
|
+
def test_cfg_alias(self):
|
|
168
|
+
"""Test .cfg extension."""
|
|
169
|
+
data = {'database': {'host': 'localhost', 'port': '5432'}}
|
|
170
|
+
encoded = codecs.encode_by_extension('config.cfg', data)
|
|
171
|
+
decoded = codecs.decode_by_extension('config.cfg', encoded)
|
|
172
|
+
assert decoded == data
|
|
173
|
+
|
|
174
|
+
def test_conf_alias(self):
|
|
175
|
+
"""Test .conf extension."""
|
|
176
|
+
data = {'settings': {'debug': 'true'}}
|
|
177
|
+
encoded = codecs.encode_by_extension('app.conf', data)
|
|
178
|
+
decoded = codecs.decode_by_extension('app.conf', encoded)
|
|
179
|
+
assert decoded == data
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
class TestXMLCodec:
|
|
183
|
+
"""Test XML encoding/decoding (if available)."""
|
|
184
|
+
|
|
185
|
+
def test_xml_roundtrip(self):
|
|
186
|
+
"""Test XML encode -> decode roundtrip."""
|
|
187
|
+
if not codecs.is_extension_registered('.xml'):
|
|
188
|
+
pytest.skip("XML codec not available")
|
|
189
|
+
|
|
190
|
+
data = {'config': {'setting1': 'value1', 'setting2': 'value2'}}
|
|
191
|
+
encoded = codecs.encode_by_extension('config.xml', data)
|
|
192
|
+
decoded = codecs.decode_by_extension('config.xml', encoded)
|
|
193
|
+
# Basic structure should match
|
|
194
|
+
assert 'config' in decoded
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class TestENVCodec:
|
|
198
|
+
"""Test .env file encoding/decoding."""
|
|
199
|
+
|
|
200
|
+
def test_env_roundtrip(self):
|
|
201
|
+
"""Test .env encode -> decode roundtrip."""
|
|
202
|
+
data = {
|
|
203
|
+
'DATABASE_URL': 'postgresql://localhost/db',
|
|
204
|
+
'API_KEY': 'secret123',
|
|
205
|
+
'DEBUG': 'true',
|
|
206
|
+
}
|
|
207
|
+
encoded = codecs.encode_by_extension('.env', data)
|
|
208
|
+
decoded = codecs.decode_by_extension('.env', encoded)
|
|
209
|
+
assert decoded == data
|
|
210
|
+
|
|
211
|
+
def test_env_with_spaces(self):
|
|
212
|
+
"""Test .env with values containing spaces."""
|
|
213
|
+
data = {'MESSAGE': 'Hello World', 'PATH': '/usr/bin:/usr/local/bin'}
|
|
214
|
+
encoded = codecs.encode_by_extension('.env', data)
|
|
215
|
+
decoded = codecs.decode_by_extension('.env', encoded)
|
|
216
|
+
# Values should be preserved (quotes may be added/removed)
|
|
217
|
+
assert 'MESSAGE' in decoded
|
|
218
|
+
assert 'PATH' in decoded
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class TestPropertiesCodec:
|
|
222
|
+
"""Test .properties file encoding/decoding."""
|
|
223
|
+
|
|
224
|
+
def test_properties_roundtrip(self):
|
|
225
|
+
"""Test .properties encode -> decode roundtrip."""
|
|
226
|
+
data = {
|
|
227
|
+
'app.name': 'MyApp',
|
|
228
|
+
'app.version': '1.0.0',
|
|
229
|
+
'server.port': '8080',
|
|
230
|
+
}
|
|
231
|
+
encoded = codecs.encode_by_extension('config.properties', data)
|
|
232
|
+
decoded = codecs.decode_by_extension('config.properties', encoded)
|
|
233
|
+
assert decoded == data
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
class TestCustomCodecRegistration:
|
|
237
|
+
"""Test custom codec registration."""
|
|
238
|
+
|
|
239
|
+
def test_register_codec_function(self):
|
|
240
|
+
"""Test registering codec via function."""
|
|
241
|
+
|
|
242
|
+
def custom_encoder(obj):
|
|
243
|
+
return f"CUSTOM:{obj}".encode()
|
|
244
|
+
|
|
245
|
+
def custom_decoder(data):
|
|
246
|
+
return data.decode().replace('CUSTOM:', '')
|
|
247
|
+
|
|
248
|
+
codecs.register_codec(
|
|
249
|
+
'.custom',
|
|
250
|
+
encoder=custom_encoder,
|
|
251
|
+
decoder=custom_decoder,
|
|
252
|
+
overwrite=True,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
data = "test data"
|
|
256
|
+
encoded = codecs.encode_by_extension('file.custom', data)
|
|
257
|
+
decoded = codecs.decode_by_extension('file.custom', encoded)
|
|
258
|
+
assert decoded == data
|
|
259
|
+
|
|
260
|
+
# Cleanup
|
|
261
|
+
del codecs.EXTENSION_TO_ENCODER['.custom']
|
|
262
|
+
del codecs.EXTENSION_TO_DECODER['.custom']
|
|
263
|
+
|
|
264
|
+
def test_register_codec_decorator(self):
|
|
265
|
+
"""Test registering codec via decorators."""
|
|
266
|
+
|
|
267
|
+
@codecs.register_encoder('.test')
|
|
268
|
+
def encode_test(obj):
|
|
269
|
+
return f"TEST:{obj}".encode()
|
|
270
|
+
|
|
271
|
+
@codecs.register_decoder('.test')
|
|
272
|
+
def decode_test(data):
|
|
273
|
+
return data.decode().replace('TEST:', '')
|
|
274
|
+
|
|
275
|
+
data = "hello"
|
|
276
|
+
encoded = codecs.encode_by_extension('data.test', data)
|
|
277
|
+
decoded = codecs.decode_by_extension('data.test', encoded)
|
|
278
|
+
assert decoded == data
|
|
279
|
+
|
|
280
|
+
# Cleanup
|
|
281
|
+
del codecs.EXTENSION_TO_ENCODER['.test']
|
|
282
|
+
del codecs.EXTENSION_TO_DECODER['.test']
|
|
283
|
+
|
|
284
|
+
def test_register_duplicate_raises_error(self):
|
|
285
|
+
"""Test that registering duplicate codec raises error."""
|
|
286
|
+
# JSON is already registered
|
|
287
|
+
with pytest.raises(ValueError, match="already registered"):
|
|
288
|
+
codecs.register_codec('.json', encoder=lambda x: b'')
|
|
289
|
+
|
|
290
|
+
def test_register_with_overwrite(self):
|
|
291
|
+
"""Test that overwrite=True allows replacing codec."""
|
|
292
|
+
|
|
293
|
+
def new_encoder(obj):
|
|
294
|
+
return b'NEW'
|
|
295
|
+
|
|
296
|
+
# This should not raise
|
|
297
|
+
codecs.register_codec(
|
|
298
|
+
'.json',
|
|
299
|
+
encoder=new_encoder,
|
|
300
|
+
overwrite=True,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
# Restore original
|
|
304
|
+
codecs.register_codec(
|
|
305
|
+
'.json',
|
|
306
|
+
encoder=lambda obj: codecs.json.dumps(
|
|
307
|
+
obj, indent=2, ensure_ascii=False
|
|
308
|
+
).encode('utf-8'),
|
|
309
|
+
overwrite=True,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
class TestErrorHandling:
|
|
314
|
+
"""Test error handling."""
|
|
315
|
+
|
|
316
|
+
def test_unknown_extension_encoder(self):
|
|
317
|
+
"""Test encoding with unknown extension raises error."""
|
|
318
|
+
with pytest.raises(ValueError, match="No encoder registered"):
|
|
319
|
+
codecs.encode_by_extension('file.unknown', {})
|
|
320
|
+
|
|
321
|
+
def test_unknown_extension_decoder(self):
|
|
322
|
+
"""Test decoding with unknown extension raises error."""
|
|
323
|
+
with pytest.raises(ValueError, match="No decoder registered"):
|
|
324
|
+
codecs.decode_by_extension('file.unknown', b'')
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
class TestConditionalCodecs:
|
|
328
|
+
"""Test conditionally available codecs."""
|
|
329
|
+
|
|
330
|
+
def test_toml_if_available(self):
|
|
331
|
+
"""Test TOML codec if available."""
|
|
332
|
+
if not codecs.is_extension_registered('.toml'):
|
|
333
|
+
pytest.skip("TOML codec not available")
|
|
334
|
+
|
|
335
|
+
data = {'table': {'key': 'value', 'number': 42}}
|
|
336
|
+
|
|
337
|
+
has_encoder = '.toml' in codecs.EXTENSION_TO_ENCODER
|
|
338
|
+
has_decoder = '.toml' in codecs.EXTENSION_TO_DECODER
|
|
339
|
+
|
|
340
|
+
if has_encoder and has_decoder:
|
|
341
|
+
encoded = codecs.encode_by_extension('config.toml', data)
|
|
342
|
+
decoded = codecs.decode_by_extension('config.toml', encoded)
|
|
343
|
+
assert decoded == data
|
|
344
|
+
elif has_decoder:
|
|
345
|
+
# tomllib (3.11+) or tomli available for reading, but no tomli_w for writing
|
|
346
|
+
toml_bytes = b'[table]\nkey = "value"\nnumber = 42\n'
|
|
347
|
+
decoded = codecs.decode_by_extension('config.toml', toml_bytes)
|
|
348
|
+
assert decoded == data
|
|
349
|
+
|
|
350
|
+
def test_yaml_if_available(self):
|
|
351
|
+
"""Test YAML codec if available."""
|
|
352
|
+
if not codecs.is_extension_registered('.yaml'):
|
|
353
|
+
pytest.skip("YAML codec not available")
|
|
354
|
+
|
|
355
|
+
data = {'key': 'value', 'list': [1, 2, 3], 'nested': {'a': 'b'}}
|
|
356
|
+
encoded = codecs.encode_by_extension('config.yaml', data)
|
|
357
|
+
decoded = codecs.decode_by_extension('config.yaml', encoded)
|
|
358
|
+
assert decoded == data
|
|
359
|
+
|
|
360
|
+
def test_yml_alias_if_available(self):
|
|
361
|
+
"""Test .yml alias if YAML available."""
|
|
362
|
+
if not codecs.is_extension_registered('.yml'):
|
|
363
|
+
pytest.skip("YAML codec not available")
|
|
364
|
+
|
|
365
|
+
data = {'test': 'data'}
|
|
366
|
+
encoded = codecs.encode_by_extension('config.yml', data)
|
|
367
|
+
decoded = codecs.decode_by_extension('config.yml', encoded)
|
|
368
|
+
assert decoded == data
|
|
369
|
+
|
|
370
|
+
def test_json5_if_available(self):
|
|
371
|
+
"""Test JSON5 codec if available."""
|
|
372
|
+
if not codecs.is_extension_registered('.json5'):
|
|
373
|
+
pytest.skip("JSON5 codec not available")
|
|
374
|
+
|
|
375
|
+
data = {'key': 'value', 'number': 123}
|
|
376
|
+
encoded = codecs.encode_by_extension('config.json5', data)
|
|
377
|
+
decoded = codecs.decode_by_extension('config.json5', encoded)
|
|
378
|
+
assert decoded == data
|
config2py/util.py
CHANGED
|
@@ -470,7 +470,7 @@ def get_app_folder(
|
|
|
470
470
|
app_data_folder_does_not_exist = not os.path.isdir(app_data_path)
|
|
471
471
|
|
|
472
472
|
if app_data_folder_does_not_exist and ensure_exists:
|
|
473
|
-
setup_callback(app_data_path
|
|
473
|
+
setup_callback(app_data_path)
|
|
474
474
|
return app_data_path
|
|
475
475
|
|
|
476
476
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: config2py
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.46
|
|
4
4
|
Summary: Simplified reading and writing configurations from various sources and formats
|
|
5
5
|
Project-URL: Homepage, https://github.com/i2mint/config2py
|
|
6
6
|
License: Apache-2.0
|
|
@@ -9,6 +9,20 @@ Requires-Python: >=3.10
|
|
|
9
9
|
Requires-Dist: dol
|
|
10
10
|
Requires-Dist: i2
|
|
11
11
|
Requires-Dist: importlib-resources; python_version < '3.9'
|
|
12
|
+
Provides-Extra: all
|
|
13
|
+
Requires-Dist: jproperties>=2.1.0; extra == 'all'
|
|
14
|
+
Requires-Dist: json5>=0.9.0; extra == 'all'
|
|
15
|
+
Requires-Dist: python-dotenv>=1.0.0; extra == 'all'
|
|
16
|
+
Requires-Dist: pyyaml>=6.0; extra == 'all'
|
|
17
|
+
Requires-Dist: tomli-w>=1.0.0; extra == 'all'
|
|
18
|
+
Requires-Dist: tomli>=2.0.0; (python_version < '3.11') and extra == 'all'
|
|
19
|
+
Provides-Extra: all-codecs
|
|
20
|
+
Requires-Dist: jproperties>=2.1.0; extra == 'all-codecs'
|
|
21
|
+
Requires-Dist: json5>=0.9.0; extra == 'all-codecs'
|
|
22
|
+
Requires-Dist: python-dotenv>=1.0.0; extra == 'all-codecs'
|
|
23
|
+
Requires-Dist: pyyaml>=6.0; extra == 'all-codecs'
|
|
24
|
+
Requires-Dist: tomli-w>=1.0.0; extra == 'all-codecs'
|
|
25
|
+
Requires-Dist: tomli>=2.0.0; (python_version < '3.11') and extra == 'all-codecs'
|
|
12
26
|
Provides-Extra: dev
|
|
13
27
|
Requires-Dist: pytest-cov>=4.0; extra == 'dev'
|
|
14
28
|
Requires-Dist: pytest>=7.0; extra == 'dev'
|
|
@@ -16,6 +30,17 @@ Requires-Dist: ruff>=0.1.0; extra == 'dev'
|
|
|
16
30
|
Provides-Extra: docs
|
|
17
31
|
Requires-Dist: sphinx-rtd-theme>=1.0; extra == 'docs'
|
|
18
32
|
Requires-Dist: sphinx>=6.0; extra == 'docs'
|
|
33
|
+
Provides-Extra: env
|
|
34
|
+
Requires-Dist: python-dotenv>=1.0.0; extra == 'env'
|
|
35
|
+
Provides-Extra: json5
|
|
36
|
+
Requires-Dist: json5>=0.9.0; extra == 'json5'
|
|
37
|
+
Provides-Extra: properties
|
|
38
|
+
Requires-Dist: jproperties>=2.1.0; extra == 'properties'
|
|
39
|
+
Provides-Extra: toml
|
|
40
|
+
Requires-Dist: tomli-w>=1.0.0; extra == 'toml'
|
|
41
|
+
Requires-Dist: tomli>=2.0.0; (python_version < '3.11') and extra == 'toml'
|
|
42
|
+
Provides-Extra: yaml
|
|
43
|
+
Requires-Dist: pyyaml>=6.0; extra == 'yaml'
|
|
19
44
|
Description-Content-Type: text/markdown
|
|
20
45
|
|
|
21
46
|
# config2py
|
|
@@ -1,16 +1,18 @@
|
|
|
1
|
-
config2py/__init__.py,sha256
|
|
1
|
+
config2py/__init__.py,sha256=XR2UBVjQaraafNr9qynIH41O7OeFtQYD07zZo5Og2i0,1044
|
|
2
2
|
config2py/base.py,sha256=8qVfdQzfX7OABFUU_drtgQXC6gsAegRctYVyZHjidkY,15883
|
|
3
|
+
config2py/codecs.py,sha256=LFO_rDYwgJ6S1dfZQcNk4mDu5CHXkc01p-cxW69TVQo,22655
|
|
3
4
|
config2py/errors.py,sha256=QdwGsoJhv6LHDHp-_yyz4oUg1Fgu4S-S7O2nuA0a5cw,203
|
|
4
5
|
config2py/s_configparser.py,sha256=yI3u_CbhfClHQ47_JLKPOvOXkAYYFKxLzv8Q9NuO030,15851
|
|
5
6
|
config2py/sync_store.py,sha256=V1Vkhw1ofGF_MppGlpL1B8BUWHIhPapos7-dzsPB3N8,13954
|
|
6
7
|
config2py/tools.py,sha256=kGy7iktvWyUof5yPt2qA7DMowipRhSKXam4Mh4TN2Ck,9207
|
|
7
|
-
config2py/util.py,sha256=
|
|
8
|
+
config2py/util.py,sha256=2R9pCwi2_5nPcslq_OFyonhZLbWIMCUBuBeA3ZbGhZU,23288
|
|
8
9
|
config2py/scrap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
10
|
config2py/tests/__init__.py,sha256=VI7rHivkD2Vh3z9tmEdDsq-KKLV2_YkJyj9IeW-FyLE,330
|
|
11
|
+
config2py/tests/test_codecs.py,sha256=uvz_DL881FRh5Oo_JnPbHKZYhlNuIdjMfKpa_xGvXdA,13392
|
|
10
12
|
config2py/tests/test_sync_store.py,sha256=StErw7UeIE01bROkppdyQYKWRgD4Ew5TQ_ZBw4R2wIk,7916
|
|
11
13
|
config2py/tests/test_tools.py,sha256=sdiBNTavuzxW2AsqBRTO9U21iWig5DEyV38r6lmaZak,3728
|
|
12
14
|
config2py/tests/utils_for_testing.py,sha256=-CB1e0l71yknCtKEn5hU3luaTRqrboZwRS4pQo7cGTU,212
|
|
13
|
-
config2py-0.1.
|
|
14
|
-
config2py-0.1.
|
|
15
|
-
config2py-0.1.
|
|
16
|
-
config2py-0.1.
|
|
15
|
+
config2py-0.1.46.dist-info/METADATA,sha256=6Bs9Ot8I9XdTXY86XDCOfcndbQnnwUQC6R-oiJ0ASew,18130
|
|
16
|
+
config2py-0.1.46.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
17
|
+
config2py-0.1.46.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
18
|
+
config2py-0.1.46.dist-info/RECORD,,
|
|
File without changes
|