config2py 0.1.42__tar.gz → 0.1.43__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {config2py-0.1.42/config2py.egg-info → config2py-0.1.43}/PKG-INFO +80 -6
- config2py-0.1.42/PKG-INFO → config2py-0.1.43/README.md +74 -14
- {config2py-0.1.42 → config2py-0.1.43}/config2py/__init__.py +1 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/base.py +6 -8
- {config2py-0.1.42 → config2py-0.1.43}/config2py/s_configparser.py +6 -5
- config2py-0.1.43/config2py/sync_store.py +380 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/tests/__init__.py +2 -0
- config2py-0.1.43/config2py/tests/test_sync_store.py +316 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/tests/utils_for_testing.py +2 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/tools.py +1 -1
- {config2py-0.1.42 → config2py-0.1.43}/config2py/util.py +8 -8
- config2py-0.1.42/README.md → config2py-0.1.43/config2py.egg-info/PKG-INFO +88 -4
- {config2py-0.1.42 → config2py-0.1.43}/config2py.egg-info/SOURCES.txt +2 -0
- {config2py-0.1.42 → config2py-0.1.43}/setup.cfg +1 -1
- {config2py-0.1.42 → config2py-0.1.43}/LICENSE +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/errors.py +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/scrap/__init__.py +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py/tests/test_tools.py +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py.egg-info/dependency_links.txt +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py.egg-info/not-zip-safe +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py.egg-info/requires.txt +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/config2py.egg-info/top_level.txt +0 -0
- {config2py-0.1.42 → config2py-0.1.43}/setup.py +0 -0
|
@@ -1,12 +1,16 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: config2py
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.43
|
|
4
4
|
Summary: Simplified reading and writing configurations from various sources and formats
|
|
5
5
|
Home-page: https://github.com/i2mint/config2py
|
|
6
6
|
License: apache-2.0
|
|
7
7
|
Platform: any
|
|
8
8
|
Description-Content-Type: text/markdown
|
|
9
9
|
License-File: LICENSE
|
|
10
|
+
Requires-Dist: dol
|
|
11
|
+
Requires-Dist: i2
|
|
12
|
+
Requires-Dist: importlib_resources; python_version < "3.9"
|
|
13
|
+
Dynamic: license-file
|
|
10
14
|
|
|
11
15
|
# config2py
|
|
12
16
|
|
|
@@ -207,6 +211,80 @@ It will return the value that the user entered last time, without prompting the
|
|
|
207
211
|
user again.
|
|
208
212
|
|
|
209
213
|
|
|
214
|
+
## SyncStore: Auto-Syncing Key-Value Stores
|
|
215
|
+
|
|
216
|
+
### Overview
|
|
217
|
+
|
|
218
|
+
`SyncStore` provides MutableMapping interfaces that automatically persist changes to backing storage. Changes sync immediately by default, or can be deferred using a context manager for efficient batch operations.
|
|
219
|
+
|
|
220
|
+
### Basic Usage
|
|
221
|
+
|
|
222
|
+
```python
|
|
223
|
+
from config2py.sync_store import FileStore, JsonStore
|
|
224
|
+
|
|
225
|
+
# Auto-detected from .json extension
|
|
226
|
+
config = FileStore('config.json')
|
|
227
|
+
config['api_key'] = 'secret' # Syncs immediately
|
|
228
|
+
|
|
229
|
+
# Batch operations (deferred sync)
|
|
230
|
+
with config:
|
|
231
|
+
config['a'] = 1
|
|
232
|
+
config['b'] = 2
|
|
233
|
+
config['c'] = 3
|
|
234
|
+
# Syncs once on exit
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
### Nested Sections
|
|
238
|
+
|
|
239
|
+
```python
|
|
240
|
+
# Work with specific section via key_path
|
|
241
|
+
db_config = FileStore('config.json', key_path='database')
|
|
242
|
+
db_config['host'] = 'localhost' # Only affects database section
|
|
243
|
+
|
|
244
|
+
# Dotted notation for deep nesting
|
|
245
|
+
items = FileStore('config.json', key_path='app.settings.items')
|
|
246
|
+
items['item1'] = 'value'
|
|
247
|
+
```
|
|
248
|
+
|
|
249
|
+
### Supported Formats
|
|
250
|
+
|
|
251
|
+
Auto-detected by extension:
|
|
252
|
+
- `.json` - JSON (stdlib)
|
|
253
|
+
- `.ini`, `.cfg` - INI files (stdlib)
|
|
254
|
+
- `.yaml`, `.yml` - YAML (if PyYAML installed)
|
|
255
|
+
- `.toml` - TOML (if tomli/tomllib installed)
|
|
256
|
+
|
|
257
|
+
Register custom formats:
|
|
258
|
+
```python
|
|
259
|
+
from sync_store import register_extension
|
|
260
|
+
|
|
261
|
+
register_extension('.custom', my_loader, my_dumper)
|
|
262
|
+
store = FileStore('data.custom')
|
|
263
|
+
```
|
|
264
|
+
|
|
265
|
+
### Custom Backing Storage
|
|
266
|
+
|
|
267
|
+
```python
|
|
268
|
+
from config2py.sync_store import SyncStore
|
|
269
|
+
|
|
270
|
+
# Any backing storage via loader/dumper
|
|
271
|
+
def my_loader():
|
|
272
|
+
return fetch_from_database()
|
|
273
|
+
|
|
274
|
+
def my_dumper(data):
|
|
275
|
+
save_to_database(data)
|
|
276
|
+
|
|
277
|
+
store = SyncStore(my_loader, my_dumper)
|
|
278
|
+
store['key'] = 'value' # Calls my_dumper
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
### Key Classes
|
|
282
|
+
|
|
283
|
+
- **`SyncStore`** - Base class with loader/dumper functions
|
|
284
|
+
- **`FileStore`** - File-based with extension detection and key_path
|
|
285
|
+
- **`JsonStore`** - Explicit JSON with sensible defaults
|
|
286
|
+
|
|
287
|
+
|
|
210
288
|
# A few notable tools you can import from config2py
|
|
211
289
|
|
|
212
290
|
* `get_config`: Get a config value from a list of sources. See more below.
|
|
@@ -337,7 +415,3 @@ s['SOME_KEY']
|
|
|
337
415
|
|
|
338
416
|
More on that another day...
|
|
339
417
|
|
|
340
|
-
|
|
341
|
-
```python
|
|
342
|
-
|
|
343
|
-
```
|
|
@@ -1,13 +1,3 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: config2py
|
|
3
|
-
Version: 0.1.42
|
|
4
|
-
Summary: Simplified reading and writing configurations from various sources and formats
|
|
5
|
-
Home-page: https://github.com/i2mint/config2py
|
|
6
|
-
License: apache-2.0
|
|
7
|
-
Platform: any
|
|
8
|
-
Description-Content-Type: text/markdown
|
|
9
|
-
License-File: LICENSE
|
|
10
|
-
|
|
11
1
|
# config2py
|
|
12
2
|
|
|
13
3
|
Simplified reading and writing configurations from various sources and formats.
|
|
@@ -207,6 +197,80 @@ It will return the value that the user entered last time, without prompting the
|
|
|
207
197
|
user again.
|
|
208
198
|
|
|
209
199
|
|
|
200
|
+
## SyncStore: Auto-Syncing Key-Value Stores
|
|
201
|
+
|
|
202
|
+
### Overview
|
|
203
|
+
|
|
204
|
+
`SyncStore` provides MutableMapping interfaces that automatically persist changes to backing storage. Changes sync immediately by default, or can be deferred using a context manager for efficient batch operations.
|
|
205
|
+
|
|
206
|
+
### Basic Usage
|
|
207
|
+
|
|
208
|
+
```python
|
|
209
|
+
from config2py.sync_store import FileStore, JsonStore
|
|
210
|
+
|
|
211
|
+
# Auto-detected from .json extension
|
|
212
|
+
config = FileStore('config.json')
|
|
213
|
+
config['api_key'] = 'secret' # Syncs immediately
|
|
214
|
+
|
|
215
|
+
# Batch operations (deferred sync)
|
|
216
|
+
with config:
|
|
217
|
+
config['a'] = 1
|
|
218
|
+
config['b'] = 2
|
|
219
|
+
config['c'] = 3
|
|
220
|
+
# Syncs once on exit
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
### Nested Sections
|
|
224
|
+
|
|
225
|
+
```python
|
|
226
|
+
# Work with specific section via key_path
|
|
227
|
+
db_config = FileStore('config.json', key_path='database')
|
|
228
|
+
db_config['host'] = 'localhost' # Only affects database section
|
|
229
|
+
|
|
230
|
+
# Dotted notation for deep nesting
|
|
231
|
+
items = FileStore('config.json', key_path='app.settings.items')
|
|
232
|
+
items['item1'] = 'value'
|
|
233
|
+
```
|
|
234
|
+
|
|
235
|
+
### Supported Formats
|
|
236
|
+
|
|
237
|
+
Auto-detected by extension:
|
|
238
|
+
- `.json` - JSON (stdlib)
|
|
239
|
+
- `.ini`, `.cfg` - INI files (stdlib)
|
|
240
|
+
- `.yaml`, `.yml` - YAML (if PyYAML installed)
|
|
241
|
+
- `.toml` - TOML (if tomli/tomllib installed)
|
|
242
|
+
|
|
243
|
+
Register custom formats:
|
|
244
|
+
```python
|
|
245
|
+
from sync_store import register_extension
|
|
246
|
+
|
|
247
|
+
register_extension('.custom', my_loader, my_dumper)
|
|
248
|
+
store = FileStore('data.custom')
|
|
249
|
+
```
|
|
250
|
+
|
|
251
|
+
### Custom Backing Storage
|
|
252
|
+
|
|
253
|
+
```python
|
|
254
|
+
from config2py.sync_store import SyncStore
|
|
255
|
+
|
|
256
|
+
# Any backing storage via loader/dumper
|
|
257
|
+
def my_loader():
|
|
258
|
+
return fetch_from_database()
|
|
259
|
+
|
|
260
|
+
def my_dumper(data):
|
|
261
|
+
save_to_database(data)
|
|
262
|
+
|
|
263
|
+
store = SyncStore(my_loader, my_dumper)
|
|
264
|
+
store['key'] = 'value' # Calls my_dumper
|
|
265
|
+
```
|
|
266
|
+
|
|
267
|
+
### Key Classes
|
|
268
|
+
|
|
269
|
+
- **`SyncStore`** - Base class with loader/dumper functions
|
|
270
|
+
- **`FileStore`** - File-based with extension detection and key_path
|
|
271
|
+
- **`JsonStore`** - Explicit JSON with sensible defaults
|
|
272
|
+
|
|
273
|
+
|
|
210
274
|
# A few notable tools you can import from config2py
|
|
211
275
|
|
|
212
276
|
* `get_config`: Get a config value from a list of sources. See more below.
|
|
@@ -337,7 +401,3 @@ s['SOME_KEY']
|
|
|
337
401
|
|
|
338
402
|
More on that another day...
|
|
339
403
|
|
|
340
|
-
|
|
341
|
-
```python
|
|
342
|
-
|
|
343
|
-
```
|
|
@@ -4,26 +4,24 @@ Base for getting configs from various sources and formats
|
|
|
4
4
|
|
|
5
5
|
from collections import ChainMap
|
|
6
6
|
from typing import (
|
|
7
|
-
Callable,
|
|
8
7
|
Type,
|
|
9
8
|
Tuple,
|
|
10
9
|
KT,
|
|
11
10
|
VT,
|
|
12
11
|
Any,
|
|
13
|
-
Iterable,
|
|
14
12
|
Protocol,
|
|
15
13
|
Union,
|
|
16
14
|
runtime_checkable,
|
|
17
15
|
Optional,
|
|
18
|
-
MutableMapping,
|
|
19
16
|
)
|
|
17
|
+
from collections.abc import Callable, Iterable, MutableMapping
|
|
20
18
|
from dataclasses import dataclass
|
|
21
19
|
from functools import lru_cache, partial
|
|
22
20
|
|
|
23
21
|
from config2py.util import always_true, ask_user_for_input, no_default, not_found
|
|
24
22
|
from config2py.errors import ConfigNotFound
|
|
25
23
|
|
|
26
|
-
Exceptions =
|
|
24
|
+
Exceptions = tuple[type[Exception], ...]
|
|
27
25
|
|
|
28
26
|
|
|
29
27
|
@runtime_checkable
|
|
@@ -104,8 +102,8 @@ def get_config(
|
|
|
104
102
|
sources: Sources = None,
|
|
105
103
|
*,
|
|
106
104
|
default: VT = no_default,
|
|
107
|
-
egress:
|
|
108
|
-
val_is_valid:
|
|
105
|
+
egress: GetConfigEgress | None = None,
|
|
106
|
+
val_is_valid: Callable[[VT], bool] | None = always_true,
|
|
109
107
|
config_not_found_exceptions: Exceptions = (Exception,),
|
|
110
108
|
):
|
|
111
109
|
"""Get a config value from a list of sources
|
|
@@ -374,7 +372,7 @@ def ask_user_for_key(
|
|
|
374
372
|
save_to: SaveTo = None,
|
|
375
373
|
save_condition=is_not_empty,
|
|
376
374
|
user_asker=ask_user_for_input,
|
|
377
|
-
egress:
|
|
375
|
+
egress: Callable | None = None,
|
|
378
376
|
):
|
|
379
377
|
if key is None:
|
|
380
378
|
return partial(
|
|
@@ -399,7 +397,7 @@ def user_gettable(
|
|
|
399
397
|
save_to: SaveTo = None,
|
|
400
398
|
*,
|
|
401
399
|
prompt_template="Enter a value for {}: ",
|
|
402
|
-
egress:
|
|
400
|
+
egress: Callable | None = None,
|
|
403
401
|
user_asker=ask_user_for_input,
|
|
404
402
|
val_is_valid: Callable[[VT], bool] = is_not_empty,
|
|
405
403
|
config_not_found_exceptions: Exceptions = (Exception,),
|
|
@@ -299,11 +299,11 @@ class ConfigStore(ConfigParserStore):
|
|
|
299
299
|
|
|
300
300
|
@persist_after_operation
|
|
301
301
|
def __setitem__(self, k, v):
|
|
302
|
-
super(
|
|
302
|
+
super().__setitem__(k, v)
|
|
303
303
|
|
|
304
304
|
@persist_after_operation
|
|
305
305
|
def __delitem__(self, k):
|
|
306
|
-
super(
|
|
306
|
+
super().__delitem__(k)
|
|
307
307
|
|
|
308
308
|
# __setitem__ = super_and_persist(ConfigParser, '__setitem__')
|
|
309
309
|
# __delitem__ = super_and_persist(ConfigParser, '__delitem__')
|
|
@@ -388,13 +388,14 @@ class ConfigReader(ConfigStore):
|
|
|
388
388
|
# return super()._obj_of_data(data)
|
|
389
389
|
|
|
390
390
|
|
|
391
|
-
from typing import
|
|
391
|
+
from typing import Union
|
|
392
|
+
from collections.abc import Mapping, Iterable, Generator
|
|
392
393
|
import re
|
|
393
394
|
|
|
394
395
|
|
|
395
396
|
# TODO: postprocess_ini_section_items and preprocess_ini_section_items: Add comma separated possibility?
|
|
396
397
|
# TODO: Find out if configparse has an option to do this processing alreadys
|
|
397
|
-
def postprocess_ini_section_items(items:
|
|
398
|
+
def postprocess_ini_section_items(items: Mapping | Iterable) -> Generator:
|
|
398
399
|
r"""Transform newline-separated string values into actual list of strings (assuming that intent)
|
|
399
400
|
|
|
400
401
|
>>> section_from_ini = {
|
|
@@ -417,7 +418,7 @@ def postprocess_ini_section_items(items: Union[Mapping, Iterable]) -> Generator:
|
|
|
417
418
|
|
|
418
419
|
|
|
419
420
|
# TODO: Find out if configparse has an option to do this processing alreadys
|
|
420
|
-
def preprocess_ini_section_items(items:
|
|
421
|
+
def preprocess_ini_section_items(items: Mapping | Iterable) -> Generator:
|
|
421
422
|
"""Transform list values into newline-separated strings, in view of writing the value to a ini formatted section
|
|
422
423
|
>>> section = {
|
|
423
424
|
... 'name': 'aspyre',
|
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Synchronized key-value stores with automatic persistence.
|
|
3
|
+
|
|
4
|
+
Provides MutableMapping interfaces that automatically sync changes to their backing
|
|
5
|
+
storage. Supports deferred sync via context manager for batch operations.
|
|
6
|
+
|
|
7
|
+
>>> import tempfile
|
|
8
|
+
>>> import json
|
|
9
|
+
>>>
|
|
10
|
+
>>> # Basic usage
|
|
11
|
+
>>> with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
|
|
12
|
+
... _ = f.write('{"key": "value"}')
|
|
13
|
+
... temp_file = f.name
|
|
14
|
+
>>>
|
|
15
|
+
>>> store = FileStore(temp_file)
|
|
16
|
+
>>> store['new_key'] = 'new_value' # Auto-syncs immediately
|
|
17
|
+
>>> assert 'new_key' in store
|
|
18
|
+
>>>
|
|
19
|
+
>>> # Batch operations with context manager
|
|
20
|
+
>>> with store:
|
|
21
|
+
... store['a'] = 1
|
|
22
|
+
... store['b'] = 2
|
|
23
|
+
... store['c'] = 3
|
|
24
|
+
... # No sync until context exit
|
|
25
|
+
>>>
|
|
26
|
+
>>> import os
|
|
27
|
+
>>> os.unlink(temp_file)
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
from typing import Callable, Any, Iterator, Union, Tuple, Optional, Dict
|
|
31
|
+
from collections.abc import MutableMapping
|
|
32
|
+
from pathlib import Path
|
|
33
|
+
import json
|
|
34
|
+
from functools import reduce
|
|
35
|
+
|
|
36
|
+
__all__ = [
|
|
37
|
+
"SyncStore",
|
|
38
|
+
"FileStore",
|
|
39
|
+
"JsonStore",
|
|
40
|
+
"register_extension",
|
|
41
|
+
"get_format_handlers",
|
|
42
|
+
]
|
|
43
|
+
|
|
44
|
+
# Note: Independent module. No imports from config2py, dol etc.
|
|
45
|
+
# TODO: Do we want to use more stuff from config2py, dol, etc.?
|
|
46
|
+
|
|
47
|
+
# Type aliases
|
|
48
|
+
KeyPath = Union[str, Tuple[str, ...], None]
|
|
49
|
+
Loader = Callable[[], dict]
|
|
50
|
+
Dumper = Callable[[dict], None]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# --------------------------------------------------------------------------------------
|
|
54
|
+
# Extension Registry
|
|
55
|
+
|
|
56
|
+
_extension_registry: Dict[str, Tuple[Callable, Callable]] = {}
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def register_extension(ext: str, loader: Callable, dumper: Callable) -> None:
|
|
60
|
+
"""Register loader/dumper for a file extension."""
|
|
61
|
+
_extension_registry[ext.lower()] = (loader, dumper)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_format_handlers(
|
|
65
|
+
filepath: Union[str, Path],
|
|
66
|
+
) -> Optional[Tuple[Callable, Callable]]:
|
|
67
|
+
"""Get loader/dumper for a file based on extension."""
|
|
68
|
+
ext = Path(filepath).suffix.lower()
|
|
69
|
+
return _extension_registry.get(ext)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
# Register standard formats
|
|
73
|
+
register_extension(".json", json.loads, json.dumps)
|
|
74
|
+
|
|
75
|
+
# TODO: Use register-if-available pattern (with context managers.. implemented somewhere...)
|
|
76
|
+
|
|
77
|
+
# ConfigParser for .ini and .cfg
|
|
78
|
+
try:
|
|
79
|
+
from configparser import ConfigParser
|
|
80
|
+
import io
|
|
81
|
+
|
|
82
|
+
def _ini_loader(content: str) -> dict:
|
|
83
|
+
parser = ConfigParser()
|
|
84
|
+
parser.read_string(content)
|
|
85
|
+
return {section: dict(parser[section]) for section in parser.sections()}
|
|
86
|
+
|
|
87
|
+
def _ini_dumper(data: dict) -> str:
|
|
88
|
+
parser = ConfigParser()
|
|
89
|
+
for section, values in data.items():
|
|
90
|
+
parser[section] = values
|
|
91
|
+
output = io.StringIO()
|
|
92
|
+
parser.write(output)
|
|
93
|
+
return output.getvalue()
|
|
94
|
+
|
|
95
|
+
register_extension(".ini", _ini_loader, _ini_dumper)
|
|
96
|
+
register_extension(".cfg", _ini_loader, _ini_dumper)
|
|
97
|
+
except ImportError:
|
|
98
|
+
pass
|
|
99
|
+
|
|
100
|
+
# YAML support (optional)
|
|
101
|
+
try:
|
|
102
|
+
import yaml
|
|
103
|
+
|
|
104
|
+
register_extension(".yaml", yaml.safe_load, yaml.dump)
|
|
105
|
+
register_extension(".yml", yaml.safe_load, yaml.dump)
|
|
106
|
+
except ImportError:
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
# TOML support (optional)
|
|
110
|
+
try:
|
|
111
|
+
import tomllib # Python 3.11+
|
|
112
|
+
import tomli_w
|
|
113
|
+
|
|
114
|
+
register_extension(".toml", tomllib.loads, tomli_w.dumps)
|
|
115
|
+
except ImportError:
|
|
116
|
+
try:
|
|
117
|
+
import tomli
|
|
118
|
+
import tomli_w
|
|
119
|
+
|
|
120
|
+
register_extension(".toml", tomli.loads, tomli_w.dumps)
|
|
121
|
+
except ImportError:
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
# --------------------------------------------------------------------------------------
|
|
126
|
+
# Helper functions for nested key paths
|
|
127
|
+
|
|
128
|
+
# TODO: Consider using dol.paths
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _normalize_key_path(key_path: KeyPath) -> Tuple[str, ...]:
|
|
132
|
+
"""Normalize key_path to tuple of strings."""
|
|
133
|
+
if key_path is None or key_path == ():
|
|
134
|
+
return ()
|
|
135
|
+
if isinstance(key_path, str):
|
|
136
|
+
return tuple(key_path.split(".")) if "." in key_path else (key_path,)
|
|
137
|
+
return tuple(key_path)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _get_nested(data: dict, path: Tuple[str, ...]) -> Any:
|
|
141
|
+
"""Get value from nested dict using path."""
|
|
142
|
+
if not path:
|
|
143
|
+
return data
|
|
144
|
+
return reduce(lambda d, key: d[key], path, data)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _set_nested(data: dict, path: Tuple[str, ...], value: Any) -> dict:
|
|
148
|
+
"""Set value in nested dict, creating intermediate dicts as needed."""
|
|
149
|
+
if not path:
|
|
150
|
+
return value
|
|
151
|
+
|
|
152
|
+
result = data.copy() if isinstance(data, dict) else {}
|
|
153
|
+
current = result
|
|
154
|
+
|
|
155
|
+
for key in path[:-1]:
|
|
156
|
+
if key not in current or not isinstance(current[key], dict):
|
|
157
|
+
current[key] = {}
|
|
158
|
+
current = current[key]
|
|
159
|
+
|
|
160
|
+
current[path[-1]] = value
|
|
161
|
+
return result
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
# --------------------------------------------------------------------------------------
|
|
165
|
+
# Core Classes
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class SyncStore(MutableMapping):
|
|
169
|
+
"""
|
|
170
|
+
A MutableMapping that automatically syncs changes to backing storage.
|
|
171
|
+
|
|
172
|
+
Supports deferred sync via context manager for efficient batch operations.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
loader: Function that returns the current data as a dict
|
|
176
|
+
dumper: Function that persists the data dict to storage
|
|
177
|
+
|
|
178
|
+
Example:
|
|
179
|
+
>>> def my_loader():
|
|
180
|
+
... return {'x': 1}
|
|
181
|
+
>>>
|
|
182
|
+
>>> data_holder = []
|
|
183
|
+
>>> def my_dumper(data):
|
|
184
|
+
... data_holder.clear()
|
|
185
|
+
... data_holder.append(data.copy())
|
|
186
|
+
>>>
|
|
187
|
+
>>> store = SyncStore(my_loader, my_dumper)
|
|
188
|
+
>>> store['y'] = 2 # Auto-syncs
|
|
189
|
+
>>> data_holder[0]
|
|
190
|
+
{'x': 1, 'y': 2}
|
|
191
|
+
>>>
|
|
192
|
+
>>> # Batch with context manager
|
|
193
|
+
>>> with store:
|
|
194
|
+
... store['a'] = 1
|
|
195
|
+
... store['b'] = 2
|
|
196
|
+
... # Not synced yet
|
|
197
|
+
>>> data_holder[0] # Now synced
|
|
198
|
+
{'x': 1, 'y': 2, 'a': 1, 'b': 2}
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
def __init__(self, loader: Loader, dumper: Dumper):
|
|
202
|
+
self._loader = loader
|
|
203
|
+
self._dumper = dumper
|
|
204
|
+
self._data = None
|
|
205
|
+
self._auto_sync = True
|
|
206
|
+
self._needs_flush = False
|
|
207
|
+
self._load()
|
|
208
|
+
|
|
209
|
+
def _load(self):
|
|
210
|
+
"""Load data from backing storage."""
|
|
211
|
+
self._data = self._loader()
|
|
212
|
+
|
|
213
|
+
def _mark_dirty(self):
|
|
214
|
+
"""Mark data as changed and sync if auto_sync is enabled."""
|
|
215
|
+
self._needs_flush = True
|
|
216
|
+
if self._auto_sync:
|
|
217
|
+
self.flush()
|
|
218
|
+
|
|
219
|
+
def flush(self):
|
|
220
|
+
"""Sync data to backing storage if changes exist."""
|
|
221
|
+
if self._needs_flush:
|
|
222
|
+
self._dumper(self._data)
|
|
223
|
+
self._needs_flush = False
|
|
224
|
+
|
|
225
|
+
def __enter__(self):
|
|
226
|
+
"""Enter deferred sync mode."""
|
|
227
|
+
self._auto_sync = False
|
|
228
|
+
return self
|
|
229
|
+
|
|
230
|
+
def __exit__(self, *args):
|
|
231
|
+
"""Exit deferred sync mode and flush changes."""
|
|
232
|
+
self.flush()
|
|
233
|
+
self._auto_sync = True
|
|
234
|
+
|
|
235
|
+
def __getitem__(self, key):
|
|
236
|
+
return self._data[key]
|
|
237
|
+
|
|
238
|
+
def __setitem__(self, key, value):
|
|
239
|
+
self._data[key] = value
|
|
240
|
+
self._mark_dirty()
|
|
241
|
+
|
|
242
|
+
def __delitem__(self, key):
|
|
243
|
+
del self._data[key]
|
|
244
|
+
self._mark_dirty()
|
|
245
|
+
|
|
246
|
+
def __iter__(self):
|
|
247
|
+
return iter(self._data)
|
|
248
|
+
|
|
249
|
+
def __len__(self):
|
|
250
|
+
return len(self._data)
|
|
251
|
+
|
|
252
|
+
def __repr__(self):
|
|
253
|
+
return f"{self.__class__.__name__}({len(self._data)} items)"
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class FileStore(SyncStore):
|
|
257
|
+
"""
|
|
258
|
+
A SyncStore backed by a file with automatic format detection.
|
|
259
|
+
|
|
260
|
+
Supports nested key paths for working with specific sections.
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
filepath: Path to file (supports ~ expansion)
|
|
264
|
+
key_path: Optional nested path to operate on
|
|
265
|
+
loader: Optional custom loader (auto-detected from extension if not provided)
|
|
266
|
+
dumper: Optional custom dumper (auto-detected from extension if not provided)
|
|
267
|
+
mode: File read mode ('r' for text, 'rb' for binary)
|
|
268
|
+
dump_kwargs: Additional kwargs for dumper
|
|
269
|
+
|
|
270
|
+
Example:
|
|
271
|
+
>>> import tempfile
|
|
272
|
+
>>> with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
|
|
273
|
+
... _ = f.write('{"section": {"key": "value"}}')
|
|
274
|
+
... temp_file = f.name
|
|
275
|
+
>>>
|
|
276
|
+
>>> # Work with nested section
|
|
277
|
+
>>> section = FileStore(temp_file, key_path='section')
|
|
278
|
+
>>> section['key']
|
|
279
|
+
'value'
|
|
280
|
+
>>> section['new'] = 'data'
|
|
281
|
+
>>>
|
|
282
|
+
>>> import os
|
|
283
|
+
>>> os.unlink(temp_file)
|
|
284
|
+
"""
|
|
285
|
+
|
|
286
|
+
def __init__(
|
|
287
|
+
self,
|
|
288
|
+
filepath: Union[str, Path],
|
|
289
|
+
*,
|
|
290
|
+
key_path: KeyPath = None,
|
|
291
|
+
loader: Optional[Callable[[str], dict]] = None,
|
|
292
|
+
dumper: Optional[Callable[[dict], str]] = None,
|
|
293
|
+
mode: str = "r",
|
|
294
|
+
dump_kwargs: Optional[dict] = None,
|
|
295
|
+
):
|
|
296
|
+
self.filepath = Path(filepath).expanduser()
|
|
297
|
+
self.key_path = _normalize_key_path(key_path)
|
|
298
|
+
self.mode = mode
|
|
299
|
+
self.dump_kwargs = dump_kwargs or {}
|
|
300
|
+
|
|
301
|
+
# Auto-detect format if not provided
|
|
302
|
+
if loader is None or dumper is None:
|
|
303
|
+
handlers = get_format_handlers(self.filepath)
|
|
304
|
+
if handlers is None:
|
|
305
|
+
raise ValueError(
|
|
306
|
+
f"No format handler registered for {self.filepath.suffix}. "
|
|
307
|
+
f"Provide explicit loader/dumper or register the extension."
|
|
308
|
+
)
|
|
309
|
+
auto_loader, auto_dumper = handlers
|
|
310
|
+
loader = loader or auto_loader
|
|
311
|
+
dumper = dumper or auto_dumper
|
|
312
|
+
|
|
313
|
+
self._file_loader = loader
|
|
314
|
+
self._file_dumper = dumper
|
|
315
|
+
|
|
316
|
+
# Create loader/dumper closures for SyncStore
|
|
317
|
+
super().__init__(loader=self._load_from_file, dumper=self._dump_to_file)
|
|
318
|
+
|
|
319
|
+
def _load_from_file(self) -> dict:
|
|
320
|
+
"""Read and parse file, returning the section specified by key_path."""
|
|
321
|
+
with open(self.filepath, self.mode) as f:
|
|
322
|
+
content = f.read()
|
|
323
|
+
data = self._file_loader(content)
|
|
324
|
+
return _get_nested(data, self.key_path)
|
|
325
|
+
|
|
326
|
+
def _dump_to_file(self, section_data: dict) -> None:
|
|
327
|
+
"""Write data to file, updating only the section specified by key_path."""
|
|
328
|
+
if not self.key_path:
|
|
329
|
+
# No key_path, write entire data
|
|
330
|
+
content = self._file_dumper(section_data, **self.dump_kwargs)
|
|
331
|
+
else:
|
|
332
|
+
# Have key_path, need to merge with full file content
|
|
333
|
+
with open(self.filepath, self.mode) as f:
|
|
334
|
+
full_data = self._file_loader(f.read())
|
|
335
|
+
full_data = _set_nested(full_data, self.key_path, section_data)
|
|
336
|
+
content = self._file_dumper(full_data, **self.dump_kwargs)
|
|
337
|
+
|
|
338
|
+
write_mode = "w" if "b" not in self.mode else "wb"
|
|
339
|
+
with open(self.filepath, write_mode) as f:
|
|
340
|
+
f.write(content)
|
|
341
|
+
|
|
342
|
+
def __repr__(self):
|
|
343
|
+
key_path_str = f", key_path={self.key_path!r}" if self.key_path else ""
|
|
344
|
+
return f"{self.__class__.__name__}({self.filepath!r}{key_path_str})"
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
class JsonStore(FileStore):
|
|
348
|
+
"""
|
|
349
|
+
A FileStore specialized for JSON files.
|
|
350
|
+
|
|
351
|
+
Pre-configured with json.loads/dumps and sensible defaults.
|
|
352
|
+
|
|
353
|
+
Args:
|
|
354
|
+
filepath: Path to JSON file
|
|
355
|
+
key_path: Optional nested path to operate on
|
|
356
|
+
indent: JSON indentation (default: 2)
|
|
357
|
+
ensure_ascii: Whether to escape non-ASCII (default: False)
|
|
358
|
+
**dump_kwargs: Additional kwargs for json.dumps
|
|
359
|
+
"""
|
|
360
|
+
|
|
361
|
+
def __init__(
|
|
362
|
+
self,
|
|
363
|
+
filepath: Union[str, Path],
|
|
364
|
+
*,
|
|
365
|
+
key_path: KeyPath = None,
|
|
366
|
+
indent: int = 2,
|
|
367
|
+
ensure_ascii: bool = False,
|
|
368
|
+
**dump_kwargs,
|
|
369
|
+
):
|
|
370
|
+
dump_kwargs.setdefault("indent", indent)
|
|
371
|
+
dump_kwargs.setdefault("ensure_ascii", ensure_ascii)
|
|
372
|
+
|
|
373
|
+
super().__init__(
|
|
374
|
+
filepath,
|
|
375
|
+
loader=json.loads,
|
|
376
|
+
dumper=json.dumps,
|
|
377
|
+
key_path=key_path,
|
|
378
|
+
mode="r",
|
|
379
|
+
dump_kwargs=dump_kwargs,
|
|
380
|
+
)
|
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
"""Tests for sync_store module."""
|
|
2
|
+
|
|
3
|
+
import tempfile
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from config2py.sync_store import SyncStore, FileStore, JsonStore, register_extension
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pytest
|
|
10
|
+
|
|
11
|
+
PYTEST_AVAILABLE = True
|
|
12
|
+
except ImportError:
|
|
13
|
+
PYTEST_AVAILABLE = False
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def test_sync_store_basic_operations():
|
|
17
|
+
"""Test basic MutableMapping operations."""
|
|
18
|
+
data_holder = [{}]
|
|
19
|
+
|
|
20
|
+
def loader():
|
|
21
|
+
return data_holder[0].copy()
|
|
22
|
+
|
|
23
|
+
def dumper(data):
|
|
24
|
+
data_holder[0] = data.copy()
|
|
25
|
+
|
|
26
|
+
store = SyncStore(loader, dumper)
|
|
27
|
+
|
|
28
|
+
# Set
|
|
29
|
+
store["key"] = "value"
|
|
30
|
+
assert store["key"] == "value"
|
|
31
|
+
assert data_holder[0]["key"] == "value"
|
|
32
|
+
|
|
33
|
+
# Delete
|
|
34
|
+
del store["key"]
|
|
35
|
+
assert "key" not in store
|
|
36
|
+
assert "key" not in data_holder[0]
|
|
37
|
+
|
|
38
|
+
# Iteration
|
|
39
|
+
store["a"] = 1
|
|
40
|
+
store["b"] = 2
|
|
41
|
+
assert set(store) == {"a", "b"}
|
|
42
|
+
|
|
43
|
+
# Length
|
|
44
|
+
assert len(store) == 2
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def test_sync_store_auto_sync():
|
|
48
|
+
"""Test that changes auto-sync by default."""
|
|
49
|
+
data_holder = [{}]
|
|
50
|
+
sync_count = [0]
|
|
51
|
+
|
|
52
|
+
def loader():
|
|
53
|
+
return data_holder[0].copy()
|
|
54
|
+
|
|
55
|
+
def dumper(data):
|
|
56
|
+
data_holder[0] = data.copy()
|
|
57
|
+
sync_count[0] += 1
|
|
58
|
+
|
|
59
|
+
store = SyncStore(loader, dumper)
|
|
60
|
+
|
|
61
|
+
store["a"] = 1
|
|
62
|
+
assert sync_count[0] == 1
|
|
63
|
+
|
|
64
|
+
store["b"] = 2
|
|
65
|
+
assert sync_count[0] == 2
|
|
66
|
+
|
|
67
|
+
del store["a"]
|
|
68
|
+
assert sync_count[0] == 3
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def test_sync_store_context_manager():
|
|
72
|
+
"""Test deferred sync with context manager."""
|
|
73
|
+
data_holder = [{}]
|
|
74
|
+
sync_count = [0]
|
|
75
|
+
|
|
76
|
+
def loader():
|
|
77
|
+
return data_holder[0].copy()
|
|
78
|
+
|
|
79
|
+
def dumper(data):
|
|
80
|
+
data_holder[0] = data.copy()
|
|
81
|
+
sync_count[0] += 1
|
|
82
|
+
|
|
83
|
+
store = SyncStore(loader, dumper)
|
|
84
|
+
|
|
85
|
+
# Batch operations
|
|
86
|
+
with store:
|
|
87
|
+
store["a"] = 1
|
|
88
|
+
store["b"] = 2
|
|
89
|
+
store["c"] = 3
|
|
90
|
+
assert sync_count[0] == 0 # Not synced yet
|
|
91
|
+
|
|
92
|
+
assert sync_count[0] == 1 # Synced once on exit
|
|
93
|
+
assert data_holder[0] == {"a": 1, "b": 2, "c": 3}
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def test_sync_store_manual_flush():
|
|
97
|
+
"""Test manual flush() call."""
|
|
98
|
+
data_holder = [{}]
|
|
99
|
+
sync_count = [0]
|
|
100
|
+
|
|
101
|
+
def loader():
|
|
102
|
+
return data_holder[0].copy()
|
|
103
|
+
|
|
104
|
+
def dumper(data):
|
|
105
|
+
data_holder[0] = data.copy()
|
|
106
|
+
sync_count[0] += 1
|
|
107
|
+
|
|
108
|
+
store = SyncStore(loader, dumper)
|
|
109
|
+
|
|
110
|
+
with store:
|
|
111
|
+
store["a"] = 1
|
|
112
|
+
assert sync_count[0] == 0
|
|
113
|
+
|
|
114
|
+
store.flush() # Manual flush
|
|
115
|
+
assert sync_count[0] == 1
|
|
116
|
+
|
|
117
|
+
store["b"] = 2
|
|
118
|
+
assert sync_count[0] == 1 # Still in context, not auto-synced
|
|
119
|
+
|
|
120
|
+
assert sync_count[0] == 2 # Final flush on exit
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def test_file_store_json():
|
|
124
|
+
"""Test FileStore with JSON file."""
|
|
125
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
|
|
126
|
+
f.write('{"key": "value"}')
|
|
127
|
+
temp_file = f.name
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
store = FileStore(temp_file)
|
|
131
|
+
|
|
132
|
+
# Read
|
|
133
|
+
assert store["key"] == "value"
|
|
134
|
+
|
|
135
|
+
# Write
|
|
136
|
+
store["new_key"] = "new_value"
|
|
137
|
+
|
|
138
|
+
# Verify persistence
|
|
139
|
+
with open(temp_file) as f:
|
|
140
|
+
data = json.load(f)
|
|
141
|
+
assert data["new_key"] == "new_value"
|
|
142
|
+
|
|
143
|
+
# Delete
|
|
144
|
+
del store["key"]
|
|
145
|
+
assert "key" not in store
|
|
146
|
+
|
|
147
|
+
finally:
|
|
148
|
+
Path(temp_file).unlink()
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def test_file_store_key_path():
|
|
152
|
+
"""Test FileStore with nested key_path."""
|
|
153
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
|
|
154
|
+
f.write('{"section1": {"a": 1}, "section2": {"b": 2}}')
|
|
155
|
+
temp_file = f.name
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
# Work with section1 only
|
|
159
|
+
section1 = FileStore(temp_file, key_path="section1")
|
|
160
|
+
|
|
161
|
+
assert section1["a"] == 1
|
|
162
|
+
section1["c"] = 3
|
|
163
|
+
assert "c" in section1
|
|
164
|
+
|
|
165
|
+
# Verify section2 untouched
|
|
166
|
+
with open(temp_file) as f:
|
|
167
|
+
data = json.load(f)
|
|
168
|
+
assert data["section2"] == {"b": 2}
|
|
169
|
+
assert data["section1"]["c"] == 3
|
|
170
|
+
|
|
171
|
+
finally:
|
|
172
|
+
Path(temp_file).unlink()
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def test_file_store_dotted_key_path():
|
|
176
|
+
"""Test FileStore with dotted key_path notation."""
|
|
177
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
|
|
178
|
+
f.write('{"a": {"b": {"c": {}}}}')
|
|
179
|
+
temp_file = f.name
|
|
180
|
+
|
|
181
|
+
try:
|
|
182
|
+
nested = FileStore(temp_file, key_path="a.b.c")
|
|
183
|
+
nested["key"] = "value"
|
|
184
|
+
|
|
185
|
+
with open(temp_file) as f:
|
|
186
|
+
data = json.load(f)
|
|
187
|
+
assert data["a"]["b"]["c"]["key"] == "value"
|
|
188
|
+
|
|
189
|
+
finally:
|
|
190
|
+
Path(temp_file).unlink()
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def test_json_store():
|
|
194
|
+
"""Test JsonStore with defaults."""
|
|
195
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
|
|
196
|
+
f.write('{"key": "value"}')
|
|
197
|
+
temp_file = f.name
|
|
198
|
+
|
|
199
|
+
try:
|
|
200
|
+
store = JsonStore(temp_file)
|
|
201
|
+
store["new"] = "data"
|
|
202
|
+
|
|
203
|
+
# Check formatting (indent=2 by default)
|
|
204
|
+
with open(temp_file) as f:
|
|
205
|
+
content = f.read()
|
|
206
|
+
assert " " in content # Should have indentation
|
|
207
|
+
|
|
208
|
+
finally:
|
|
209
|
+
Path(temp_file).unlink()
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def test_file_store_batch_operations():
|
|
213
|
+
"""Test that batch operations work efficiently."""
|
|
214
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
|
|
215
|
+
f.write("{}")
|
|
216
|
+
temp_file = f.name
|
|
217
|
+
|
|
218
|
+
try:
|
|
219
|
+
store = FileStore(temp_file)
|
|
220
|
+
|
|
221
|
+
# Batch mode - only one write
|
|
222
|
+
with store:
|
|
223
|
+
for i in range(100):
|
|
224
|
+
store[f"key_{i}"] = i
|
|
225
|
+
|
|
226
|
+
# Verify all written
|
|
227
|
+
with open(temp_file) as f:
|
|
228
|
+
data = json.load(f)
|
|
229
|
+
assert len(data) == 100
|
|
230
|
+
assert data["key_42"] == 42
|
|
231
|
+
|
|
232
|
+
finally:
|
|
233
|
+
Path(temp_file).unlink()
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def test_extension_registry():
|
|
237
|
+
"""Test custom extension registration."""
|
|
238
|
+
|
|
239
|
+
# Register custom format
|
|
240
|
+
def custom_loader(content):
|
|
241
|
+
return {"loaded": content}
|
|
242
|
+
|
|
243
|
+
def custom_dumper(data):
|
|
244
|
+
return str(data)
|
|
245
|
+
|
|
246
|
+
register_extension(".custom", custom_loader, custom_dumper)
|
|
247
|
+
|
|
248
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".custom", delete=False) as f:
|
|
249
|
+
f.write("test content")
|
|
250
|
+
temp_file = f.name
|
|
251
|
+
|
|
252
|
+
try:
|
|
253
|
+
store = FileStore(temp_file)
|
|
254
|
+
assert store["loaded"] == "test content"
|
|
255
|
+
|
|
256
|
+
finally:
|
|
257
|
+
Path(temp_file).unlink()
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def test_store_repr():
|
|
261
|
+
"""Test string representations."""
|
|
262
|
+
data_holder = [{}]
|
|
263
|
+
store = SyncStore(lambda: data_holder[0], lambda d: None)
|
|
264
|
+
assert "SyncStore" in repr(store)
|
|
265
|
+
|
|
266
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
|
|
267
|
+
f.write('{"section": {}}') # Include section key
|
|
268
|
+
temp_file = f.name
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
file_store = FileStore(temp_file)
|
|
272
|
+
assert "FileStore" in repr(file_store)
|
|
273
|
+
assert temp_file in repr(file_store)
|
|
274
|
+
|
|
275
|
+
file_store_with_path = FileStore(temp_file, key_path="section")
|
|
276
|
+
assert "key_path" in repr(file_store_with_path)
|
|
277
|
+
|
|
278
|
+
finally:
|
|
279
|
+
Path(temp_file).unlink()
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
if __name__ == "__main__":
|
|
283
|
+
if PYTEST_AVAILABLE:
|
|
284
|
+
pytest.main([__file__, "-v"])
|
|
285
|
+
else:
|
|
286
|
+
# Run tests manually
|
|
287
|
+
print("Running tests without pytest...")
|
|
288
|
+
tests = [
|
|
289
|
+
test_sync_store_basic_operations,
|
|
290
|
+
test_sync_store_auto_sync,
|
|
291
|
+
test_sync_store_context_manager,
|
|
292
|
+
test_sync_store_manual_flush,
|
|
293
|
+
test_file_store_json,
|
|
294
|
+
test_file_store_key_path,
|
|
295
|
+
test_file_store_dotted_key_path,
|
|
296
|
+
test_json_store,
|
|
297
|
+
test_file_store_batch_operations,
|
|
298
|
+
test_extension_registry,
|
|
299
|
+
test_store_repr,
|
|
300
|
+
]
|
|
301
|
+
|
|
302
|
+
failed = []
|
|
303
|
+
for test in tests:
|
|
304
|
+
try:
|
|
305
|
+
test()
|
|
306
|
+
print(f"✓ {test.__name__}")
|
|
307
|
+
except Exception as e:
|
|
308
|
+
print(f"✗ {test.__name__}: {e}")
|
|
309
|
+
failed.append((test.__name__, e))
|
|
310
|
+
|
|
311
|
+
print(f"\n{len(tests) - len(failed)}/{len(tests)} tests passed")
|
|
312
|
+
if failed:
|
|
313
|
+
print("\nFailed tests:")
|
|
314
|
+
for name, error in failed:
|
|
315
|
+
print(f" {name}: {error}")
|
|
316
|
+
exit(1)
|
|
@@ -9,7 +9,8 @@ import ast
|
|
|
9
9
|
from collections import ChainMap, namedtuple
|
|
10
10
|
from pathlib import Path
|
|
11
11
|
from functools import partial
|
|
12
|
-
from typing import Optional, Union, Any,
|
|
12
|
+
from typing import Optional, Union, Any, Set, Literal, get_args
|
|
13
|
+
from collections.abc import Callable
|
|
13
14
|
from types import SimpleNamespace
|
|
14
15
|
import getpass
|
|
15
16
|
|
|
@@ -119,7 +120,7 @@ def ask_user_for_input(
|
|
|
119
120
|
|
|
120
121
|
# Note: Could be made more efficient, but this is good enough (for now)
|
|
121
122
|
def extract_variable_declarations(
|
|
122
|
-
string: str, expand:
|
|
123
|
+
string: str, expand: dict | bool | None = None
|
|
123
124
|
) -> dict:
|
|
124
125
|
"""
|
|
125
126
|
Reads the contents of a config file, extracting Unix-style environment variable
|
|
@@ -402,7 +403,7 @@ def _default_folder_setup(directory_path: str) -> None:
|
|
|
402
403
|
This is the default setup callback for directories managed by config2py.
|
|
403
404
|
"""
|
|
404
405
|
if not os.path.isdir(directory_path):
|
|
405
|
-
os.
|
|
406
|
+
os.makedirs(directory_path, exist_ok=True)
|
|
406
407
|
# Add a hidden file to annotate the directory as one managed by config2py.
|
|
407
408
|
# This helps distinguish it from directories created by other programs
|
|
408
409
|
# (this can be useful to avoid conflicts).
|
|
@@ -466,11 +467,10 @@ def get_app_folder(
|
|
|
466
467
|
app_data_path = os.path.join(
|
|
467
468
|
get_app_rootdir(folder_kind, ensure_exists=ensure_exists), app_name
|
|
468
469
|
)
|
|
469
|
-
|
|
470
|
-
# process_path(app_data_path, ensure_dir_exists=True)
|
|
470
|
+
app_data_folder_does_not_exist = not os.path.isdir(app_data_path)
|
|
471
471
|
|
|
472
|
-
if
|
|
473
|
-
setup_callback(app_data_path)
|
|
472
|
+
if app_data_folder_does_not_exist and ensure_exists:
|
|
473
|
+
setup_callback(app_data_path, ensure_exists=ensure_exists)
|
|
474
474
|
return app_data_path
|
|
475
475
|
|
|
476
476
|
|
|
@@ -554,7 +554,7 @@ def is_repl():
|
|
|
554
554
|
return False
|
|
555
555
|
|
|
556
556
|
|
|
557
|
-
is_repl.repl_conditions:
|
|
557
|
+
is_repl.repl_conditions: set[Callable] = _repl_conditions # type: ignore
|
|
558
558
|
|
|
559
559
|
|
|
560
560
|
def _value_node_is_instance_of(
|
|
@@ -1,3 +1,17 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: config2py
|
|
3
|
+
Version: 0.1.43
|
|
4
|
+
Summary: Simplified reading and writing configurations from various sources and formats
|
|
5
|
+
Home-page: https://github.com/i2mint/config2py
|
|
6
|
+
License: apache-2.0
|
|
7
|
+
Platform: any
|
|
8
|
+
Description-Content-Type: text/markdown
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Requires-Dist: dol
|
|
11
|
+
Requires-Dist: i2
|
|
12
|
+
Requires-Dist: importlib_resources; python_version < "3.9"
|
|
13
|
+
Dynamic: license-file
|
|
14
|
+
|
|
1
15
|
# config2py
|
|
2
16
|
|
|
3
17
|
Simplified reading and writing configurations from various sources and formats.
|
|
@@ -197,6 +211,80 @@ It will return the value that the user entered last time, without prompting the
|
|
|
197
211
|
user again.
|
|
198
212
|
|
|
199
213
|
|
|
214
|
+
## SyncStore: Auto-Syncing Key-Value Stores
|
|
215
|
+
|
|
216
|
+
### Overview
|
|
217
|
+
|
|
218
|
+
`SyncStore` provides MutableMapping interfaces that automatically persist changes to backing storage. Changes sync immediately by default, or can be deferred using a context manager for efficient batch operations.
|
|
219
|
+
|
|
220
|
+
### Basic Usage
|
|
221
|
+
|
|
222
|
+
```python
|
|
223
|
+
from config2py.sync_store import FileStore, JsonStore
|
|
224
|
+
|
|
225
|
+
# Auto-detected from .json extension
|
|
226
|
+
config = FileStore('config.json')
|
|
227
|
+
config['api_key'] = 'secret' # Syncs immediately
|
|
228
|
+
|
|
229
|
+
# Batch operations (deferred sync)
|
|
230
|
+
with config:
|
|
231
|
+
config['a'] = 1
|
|
232
|
+
config['b'] = 2
|
|
233
|
+
config['c'] = 3
|
|
234
|
+
# Syncs once on exit
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
### Nested Sections
|
|
238
|
+
|
|
239
|
+
```python
|
|
240
|
+
# Work with specific section via key_path
|
|
241
|
+
db_config = FileStore('config.json', key_path='database')
|
|
242
|
+
db_config['host'] = 'localhost' # Only affects database section
|
|
243
|
+
|
|
244
|
+
# Dotted notation for deep nesting
|
|
245
|
+
items = FileStore('config.json', key_path='app.settings.items')
|
|
246
|
+
items['item1'] = 'value'
|
|
247
|
+
```
|
|
248
|
+
|
|
249
|
+
### Supported Formats
|
|
250
|
+
|
|
251
|
+
Auto-detected by extension:
|
|
252
|
+
- `.json` - JSON (stdlib)
|
|
253
|
+
- `.ini`, `.cfg` - INI files (stdlib)
|
|
254
|
+
- `.yaml`, `.yml` - YAML (if PyYAML installed)
|
|
255
|
+
- `.toml` - TOML (if tomli/tomllib installed)
|
|
256
|
+
|
|
257
|
+
Register custom formats:
|
|
258
|
+
```python
|
|
259
|
+
from sync_store import register_extension
|
|
260
|
+
|
|
261
|
+
register_extension('.custom', my_loader, my_dumper)
|
|
262
|
+
store = FileStore('data.custom')
|
|
263
|
+
```
|
|
264
|
+
|
|
265
|
+
### Custom Backing Storage
|
|
266
|
+
|
|
267
|
+
```python
|
|
268
|
+
from config2py.sync_store import SyncStore
|
|
269
|
+
|
|
270
|
+
# Any backing storage via loader/dumper
|
|
271
|
+
def my_loader():
|
|
272
|
+
return fetch_from_database()
|
|
273
|
+
|
|
274
|
+
def my_dumper(data):
|
|
275
|
+
save_to_database(data)
|
|
276
|
+
|
|
277
|
+
store = SyncStore(my_loader, my_dumper)
|
|
278
|
+
store['key'] = 'value' # Calls my_dumper
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
### Key Classes
|
|
282
|
+
|
|
283
|
+
- **`SyncStore`** - Base class with loader/dumper functions
|
|
284
|
+
- **`FileStore`** - File-based with extension detection and key_path
|
|
285
|
+
- **`JsonStore`** - Explicit JSON with sensible defaults
|
|
286
|
+
|
|
287
|
+
|
|
200
288
|
# A few notable tools you can import from config2py
|
|
201
289
|
|
|
202
290
|
* `get_config`: Get a config value from a list of sources. See more below.
|
|
@@ -327,7 +415,3 @@ s['SOME_KEY']
|
|
|
327
415
|
|
|
328
416
|
More on that another day...
|
|
329
417
|
|
|
330
|
-
|
|
331
|
-
```python
|
|
332
|
-
|
|
333
|
-
```
|
|
@@ -6,6 +6,7 @@ config2py/__init__.py
|
|
|
6
6
|
config2py/base.py
|
|
7
7
|
config2py/errors.py
|
|
8
8
|
config2py/s_configparser.py
|
|
9
|
+
config2py/sync_store.py
|
|
9
10
|
config2py/tools.py
|
|
10
11
|
config2py/util.py
|
|
11
12
|
config2py.egg-info/PKG-INFO
|
|
@@ -16,5 +17,6 @@ config2py.egg-info/requires.txt
|
|
|
16
17
|
config2py.egg-info/top_level.txt
|
|
17
18
|
config2py/scrap/__init__.py
|
|
18
19
|
config2py/tests/__init__.py
|
|
20
|
+
config2py/tests/test_sync_store.py
|
|
19
21
|
config2py/tests/test_tools.py
|
|
20
22
|
config2py/tests/utils_for_testing.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|