jsonjsdb 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jsonjsdb-0.1.0/.gitignore +11 -0
- jsonjsdb-0.1.0/LICENSE +21 -0
- jsonjsdb-0.1.0/PKG-INFO +145 -0
- jsonjsdb-0.1.0/README.md +121 -0
- jsonjsdb-0.1.0/pyproject.toml +61 -0
- jsonjsdb-0.1.0/src/jsonjsdb/__init__.py +10 -0
- jsonjsdb-0.1.0/src/jsonjsdb/database.py +111 -0
- jsonjsdb-0.1.0/src/jsonjsdb/loader.py +55 -0
- jsonjsdb-0.1.0/src/jsonjsdb/py.typed +0 -0
- jsonjsdb-0.1.0/src/jsonjsdb/table.py +227 -0
- jsonjsdb-0.1.0/src/jsonjsdb/types.py +9 -0
- jsonjsdb-0.1.0/src/jsonjsdb/writer.py +71 -0
jsonjsdb-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright © 2026-present Bassim Matar - datannur
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
|
13
|
+
all copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
+
THE SOFTWARE.
|
jsonjsdb-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: jsonjsdb
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Python library for JSONJS database loading
|
|
5
|
+
Project-URL: Homepage, https://github.com/datannur/jsonjsdb
|
|
6
|
+
Project-URL: Repository, https://github.com/datannur/jsonjsdb
|
|
7
|
+
Project-URL: Documentation, https://github.com/datannur/jsonjsdb#readme
|
|
8
|
+
Project-URL: Issues, https://github.com/datannur/jsonjsdb/issues
|
|
9
|
+
Author: datannur
|
|
10
|
+
License: MIT
|
|
11
|
+
License-File: LICENSE
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
21
|
+
Requires-Python: >=3.9
|
|
22
|
+
Requires-Dist: polars>=1.0.0
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
# jsonjsdb
|
|
26
|
+
|
|
27
|
+
[](https://pypi.org/project/jsonjsdb/)
|
|
28
|
+
[](https://pypi.org/project/jsonjsdb/)
|
|
29
|
+
[](https://github.com/datannur/jsonjsdb/actions/workflows/ci.yml)
|
|
30
|
+
[](https://codecov.io/gh/datannur/jsonjsdb)
|
|
31
|
+
[](https://opensource.org/licenses/MIT)
|
|
32
|
+
|
|
33
|
+
Python library for JSONJS databases with full CRUD support and relational queries.
|
|
34
|
+
|
|
35
|
+
## Features
|
|
36
|
+
|
|
37
|
+
- **Read & Write**: Full CRUD operations
|
|
38
|
+
- **Typed API**: Optional TypedDict support with autocompletion
|
|
39
|
+
- **Relational queries**: `having.{table}(id)` for one-to-many and many-to-many
|
|
40
|
+
- **Filtering**: `where()` with operators (`==`, `!=`, `>`, `in`, `is_null`, etc.)
|
|
41
|
+
- **TypeScript compatible**: Same file format as the TypeScript jsonjsdb library
|
|
42
|
+
|
|
43
|
+
## Installation
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
pip install jsonjsdb
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Quick Start
|
|
50
|
+
|
|
51
|
+
```python
|
|
52
|
+
from jsonjsdb import Jsonjsdb
|
|
53
|
+
|
|
54
|
+
db = Jsonjsdb("path/to/db")
|
|
55
|
+
|
|
56
|
+
# Read
|
|
57
|
+
user = db["user"].get("user_1")
|
|
58
|
+
active = db["user"].where("status", "==", "active")
|
|
59
|
+
|
|
60
|
+
# Write
|
|
61
|
+
db["user"].add({"id": "u1", "name": "Alice", "tag_ids": []})
|
|
62
|
+
db["user"].update("u1", name="Alice Updated")
|
|
63
|
+
db.save()
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
## Typed Access
|
|
67
|
+
|
|
68
|
+
```python
|
|
69
|
+
from typing import TypedDict
|
|
70
|
+
from jsonjsdb import Jsonjsdb, Table
|
|
71
|
+
|
|
72
|
+
class User(TypedDict):
|
|
73
|
+
id: str
|
|
74
|
+
name: str
|
|
75
|
+
tag_ids: list[str]
|
|
76
|
+
|
|
77
|
+
class MyDB(Jsonjsdb):
|
|
78
|
+
user: Table[User]
|
|
79
|
+
|
|
80
|
+
db = MyDB("path/to/db")
|
|
81
|
+
user = db.user.get("user_1") # Returns User | None (with autocompletion)
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
## API Reference
|
|
85
|
+
|
|
86
|
+
### CRUD
|
|
87
|
+
|
|
88
|
+
```python
|
|
89
|
+
db.user.add({"id": "u1", "name": "Alice", ...}) # Add row (id required)
|
|
90
|
+
db.user.add_all([...]) # Add multiple rows
|
|
91
|
+
|
|
92
|
+
db.user.get("u1") # → User | None
|
|
93
|
+
db.user.all() # → list[User]
|
|
94
|
+
|
|
95
|
+
db.user.update("u1", name="New Name") # Update fields
|
|
96
|
+
db.user.remove("u1") # → bool
|
|
97
|
+
db.user.remove_all(["u1", "u2"]) # → int (count)
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
### Filtering
|
|
101
|
+
|
|
102
|
+
```python
|
|
103
|
+
db.user.where("status", "==", "active") # Equality
|
|
104
|
+
db.user.where("age", ">", 18) # Comparison (>, >=, <, <=)
|
|
105
|
+
db.user.where("status", "in", ["a", "b"]) # In list
|
|
106
|
+
db.user.where("email", "is_null") # Null check (is_not_null)
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
### Relations
|
|
110
|
+
|
|
111
|
+
```python
|
|
112
|
+
db.email.having.user("user_1") # One-to-many: where user_id == "user_1"
|
|
113
|
+
db.user.having.tag("tag_1") # Many-to-many: where tag_ids contains "tag_1"
|
|
114
|
+
db.folder.having.parent("folder_1") # Hierarchy: where parent_id == "folder_1"
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
### Save / New Database
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
db.save() # Save to original path
|
|
121
|
+
db.save("new/path") # Save to new location
|
|
122
|
+
|
|
123
|
+
db = MyDB() # Create empty in-memory DB
|
|
124
|
+
db.user.add({...})
|
|
125
|
+
db.save("path/to/db") # Path required on first save
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
## File Format
|
|
129
|
+
|
|
130
|
+
- `__table__.json` — Index of tables with metadata
|
|
131
|
+
- `{table}.json` — Data as array of objects
|
|
132
|
+
- `{table}.json.js` — Same data for browser (JavaScript)
|
|
133
|
+
|
|
134
|
+
### Column Conventions
|
|
135
|
+
|
|
136
|
+
| Column | Description |
|
|
137
|
+
|--------|-------------|
|
|
138
|
+
| `id` | Primary key (always string) |
|
|
139
|
+
| `xxx_id` | Foreign key to table `xxx` |
|
|
140
|
+
| `xxx_ids` | Many-to-many (comma-separated in file, `list[str]` in API) |
|
|
141
|
+
| `parent_id` | Self-reference for hierarchies |
|
|
142
|
+
|
|
143
|
+
## License
|
|
144
|
+
|
|
145
|
+
MIT
|
jsonjsdb-0.1.0/README.md
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# jsonjsdb
|
|
2
|
+
|
|
3
|
+
[](https://pypi.org/project/jsonjsdb/)
|
|
4
|
+
[](https://pypi.org/project/jsonjsdb/)
|
|
5
|
+
[](https://github.com/datannur/jsonjsdb/actions/workflows/ci.yml)
|
|
6
|
+
[](https://codecov.io/gh/datannur/jsonjsdb)
|
|
7
|
+
[](https://opensource.org/licenses/MIT)
|
|
8
|
+
|
|
9
|
+
Python library for JSONJS databases with full CRUD support and relational queries.
|
|
10
|
+
|
|
11
|
+
## Features
|
|
12
|
+
|
|
13
|
+
- **Read & Write**: Full CRUD operations
|
|
14
|
+
- **Typed API**: Optional TypedDict support with autocompletion
|
|
15
|
+
- **Relational queries**: `having.{table}(id)` for one-to-many and many-to-many
|
|
16
|
+
- **Filtering**: `where()` with operators (`==`, `!=`, `>`, `in`, `is_null`, etc.)
|
|
17
|
+
- **TypeScript compatible**: Same file format as the TypeScript jsonjsdb library
|
|
18
|
+
|
|
19
|
+
## Installation
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
pip install jsonjsdb
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Quick Start
|
|
26
|
+
|
|
27
|
+
```python
|
|
28
|
+
from jsonjsdb import Jsonjsdb
|
|
29
|
+
|
|
30
|
+
db = Jsonjsdb("path/to/db")
|
|
31
|
+
|
|
32
|
+
# Read
|
|
33
|
+
user = db["user"].get("user_1")
|
|
34
|
+
active = db["user"].where("status", "==", "active")
|
|
35
|
+
|
|
36
|
+
# Write
|
|
37
|
+
db["user"].add({"id": "u1", "name": "Alice", "tag_ids": []})
|
|
38
|
+
db["user"].update("u1", name="Alice Updated")
|
|
39
|
+
db.save()
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Typed Access
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
from typing import TypedDict
|
|
46
|
+
from jsonjsdb import Jsonjsdb, Table
|
|
47
|
+
|
|
48
|
+
class User(TypedDict):
|
|
49
|
+
id: str
|
|
50
|
+
name: str
|
|
51
|
+
tag_ids: list[str]
|
|
52
|
+
|
|
53
|
+
class MyDB(Jsonjsdb):
|
|
54
|
+
user: Table[User]
|
|
55
|
+
|
|
56
|
+
db = MyDB("path/to/db")
|
|
57
|
+
user = db.user.get("user_1") # Returns User | None (with autocompletion)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## API Reference
|
|
61
|
+
|
|
62
|
+
### CRUD
|
|
63
|
+
|
|
64
|
+
```python
|
|
65
|
+
db.user.add({"id": "u1", "name": "Alice", ...}) # Add row (id required)
|
|
66
|
+
db.user.add_all([...]) # Add multiple rows
|
|
67
|
+
|
|
68
|
+
db.user.get("u1") # → User | None
|
|
69
|
+
db.user.all() # → list[User]
|
|
70
|
+
|
|
71
|
+
db.user.update("u1", name="New Name") # Update fields
|
|
72
|
+
db.user.remove("u1") # → bool
|
|
73
|
+
db.user.remove_all(["u1", "u2"]) # → int (count)
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Filtering
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
db.user.where("status", "==", "active") # Equality
|
|
80
|
+
db.user.where("age", ">", 18) # Comparison (>, >=, <, <=)
|
|
81
|
+
db.user.where("status", "in", ["a", "b"]) # In list
|
|
82
|
+
db.user.where("email", "is_null") # Null check (is_not_null)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
### Relations
|
|
86
|
+
|
|
87
|
+
```python
|
|
88
|
+
db.email.having.user("user_1") # One-to-many: where user_id == "user_1"
|
|
89
|
+
db.user.having.tag("tag_1") # Many-to-many: where tag_ids contains "tag_1"
|
|
90
|
+
db.folder.having.parent("folder_1") # Hierarchy: where parent_id == "folder_1"
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
### Save / New Database
|
|
94
|
+
|
|
95
|
+
```python
|
|
96
|
+
db.save() # Save to original path
|
|
97
|
+
db.save("new/path") # Save to new location
|
|
98
|
+
|
|
99
|
+
db = MyDB() # Create empty in-memory DB
|
|
100
|
+
db.user.add({...})
|
|
101
|
+
db.save("path/to/db") # Path required on first save
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## File Format
|
|
105
|
+
|
|
106
|
+
- `__table__.json` — Index of tables with metadata
|
|
107
|
+
- `{table}.json` — Data as array of objects
|
|
108
|
+
- `{table}.json.js` — Same data for browser (JavaScript)
|
|
109
|
+
|
|
110
|
+
### Column Conventions
|
|
111
|
+
|
|
112
|
+
| Column | Description |
|
|
113
|
+
|--------|-------------|
|
|
114
|
+
| `id` | Primary key (always string) |
|
|
115
|
+
| `xxx_id` | Foreign key to table `xxx` |
|
|
116
|
+
| `xxx_ids` | Many-to-many (comma-separated in file, `list[str]` in API) |
|
|
117
|
+
| `parent_id` | Self-reference for hierarchies |
|
|
118
|
+
|
|
119
|
+
## License
|
|
120
|
+
|
|
121
|
+
MIT
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "jsonjsdb"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Python library for JSONJS database loading"
|
|
9
|
+
authors = [{ name = "datannur" }]
|
|
10
|
+
readme = "README.md"
|
|
11
|
+
requires-python = ">=3.9"
|
|
12
|
+
license = { text = "MIT" }
|
|
13
|
+
classifiers = [
|
|
14
|
+
"Development Status :: 3 - Alpha",
|
|
15
|
+
"License :: OSI Approved :: MIT License",
|
|
16
|
+
"Programming Language :: Python :: 3",
|
|
17
|
+
"Programming Language :: Python :: 3.9",
|
|
18
|
+
"Programming Language :: Python :: 3.10",
|
|
19
|
+
"Programming Language :: Python :: 3.11",
|
|
20
|
+
"Programming Language :: Python :: 3.12",
|
|
21
|
+
"Programming Language :: Python :: 3.13",
|
|
22
|
+
"Programming Language :: Python :: 3.14",
|
|
23
|
+
]
|
|
24
|
+
dependencies = ["polars>=1.0.0"]
|
|
25
|
+
|
|
26
|
+
[project.urls]
|
|
27
|
+
Homepage = "https://github.com/datannur/jsonjsdb"
|
|
28
|
+
Repository = "https://github.com/datannur/jsonjsdb"
|
|
29
|
+
Documentation = "https://github.com/datannur/jsonjsdb#readme"
|
|
30
|
+
Issues = "https://github.com/datannur/jsonjsdb/issues"
|
|
31
|
+
|
|
32
|
+
[dependency-groups]
|
|
33
|
+
dev = [
|
|
34
|
+
"pytest",
|
|
35
|
+
"pytest-cov",
|
|
36
|
+
"ruff",
|
|
37
|
+
"pyright>=1.1.400",
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
[tool.pytest.ini_options]
|
|
41
|
+
testpaths = ["tests"]
|
|
42
|
+
|
|
43
|
+
[tool.ruff]
|
|
44
|
+
target-version = "py39"
|
|
45
|
+
|
|
46
|
+
[tool.hatch.build.targets.wheel]
|
|
47
|
+
packages = ["src/jsonjsdb"]
|
|
48
|
+
|
|
49
|
+
[tool.hatch.build.targets.sdist]
|
|
50
|
+
include = ["src/jsonjsdb"]
|
|
51
|
+
|
|
52
|
+
[tool.pyright]
|
|
53
|
+
pythonVersion = "3.9"
|
|
54
|
+
typeCheckingMode = "standard"
|
|
55
|
+
|
|
56
|
+
[tool.coverage.run]
|
|
57
|
+
source = ["src/jsonjsdb"]
|
|
58
|
+
branch = true
|
|
59
|
+
|
|
60
|
+
[tool.coverage.report]
|
|
61
|
+
exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:", "@overload", "\\.\\.\\."]
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""JSONJS database library for Python."""
|
|
2
|
+
|
|
3
|
+
from importlib.metadata import version
|
|
4
|
+
|
|
5
|
+
from .database import Jsonjsdb
|
|
6
|
+
from .table import Table
|
|
7
|
+
from .types import ID, Operator, TableRow
|
|
8
|
+
|
|
9
|
+
__version__ = version("jsonjsdb")
|
|
10
|
+
__all__ = ["Jsonjsdb", "Table", "ID", "Operator", "TableRow"]
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""Main Jsonjsdb database class."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, get_type_hints
|
|
7
|
+
|
|
8
|
+
from .loader import load_table, load_table_index
|
|
9
|
+
from .table import Table
|
|
10
|
+
from .types import TableRow
|
|
11
|
+
from .writer import write_table_index, write_table_json, write_table_jsonjs
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Jsonjsdb:
|
|
15
|
+
"""JSONJS database with typed table access.
|
|
16
|
+
|
|
17
|
+
Usage (untyped):
|
|
18
|
+
db = Jsonjsdb("path/to/db")
|
|
19
|
+
user = db["user"].get("user_1")
|
|
20
|
+
|
|
21
|
+
Usage (typed):
|
|
22
|
+
class MyDB(Jsonjsdb):
|
|
23
|
+
user: Table[User]
|
|
24
|
+
tag: Table[Tag]
|
|
25
|
+
|
|
26
|
+
db = MyDB("path/to/db")
|
|
27
|
+
user = db.user.get("user_1") # Typed as User | None
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, path: str | Path | None = None) -> None:
|
|
31
|
+
self._path: Path | None = Path(path) if path else None
|
|
32
|
+
self._tables: dict[str, Table[Any]] = {}
|
|
33
|
+
|
|
34
|
+
if self._path:
|
|
35
|
+
self._load_from_path(self._path)
|
|
36
|
+
|
|
37
|
+
self._init_typed_tables()
|
|
38
|
+
|
|
39
|
+
def _load_from_path(self, path: Path) -> None:
|
|
40
|
+
"""Load all tables from disk."""
|
|
41
|
+
if not path.exists():
|
|
42
|
+
raise FileNotFoundError(f"Database path does not exist: {path}")
|
|
43
|
+
|
|
44
|
+
table_index_path = path / "__table__.json"
|
|
45
|
+
if not table_index_path.exists():
|
|
46
|
+
raise FileNotFoundError(f"Missing __table__.json in {path}")
|
|
47
|
+
|
|
48
|
+
table_index = load_table_index(table_index_path)
|
|
49
|
+
|
|
50
|
+
for entry in table_index:
|
|
51
|
+
name = str(entry["name"])
|
|
52
|
+
json_path = path / f"{name}.json"
|
|
53
|
+
|
|
54
|
+
if json_path.exists():
|
|
55
|
+
df = load_table(json_path)
|
|
56
|
+
self._tables[name] = Table(name, self, df)
|
|
57
|
+
|
|
58
|
+
def _init_typed_tables(self) -> None:
|
|
59
|
+
"""Initialize Table attributes from type annotations on subclasses."""
|
|
60
|
+
hints = get_type_hints(self.__class__)
|
|
61
|
+
|
|
62
|
+
for attr_name, hint in hints.items():
|
|
63
|
+
origin = getattr(hint, "__origin__", None)
|
|
64
|
+
if origin is Table:
|
|
65
|
+
if attr_name in self._tables:
|
|
66
|
+
setattr(self, attr_name, self._tables[attr_name])
|
|
67
|
+
else:
|
|
68
|
+
table = Table(attr_name, self)
|
|
69
|
+
self._tables[attr_name] = table
|
|
70
|
+
setattr(self, attr_name, table)
|
|
71
|
+
|
|
72
|
+
def __getitem__(self, table_name: str) -> Table[TableRow]:
|
|
73
|
+
"""Access a table by name (untyped)."""
|
|
74
|
+
if table_name not in self._tables:
|
|
75
|
+
raise KeyError(f"Table '{table_name}' not found")
|
|
76
|
+
return self._tables[table_name]
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def path(self) -> Path | None:
|
|
80
|
+
return self._path
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def tables(self) -> list[str]:
|
|
84
|
+
"""List of loaded table names."""
|
|
85
|
+
return list(self._tables.keys())
|
|
86
|
+
|
|
87
|
+
def save(self, path: str | Path | None = None) -> None:
|
|
88
|
+
"""Save all tables to disk.
|
|
89
|
+
|
|
90
|
+
If path is provided, saves to that location and updates self._path.
|
|
91
|
+
If path is None, saves to the original path (must exist).
|
|
92
|
+
"""
|
|
93
|
+
save_path = Path(path) if path else self._path
|
|
94
|
+
|
|
95
|
+
if save_path is None:
|
|
96
|
+
raise ValueError(
|
|
97
|
+
"No path specified. Provide a path or load from an existing database first."
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
save_path.mkdir(parents=True, exist_ok=True)
|
|
101
|
+
|
|
102
|
+
table_names = []
|
|
103
|
+
for name, table in self._tables.items():
|
|
104
|
+
if not table.df.is_empty():
|
|
105
|
+
write_table_json(table.df, save_path / f"{name}.json")
|
|
106
|
+
write_table_jsonjs(table.df, name, save_path / f"{name}.json.js")
|
|
107
|
+
table_names.append(name)
|
|
108
|
+
|
|
109
|
+
write_table_index(table_names, save_path / "__table__.json")
|
|
110
|
+
|
|
111
|
+
self._path = save_path
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"""Load JSON files into Polars DataFrames."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import polars as pl
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def load_table(path: Path) -> pl.DataFrame:
|
|
10
|
+
"""Load a JSON file into a Polars DataFrame.
|
|
11
|
+
|
|
12
|
+
- Converts 'id' column to string type
|
|
13
|
+
- Converts '*_ids' columns from comma-separated strings to list[str]
|
|
14
|
+
"""
|
|
15
|
+
df = pl.read_json(path)
|
|
16
|
+
|
|
17
|
+
if df.is_empty():
|
|
18
|
+
return df
|
|
19
|
+
|
|
20
|
+
transforms: list[pl.Expr] = []
|
|
21
|
+
|
|
22
|
+
for col_name in df.columns:
|
|
23
|
+
col_type = df.schema[col_name]
|
|
24
|
+
|
|
25
|
+
if col_name == "id":
|
|
26
|
+
transforms.append(pl.col("id").cast(pl.Utf8).alias("id"))
|
|
27
|
+
elif col_name.endswith("_ids"):
|
|
28
|
+
transforms.append(_convert_ids_column(col_name, col_type))
|
|
29
|
+
else:
|
|
30
|
+
transforms.append(pl.col(col_name))
|
|
31
|
+
|
|
32
|
+
return df.select(transforms)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _convert_ids_column(col_name: str, col_type: pl.DataType) -> pl.Expr:
|
|
36
|
+
"""Convert a *_ids column from comma-separated string to list[str]."""
|
|
37
|
+
col = pl.col(col_name)
|
|
38
|
+
|
|
39
|
+
if col_type == pl.Utf8 or col_type == pl.String:
|
|
40
|
+
return (
|
|
41
|
+
pl.when(col.is_null() | (col == ""))
|
|
42
|
+
.then(pl.lit([]).cast(pl.List(pl.Utf8)))
|
|
43
|
+
.otherwise(col.str.split(","))
|
|
44
|
+
.alias(col_name)
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
return col
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def load_table_index(path: Path) -> list[dict[str, Any]]:
|
|
51
|
+
"""Load __table__.json which contains table metadata."""
|
|
52
|
+
import json
|
|
53
|
+
|
|
54
|
+
with open(path) as f:
|
|
55
|
+
return json.load(f)
|
|
File without changes
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
"""Table class wrapping a Polars DataFrame."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Generic, TypeVar, overload
|
|
6
|
+
|
|
7
|
+
import polars as pl
|
|
8
|
+
|
|
9
|
+
from .types import ID, Operator
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from .database import Jsonjsdb
|
|
13
|
+
|
|
14
|
+
T = TypeVar("T")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Table(Generic[T]):
|
|
18
|
+
"""A table backed by a Polars DataFrame."""
|
|
19
|
+
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
name: str,
|
|
23
|
+
db: Jsonjsdb | None = None,
|
|
24
|
+
df: pl.DataFrame | None = None,
|
|
25
|
+
) -> None:
|
|
26
|
+
self._name = name
|
|
27
|
+
self._db = db
|
|
28
|
+
self._df = df if df is not None else pl.DataFrame()
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def name(self) -> str:
|
|
32
|
+
return self._name
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def df(self) -> pl.DataFrame:
|
|
36
|
+
return self._df
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def having(self) -> HavingProxy[T]:
|
|
40
|
+
"""Access relational queries via having.{table}(id)."""
|
|
41
|
+
if self._db is None:
|
|
42
|
+
raise RuntimeError("Cannot use 'having' without a database context")
|
|
43
|
+
return HavingProxy(self, self._db)
|
|
44
|
+
|
|
45
|
+
def get(self, id: ID) -> T | None:
|
|
46
|
+
"""Get a single row by ID, or None if not found."""
|
|
47
|
+
result = self._df.filter(pl.col("id") == id)
|
|
48
|
+
if result.is_empty():
|
|
49
|
+
return None
|
|
50
|
+
return self._row_to_dict(result.row(0, named=True))
|
|
51
|
+
|
|
52
|
+
def all(self) -> list[T]:
|
|
53
|
+
"""Get all rows as a list of dicts."""
|
|
54
|
+
return [self._row_to_dict(row) for row in self._df.iter_rows(named=True)]
|
|
55
|
+
|
|
56
|
+
@overload
|
|
57
|
+
def where(self, column: str, op: Operator, value: Any) -> list[T]: ...
|
|
58
|
+
|
|
59
|
+
@overload
|
|
60
|
+
def where(self, column: str, op: Operator) -> list[T]: ...
|
|
61
|
+
|
|
62
|
+
def where(self, column: str, op: Operator, value: Any = None) -> list[T]:
|
|
63
|
+
"""Filter rows by a condition.
|
|
64
|
+
|
|
65
|
+
Operators: ==, !=, >, >=, <, <=, in, is_null, is_not_null
|
|
66
|
+
"""
|
|
67
|
+
col = pl.col(column)
|
|
68
|
+
expr: pl.Expr
|
|
69
|
+
|
|
70
|
+
if op == "==":
|
|
71
|
+
expr = col == value
|
|
72
|
+
elif op == "!=":
|
|
73
|
+
expr = col != value
|
|
74
|
+
elif op == ">":
|
|
75
|
+
expr = col > value
|
|
76
|
+
elif op == ">=":
|
|
77
|
+
expr = col >= value
|
|
78
|
+
elif op == "<":
|
|
79
|
+
expr = col < value
|
|
80
|
+
elif op == "<=":
|
|
81
|
+
expr = col <= value
|
|
82
|
+
elif op == "in":
|
|
83
|
+
expr = col.is_in(value)
|
|
84
|
+
elif op == "is_null":
|
|
85
|
+
expr = col.is_null()
|
|
86
|
+
elif op == "is_not_null":
|
|
87
|
+
expr = col.is_not_null()
|
|
88
|
+
else:
|
|
89
|
+
raise ValueError(f"Unknown operator: {op}")
|
|
90
|
+
|
|
91
|
+
result = self._df.filter(expr)
|
|
92
|
+
return [self._row_to_dict(row) for row in result.iter_rows(named=True)]
|
|
93
|
+
|
|
94
|
+
def _row_to_dict(self, row: dict[str, Any]) -> T:
|
|
95
|
+
"""Convert a Polars row dict to API dict (T)."""
|
|
96
|
+
return row # type: ignore[return-value]
|
|
97
|
+
|
|
98
|
+
# --- CRUD operations ---
|
|
99
|
+
|
|
100
|
+
def add(self, row: T) -> None:
|
|
101
|
+
"""Add a single row. Raises ValueError if id missing or already exists."""
|
|
102
|
+
row_dict: dict[str, Any] = row # type: ignore[assignment]
|
|
103
|
+
|
|
104
|
+
if "id" not in row_dict:
|
|
105
|
+
raise ValueError("Row must have an 'id' field")
|
|
106
|
+
|
|
107
|
+
row_id = str(row_dict["id"])
|
|
108
|
+
if (
|
|
109
|
+
not self._df.is_empty()
|
|
110
|
+
and not self._df.filter(pl.col("id") == row_id).is_empty()
|
|
111
|
+
):
|
|
112
|
+
raise ValueError(f"Row with id '{row_id}' already exists")
|
|
113
|
+
|
|
114
|
+
prepared = self._prepare_row_for_storage(row_dict)
|
|
115
|
+
new_df = pl.DataFrame([prepared])
|
|
116
|
+
|
|
117
|
+
if self._df.is_empty():
|
|
118
|
+
self._df = new_df
|
|
119
|
+
else:
|
|
120
|
+
self._df = pl.concat([self._df, new_df], how="diagonal")
|
|
121
|
+
|
|
122
|
+
def add_all(self, rows: list[T]) -> None:
|
|
123
|
+
"""Add multiple rows."""
|
|
124
|
+
for row in rows:
|
|
125
|
+
self.add(row)
|
|
126
|
+
|
|
127
|
+
def update(self, id: ID, **kwargs: Any) -> None:
|
|
128
|
+
"""Update a row by ID. Raises KeyError if not found."""
|
|
129
|
+
if self._df.is_empty() or self._df.filter(pl.col("id") == id).is_empty():
|
|
130
|
+
raise KeyError(f"Row with id '{id}' not found")
|
|
131
|
+
|
|
132
|
+
updates: list[pl.Expr] = []
|
|
133
|
+
for col_name in self._df.columns:
|
|
134
|
+
if col_name in kwargs:
|
|
135
|
+
value = kwargs[col_name]
|
|
136
|
+
updates.append(
|
|
137
|
+
pl.when(pl.col("id") == id)
|
|
138
|
+
.then(pl.lit(value, allow_object=True))
|
|
139
|
+
.otherwise(pl.col(col_name))
|
|
140
|
+
.alias(col_name)
|
|
141
|
+
)
|
|
142
|
+
else:
|
|
143
|
+
updates.append(pl.col(col_name))
|
|
144
|
+
|
|
145
|
+
self._df = self._df.select(updates)
|
|
146
|
+
|
|
147
|
+
def remove(self, id: ID) -> bool:
|
|
148
|
+
"""Remove a row by ID. Returns True if removed, False if not found."""
|
|
149
|
+
if self._df.is_empty():
|
|
150
|
+
return False
|
|
151
|
+
|
|
152
|
+
original_len = len(self._df)
|
|
153
|
+
self._df = self._df.filter(pl.col("id") != id)
|
|
154
|
+
return len(self._df) < original_len
|
|
155
|
+
|
|
156
|
+
def remove_all(self, ids: list[ID]) -> int:
|
|
157
|
+
"""Remove multiple rows by ID. Returns count of removed rows."""
|
|
158
|
+
if self._df.is_empty():
|
|
159
|
+
return 0
|
|
160
|
+
|
|
161
|
+
original_len = len(self._df)
|
|
162
|
+
self._df = self._df.filter(~pl.col("id").is_in(ids))
|
|
163
|
+
return original_len - len(self._df)
|
|
164
|
+
|
|
165
|
+
def _prepare_row_for_storage(self, row: dict[str, Any]) -> dict[str, Any]:
|
|
166
|
+
"""Prepare row for internal DataFrame storage.
|
|
167
|
+
|
|
168
|
+
- Converts id to string
|
|
169
|
+
- Keeps *_ids as lists (DataFrame stores them as List(String))
|
|
170
|
+
"""
|
|
171
|
+
prepared = {}
|
|
172
|
+
for key, value in row.items():
|
|
173
|
+
if key == "id":
|
|
174
|
+
prepared[key] = str(value)
|
|
175
|
+
else:
|
|
176
|
+
prepared[key] = value
|
|
177
|
+
return prepared
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class HavingProxy(Generic[T]):
|
|
181
|
+
"""Proxy for relational queries: table.having.{target}(id).
|
|
182
|
+
|
|
183
|
+
Detects relation type automatically:
|
|
184
|
+
- If {target}_id exists → one-to-many (filter by foreign key)
|
|
185
|
+
- If {target}_ids exists → many-to-many (filter by list contains)
|
|
186
|
+
- Special case: 'parent' → looks for parent_id (self-reference)
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
def __init__(self, table: Table[T], db: Jsonjsdb) -> None:
|
|
190
|
+
self._table = table
|
|
191
|
+
self._db = db
|
|
192
|
+
|
|
193
|
+
def __getattr__(self, target: str) -> _RelationQuery[T]:
|
|
194
|
+
return _RelationQuery(self._table, self._db, target)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class _RelationQuery(Generic[T]):
|
|
198
|
+
"""Callable that executes the relation query."""
|
|
199
|
+
|
|
200
|
+
def __init__(self, table: Table[T], db: Jsonjsdb, target: str) -> None:
|
|
201
|
+
self._table = table
|
|
202
|
+
self._db = db
|
|
203
|
+
self._target = target
|
|
204
|
+
|
|
205
|
+
def __call__(self, id: ID) -> list[T]:
|
|
206
|
+
columns = self._table.df.columns
|
|
207
|
+
|
|
208
|
+
# Special case: 'parent' → parent_id (self-reference)
|
|
209
|
+
lookup_target = "parent" if self._target == "parent" else self._target
|
|
210
|
+
|
|
211
|
+
# Check for {target}_id (one-to-many)
|
|
212
|
+
fk_col = f"{lookup_target}_id"
|
|
213
|
+
if fk_col in columns:
|
|
214
|
+
return self._table.where(fk_col, "==", id)
|
|
215
|
+
|
|
216
|
+
# Check for {target}_ids (many-to-many)
|
|
217
|
+
fk_ids_col = f"{lookup_target}_ids"
|
|
218
|
+
if fk_ids_col in columns:
|
|
219
|
+
result = self._table.df.filter(pl.col(fk_ids_col).list.contains(id))
|
|
220
|
+
return [
|
|
221
|
+
self._table._row_to_dict(row) for row in result.iter_rows(named=True)
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
raise AttributeError(
|
|
225
|
+
f"No relation '{self._target}' found in table '{self._table.name}'. "
|
|
226
|
+
f"Expected column '{fk_col}' or '{fk_ids_col}'."
|
|
227
|
+
)
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""Write Polars DataFrames to JSON and JSON.js files."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import time
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import polars as pl
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def write_table_json(df: pl.DataFrame, path: Path) -> None:
|
|
12
|
+
"""Write a DataFrame to a JSON file (array of objects)."""
|
|
13
|
+
prepared_df = _prepare_df_for_write(df)
|
|
14
|
+
rows = _df_to_json_rows(prepared_df)
|
|
15
|
+
with open(path, "w") as f:
|
|
16
|
+
json.dump(rows, f, indent=2, ensure_ascii=False)
|
|
17
|
+
f.write("\n")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def write_table_jsonjs(df: pl.DataFrame, table_name: str, path: Path) -> None:
|
|
21
|
+
"""Write a DataFrame to a JSON.js file (array of arrays format)."""
|
|
22
|
+
prepared_df = _prepare_df_for_write(df)
|
|
23
|
+
columns = prepared_df.columns
|
|
24
|
+
rows: list[list[Any]] = [columns]
|
|
25
|
+
|
|
26
|
+
for row in prepared_df.iter_rows():
|
|
27
|
+
rows.append(list(row))
|
|
28
|
+
|
|
29
|
+
json_array = json.dumps(rows, ensure_ascii=False, separators=(",", ":"))
|
|
30
|
+
content = f"jsonjs.data['{table_name}'] = {json_array}\n"
|
|
31
|
+
|
|
32
|
+
with open(path, "w") as f:
|
|
33
|
+
f.write(content)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def write_table_index(tables: list[str], path: Path) -> None:
|
|
37
|
+
"""Write __table__.json with table metadata."""
|
|
38
|
+
now = int(time.time())
|
|
39
|
+
entries = [{"name": name, "last_modif": now} for name in sorted(tables)]
|
|
40
|
+
|
|
41
|
+
with open(path, "w") as f:
|
|
42
|
+
json.dump(entries, f, indent=2, ensure_ascii=False)
|
|
43
|
+
f.write("\n")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _prepare_df_for_write(df: pl.DataFrame) -> pl.DataFrame:
|
|
47
|
+
"""Prepare DataFrame for writing: convert List columns to comma-separated strings."""
|
|
48
|
+
transforms: list[pl.Expr] = []
|
|
49
|
+
|
|
50
|
+
for col_name in df.columns:
|
|
51
|
+
col_type = df.schema[col_name]
|
|
52
|
+
if isinstance(col_type, pl.List):
|
|
53
|
+
transforms.append(
|
|
54
|
+
pl.col(col_name)
|
|
55
|
+
.cast(pl.List(pl.Utf8))
|
|
56
|
+
.list.join(",")
|
|
57
|
+
.fill_null("")
|
|
58
|
+
.alias(col_name)
|
|
59
|
+
)
|
|
60
|
+
else:
|
|
61
|
+
transforms.append(pl.col(col_name))
|
|
62
|
+
|
|
63
|
+
return df.select(transforms)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _df_to_json_rows(df: pl.DataFrame) -> list[dict[str, Any]]:
|
|
67
|
+
"""Convert DataFrame to list of dicts for JSON serialization."""
|
|
68
|
+
rows = []
|
|
69
|
+
for row in df.iter_rows(named=True):
|
|
70
|
+
rows.append(row)
|
|
71
|
+
return rows
|