idli 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
idli-0.1.0/.gitignore ADDED
@@ -0,0 +1,5 @@
1
+ venv/
2
+ __pycache__/
3
+
4
+ .env
5
+ tinker.py
idli-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,164 @@
1
+ Metadata-Version: 2.4
2
+ Name: idli
3
+ Version: 0.1.0
4
+ Summary: Nag-free PostgreSQL ORM
5
+ Project-URL: Homepage, https://github.com/kdqed/zaturn
6
+ Project-URL: Issues, https://github.com/kdqed/zaturn/issues
7
+ Requires-Python: >=3.13
8
+ Description-Content-Type: text/markdown
9
+ Requires-Dist: psycopg-pool>=3.2.8
10
+ Requires-Dist: psycopg[binary]>=3.2.13
11
+
12
+ # Coming Soon: A Python ORM That Treats You Like Its 2026
13
+
14
+ Alchemy is complex, but Idli is simple.
15
+
16
+ ## Have Some Idli
17
+
18
+ (Doesn't work yet)
19
+
20
+ ```bash
21
+ $ pip install idli
22
+ ```
23
+
24
+ ```bash
25
+ $ uv add idli
26
+ ```
27
+
28
+ ## Goals
29
+
30
+ - Act as a simple data persistence & query layer for simple Python apps (typical CRUD apps).
31
+ - Primarily support solo devs and small teams who iterate fast. Not intended for those who need 4 approvals to create a new column.
32
+ - Be as declarative as possible.
33
+ - Keep your data layer code minimal and elegant.
34
+ - Manage as much database administration as possible within the application.
35
+ - Handle migrations natively, with least nagging.
36
+ - Be framework agnostic.
37
+ - Support both `sync` and `async`.
38
+ - Be well documented.
39
+
40
+ ## What Could It Be Like?
41
+
42
+ ```python
43
+ from datetime import datetime
44
+ import uuid
45
+
46
+ from idli import connect
47
+
48
+ db = connect('postgresql://user:pwd@localhost/somedb')
49
+
50
+
51
+ @db.Model
52
+ class Task:
53
+ id: uuid.UUID = uuid.uuid7 # initialize with function value
54
+ title: str
55
+ description: str | None # nullable column because None is an allowed type
56
+ status: str = 'todo' # initialize with default value as 'todo'
57
+ created: datetime = datetime.now
58
+ updated: datetime | None
59
+
60
+
61
+ task = Task(
62
+ title = "Ship this ORM",
63
+ description = "Before the year ends",
64
+ )
65
+ task.save()
66
+
67
+ # tomorrow
68
+ task = Task.select(title = "Ship this ORM").one()
69
+ task.status = 'doing'
70
+ task.save()
71
+
72
+ # few weeks later
73
+ task = Task.select(title = "Ship this ORM").one()
74
+ task.update(status = 'done') # using .update will update the status and save it.
75
+
76
+ # next year
77
+ pending_tasks = Task.select(status__neq='done')
78
+ for task in pending_tasks:
79
+ print(task.title, ',', 'Pending Since:', datetime.now()-task.created)
80
+
81
+ ```
82
+
83
+ ## Migrations
84
+
85
+ Apart from Django ORM, there is no other Python ORM that I know of that handles database migrations natively. Even with that, I'm too lazy to 'make migrations', check them into my VCS, run them, etc. It's okay be to lazy and prioritize other things in life. Hence, Idli will support auto-migrations for non-destructive migrations. Destructive migrations will have to be done by hand. Suppose the above data model has to be extended a few days later:
86
+
87
+ ```python
88
+ from datetime import datetime
89
+ import uuid
90
+
91
+ from idli import connect, PrimaryKey, Index
92
+
93
+ db = connect(
94
+ 'postgresql://user:pwd@localhost/somedb',
95
+ sambar_dip = True # this will automatically create tables, columns, and indexes defined below
96
+ )
97
+
98
+
99
+ @db.Model
100
+ class Task:
101
+ id: uuid.UUID = uuid.uuid7 # initialize with function value
102
+ title: str
103
+ description: str | None # nullable column because None is an allowed type
104
+ status: str = 'todo' # initialize with default value as 'todo'
105
+ created: datetime = datetime.now
106
+ updated: datetime | None
107
+ owner: User # newly created column referencing another table
108
+
109
+ __idli__ = [
110
+ Index('owner', '-created') # newly created Index
111
+ ]
112
+
113
+
114
+ @db.Model
115
+ class User: # new table
116
+ username: str
117
+ full_name: str
118
+ email: str
119
+ send_task_reminders: bool = False
120
+
121
+ __idli__ = [
122
+ PrimaryKey('username')
123
+ ]
124
+ ```
125
+
126
+ This is inspired from GORM for Golang:
127
+
128
+ > NOTE: AutoMigrate will create tables, missing foreign keys, constraints, columns and indexes. It will change existing column’s type if its size, precision changed, or if it’s changing from non-nullable to nullable. It WON’T delete unused columns to protect your data.
129
+ > \- (from GORM docs: https://gorm.io/docs/migration.html)
130
+
131
+
132
+ ## Async
133
+
134
+ ```python
135
+ import asyncio
136
+ from datetime import datetime
137
+ import uuid
138
+
139
+ from idli import async_connect
140
+
141
+ db = async_connect('postgresql://user:pwd@localhost/somedb')
142
+
143
+
144
+ @db.Model
145
+ class Task:
146
+ id: uuid.UUID = uuid.uuid7 # initialize with function value
147
+ title: str
148
+ description: str | None # nullable column because None is an allowed type
149
+ status: str = 'todo' # initialize with default value as 'todo'
150
+ created: datetime = datetime.now
151
+ updated: datetime | None
152
+
153
+
154
+ async def main():
155
+ task = Task(
156
+ title = "Ship this ORM",
157
+ description = "Before the year ends",
158
+ )
159
+ await task.save()
160
+
161
+
162
+ asyncio.run(main())
163
+ ```
164
+
idli-0.1.0/README.md ADDED
@@ -0,0 +1,153 @@
1
+ # Coming Soon: A Python ORM That Treats You Like Its 2026
2
+
3
+ Alchemy is complex, but Idli is simple.
4
+
5
+ ## Have Some Idli
6
+
7
+ (Doesn't work yet)
8
+
9
+ ```bash
10
+ $ pip install idli
11
+ ```
12
+
13
+ ```bash
14
+ $ uv add idli
15
+ ```
16
+
17
+ ## Goals
18
+
19
+ - Act as a simple data persistence & query layer for simple Python apps (typical CRUD apps).
20
+ - Primarily support solo devs and small teams who iterate fast. Not intended for those who need 4 approvals to create a new column.
21
+ - Be as declarative as possible.
22
+ - Keep your data layer code minimal and elegant.
23
+ - Manage as much database administration as possible within the application.
24
+ - Handle migrations natively, with least nagging.
25
+ - Be framework agnostic.
26
+ - Support both `sync` and `async`.
27
+ - Be well documented.
28
+
29
+ ## What Could It Be Like?
30
+
31
+ ```python
32
+ from datetime import datetime
33
+ import uuid
34
+
35
+ from idli import connect
36
+
37
+ db = connect('postgresql://user:pwd@localhost/somedb')
38
+
39
+
40
+ @db.Model
41
+ class Task:
42
+ id: uuid.UUID = uuid.uuid7 # initialize with function value
43
+ title: str
44
+ description: str | None # nullable column because None is an allowed type
45
+ status: str = 'todo' # initialize with default value as 'todo'
46
+ created: datetime = datetime.now
47
+ updated: datetime | None
48
+
49
+
50
+ task = Task(
51
+ title = "Ship this ORM",
52
+ description = "Before the year ends",
53
+ )
54
+ task.save()
55
+
56
+ # tomorrow
57
+ task = Task.select(title = "Ship this ORM").one()
58
+ task.status = 'doing'
59
+ task.save()
60
+
61
+ # few weeks later
62
+ task = Task.select(title = "Ship this ORM").one()
63
+ task.update(status = 'done') # using .update will update the status and save it.
64
+
65
+ # next year
66
+ pending_tasks = Task.select(status__neq='done')
67
+ for task in pending_tasks:
68
+ print(task.title, ',', 'Pending Since:', datetime.now()-task.created)
69
+
70
+ ```
71
+
72
+ ## Migrations
73
+
74
+ Apart from Django ORM, there is no other Python ORM that I know of that handles database migrations natively. Even with that, I'm too lazy to 'make migrations', check them into my VCS, run them, etc. It's okay be to lazy and prioritize other things in life. Hence, Idli will support auto-migrations for non-destructive migrations. Destructive migrations will have to be done by hand. Suppose the above data model has to be extended a few days later:
75
+
76
+ ```python
77
+ from datetime import datetime
78
+ import uuid
79
+
80
+ from idli import connect, PrimaryKey, Index
81
+
82
+ db = connect(
83
+ 'postgresql://user:pwd@localhost/somedb',
84
+ sambar_dip = True # this will automatically create tables, columns, and indexes defined below
85
+ )
86
+
87
+
88
+ @db.Model
89
+ class Task:
90
+ id: uuid.UUID = uuid.uuid7 # initialize with function value
91
+ title: str
92
+ description: str | None # nullable column because None is an allowed type
93
+ status: str = 'todo' # initialize with default value as 'todo'
94
+ created: datetime = datetime.now
95
+ updated: datetime | None
96
+ owner: User # newly created column referencing another table
97
+
98
+ __idli__ = [
99
+ Index('owner', '-created') # newly created Index
100
+ ]
101
+
102
+
103
+ @db.Model
104
+ class User: # new table
105
+ username: str
106
+ full_name: str
107
+ email: str
108
+ send_task_reminders: bool = False
109
+
110
+ __idli__ = [
111
+ PrimaryKey('username')
112
+ ]
113
+ ```
114
+
115
+ This is inspired from GORM for Golang:
116
+
117
+ > NOTE: AutoMigrate will create tables, missing foreign keys, constraints, columns and indexes. It will change existing column’s type if its size, precision changed, or if it’s changing from non-nullable to nullable. It WON’T delete unused columns to protect your data.
118
+ > \- (from GORM docs: https://gorm.io/docs/migration.html)
119
+
120
+
121
+ ## Async
122
+
123
+ ```python
124
+ import asyncio
125
+ from datetime import datetime
126
+ import uuid
127
+
128
+ from idli import async_connect
129
+
130
+ db = async_connect('postgresql://user:pwd@localhost/somedb')
131
+
132
+
133
+ @db.Model
134
+ class Task:
135
+ id: uuid.UUID = uuid.uuid7 # initialize with function value
136
+ title: str
137
+ description: str | None # nullable column because None is an allowed type
138
+ status: str = 'todo' # initialize with default value as 'todo'
139
+ created: datetime = datetime.now
140
+ updated: datetime | None
141
+
142
+
143
+ async def main():
144
+ task = Task(
145
+ title = "Ship this ORM",
146
+ description = "Before the year ends",
147
+ )
148
+ await task.save()
149
+
150
+
151
+ asyncio.run(main())
152
+ ```
153
+
@@ -0,0 +1,7 @@
1
+ from idli.connection import Connection
2
+ from idli.helpers import (
3
+ AutoInt,
4
+ AutoUUID,
5
+ BTreeIndex,
6
+ PrimaryKey,
7
+ )
@@ -0,0 +1,220 @@
1
+ import atexit
2
+ import inspect
3
+ import re
4
+ from typing import Optional, Union, get_args, get_type_hints
5
+
6
+ import psycopg
7
+ from psycopg.rows import dict_row
8
+ from psycopg_pool import ConnectionPool
9
+
10
+ from idli import model_methods
11
+ from idli import sql_factory
12
+ from idli.errors import *
13
+ from idli.helpers import *
14
+ from idli.internal import Column, Table
15
+
16
+
17
+ class Connection:
18
+
19
+ def __init__(self, db_uri: str, sambar_dip: bool=False):
20
+ self._pool = ConnectionPool(db_uri, open=True)
21
+ atexit.register(self._pool.close)
22
+
23
+ self._sambar_dip = sambar_dip
24
+
25
+ self.load_tables()
26
+ self.load_columns()
27
+ self.load_indexes()
28
+
29
+
30
+ def exec_sql(self, *args):
31
+ with self._pool.connection() as conn:
32
+ return conn.execute(*args)
33
+
34
+
35
+ def exec_sql_to_dict_rows(self, *args):
36
+ with self._pool.connection() as conn:
37
+ cur = conn.cursor(row_factory = dict_row)
38
+ return cur.execute(*args)
39
+
40
+
41
+ def load_tables(self):
42
+ result = self.exec_sql_to_dict_rows(sql_factory.list_tables()).fetchall()
43
+ self.__db_tables__ = { row['table_name']: Table(row['table_name']) for row in result }
44
+
45
+
46
+ def load_columns(self):
47
+ result = self.exec_sql_to_dict_rows(sql_factory.list_columns()).fetchall()
48
+ for row in result:
49
+ table_name = row['table_name']
50
+ if table_name in self.__db_tables__:
51
+ self.__db_tables__[table_name].add_column(Column.from_db_row(**row))
52
+
53
+
54
+ def load_indexes(self):
55
+ result = self.exec_sql_to_dict_rows(sql_factory.list_indexes()).fetchall()
56
+ self.__db_indexes__ = { row['indexname']: row for row in result }
57
+
58
+
59
+ def _ensure_table(self, cls):
60
+ if cls.__table__.name not in self.__db_tables__:
61
+ if self._sambar_dip:
62
+ table_name = cls.__table__.name
63
+ self.exec_sql(sql_factory.create_table(table_name))
64
+ self.__db_tables__[table_name] = Table(table_name)
65
+ else:
66
+ raise TableNotFoundError(f'Table {cls.__tablename__} for model {cls.__name__} does not exist on database')
67
+
68
+
69
+ def _build_column_model(self, cls):
70
+ for key, val in get_type_hints(cls).items():
71
+ if key.startswith('__'):
72
+ continue
73
+
74
+ col_name = key
75
+ type_args = get_args(val)
76
+ if type_args:
77
+ if len(type_args)==1:
78
+ col_class = type_args[0]
79
+ nullable = False
80
+ elif len(type_args)==2 and type_args[0] is type(None):
81
+ col_class = type_args[1]
82
+ nullable = True
83
+ elif len(type_args)==2 and type_args[1] is type(None):
84
+ col_class = type_args[0]
85
+ nullable = True
86
+ else:
87
+ col_class = val
88
+ nullable = False
89
+
90
+ default = getattr(cls, key, None)
91
+
92
+ cls.__table__.add_column(Column.from_py_model(
93
+ table_name = cls.__table__.name,
94
+ name = col_name,
95
+ column_class = col_class,
96
+ nullable = nullable,
97
+ default = default,
98
+ ))
99
+
100
+
101
+ def _reconcile_columns(self, cls):
102
+ for column in cls.__table__.columns.values():
103
+ db_table = self.__db_tables__[cls.__table__.name]
104
+ db_column = db_table.columns.get(column.name)
105
+
106
+ if db_column:
107
+ if db_column.column_type != column.column_type:
108
+ raise ColumnTypeMismatchError(f"Column '{column.name}' is type '{db_column.column_type}' on database")
109
+
110
+ if db_column.nullable == False and column.nullable == True:
111
+ if self._sambar_dip:
112
+ self.exec_sql(sql_factory.make_column_nullable(column))
113
+ else:
114
+ raise ColumnNotNullableError(f"Changing column '{column.name}' to nullable is not supported with sambar_dip=False")
115
+ if db_column.nullable == True and column.nullable == False:
116
+ raise ColumnNullableError(f"Changing column '{column.name}' to not nullable is not supported")
117
+
118
+ if db_column.default != column.default:
119
+ if self._sambar_dip:
120
+ self.exec_sql(sql_factory.set_default_column_value(column))
121
+ else:
122
+ raise ColumnDefaultMismatchError(f"Defined default value for column '{column.name}' does not match with the database")
123
+ else:
124
+ if self._sambar_dip:
125
+ self.exec_sql(sql_factory.create_column(column))
126
+ else:
127
+ raise ColumnNotFoundError(f"Column '{column.name}' does not exist in table '{cls.__table__.name}'")
128
+
129
+
130
+ def _handle_directives(self, cls):
131
+ directives = getattr(cls, '__idli__', [])
132
+ cls.__primary_key__ = ['id']
133
+ cls.__indexes__ = {}
134
+
135
+ for d in directives:
136
+ if type(d) is PrimaryKey:
137
+ cls.__primary_key__ = d.columns
138
+ if type(d) is BTreeIndex:
139
+ cls.__indexes__[f'{cls.__table__.name}_{d.name_hash}'] = d
140
+
141
+
142
+ def _reconcile_primary_key(self, cls):
143
+ defined_pk_columns = cls.__primary_key__
144
+
145
+ constraint = self.exec_sql_to_dict_rows(
146
+ sql_factory.get_primary_key_constraint_name(cls.__table__.name),
147
+ ).fetchall()
148
+
149
+ if len(constraint) == 0:
150
+ self.exec_sql(sql_factory.create_primary_key(
151
+ table_name = cls.__table__.name,
152
+ columns = defined_pk_columns,
153
+ ))
154
+ return
155
+
156
+ constraint_name = constraint[0]['constraint_name']
157
+
158
+ result = self.exec_sql_to_dict_rows(
159
+ sql_factory.get_primary_key_columns(constraint_name)
160
+ ).fetchall()
161
+ existing_pk_columns = [c['column_name'] for c in result]
162
+
163
+ reconciliation_required = False
164
+ if len(defined_pk_columns) != len(existing_pk_columns):
165
+ reconciliation_required = True
166
+ else:
167
+ for i in range(len(defined_pk_columns)):
168
+ if defined_pk_columns[i] != existing_pk_columns[i]:
169
+ reconciliation_required = True
170
+ break
171
+
172
+ if reconciliation_required:
173
+ self.exec_sql(sql_factory.drop_constraint(
174
+ table_name = cls.__table__.name,
175
+ constraint_name = constraint_name,
176
+ ))
177
+
178
+ self.exec_sql(sql_factory.create_primary_key(
179
+ table_name = cls.__table__.name,
180
+ columns = defined_pk_columns,
181
+ ))
182
+
183
+
184
+ def _reconcile_indexes(self, cls):
185
+ for name, idx in cls.__indexes__.items():
186
+ if name in self.__db_indexes__:
187
+ continue
188
+ if type(idx) is BTreeIndex:
189
+ self.exec_sql(sql_factory.create_btree_index(
190
+ table_name = cls.__table__.name,
191
+ columns = idx.columns,
192
+ index_name = name,
193
+ ))
194
+
195
+
196
+ def Model(self, cls):
197
+ s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', cls.__name__)
198
+ s2 = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1)
199
+ cls.__table__ = Table(s2.lower())
200
+
201
+ self._ensure_table(cls)
202
+ self._build_column_model(cls)
203
+ self._reconcile_columns(cls)
204
+ self._handle_directives(cls)
205
+ self._reconcile_primary_key(cls)
206
+ self._reconcile_indexes(cls)
207
+
208
+ cls._connection = self
209
+ cls.__init__ = model_methods.__init__
210
+ cls._save_existing = model_methods._save_existing
211
+ cls._save_new = model_methods._save_new
212
+ cls.delete = model_methods.delete
213
+ cls.save = model_methods.save
214
+ cls.update = model_methods.update
215
+
216
+ cls._obj_from_dict = classmethod(model_methods._obj_from_dict)
217
+ cls.count = classmethod(model_methods.count)
218
+ cls.select = classmethod(model_methods.select)
219
+
220
+ return cls
@@ -0,0 +1,27 @@
1
+ class CannotBeNoneError(Exception):
2
+ pass
3
+
4
+ class ColumnDefaultMismatchError(Exception):
5
+ pass
6
+
7
+ class ColumnNotFoundError(Exception):
8
+ pass
9
+
10
+ class ColumnNullableError(Exception):
11
+ pass
12
+
13
+ class ColumnNullableError(Exception):
14
+ pass
15
+
16
+ class ColumnTypeMismatchError(Exception):
17
+ pass
18
+
19
+ class InvalidColumnTypeError(Exception):
20
+ pass
21
+
22
+ class InvalidValueTypeError(Exception):
23
+ pass
24
+
25
+ class TableNotFoundError(Exception):
26
+ pass
27
+
@@ -0,0 +1,28 @@
1
+
2
+
3
+ class AutoInt:
4
+ pass
5
+
6
+
7
+ class AutoUUID:
8
+ pass
9
+
10
+
11
+ class BTreeIndex:
12
+
13
+ def __init__(self, *args):
14
+ self.columns = list(args)
15
+
16
+
17
+ @property
18
+ def name_hash(self):
19
+ return '_'.join(map(
20
+ lambda x: 'd' + x[1:] if x.startswith('-') else 'a' + x,
21
+ self.columns
22
+ )) + '_btree'
23
+
24
+
25
+ class PrimaryKey:
26
+
27
+ def __init__(self, *args):
28
+ self.columns = list(args)