coffy 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
coffy/__init__.py ADDED
@@ -0,0 +1,2 @@
1
+ # coffy/__init__.py
2
+ # author: nsarathy
@@ -0,0 +1,4 @@
1
+ # coffy/graph/__init__.py
2
+ # author: nsarathy
3
+
4
+ from .graphdb_nx import GraphDB
@@ -0,0 +1,99 @@
1
+ import networkx as nx
2
+
3
+ class GraphDB:
4
+ def __init__(self, directed=False):
5
+ self.g = nx.DiGraph() if directed else nx.Graph()
6
+
7
+ # Node operations
8
+ def add_node(self, node_id, **attrs):
9
+ self.g.add_node(node_id, **attrs)
10
+
11
+ def add_nodes(self, nodes):
12
+ for node in nodes:
13
+ self.add_node(node["id"], **{k: v for k, v in node.items() if k != "id"})
14
+
15
+ def get_node(self, node_id):
16
+ return self.g.nodes[node_id]
17
+
18
+ def remove_node(self, node_id):
19
+ self.g.remove_node(node_id)
20
+
21
+ # Relationship (edge) operations
22
+ def add_relationship(self, source, target, **attrs):
23
+ self.g.add_edge(source, target, **attrs)
24
+
25
+ def add_relationships(self, relationships):
26
+ for rel in relationships:
27
+ self.add_relationship(rel["source"], rel["target"],
28
+ **{k: v for k, v in rel.items() if k not in ["source", "target"]})
29
+
30
+ def get_relationship(self, source, target):
31
+ return self.g.get_edge_data(source, target)
32
+
33
+ def remove_relationship(self, source, target):
34
+ self.g.remove_edge(source, target)
35
+
36
+ # Basic queries
37
+ def neighbors(self, node_id):
38
+ return list(self.g.neighbors(node_id))
39
+
40
+ def degree(self, node_id):
41
+ return self.g.degree[node_id]
42
+
43
+ def has_node(self, node_id):
44
+ return self.g.has_node(node_id)
45
+
46
+ def has_relationship(self, u, v):
47
+ return self.g.has_edge(u, v)
48
+
49
+ # Advanced node search
50
+ def find_nodes(self, **conditions):
51
+ return [
52
+ {"id": n, **a} for n, a in self.g.nodes(data=True)
53
+ if self._match_conditions(a, conditions)
54
+ ]
55
+
56
+ def find_relationships(self, **conditions):
57
+ return [
58
+ {"source": u, "target": v, **a} for u, v, a in self.g.edges(data=True)
59
+ if self._match_conditions(a, conditions)
60
+ ]
61
+
62
+ def _match_conditions(self, attrs, conditions):
63
+ if not conditions:
64
+ return True
65
+ logic = conditions.pop("_logic", "and")
66
+ results = []
67
+
68
+ for key, expected in conditions.items():
69
+ actual = attrs.get(key)
70
+ if isinstance(expected, dict):
71
+ for op, val in expected.items():
72
+ if op == "gt": results.append(actual > val)
73
+ elif op == "lt": results.append(actual < val)
74
+ elif op == "gte": results.append(actual >= val)
75
+ elif op == "lte": results.append(actual <= val)
76
+ elif op == "ne": results.append(actual != val)
77
+ elif op == "eq": results.append(actual == val)
78
+ else: results.append(False)
79
+ else:
80
+ results.append(actual == expected)
81
+
82
+ if logic == "or":
83
+ return any(results)
84
+ elif logic == "not":
85
+ return not all(results)
86
+ return all(results)
87
+
88
+ # Export
89
+ def nodes(self):
90
+ return [{"id": n, **a} for n, a in self.g.nodes(data=True)]
91
+
92
+ def relationships(self):
93
+ return [{"source": u, "target": v, **a} for u, v, a in self.g.edges(data=True)]
94
+
95
+ def to_dict(self):
96
+ return {
97
+ "nodes": self.nodes(),
98
+ "relationships": self.relationships()
99
+ }
@@ -0,0 +1,7 @@
1
+ # coffy/nosql/__init__.py
2
+ # author: nsarathy
3
+
4
+ from .engine import CollectionManager
5
+
6
+ def db(collection_name: str, path: str = None):
7
+ return CollectionManager(collection_name, path=path)
coffy/nosql/engine.py ADDED
@@ -0,0 +1,304 @@
1
+ # coffy/nosql/engine.py
2
+ # author: nsarathy
3
+
4
+ import json
5
+ import os
6
+ import re
7
+
8
+ class QueryBuilder:
9
+ def __init__(self, documents, all_collections=None):
10
+ self.documents = documents
11
+ self.filters = []
12
+ self.current_field = None
13
+ self.all_collections = all_collections or {}
14
+ self._lookup_done = False
15
+ self._lookup_results = None
16
+
17
+ def where(self, field):
18
+ self.current_field = field
19
+ return self
20
+
21
+ # Comparison
22
+ def eq(self, value): return self._add_filter(lambda d: d.get(self.current_field) == value)
23
+ def ne(self, value): return self._add_filter(lambda d: d.get(self.current_field) != value)
24
+ def gt(self, value):
25
+ return self._add_filter(
26
+ lambda d: isinstance(d.get(self.current_field), (int, float)) and d.get(self.current_field) > value
27
+ )
28
+
29
+ def gte(self, value):
30
+ return self._add_filter(
31
+ lambda d: isinstance(d.get(self.current_field), (int, float)) and d.get(self.current_field) >= value
32
+ )
33
+
34
+ def lt(self, value):
35
+ return self._add_filter(
36
+ lambda d: isinstance(d.get(self.current_field), (int, float)) and d.get(self.current_field) < value
37
+ )
38
+
39
+ def lte(self, value):
40
+ return self._add_filter(
41
+ lambda d: isinstance(d.get(self.current_field), (int, float)) and d.get(self.current_field) <= value
42
+ )
43
+
44
+ def in_(self, values):
45
+ return self._add_filter(
46
+ lambda d: d.get(self.current_field) in values
47
+ )
48
+
49
+ def nin(self, values):
50
+ return self._add_filter(
51
+ lambda d: d.get(self.current_field) not in values
52
+ )
53
+
54
+ def matches(self, regex): return self._add_filter(lambda d: re.search(regex, str(d.get(self.current_field))))
55
+
56
+ def exists(self): return self._add_filter(lambda d: self.current_field in d)
57
+
58
+ # Logic grouping
59
+ def _and(self, *fns):
60
+ for fn in fns:
61
+ sub = QueryBuilder(self.documents, self.all_collections)
62
+ fn(sub)
63
+ self.filters.append(lambda d, fs=sub.filters: all(f(d) for f in fs))
64
+ return self
65
+
66
+ def _not(self, *fns):
67
+ for fn in fns:
68
+ sub = QueryBuilder(self.documents, self.all_collections)
69
+ fn(sub)
70
+ self.filters.append(lambda d, fs=sub.filters: not all(f(d) for f in fs))
71
+ return self
72
+
73
+ def _or(self, *fns):
74
+ chains = []
75
+ for fn in fns:
76
+ sub = QueryBuilder(self.documents, self.all_collections)
77
+ fn(sub)
78
+ chains.append(sub.filters)
79
+ self.filters.append(lambda d: any(all(f(d) for f in chain) for chain in chains))
80
+ return self
81
+
82
+ # Add filter
83
+ def _add_filter(self, fn):
84
+ negate = getattr(self, "_negate", False)
85
+ self._negate = False
86
+ self.filters.append(lambda d: not fn(d) if negate else fn(d))
87
+ return self
88
+
89
+ # Core execution
90
+ def run(self):
91
+ results = [doc for doc in self.documents if all(f(doc) for f in self.filters)]
92
+ if self._lookup_done:
93
+ results = self._lookup_results
94
+ return DocList(results)
95
+
96
+ def update(self, changes):
97
+ count = 0
98
+ for doc in self.documents:
99
+ if all(f(doc) for f in self.filters):
100
+ doc.update(changes)
101
+ count += 1
102
+ return {"updated": count}
103
+
104
+ def delete(self):
105
+ before = len(self.documents)
106
+ self.documents[:] = [doc for doc in self.documents if not all(f(doc) for f in self.filters)]
107
+ return {"deleted": before - len(self.documents)}
108
+
109
+ def replace(self, new_doc):
110
+ replaced = 0
111
+ for i, doc in enumerate(self.documents):
112
+ if all(f(doc) for f in self.filters):
113
+ self.documents[i] = new_doc
114
+ replaced += 1
115
+ return {"replaced": replaced}
116
+
117
+ def count(self): return len(self.run())
118
+ def first(self): return next(iter(self.run()), None)
119
+
120
+ # Aggregates
121
+ def sum(self, field):
122
+ return sum(doc.get(field, 0) for doc in self.run() if isinstance(doc.get(field), (int, float)))
123
+
124
+ def avg(self, field):
125
+ values = [doc.get(field) for doc in self.run() if isinstance(doc.get(field), (int, float))]
126
+ return sum(values) / len(values) if values else 0
127
+
128
+ def min(self, field):
129
+ values = [doc.get(field) for doc in self.run() if isinstance(doc.get(field), (int, float))]
130
+ return min(values) if values else None
131
+
132
+ def max(self, field):
133
+ values = [doc.get(field) for doc in self.run() if isinstance(doc.get(field), (int, float))]
134
+ return max(values) if values else None
135
+
136
+ # Lookup
137
+ def lookup(self, foreign_collection_name, local_key, foreign_key, as_field):
138
+ foreign_docs = self.all_collections.get(foreign_collection_name, [])
139
+ fk_map = {doc[foreign_key]: doc for doc in foreign_docs}
140
+ enriched = []
141
+ for doc in self.run():
142
+ joined = fk_map.get(doc.get(local_key))
143
+ if joined:
144
+ doc = dict(doc) # copy
145
+ doc[as_field] = joined
146
+ enriched.append(doc)
147
+ self._lookup_done = True
148
+ self._lookup_results = enriched
149
+ return self
150
+
151
+ # Merge
152
+ def merge(self, fn):
153
+ docs = self._lookup_results if self._lookup_done else self.run()
154
+ merged = []
155
+ for doc in docs:
156
+ new_doc = dict(doc)
157
+ new_doc.update(fn(doc))
158
+ merged.append(new_doc)
159
+ self._lookup_done = True
160
+ self._lookup_results = merged
161
+ return self
162
+
163
+
164
+
165
+ _collection_registry = {}
166
+
167
+ class CollectionManager:
168
+ DEFAULT_DIR = os.path.join(os.getcwd(), "nosql_data")
169
+
170
+ def __init__(self, name: str, path: str = None):
171
+ self.name = name
172
+ self.in_memory = False
173
+
174
+ if path:
175
+ self.path = path
176
+ else:
177
+ os.makedirs(self.DEFAULT_DIR, exist_ok=True)
178
+ self.path = os.path.join(self.DEFAULT_DIR, f"{name}.json")
179
+ self.in_memory = True if name == ":memory:" else False
180
+
181
+ self.documents = []
182
+ self._load()
183
+ _collection_registry[name] = self.documents
184
+
185
+ def _load(self):
186
+ if self.in_memory:
187
+ self.documents = []
188
+ else:
189
+ try:
190
+ with open(self.path, 'r', encoding='utf-8') as f:
191
+ self.documents = json.load(f)
192
+ except FileNotFoundError:
193
+ self.documents = []
194
+
195
+ def _save(self):
196
+ if not self.in_memory:
197
+ with open(self.path, 'w', encoding='utf-8') as f:
198
+ json.dump(self.documents, f, indent=4)
199
+
200
+ def add(self, document: dict):
201
+ self.documents.append(document)
202
+ self._save()
203
+ return {"inserted": 1}
204
+
205
+ def add_many(self, docs: list[dict]):
206
+ self.documents.extend(docs)
207
+ self._save()
208
+ return {"inserted": len(docs)}
209
+
210
+ def where(self, field):
211
+ return QueryBuilder(self.documents, all_collections=_collection_registry).where(field)
212
+
213
+ def match_any(self, *conditions):
214
+ q = QueryBuilder(self.documents, all_collections=_collection_registry)
215
+ return q._or(*conditions)
216
+
217
+ def match_all(self, *conditions):
218
+ q = QueryBuilder(self.documents, all_collections=_collection_registry)
219
+ return q._and(*conditions)
220
+
221
+ def not_any(self, *conditions):
222
+ q = QueryBuilder(self.documents, all_collections=_collection_registry)
223
+ return q._not(lambda nq: nq._or(*conditions))
224
+
225
+ def lookup(self, *args, **kwargs):
226
+ return QueryBuilder(self.documents, all_collections=_collection_registry).lookup(*args, **kwargs)
227
+
228
+ def merge(self, *args, **kwargs):
229
+ return QueryBuilder(self.documents, all_collections=_collection_registry).merge(*args, **kwargs)
230
+
231
+ def sum(self, field):
232
+ return QueryBuilder(self.documents).sum(field)
233
+
234
+ def avg(self, field):
235
+ return QueryBuilder(self.documents).avg(field)
236
+
237
+ def min(self, field):
238
+ return QueryBuilder(self.documents).min(field)
239
+
240
+ def max(self, field):
241
+ return QueryBuilder(self.documents).max(field)
242
+
243
+ def count(self):
244
+ return QueryBuilder(self.documents).count()
245
+
246
+ def first(self):
247
+ return QueryBuilder(self.documents).first()
248
+
249
+ def clear(self):
250
+ count = len(self.documents)
251
+ self.documents = []
252
+ self._save()
253
+ return {"cleared": count}
254
+
255
+ def export(self, path):
256
+ with open(path, 'w', encoding='utf-8') as f:
257
+ json.dump(self.documents, f, indent=4)
258
+
259
+ def import_(self, path):
260
+ with open(path, 'r', encoding='utf-8') as f:
261
+ self.documents = json.load(f)
262
+ self._save()
263
+
264
+ def all(self): return self.documents
265
+ def count(self): return len(self.documents)
266
+
267
+ def save(self, path: str):
268
+ with open(path, 'w', encoding='utf-8') as f:
269
+ json.dump(self.documents, f, indent=4)
270
+
271
+ def all_docs(self):
272
+ return self.documents
273
+
274
+ class DocList:
275
+ def __init__(self, docs: list[dict]):
276
+ self._docs = docs
277
+
278
+ def __iter__(self):
279
+ return iter(self._docs)
280
+
281
+ def __getitem__(self, index):
282
+ return self._docs[index]
283
+
284
+ def __len__(self):
285
+ return len(self._docs)
286
+
287
+ def __repr__(self):
288
+ if not self._docs:
289
+ return "<empty result>"
290
+ keys = list(self._docs[0].keys())
291
+ header = " | ".join(keys)
292
+ line = "-+-".join("-" * len(k) for k in keys)
293
+ rows = []
294
+ for doc in self._docs:
295
+ row = " | ".join(str(doc.get(k, "")) for k in keys)
296
+ rows.append(row)
297
+ return f"{header}\n{line}\n" + "\n".join(rows)
298
+
299
+ def to_json(self, path: str):
300
+ with open(path, "w", encoding="utf-8") as f:
301
+ json.dump(self._docs, f, indent=4)
302
+
303
+ def as_list(self):
304
+ return self._docs
coffy/sql/__init__.py ADDED
@@ -0,0 +1,10 @@
1
+ # coffy/sql/__init__.py
2
+ # author: nsarathy
3
+
4
+ from .engine import execute_query, initialize
5
+
6
+ def init(path: str = None):
7
+ initialize(path)
8
+
9
+ def query(sql: str):
10
+ return execute_query(sql)
Binary file
coffy/sql/engine.py ADDED
@@ -0,0 +1,33 @@
1
+ # coffy/sql/engine.py
2
+ # author: nsarathy
3
+
4
+ import sqlite3
5
+ from .sqldict import SQLDict
6
+
7
+ # Internal connection state
8
+ _connection = None
9
+ _cursor = None
10
+
11
+ def initialize(db_path=None):
12
+ """Initialize the database connection."""
13
+ global _connection, _cursor
14
+ if _connection:
15
+ return # already initialized
16
+ _connection = sqlite3.connect(db_path or ":memory:") # Uses in-memory DB if no path provided
17
+ _cursor = _connection.cursor()
18
+
19
+ def execute_query(sql: str):
20
+ if _connection is None:
21
+ initialize() # uses in-memory if not initialized
22
+
23
+ try:
24
+ _cursor.execute(sql)
25
+ if sql.strip().lower().startswith("select"):
26
+ columns = [desc[0] for desc in _cursor.description]
27
+ rows = _cursor.fetchall()
28
+ return SQLDict([dict(zip(columns, row)) for row in rows])
29
+ else:
30
+ _connection.commit()
31
+ return {"status": "success", "rows_affected": _cursor.rowcount}
32
+ except Exception as e:
33
+ return {"status": "error", "message": str(e)}
coffy/sql/sqldict.py ADDED
@@ -0,0 +1,58 @@
1
+ # coffy/sql/sqldict.py
2
+ # author: nsarathy
3
+
4
+ from collections.abc import Sequence
5
+ import csv
6
+ import json
7
+
8
+ class SQLDict(Sequence):
9
+ def __init__(self, data):
10
+ self._data = data if isinstance(data, list) else [data]
11
+
12
+ def __getitem__(self, index):
13
+ return self._data[index]
14
+
15
+ def __len__(self):
16
+ return len(self._data)
17
+
18
+ def __repr__(self):
19
+ if not self._data:
20
+ return "<empty result>"
21
+
22
+ # Get all column names
23
+ columns = list(self._data[0].keys())
24
+ col_widths = {col: max(len(col), *(len(str(row[col])) for row in self._data)) for col in columns}
25
+
26
+ # Header
27
+ header = " | ".join(f"{col:<{col_widths[col]}}" for col in columns)
28
+ line = "-+-".join('-' * col_widths[col] for col in columns)
29
+
30
+ # Rows
31
+ rows = []
32
+ for row in self._data:
33
+ row_str = " | ".join(f"{str(row[col]):<{col_widths[col]}}" for col in columns)
34
+ rows.append(row_str)
35
+
36
+ return f"{header}\n{line}\n" + "\n".join(rows)
37
+
38
+ def as_list(self):
39
+ """Access raw list of dicts."""
40
+ return self._data
41
+
42
+ def to_csv(self, path: str):
43
+ """Write result to a CSV file."""
44
+ if not self._data:
45
+ raise ValueError("No data to write.")
46
+
47
+ with open(path, mode='w', newline='', encoding='utf-8') as file:
48
+ writer = csv.DictWriter(file, fieldnames=self._data[0].keys())
49
+ writer.writeheader()
50
+ writer.writerows(self._data)
51
+
52
+ def to_json(self, path: str):
53
+ """Write result to a JSON file."""
54
+ if not self._data:
55
+ raise ValueError("No data to write.")
56
+
57
+ with open(path, mode='w', encoding='utf-8') as file:
58
+ json.dump(self._data, file, indent=4)
@@ -0,0 +1,106 @@
1
+ Metadata-Version: 2.4
2
+ Name: coffy
3
+ Version: 0.1.0
4
+ Summary: Lightweight local NoSQL, SQL, and Graph embedded database engine
5
+ Author: nsarathy
6
+ Classifier: Programming Language :: Python :: 3
7
+ Classifier: License :: OSI Approved :: MIT License
8
+ Classifier: Operating System :: OS Independent
9
+ Requires-Python: >=3.7
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Requires-Dist: networkx>=3.0
13
+ Dynamic: author
14
+ Dynamic: classifier
15
+ Dynamic: description
16
+ Dynamic: description-content-type
17
+ Dynamic: license-file
18
+ Dynamic: requires-dist
19
+ Dynamic: requires-python
20
+ Dynamic: summary
21
+
22
+ # β˜• Coffy
23
+
24
+ **Coffy** is a lightweight embedded database engine for Python, designed for local-first apps, scripts, and tools. It includes:
25
+
26
+ - `coffy.nosql`: A simple JSON-backed NoSQL engine with a fluent, chainable query interface
27
+ - `coffy.sql`: A minimal wrapper over SQLite for executing raw SQL with clean tabular results
28
+ - `coffy.graph`: An in-memory graph engine built on `networkx` with advanced filtering and logic-based querying
29
+
30
+ No dependencies (except `networkx`). No boilerplate. Just data.
31
+
32
+ ---
33
+
34
+ ## πŸ”§ Install
35
+
36
+ ```bash
37
+ pip install coffy
38
+ ```
39
+
40
+ ---
41
+
42
+ ## πŸ“‚ Modules
43
+
44
+ ### `coffy.nosql`
45
+
46
+ - JSON-based collections with fluent `.where().eq().gt()...` query chaining
47
+ - Joins, updates, filters, aggregation, export/import
48
+ - All data saved to human-readable `.json` files
49
+
50
+ πŸ“„ [NoSQL Documentation β†’](./NOSQL_DOCS.md)
51
+
52
+ ---
53
+
54
+ ### `coffy.sql`
55
+
56
+ - SQLite-backed engine with raw SQL query support
57
+ - Outputs as readable tables or exportable lists
58
+ - Uses in-memory DB by default, or file-based if initialized with a path
59
+
60
+ πŸ“„ [SQL Documentation β†’](./SQL_DOCS.md)
61
+
62
+ ---
63
+
64
+ ### `coffy.graph`
65
+
66
+ - Wrapper around `networkx` with simplified node/relationship API
67
+ - Query nodes and relationships using filters like `gt`, `lt`, `eq`, `or`, `not`
68
+ - Returns clean structured output, easy to extend
69
+
70
+ πŸ“„ [Graph Documentation β†’](./GRAPH_DOCS.md)
71
+
72
+ ---
73
+
74
+ ## πŸ§ͺ Example
75
+
76
+ ```python
77
+ from coffy.nosql import db
78
+
79
+ users = db("users", path="users.json")
80
+ users.add({"id": 1, "name": "Neel"})
81
+ print(users.where("name").eq("Neel").first())
82
+ ```
83
+
84
+ ```python
85
+ from coffy.sql import init, query
86
+
87
+ init("app.db")
88
+ query("CREATE TABLE test (id INT, name TEXT)")
89
+ query("INSERT INTO test VALUES (1, 'Neel')")
90
+ print(query("SELECT * FROM test"))
91
+ ```
92
+
93
+ ```python
94
+ from coffy.graph import GraphDB
95
+
96
+ g = GraphDB(directed=True)
97
+ g.add_nodes([{"id": 1, "name": "Neel"}, {"id": 2, "name": "Tanaya"}])
98
+ g.add_relationships([{"source": 1, "target": 2, "type": "friend"}])
99
+ print(g.find_relationships(type="friend"))
100
+ ```
101
+
102
+ ---
103
+
104
+ ## πŸ“„ License
105
+
106
+ MIT Β© 2025 nsarathy
@@ -0,0 +1,28 @@
1
+ coffy/__init__.py,sha256=Q5FwcCZtemm41gSD-n-t9zAipeh5XV7JuoBzEHze7J8,39
2
+ coffy/__pycache__/__init__.cpython-311.pyc,sha256=Mf7ImWR3gBHSLuRr-Wf__3OsjKGpdH8D5T2qm-em5AM,150
3
+ coffy/__pycache__/__init__.cpython-312.pyc,sha256=n_nvLEknScZM-FT6aNmDlpZGQd_ZHB3p_zRJirbI_Ic,146
4
+ coffy/graph/__init__.py,sha256=Z0cIgmxre3YgwBrstubB1PTElP5uatz3ZOEIuW9EwY4,80
5
+ coffy/graph/graphdb_nx.py,sha256=v-36PXkROACXqOANWGyoKt29TfNTkTS7wNq9EjA7OJg,3242
6
+ coffy/graph/__pycache__/__init__.cpython-312.pyc,sha256=GCuchZzMG77ILVDNh1wX5eerxwQlrnm0VGNAqjGITXg,199
7
+ coffy/graph/__pycache__/graphdb_nx.cpython-312.pyc,sha256=tPP_42IXY_lwkqWU0i3Ab58HtQxJCNDkk2mROmllKCM,6632
8
+ coffy/nosql/__init__.py,sha256=HN9UfUd9pooB7udUxNn3EAttEjGG394fECOXDb6SH60,197
9
+ coffy/nosql/engine.py,sha256=-rXYu4Ps6P_6C8nIbEpGRuDeJgAi33_FebGZNlM4jsg,10103
10
+ coffy/nosql/__pycache__/__init__.cpython-311.pyc,sha256=0dDD3adswuWrOOUVE_2KjO80zmOHkPAjW7pxMg4fukk,463
11
+ coffy/nosql/__pycache__/__init__.cpython-312.pyc,sha256=NdU26hkvPHawUktm4TUoEirspFgWDMaixaQV7cGAYIc,409
12
+ coffy/nosql/__pycache__/engine.cpython-311.pyc,sha256=lJm3ZWQbStmBV1p9pmXdrIBcf2PMPM0SGozUEHdInA8,27471
13
+ coffy/nosql/__pycache__/engine.cpython-312.pyc,sha256=YCxgeof-6PE5SlrmDLyYLu8967UXN42vpciaUAM5XCg,24241
14
+ coffy/sql/__init__.py,sha256=dXoCW3Qyk3WoYl-y2gHCc8YK6dAUt0Eaqhtk3PEDHMQ,202
15
+ coffy/sql/engine.py,sha256=q9A7zvg9JANpWAFvbw8wfGY5BTkvObpM3eoQ6dZYu_8,1065
16
+ coffy/sql/sqldict.py,sha256=0IxlCz1hi1zj5Q4_8If9YXwQBZGczpMir7PMXBOxbjs,1837
17
+ coffy/sql/__pycache__/__init__.cpython-311.pyc,sha256=6kqiDVzq_X9yFEMupVrLUCQL3JinwHVxDFXJSrme3pI,628
18
+ coffy/sql/__pycache__/__init__.cpython-312.pyc,sha256=jB-bTlCkiV0dNzISmDoBcqf861iQF6q2O14ylDO39yw,535
19
+ coffy/sql/__pycache__/engine.cpython-311.pyc,sha256=IbSKkWjikTnkXhGDgqJHo2n49SREzRULeUXQfAcFt_Q,2239
20
+ coffy/sql/__pycache__/engine.cpython-312.pyc,sha256=fKlvycKKYZOslQd6SDHexrWV_NDcPoqNQWoiS8do49w,1746
21
+ coffy/sql/__pycache__/io.cpython-312.pyc,sha256=TPkeJ3qUE_ZcvcykGIf-Yyc0__5FZKB1J95_2yTFrXY,2517
22
+ coffy/sql/__pycache__/sqldict.cpython-311.pyc,sha256=jJQDFH9ULzi2ay4pyXXye-W_PSjxuT_ULb20CdL8Ec0,5131
23
+ coffy/sql/__pycache__/sqldict.cpython-312.pyc,sha256=T4P9qMjH7dHqDTp3gzZQbhpdpfgdopmC3ccm8O8gooc,4339
24
+ coffy-0.1.0.dist-info/licenses/LICENSE,sha256=iRyxG6b7B-JPKuOcS7w5lDhrL0AD9fFSDUh6-KMKDO8,1068
25
+ coffy-0.1.0.dist-info/METADATA,sha256=swg1zpj6plEIbtzR5O7p6CzGxR4P5_3nVk1WSRKkyo4,2725
26
+ coffy-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
+ coffy-0.1.0.dist-info/top_level.txt,sha256=J1zGvXA_bfET3PsE4-qbFrtFMIN3bJNxBRMHKk6YIv0,6
28
+ coffy-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,7 @@
1
+ Copyright 2025 Neelesh Sarathy
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4
+
5
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6
+
7
+ THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1 @@
1
+ coffy