hammad-python 0.0.11__py3-none-any.whl → 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. hammad/__init__.py +169 -56
  2. hammad/_core/__init__.py +1 -0
  3. hammad/_core/_utils/__init__.py +4 -0
  4. hammad/_core/_utils/_import_utils.py +182 -0
  5. hammad/ai/__init__.py +59 -0
  6. hammad/ai/_utils.py +142 -0
  7. hammad/ai/completions/__init__.py +44 -0
  8. hammad/ai/completions/client.py +729 -0
  9. hammad/ai/completions/create.py +686 -0
  10. hammad/ai/completions/types.py +711 -0
  11. hammad/ai/completions/utils.py +374 -0
  12. hammad/ai/embeddings/__init__.py +35 -0
  13. hammad/ai/embeddings/client/__init__.py +1 -0
  14. hammad/ai/embeddings/client/base_embeddings_client.py +26 -0
  15. hammad/ai/embeddings/client/fastembed_text_embeddings_client.py +200 -0
  16. hammad/ai/embeddings/client/litellm_embeddings_client.py +288 -0
  17. hammad/ai/embeddings/create.py +159 -0
  18. hammad/ai/embeddings/types.py +69 -0
  19. hammad/base/__init__.py +35 -0
  20. hammad/{based → base}/fields.py +23 -23
  21. hammad/{based → base}/model.py +124 -14
  22. hammad/base/utils.py +280 -0
  23. hammad/cache/__init__.py +30 -12
  24. hammad/cache/base_cache.py +181 -0
  25. hammad/cache/cache.py +169 -0
  26. hammad/cache/decorators.py +261 -0
  27. hammad/cache/file_cache.py +80 -0
  28. hammad/cache/ttl_cache.py +74 -0
  29. hammad/cli/__init__.py +10 -2
  30. hammad/cli/{styles/animations.py → animations.py} +79 -23
  31. hammad/cli/{plugins/__init__.py → plugins.py} +85 -90
  32. hammad/cli/styles/__init__.py +50 -0
  33. hammad/cli/styles/settings.py +4 -0
  34. hammad/configuration/__init__.py +35 -0
  35. hammad/{data/types/files → configuration}/configuration.py +96 -7
  36. hammad/data/__init__.py +14 -26
  37. hammad/data/collections/__init__.py +4 -2
  38. hammad/data/collections/collection.py +300 -75
  39. hammad/data/collections/vector_collection.py +118 -12
  40. hammad/data/databases/__init__.py +2 -2
  41. hammad/data/databases/database.py +383 -32
  42. hammad/json/__init__.py +2 -2
  43. hammad/logging/__init__.py +13 -5
  44. hammad/logging/decorators.py +404 -2
  45. hammad/logging/logger.py +442 -22
  46. hammad/multimodal/__init__.py +24 -0
  47. hammad/{data/types/files → multimodal}/audio.py +21 -6
  48. hammad/{data/types/files → multimodal}/image.py +5 -5
  49. hammad/multithreading/__init__.py +304 -0
  50. hammad/pydantic/__init__.py +2 -2
  51. hammad/pydantic/converters.py +1 -1
  52. hammad/pydantic/models/__init__.py +2 -2
  53. hammad/text/__init__.py +59 -14
  54. hammad/text/converters.py +723 -0
  55. hammad/text/{utils/markdown/formatting.py → markdown.py} +25 -23
  56. hammad/text/text.py +12 -14
  57. hammad/types/__init__.py +11 -0
  58. hammad/{data/types/files → types}/file.py +18 -18
  59. hammad/typing/__init__.py +138 -84
  60. hammad/web/__init__.py +3 -2
  61. hammad/web/models.py +245 -0
  62. hammad/web/search/client.py +75 -23
  63. hammad/web/utils.py +14 -5
  64. hammad/yaml/__init__.py +2 -2
  65. hammad/yaml/converters.py +1 -1
  66. {hammad_python-0.0.11.dist-info → hammad_python-0.0.13.dist-info}/METADATA +4 -1
  67. hammad_python-0.0.13.dist-info/RECORD +85 -0
  68. hammad/based/__init__.py +0 -52
  69. hammad/based/utils.py +0 -455
  70. hammad/cache/_cache.py +0 -746
  71. hammad/data/types/__init__.py +0 -33
  72. hammad/data/types/files/__init__.py +0 -1
  73. hammad/data/types/files/document.py +0 -195
  74. hammad/text/utils/__init__.py +0 -1
  75. hammad/text/utils/converters.py +0 -229
  76. hammad/text/utils/markdown/__init__.py +0 -1
  77. hammad/text/utils/markdown/converters.py +0 -506
  78. hammad_python-0.0.11.dist-info/RECORD +0 -65
  79. {hammad_python-0.0.11.dist-info → hammad_python-0.0.13.dist-info}/WHEEL +0 -0
  80. {hammad_python-0.0.11.dist-info → hammad_python-0.0.13.dist-info}/licenses/LICENSE +0 -0
@@ -6,6 +6,7 @@ from typing import (
6
6
  Dict,
7
7
  Optional,
8
8
  List,
9
+ Literal,
9
10
  TypeVar,
10
11
  Generic,
11
12
  Callable,
@@ -14,6 +15,25 @@ from typing import (
14
15
  TYPE_CHECKING,
15
16
  )
16
17
  from datetime import datetime, timezone, timedelta
18
+ import json
19
+ import os
20
+
21
+ try:
22
+ from sqlalchemy import (
23
+ create_engine,
24
+ Column,
25
+ String,
26
+ Text,
27
+ DateTime,
28
+ Integer,
29
+ MetaData,
30
+ Table,
31
+ )
32
+ from sqlalchemy.orm import sessionmaker, declarative_base
33
+ from sqlalchemy.sql import select, insert, update, delete
34
+ except ImportError:
35
+ # SQLAlchemy not available - file storage will not work
36
+ create_engine = None
17
37
 
18
38
  from ..collections.base_collection import BaseCollection, Filters, Schema
19
39
  from ..collections.collection import create_collection
@@ -26,6 +46,8 @@ __all__ = ("Database",)
26
46
 
27
47
  DatabaseEntryType = TypeVar("DatabaseEntryType", bound=Any)
28
48
 
49
+ DatabaseLocation = Literal["memory", "file"]
50
+
29
51
 
30
52
  class Database(Generic[DatabaseEntryType]):
31
53
  """
@@ -40,15 +62,22 @@ class Database(Generic[DatabaseEntryType]):
40
62
  - TTL support and filtering
41
63
  """
42
64
 
43
- def __init__(self, location: str = "memory", default_ttl: Optional[int] = None):
65
+ def __init__(
66
+ self,
67
+ location: DatabaseLocation = "memory",
68
+ path: str = "database.db",
69
+ default_ttl: Optional[int] = None,
70
+ ):
44
71
  """
45
72
  Initialize the database.
46
73
 
47
74
  Args:
48
- location: Storage location ("memory" for in-memory, or path for persistent)
75
+ location: Storage location ("memory" for in-memory, "file" for persistent)
76
+ path: Path to the database file when using "file" location (default: "database.db")
49
77
  default_ttl: Default TTL for items in seconds
50
78
  """
51
79
  self.location = location
80
+ self.path = path
52
81
  self.default_ttl = default_ttl
53
82
 
54
83
  # Storage for traditional collections
@@ -59,11 +88,208 @@ class Database(Generic[DatabaseEntryType]):
59
88
  # Registry for modern collections (searchable/vector)
60
89
  self._collections: Dict[str, BaseCollection] = {}
61
90
 
91
+ # SQLAlchemy setup for file storage
92
+ self._engine = None
93
+ self._SessionLocal = None
94
+ self._metadata = None
95
+ self._tables: Dict[str, Table] = {}
96
+
97
+ if location == "file":
98
+ self._init_file_storage()
99
+
100
+ def _init_file_storage(self) -> None:
101
+ """Initialize SQLAlchemy for file-based storage."""
102
+ if create_engine is None:
103
+ raise ImportError(
104
+ "SQLAlchemy is required for file storage. "
105
+ "Install with: pip install sqlalchemy"
106
+ )
107
+
108
+ # Create database directory if it doesn't exist
109
+ db_dir = os.path.dirname(os.path.abspath(self.path))
110
+ if db_dir and not os.path.exists(db_dir):
111
+ os.makedirs(db_dir)
112
+
113
+ # Create SQLAlchemy engine
114
+ self._engine = create_engine(f"sqlite:///{self.path}", echo=False)
115
+ self._SessionLocal = sessionmaker(bind=self._engine)
116
+ self._metadata = MetaData()
117
+
118
+ # Create default table
119
+ self._create_collection_table("default")
120
+
121
+ def _create_collection_table(self, collection_name: str) -> Table:
122
+ """Create a table for a collection."""
123
+ if collection_name in self._tables:
124
+ return self._tables[collection_name]
125
+
126
+ table = Table(
127
+ f"collection_{collection_name}",
128
+ self._metadata,
129
+ Column("id", String, primary_key=True),
130
+ Column("value", Text),
131
+ Column("filters", Text),
132
+ Column("created_at", DateTime),
133
+ Column("updated_at", DateTime),
134
+ Column("expires_at", DateTime, nullable=True),
135
+ )
136
+
137
+ self._tables[collection_name] = table
138
+
139
+ # Create table in database
140
+ if self._engine:
141
+ self._metadata.create_all(self._engine)
142
+
143
+ return table
144
+
145
+ def _get_from_file(
146
+ self, id: str, collection: str, filters: Optional[Filters] = None
147
+ ) -> Optional[DatabaseEntryType]:
148
+ """Get an item from file storage."""
149
+ if collection not in self._schemas:
150
+ return None
151
+
152
+ table = self._tables.get(collection)
153
+ if table is None:
154
+ return None
155
+
156
+ with self._SessionLocal() as session:
157
+ stmt = select(table).where(table.c.id == id)
158
+ result = session.execute(stmt).fetchone()
159
+
160
+ if not result:
161
+ return None
162
+
163
+ # Check expiration
164
+ if result.expires_at and self._is_expired(result.expires_at):
165
+ # Delete expired item
166
+ delete_stmt = delete(table).where(table.c.id == id)
167
+ session.execute(delete_stmt)
168
+ session.commit()
169
+ return None
170
+
171
+ # Check filters
172
+ stored_filters = json.loads(result.filters) if result.filters else {}
173
+ if not self._match_filters(stored_filters, filters):
174
+ return None
175
+
176
+ return json.loads(result.value)
177
+
178
+ def _add_to_file(
179
+ self,
180
+ entry: DatabaseEntryType,
181
+ id: Optional[str],
182
+ collection: str,
183
+ filters: Optional[Filters],
184
+ ttl: Optional[int],
185
+ ) -> None:
186
+ """Add an item to file storage."""
187
+ if collection not in self._schemas:
188
+ self.create_collection(collection)
189
+
190
+ table = self._tables.get(collection)
191
+ if table is None:
192
+ return
193
+
194
+ item_id = id or str(uuid.uuid4())
195
+ expires_at = self._calculate_expires_at(ttl)
196
+ now = datetime.now(timezone.utc)
197
+
198
+ with self._SessionLocal() as session:
199
+ # Check if item exists
200
+ existing = session.execute(
201
+ select(table).where(table.c.id == item_id)
202
+ ).fetchone()
203
+
204
+ if existing:
205
+ # Update existing item
206
+ stmt = (
207
+ update(table)
208
+ .where(table.c.id == item_id)
209
+ .values(
210
+ value=json.dumps(entry),
211
+ filters=json.dumps(filters or {}),
212
+ updated_at=now,
213
+ expires_at=expires_at,
214
+ )
215
+ )
216
+ else:
217
+ # Insert new item
218
+ stmt = insert(table).values(
219
+ id=item_id,
220
+ value=json.dumps(entry),
221
+ filters=json.dumps(filters or {}),
222
+ created_at=now,
223
+ updated_at=now,
224
+ expires_at=expires_at,
225
+ )
226
+
227
+ session.execute(stmt)
228
+ session.commit()
229
+
230
+ def _query_from_file(
231
+ self,
232
+ collection: str,
233
+ filters: Optional[Filters],
234
+ search: Optional[str],
235
+ limit: Optional[int],
236
+ ) -> List[DatabaseEntryType]:
237
+ """Query items from file storage."""
238
+ if collection not in self._schemas:
239
+ return []
240
+
241
+ table = self._tables.get(collection)
242
+ if table is None:
243
+ return []
244
+
245
+ with self._SessionLocal() as session:
246
+ stmt = select(table)
247
+
248
+ # Apply limit
249
+ if limit:
250
+ stmt = stmt.limit(limit)
251
+
252
+ results = session.execute(stmt).fetchall()
253
+
254
+ items = []
255
+ expired_ids = []
256
+
257
+ for result in results:
258
+ # Check expiration
259
+ if result.expires_at and self._is_expired(result.expires_at):
260
+ expired_ids.append(result.id)
261
+ continue
262
+
263
+ # Check filters
264
+ stored_filters = json.loads(result.filters) if result.filters else {}
265
+ if not self._match_filters(stored_filters, filters):
266
+ continue
267
+
268
+ # Basic search implementation
269
+ value = json.loads(result.value)
270
+ if search:
271
+ item_text = str(value).lower()
272
+ if search.lower() not in item_text:
273
+ continue
274
+
275
+ items.append(value)
276
+ if limit and len(items) >= limit:
277
+ break
278
+
279
+ # Clean up expired items
280
+ if expired_ids:
281
+ delete_stmt = delete(table).where(table.c.id.in_(expired_ids))
282
+ session.execute(delete_stmt)
283
+ session.commit()
284
+
285
+ return items
286
+
62
287
  def __repr__(self) -> str:
63
288
  all_collections = set(self._schemas.keys()) | set(self._collections.keys())
64
- return (
65
- f"<Database location='{self.location}' collections={list(all_collections)}>"
66
- )
289
+ location_info = f"location='{self.location}'"
290
+ if self.location == "file":
291
+ location_info += f" path='{self.path}'"
292
+ return f"<Database {location_info} collections={list(all_collections)}>"
67
293
 
68
294
  @overload
69
295
  def create_searchable_collection(
@@ -92,13 +318,29 @@ class Database(Generic[DatabaseEntryType]):
92
318
  default_ttl: Optional[int] = None,
93
319
  distance_metric: Optional[Any] = None,
94
320
  embedding_function: Optional[Callable[[Any], List[float]]] = None,
321
+ model: Optional[str] = None,
322
+ # Common embedding parameters
323
+ format: bool = False,
324
+ # LiteLLM parameters
325
+ dimensions: Optional[int] = None,
326
+ encoding_format: Optional[str] = None,
327
+ timeout: Optional[int] = None,
328
+ api_base: Optional[str] = None,
329
+ api_version: Optional[str] = None,
330
+ api_key: Optional[str] = None,
331
+ api_type: Optional[str] = None,
332
+ caching: bool = False,
333
+ user: Optional[str] = None,
334
+ # FastEmbed parameters
335
+ parallel: Optional[int] = None,
336
+ batch_size: Optional[int] = None,
337
+ # Qdrant configuration parameters
95
338
  path: Optional[str] = None,
96
339
  host: Optional[str] = None,
97
340
  port: Optional[int] = None,
98
341
  grpc_port: Optional[int] = None,
99
342
  prefer_grpc: Optional[bool] = None,
100
- api_key: Optional[str] = None,
101
- timeout: Optional[float] = None,
343
+ qdrant_timeout: Optional[float] = None,
102
344
  ) -> "VectorCollection[DatabaseEntryType]":
103
345
  """Create a vector collection using Qdrant for semantic similarity search."""
104
346
  ...
@@ -142,15 +384,87 @@ class Database(Generic[DatabaseEntryType]):
142
384
  default_ttl: Optional[int] = None,
143
385
  distance_metric: Optional[Any] = None,
144
386
  embedding_function: Optional[Callable[[Any], List[float]]] = None,
387
+ model: Optional[str] = None,
388
+ # Common embedding parameters
389
+ format: bool = False,
390
+ # LiteLLM parameters
391
+ dimensions: Optional[int] = None,
392
+ encoding_format: Optional[str] = None,
393
+ timeout: Optional[int] = None,
394
+ api_base: Optional[str] = None,
395
+ api_version: Optional[str] = None,
396
+ api_key: Optional[str] = None,
397
+ api_type: Optional[str] = None,
398
+ caching: bool = False,
399
+ user: Optional[str] = None,
400
+ # FastEmbed parameters
401
+ parallel: Optional[int] = None,
402
+ batch_size: Optional[int] = None,
403
+ # Qdrant configuration parameters
145
404
  path: Optional[str] = None,
146
405
  host: Optional[str] = None,
147
406
  port: Optional[int] = None,
148
407
  grpc_port: Optional[int] = None,
149
408
  prefer_grpc: Optional[bool] = None,
150
- api_key: Optional[str] = None,
151
- timeout: Optional[float] = None,
409
+ qdrant_timeout: Optional[float] = None,
152
410
  ) -> "VectorCollection[DatabaseEntryType]":
153
- """Create a vector collection using Qdrant for semantic similarity search."""
411
+ """Create a vector collection using Qdrant for semantic similarity search.
412
+
413
+ Args:
414
+ model: Model name (e.g., 'fastembed/BAAI/bge-small-en-v1.5', 'openai/text-embedding-3-small')
415
+ format: Whether to format each non-string input as a markdown string
416
+
417
+ # LiteLLM-specific parameters:
418
+ dimensions: The dimensions of the embedding
419
+ encoding_format: The encoding format (e.g. "float", "base64")
420
+ timeout: The timeout for embedding requests
421
+ api_base: API base URL for remote models
422
+ api_version: The version of the embedding API
423
+ api_key: API key for remote models
424
+ api_type: The type of the embedding API
425
+ caching: Whether to cache embeddings
426
+ user: The user for the embedding
427
+
428
+ # FastEmbed-specific parameters:
429
+ parallel: Number of parallel processes for embedding
430
+ batch_size: Batch size for embedding
431
+
432
+ # Qdrant configuration parameters:
433
+ path: Path for local Qdrant storage
434
+ host: Qdrant server host
435
+ port: Qdrant server port
436
+ grpc_port: Qdrant gRPC port
437
+ prefer_grpc: Whether to prefer gRPC over HTTP
438
+ qdrant_timeout: Request timeout for Qdrant operations
439
+ """
440
+
441
+ # Build qdrant config, using database defaults and unified path
442
+ qdrant_config = getattr(self, "_default_qdrant_settings", {}).copy()
443
+
444
+ # Override with method parameters if provided
445
+ if path is not None:
446
+ qdrant_config["path"] = path
447
+ elif host is not None:
448
+ qdrant_config["host"] = host
449
+ elif (
450
+ self.location == "file"
451
+ and "path" not in qdrant_config
452
+ and "host" not in qdrant_config
453
+ ):
454
+ # Use unified path approach for file storage
455
+ qdrant_path = self.path.replace(".db", f"_qdrant_{name}")
456
+ qdrant_config["path"] = qdrant_path
457
+
458
+ # Override other parameters
459
+ if port is not None:
460
+ qdrant_config["port"] = port
461
+ if grpc_port is not None:
462
+ qdrant_config["grpc_port"] = grpc_port
463
+ if prefer_grpc is not None:
464
+ qdrant_config["prefer_grpc"] = prefer_grpc
465
+ if qdrant_timeout is not None:
466
+ qdrant_config["timeout"] = qdrant_timeout
467
+
154
468
  collection = create_collection(
155
469
  "vector",
156
470
  name,
@@ -160,13 +474,29 @@ class Database(Generic[DatabaseEntryType]):
160
474
  storage_backend=self,
161
475
  distance_metric=distance_metric,
162
476
  embedding_function=embedding_function,
163
- path=path,
164
- host=host,
165
- port=port,
166
- grpc_port=grpc_port,
167
- prefer_grpc=prefer_grpc,
168
- api_key=api_key,
477
+ model=model,
478
+ # Common embedding parameters
479
+ format=format,
480
+ # LiteLLM parameters
481
+ dimensions=dimensions,
482
+ encoding_format=encoding_format,
169
483
  timeout=timeout,
484
+ api_base=api_base,
485
+ api_version=api_version,
486
+ api_key=api_key,
487
+ api_type=api_type,
488
+ caching=caching,
489
+ user=user,
490
+ # FastEmbed parameters
491
+ parallel=parallel,
492
+ batch_size=batch_size,
493
+ # Qdrant config
494
+ path=qdrant_config.get("path"),
495
+ host=qdrant_config.get("host"),
496
+ port=qdrant_config.get("port"),
497
+ grpc_port=qdrant_config.get("grpc_port"),
498
+ prefer_grpc=qdrant_config.get("prefer_grpc"),
499
+ qdrant_timeout=qdrant_config.get("timeout"),
170
500
  )
171
501
  self._collections[name] = collection
172
502
  return collection
@@ -185,7 +515,11 @@ class Database(Generic[DatabaseEntryType]):
185
515
  """Create a traditional collection (backward compatibility)."""
186
516
  self._schemas[name] = schema
187
517
  self._collection_ttls[name] = default_ttl
188
- self._storage.setdefault(name, {})
518
+
519
+ if self.location == "file":
520
+ self._create_collection_table(name)
521
+ else:
522
+ self._storage.setdefault(name, {})
189
523
 
190
524
  def _calculate_expires_at(self, ttl: Optional[int]) -> Optional[datetime]:
191
525
  """Calculate expiry time based on TTL."""
@@ -233,7 +567,11 @@ class Database(Generic[DatabaseEntryType]):
233
567
  finally:
234
568
  coll._storage_backend = original_backend
235
569
 
236
- # Traditional collection logic
570
+ # File storage
571
+ if self.location == "file":
572
+ return self._get_from_file(id, collection, filters)
573
+
574
+ # Traditional in-memory collection logic
237
575
  if collection not in self._schemas:
238
576
  return None
239
577
 
@@ -275,7 +613,12 @@ class Database(Generic[DatabaseEntryType]):
275
613
  coll._storage_backend = original_backend
276
614
  return
277
615
 
278
- # Traditional collection logic
616
+ # File storage
617
+ if self.location == "file":
618
+ self._add_to_file(entry, id, collection, filters, ttl)
619
+ return
620
+
621
+ # Traditional in-memory collection logic
279
622
  if collection not in self._schemas:
280
623
  self.create_collection(collection)
281
624
 
@@ -312,7 +655,11 @@ class Database(Generic[DatabaseEntryType]):
312
655
  finally:
313
656
  coll._storage_backend = original_backend
314
657
 
315
- # Traditional collection logic
658
+ # File storage
659
+ if self.location == "file":
660
+ return self._query_from_file(collection, filters, search, limit)
661
+
662
+ # Traditional in-memory collection logic
316
663
  if collection not in self._schemas:
317
664
  return []
318
665
 
@@ -426,8 +773,9 @@ class Database(Generic[DatabaseEntryType]):
426
773
  @overload
427
774
  def create_database(
428
775
  type: Literal["searchable"],
429
- location: str = "memory",
776
+ location: DatabaseLocation = "memory",
430
777
  *,
778
+ path: str = "database.db",
431
779
  default_ttl: Optional[int] = None,
432
780
  heap_size: Optional[int] = None,
433
781
  num_threads: Optional[int] = None,
@@ -441,10 +789,10 @@ def create_database(
441
789
  @overload
442
790
  def create_database(
443
791
  type: Literal["vector"],
444
- location: str = "memory",
792
+ location: DatabaseLocation = "memory",
445
793
  *,
794
+ path: str = "database.db",
446
795
  default_ttl: Optional[int] = None,
447
- path: Optional[str] = None,
448
796
  host: Optional[str] = None,
449
797
  port: Optional[int] = None,
450
798
  grpc_port: Optional[int] = None,
@@ -456,8 +804,9 @@ def create_database(
456
804
 
457
805
  def create_database(
458
806
  type: Literal["searchable", "vector"],
459
- location: str = "memory",
807
+ location: DatabaseLocation = "memory",
460
808
  *,
809
+ path: str = "database.db",
461
810
  default_ttl: Optional[int] = None,
462
811
  # Tantivy parameters (searchable databases only)
463
812
  heap_size: Optional[int] = None,
@@ -467,7 +816,6 @@ def create_database(
467
816
  writer_memory: Optional[int] = None,
468
817
  reload_policy: Optional[str] = None,
469
818
  # Qdrant parameters (vector databases only)
470
- path: Optional[str] = None,
471
819
  host: Optional[str] = None,
472
820
  port: Optional[int] = None,
473
821
  grpc_port: Optional[int] = None,
@@ -480,7 +828,8 @@ def create_database(
480
828
 
481
829
  Args:
482
830
  type: Type of database to create ("searchable" or "vector")
483
- location: Database location (default: "memory")
831
+ location: Database location ("memory" or "file")
832
+ path: Path to the database file when using "file" location
484
833
  default_ttl: Default TTL for items in seconds
485
834
 
486
835
  Tantivy parameters (searchable databases only):
@@ -492,8 +841,7 @@ def create_database(
492
841
  reload_policy: Policy for reloading tantivy index
493
842
 
494
843
  Qdrant parameters (vector databases only):
495
- path: Path for local Qdrant storage
496
- host: Qdrant server host
844
+ host: Qdrant server host (if not provided, uses local storage with unified 'path')
497
845
  port: Qdrant server port
498
846
  grpc_port: Qdrant gRPC port
499
847
  prefer_grpc: Whether to prefer gRPC over HTTP
@@ -503,7 +851,7 @@ def create_database(
503
851
  Returns:
504
852
  A Database instance optimized for the specified collection type
505
853
  """
506
- database = Database(location=location, default_ttl=default_ttl)
854
+ database = Database(location=location, path=path, default_ttl=default_ttl)
507
855
 
508
856
  # Store the database type for future collection creation optimization
509
857
  database._database_type = type
@@ -530,9 +878,12 @@ def create_database(
530
878
  elif type == "vector":
531
879
  # Build default qdrant settings from individual parameters
532
880
  qdrant_defaults = {}
533
- if path is not None:
534
- qdrant_defaults["path"] = path
535
- if host is not None:
881
+ # Use the unified path for local Qdrant storage when no host is provided
882
+ if host is None and location == "file":
883
+ # For file storage, create a directory path for Qdrant
884
+ qdrant_path = path.replace(".db", "_qdrant")
885
+ qdrant_defaults["path"] = qdrant_path
886
+ elif host is not None:
536
887
  qdrant_defaults["host"] = host
537
888
  if port is not None:
538
889
  qdrant_defaults["port"] = port
hammad/json/__init__.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """hammad.utils.json"""
2
2
 
3
3
  from typing import TYPE_CHECKING
4
- from ..based.utils import auto_create_lazy_loader
4
+ from .._core._utils._import_utils import _auto_create_getattr_loader
5
5
 
6
6
  if TYPE_CHECKING:
7
7
  from .converters import (
@@ -13,7 +13,7 @@ if TYPE_CHECKING:
13
13
  __all__ = ("convert_to_json_schema", "encode_json", "decode_json")
14
14
 
15
15
 
16
- __getattr__ = auto_create_lazy_loader(__all__)
16
+ __getattr__ = _auto_create_getattr_loader(__all__)
17
17
 
18
18
 
19
19
  def __dir__() -> list[str]:
@@ -1,25 +1,33 @@
1
1
  """hammad.logging"""
2
2
 
3
3
  from typing import TYPE_CHECKING
4
- from ..based.utils import auto_create_lazy_loader
4
+ from .._core._utils._import_utils import _auto_create_getattr_loader
5
5
 
6
6
  if TYPE_CHECKING:
7
- from .logger import Logger, create_logger, create_logger_level
8
- from .decorators import trace_function, trace_cls, trace
7
+ from .logger import Logger, create_logger, create_logger_level, LoggerLevelName
8
+ from .decorators import (
9
+ trace_function,
10
+ trace_cls,
11
+ trace,
12
+ trace_http,
13
+ install_trace_http,
14
+ )
9
15
 
10
16
 
11
17
  __all__ = (
12
18
  "Logger",
13
- "LoggerLevel",
19
+ "LoggerLevelName",
14
20
  "create_logger",
15
21
  "create_logger_level",
16
22
  "trace_function",
17
23
  "trace_cls",
18
24
  "trace",
25
+ "trace_http",
26
+ "install_trace_http",
19
27
  )
20
28
 
21
29
 
22
- __getattr__ = auto_create_lazy_loader(__all__)
30
+ __getattr__ = _auto_create_getattr_loader(__all__)
23
31
 
24
32
 
25
33
  def __dir__() -> list[str]: