erioon 0.0.7__tar.gz → 0.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: erioon
3
- Version: 0.0.7
3
+ Version: 0.0.8
4
4
  Summary: Erioon SDF for Python
5
5
  Author: Zyber Pireci
6
6
  Author-email: zyber.pireci@erioon.com
@@ -13,7 +13,7 @@ def Auth(credential_string):
13
13
 
14
14
  Example usage:
15
15
  >>> from erioon.auth import Auth
16
- >>> client = Auth("<EMAIL>:<PASSWORD>")
16
+ >>> client = Auth("<API_KEY>:<EMAIL>:<PASSWORD>")
17
17
  >>> print(client) # prints user_id if successful or error message if not
18
18
  """
19
19
  api, email, password = credential_string.split(":")
@@ -1,33 +1,21 @@
1
1
  import os
2
2
  import json
3
3
  import requests
4
- from werkzeug.security import generate_password_hash
4
+ from datetime import datetime, timezone
5
5
  from erioon.database import Database
6
6
 
7
7
  class ErioonClient:
8
8
  """
9
9
  Client SDK for interacting with the Erioon API.
10
10
 
11
- Handles user authentication, token caching, and accessing user databases.
12
-
13
- Attributes:
14
- email (str): User email for login.
15
- password (str): User password for login.
16
- base_url (str): Base URL of the Erioon API.
17
- user_id (str | None): Authenticated user ID.
18
- error (str | None): Stores error messages if login fails.
19
- token_path (str): Local path to cached authentication token.
11
+ Handles:
12
+ - User authentication with email/password and API key
13
+ - Token caching to avoid re-authenticating every time
14
+ - SAS token expiration detection and auto-renewal
15
+ - Access to user-specific databases
20
16
  """
21
17
 
22
18
  def __init__(self, api, email, password, base_url="https://sdk.erioon.com"):
23
- """
24
- Initialize ErioonClient instance, attempts to load cached token or perform login.
25
-
26
- Args:
27
- email (str): User email for authentication.
28
- password (str): User password for authentication.
29
- base_url (str, optional): Base API URL. Defaults to "https://sdk.erioon.com".
30
- """
31
19
  self.api = api
32
20
  self.email = email
33
21
  self.password = password
@@ -35,34 +23,23 @@ class ErioonClient:
35
23
  self.user_id = None
36
24
  self.error = None
37
25
  self.token_path = os.path.expanduser(f"~/.erioon_token_{self._safe_filename(email)}")
26
+ self.login_metadata = None
38
27
 
39
28
  try:
40
- metadata = self._load_or_login()
41
- self.user_id = metadata.get("_id")
42
- self.database = metadata.get("database")
43
- self.cluster = metadata.get("cluster")
44
- self.login_metadata = metadata
29
+ self.login_metadata = self._load_or_login()
30
+ self._update_metadata_fields()
45
31
  except Exception as e:
46
32
  self.error = str(e)
47
33
 
48
34
  def _safe_filename(self, text):
49
35
  """
50
- Converts a string into a safe filename by replacing non-alphanumeric chars with underscores.
51
-
52
- Args:
53
- text (str): Input string to convert.
54
-
55
- Returns:
56
- str: Sanitized filename-safe string.
36
+ Converts unsafe filename characters to underscores for cache file naming.
57
37
  """
58
38
  return "".join(c if c.isalnum() else "_" for c in text)
59
39
 
60
40
  def _do_login_and_cache(self):
61
41
  """
62
- Perform login to API and cache the metadata locally.
63
-
64
- Returns:
65
- dict: Login metadata including user_id, database, cluster.
42
+ Logs in to the API and writes the returned metadata (e.g. SAS token, user ID) to a local file.
66
43
  """
67
44
  metadata = self._login()
68
45
  with open(self.token_path, "w") as f:
@@ -71,68 +48,80 @@ class ErioonClient:
71
48
 
72
49
  def _load_or_login(self):
73
50
  """
74
- Load cached metadata or perform login.
75
-
76
- Returns:
77
- dict: Login metadata.
51
+ Tries to load the cached login metadata.
52
+ If token is expired or file does not exist, performs a fresh login.
78
53
  """
79
54
  if os.path.exists(self.token_path):
80
55
  with open(self.token_path, "r") as f:
81
56
  metadata = json.load(f)
82
- if "user_id" in metadata:
83
- return metadata
84
-
85
- return self._do_login_and_cache()
57
+ if self._is_sas_expired(metadata):
58
+ metadata = self._do_login_and_cache()
59
+ return metadata
60
+ else:
61
+ return self._do_login_and_cache()
86
62
 
87
63
  def _login(self):
88
64
  """
89
- Authenticate and return full login metadata.
90
-
91
- Returns:
92
- dict: Metadata with user_id, database, cluster, etc.
65
+ Sends login request to Erioon API using API key, email, and password.
66
+ Returns authentication metadata including SAS token.
93
67
  """
94
68
  url = f"{self.base_url}/login_with_credentials"
95
- payload = {"api_key": self.api,"email": self.email, "password": self.password}
69
+ payload = {"api_key": self.api, "email": self.email, "password": self.password}
96
70
  headers = {"Content-Type": "application/json"}
97
71
 
98
72
  response = requests.post(url, json=payload, headers=headers)
99
73
  if response.status_code == 200:
100
74
  data = response.json()
101
75
  self.login_metadata = data
76
+ self._update_metadata_fields()
102
77
  return data
103
78
  else:
104
79
  raise Exception("Invalid account")
105
80
 
81
+ def _update_metadata_fields(self):
82
+ """
83
+ Updates internal fields like user_id, database name, and cluster info
84
+ from login metadata.
85
+ """
86
+ if self.login_metadata:
87
+ self.user_id = self.login_metadata.get("_id")
88
+ self.database = self.login_metadata.get("database")
89
+ self.cluster = self.login_metadata.get("cluster")
106
90
 
107
91
  def _clear_cached_token(self):
108
92
  """
109
- Remove cached token file and reset user_id to None.
93
+ Clears the locally cached authentication token and resets internal state.
110
94
  """
111
95
  if os.path.exists(self.token_path):
112
96
  os.remove(self.token_path)
113
97
  self.user_id = None
98
+ self.login_metadata = None
114
99
 
115
- def __getitem__(self, db_id):
100
+ def _is_sas_expired(self, metadata):
116
101
  """
117
- Access a Database object by database ID.
118
-
119
- Args:
120
- db_id (str): The ID of the database to access.
102
+ Determines whether the SAS token has expired by comparing the 'sas_token_expiry'
103
+ or 'expiry' field with the current UTC time.
104
+ """
105
+ expiry_str = metadata.get("sas_token_expiry") or metadata.get("expiry")
121
106
 
122
- Returns:
123
- Database: An instance representing the database.
107
+ if not expiry_str:
108
+ return True
124
109
 
125
- Raises:
126
- ValueError: If client is not authenticated.
127
- Exception: For other API errors not related to database existence.
110
+ try:
111
+ expiry_dt = datetime.fromisoformat(expiry_str.replace("Z", "+00:00"))
112
+ now = datetime.now(timezone.utc)
113
+ return now >= expiry_dt
114
+ except Exception:
115
+ return True
128
116
 
129
- Handles:
130
- On database-related errors, tries to relogin once. If relogin fails, returns "Login error".
131
- If database still not found after relogin, returns a formatted error message.
117
+ def __getitem__(self, db_id):
118
+ """
119
+ Allows syntax like `client["my_database_id"]` to access a database.
120
+ If the token is expired or invalid, it attempts reauthentication.
132
121
  """
133
122
  if not self.user_id:
134
123
  raise ValueError("Client not authenticated. Cannot access database.")
135
-
124
+
136
125
  try:
137
126
  return self._get_database_info(db_id)
138
127
  except Exception as e:
@@ -140,29 +129,22 @@ class ErioonClient:
140
129
  if f"database with {db_id.lower()}" in err_msg or "database" in err_msg:
141
130
  self._clear_cached_token()
142
131
  try:
143
- self.user_id = self._do_login_and_cache()
132
+ self.login_metadata = self._do_login_and_cache()
133
+ self._update_metadata_fields()
144
134
  except Exception:
145
135
  return "Login error"
146
-
136
+
147
137
  try:
148
138
  return self._get_database_info(db_id)
149
139
  except Exception:
150
140
  return f"❌ Database with _id {db_id} ..."
151
141
  else:
152
142
  raise e
153
-
143
+
154
144
  def _get_database_info(self, db_id):
155
145
  """
156
- Helper method to fetch database info from API and instantiate a Database object.
157
-
158
- Args:
159
- db_id (str): The database ID to fetch.
160
-
161
- Returns:
162
- Database: Database instance with the fetched info.
163
-
164
- Raises:
165
- Exception: If API returns an error.
146
+ Sends a POST request to fetch metadata for a given database ID.
147
+ Returns a `Database` instance initialized with SAS URL and metadata.
166
148
  """
167
149
  payload = {"user_id": self.user_id, "db_id": db_id}
168
150
  headers = {"Content-Type": "application/json"}
@@ -171,11 +153,24 @@ class ErioonClient:
171
153
 
172
154
  if response.status_code == 200:
173
155
  db_info = response.json()
156
+
157
+ container_url = self.login_metadata.get("container_url")
158
+ sas_token = self.login_metadata.get("sas_token")
159
+
160
+ if not container_url or not sas_token:
161
+ raise Exception("Missing SAS URL components for storage access")
162
+
163
+ if not sas_token.startswith("?"):
164
+ sas_token = "?" + sas_token
165
+
166
+ sas_url = container_url.split("?")[0] + sas_token
167
+
174
168
  return Database(
175
169
  user_id=self.user_id,
176
170
  metadata=db_info,
177
171
  database=self.database,
178
- cluster=self.cluster
172
+ cluster=self.cluster,
173
+ sas_url=sas_url
179
174
  )
180
175
  else:
181
176
  try:
@@ -187,12 +182,12 @@ class ErioonClient:
187
182
 
188
183
  def __str__(self):
189
184
  """
190
- String representation: returns user_id if authenticated, else the error message.
185
+ Returns user_id or error string when printed.
191
186
  """
192
187
  return self.user_id if self.user_id else self.error
193
188
 
194
189
  def __repr__(self):
195
190
  """
196
- Developer-friendly string representation of the client instance.
191
+ Developer-friendly representation of the client.
197
192
  """
198
193
  return f"<ErioonClient user_id={self.user_id}>" if self.user_id else f"<ErioonClient error='{self.error}'>"
@@ -0,0 +1,233 @@
1
+ import json
2
+ from urllib.parse import urlparse
3
+ from erioon.read import handle_get_all, handle_get_data
4
+ from erioon.create import handle_insert_one, handle_insert_many
5
+ from erioon.delete import handle_delete_one, handle_delete_many
6
+ from erioon.update import handle_update_query
7
+ from erioon.ping import handle_connection_ping
8
+
9
+ class Collection:
10
+ def __init__(
11
+ self,
12
+ user_id,
13
+ db_id,
14
+ coll_id,
15
+ metadata,
16
+ database,
17
+ cluster,
18
+ sas_url,
19
+ ):
20
+
21
+ """
22
+ Initialize a Collection object that wraps Erioon collection access.
23
+
24
+ Args:
25
+ user_id (str): The authenticated user's ID.
26
+ db_id (str): The database ID.
27
+ coll_id (str): The collection ID.
28
+ metadata (dict): Metadata info about this collection (e.g., schema, indexing, etc.).
29
+ database (str): Name or ID of the database.
30
+ cluster (str): Cluster name or ID hosting the database.
31
+ sas_url (str): Full SAS URL used to access the storage container.
32
+ """
33
+
34
+ self.user_id = user_id
35
+ self.db_id = db_id
36
+ self.coll_id = coll_id
37
+ self.metadata = metadata
38
+ self.database = database
39
+ self.cluster = cluster
40
+
41
+ parsed_url = urlparse(sas_url.rstrip("/"))
42
+ container_name = parsed_url.path.lstrip("/").split("/")[0]
43
+ account_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
44
+ sas_token = parsed_url.query
45
+ self.container_url = f"{account_url}/{container_name}?{sas_token}"
46
+
47
+ def _print_loading(self):
48
+ """Prints a loading message (likely for UX in CLI or SDK usage)."""
49
+ print("Erioon is loading...")
50
+
51
+ def _is_read_only(self):
52
+ """Check if the current database is marked as read-only."""
53
+ return self.database == "read"
54
+
55
+ def _read_only_response(self):
56
+ """Standardized error response for blocked write operations."""
57
+ return "This user is not allowed to perform write operations.", 403
58
+
59
+ def get_all(self, limit=1000000):
60
+ """
61
+ Fetch all records from the collection (up to a limit).
62
+
63
+ Args:
64
+ limit (int): Max number of records to fetch.
65
+ Returns:
66
+ list: Collection of records.
67
+ """
68
+ self._print_loading()
69
+ result, status_code = handle_get_all(
70
+ user_id=self.user_id,
71
+ db_id=self.db_id,
72
+ coll_id=self.coll_id,
73
+ limit=limit,
74
+ container_url=self.container_url,
75
+ )
76
+ return result
77
+
78
+ def get_specific(self, filters: dict | None = None, limit: int = 1000):
79
+ """
80
+ Fetch records that match specific key-value filters.
81
+
82
+ Args:
83
+ filters (dict): Dictionary of exact match filters.
84
+ limit (int): Max number of matching records to return.
85
+
86
+ Returns:
87
+ list: Filtered records from the collection.
88
+ """
89
+ if limit > 500_000:
90
+ raise ValueError("Limit of 500,000 exceeded")
91
+ self._print_loading()
92
+
93
+ if filters is None:
94
+ filters = {}
95
+
96
+ search_criteria = [{k: v} for k, v in filters.items()]
97
+ print(search_criteria)
98
+
99
+ result, status_code = handle_get_data(
100
+ user_id=self.user_id,
101
+ db_id=self.db_id,
102
+ coll_id=self.coll_id,
103
+ search_criteria=search_criteria,
104
+ limit=limit,
105
+ container_url=self.container_url,
106
+ )
107
+ return result
108
+
109
+ def insert_one(self, record):
110
+ """
111
+ Insert a single record into the collection.
112
+
113
+ Args:
114
+ record (dict): Record to insert.
115
+
116
+ Returns:
117
+ tuple: (response message, HTTP status code)
118
+ """
119
+ if self._is_read_only():
120
+ return self._read_only_response()
121
+ return handle_insert_one(
122
+ user_id_cont=self.user_id,
123
+ database=self.db_id,
124
+ collection=self.coll_id,
125
+ record=record,
126
+ container_url=self.container_url,
127
+ )
128
+
129
+ def insert_many(self, data):
130
+ """
131
+ Insert multiple records into the collection.
132
+
133
+ Args:
134
+ data (list of dicts): Multiple records to insert.
135
+
136
+ Returns:
137
+ tuple: (response message, HTTP status code)
138
+ """
139
+ if self._is_read_only():
140
+ return self._read_only_response()
141
+ return handle_insert_many(
142
+ user_id_cont=self.user_id,
143
+ database=self.db_id,
144
+ collection=self.coll_id,
145
+ data=data,
146
+ container_url=self.container_url,
147
+ )
148
+
149
+ def delete_one(self, record_to_delete):
150
+ """
151
+ Delete a single record based on its _id or nested key.
152
+
153
+ Args:
154
+ record_to_delete (dict): Identification of the record.
155
+
156
+ Returns:
157
+ tuple: (response message, HTTP status code)
158
+ """
159
+ if self._is_read_only():
160
+ return self._read_only_response()
161
+ return handle_delete_one(
162
+ user_id=self.user_id,
163
+ db_id=self.db_id,
164
+ coll_id=self.coll_id,
165
+ data_to_delete=record_to_delete,
166
+ container_url=self.container_url,
167
+ )
168
+
169
+ def delete_many(self, records_to_delete_list, batch_size=10):
170
+ """
171
+ Delete multiple records in batches.
172
+
173
+ Args:
174
+ records_to_delete_list (list): List of record identifiers.
175
+ batch_size (int): How many to delete at once (for efficiency).
176
+
177
+ Returns:
178
+ tuple: (response message, HTTP status code)
179
+ """
180
+ if self._is_read_only():
181
+ return self._read_only_response()
182
+ return handle_delete_many(
183
+ user_id=self.user_id,
184
+ db_id=self.db_id,
185
+ coll_id=self.coll_id,
186
+ data_to_delete_list=records_to_delete_list,
187
+ batch_size=batch_size,
188
+ container_url=self.container_url,
189
+ )
190
+
191
+ def update_query(self, filter_query: dict, update_query: dict):
192
+ """
193
+ Update a record in-place by filtering and applying update logic.
194
+
195
+ Args:
196
+ filter_query (dict): Dict describing what record(s) to match.
197
+ update_query (dict): Dict describing update operators ($set, $push, $remove).
198
+
199
+ Returns:
200
+ tuple: (response message, HTTP status code)
201
+ """
202
+ if self._is_read_only():
203
+ return self._read_only_response()
204
+ return handle_update_query(
205
+ user_id=self.user_id,
206
+ db_id=self.db_id,
207
+ coll_id=self.coll_id,
208
+ filter_query=filter_query,
209
+ update_query=update_query,
210
+ container_url=self.container_url,
211
+ )
212
+
213
+ def ping(self):
214
+ """
215
+ Health check / ping to verify collection accessibility.
216
+
217
+ Returns:
218
+ tuple: (response message, HTTP status code)
219
+ """
220
+ return handle_connection_ping(
221
+ user_id=self.user_id,
222
+ db_id=self.db_id,
223
+ coll_id=self.coll_id,
224
+ container_url=self.container_url,
225
+ )
226
+
227
+ def __str__(self):
228
+ """Pretty print the collection metadata."""
229
+ return json.dumps(self.metadata, indent=4)
230
+
231
+ def __repr__(self):
232
+ """Simplified representation for debugging or introspection."""
233
+ return f"<Collection coll_id={self.coll_id}>"
@@ -0,0 +1,159 @@
1
+ from azure.storage.blob import ContainerClient
2
+ import uuid
3
+ import json
4
+ from erioon.functions import (
5
+ create_msgpack_file,
6
+ update_index_file_insert,
7
+ calculate_shard_number,
8
+ async_log
9
+ )
10
+
11
+ def get_index_data(user_id_cont, database, collection, container_url):
12
+ """
13
+ Retrieves the content of the index.json file that tracks which records are stored in which shards.
14
+
15
+ Args:
16
+ user_id_cont: User identifier or context.
17
+ database: Database name.
18
+ collection: Collection name.
19
+ container_url: Blob Storage container SAS URL.
20
+
21
+ Returns:
22
+ List of shard mappings (list of dicts) or empty list if file not found or error.
23
+ """
24
+ container_client = ContainerClient.from_container_url(container_url)
25
+ index_blob_client = container_client.get_blob_client(blob=f"{database}/{collection}/index.json")
26
+
27
+ try:
28
+ index_data = index_blob_client.download_blob().readall()
29
+ return json.loads(index_data) if index_data else []
30
+ except Exception:
31
+ return []
32
+
33
+ def is_duplicate_id(user_id_cont, database, collection, _id, container_url):
34
+ """
35
+ Checks if the given record _id is already present in the index.json across shards.
36
+
37
+ Args:
38
+ user_id_cont: User identifier.
39
+ database: Database name.
40
+ collection: Collection name.
41
+ _id: Record ID to check.
42
+ container_url: Blob Storage container SAS URL.
43
+
44
+ Returns:
45
+ True if _id exists in any shard, else False.
46
+ """
47
+ index_data = get_index_data(user_id_cont, database, collection, container_url)
48
+
49
+ for shard in index_data:
50
+ for shard_name, ids in shard.items():
51
+ if _id in ids:
52
+ return True
53
+ return False
54
+
55
+ def handle_insert_one(user_id_cont, database, collection, record, container_url):
56
+ """
57
+ Insert a single record into the collection.
58
+
59
+ - If no '_id' provided, generate a new UUID.
60
+ - If provided '_id' is duplicate, generate a new one and update the record.
61
+ - Create or append the record in a shard file.
62
+ - Update index.json to map the record to the appropriate shard.
63
+ - Log success or errors asynchronously.
64
+
65
+ Args:
66
+ user_id_cont: User identifier.
67
+ database: Database name.
68
+ collection: Collection name.
69
+ record: Dict representing the record to insert.
70
+ container_url: Blob Storage container SAS URL.
71
+
72
+ Returns:
73
+ Tuple (response dict, status code) indicating success or failure.
74
+ """
75
+ try:
76
+ if "_id" not in record or not record["_id"]:
77
+ record["_id"] = str(uuid.uuid4())
78
+
79
+ rec_id = record["_id"]
80
+
81
+ if is_duplicate_id(user_id_cont, database, collection, rec_id, container_url):
82
+ new_id = str(uuid.uuid4())
83
+ record["_id"] = new_id
84
+ rec_id = new_id
85
+ msg = f"Record inserted successfully in {collection} with a new _id {rec_id} because the provided _id was already present."
86
+ else:
87
+ msg = f"Record inserted successfully in {collection} with _id {rec_id}"
88
+
89
+ async_log(user_id_cont, database, collection, "POST", "SUCCESS", msg, 1, container_url)
90
+
91
+ create_msgpack_file(user_id_cont, database, collection, record, container_url)
92
+
93
+ shard_number = calculate_shard_number(user_id_cont, database, collection, container_url)
94
+ update_index_file_insert(user_id_cont, database, collection, rec_id, shard_number, container_url)
95
+
96
+ return {"status": "OK", "message": msg, "record": record}, 200
97
+
98
+ except Exception as e:
99
+ error_msg = f"An error occurred during insert in {collection}: {str(e)}"
100
+ async_log(user_id_cont, database, collection,"POST", "ERROR", error_msg, 1, container_url)
101
+ return {"status": "KO", "message": "Failed to insert record.", "error": str(e)}, 500
102
+
103
+ def handle_insert_many(user_id_cont, database, collection, data, container_url):
104
+ """
105
+ Insert multiple records in bulk.
106
+
107
+ - For each record:
108
+ - Ensure it has a unique _id (generate new UUID if missing or duplicate).
109
+ - Write the record to the appropriate shard.
110
+ - Update index.json with _id to shard mapping.
111
+ - Log the batch insert operation with details.
112
+ - Return aggregate success or failure response.
113
+
114
+ Args:
115
+ user_id_cont: User identifier.
116
+ database: Database name.
117
+ collection: Collection name.
118
+ data: Dict with key "records" containing list of record dicts.
119
+ container_url: Blob Storage container SAS URL.
120
+
121
+ Returns:
122
+ Tuple (response dict, status code) with summary of insert results.
123
+ """
124
+ insert_results = []
125
+ records = data.get("records", [])
126
+ count = len(records)
127
+
128
+ try:
129
+ for record in records:
130
+ if "_id" not in record or not record["_id"]:
131
+ record["_id"] = str(uuid.uuid4())
132
+
133
+ rec_id = record["_id"]
134
+
135
+ if is_duplicate_id(user_id_cont, database, collection, rec_id, container_url):
136
+ new_id = str(uuid.uuid4())
137
+ record["_id"] = new_id
138
+ rec_id = new_id
139
+ msg = f"Inserted with new _id {rec_id} (original _id was already present)."
140
+ else:
141
+ msg = f"Inserted with _id {rec_id}."
142
+
143
+ create_msgpack_file(user_id_cont, database, collection, record, container_url)
144
+
145
+ shard_number = calculate_shard_number(user_id_cont, database, collection, container_url)
146
+ update_index_file_insert(
147
+ user_id_cont, database, collection, rec_id, shard_number, container_url
148
+ )
149
+
150
+ insert_results.append({"_id": rec_id, "message": msg})
151
+
152
+ async_log(user_id_cont, database, collection, "POST", "SUCCESS", insert_results, count, container_url)
153
+ return {"success": "Records inserted successfully", "details": insert_results}, 200
154
+
155
+
156
+ except Exception as e:
157
+ general_error_msg = f"Unexpected error during bulk insert: {str(e)}"
158
+ async_log(user_id_cont, database, collection, "POST", "ERROR", general_error_msg, 1, container_url)
159
+ return {"status": "KO", "message": general_error_msg}, 500