botrun-flow-lang 5.12.263__py3-none-any.whl → 6.2.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- botrun_flow_lang/api/auth_api.py +39 -39
- botrun_flow_lang/api/auth_utils.py +183 -183
- botrun_flow_lang/api/botrun_back_api.py +65 -65
- botrun_flow_lang/api/flow_api.py +3 -3
- botrun_flow_lang/api/hatch_api.py +508 -508
- botrun_flow_lang/api/langgraph_api.py +816 -811
- botrun_flow_lang/api/langgraph_constants.py +11 -0
- botrun_flow_lang/api/line_bot_api.py +1484 -1484
- botrun_flow_lang/api/model_api.py +300 -300
- botrun_flow_lang/api/rate_limit_api.py +32 -32
- botrun_flow_lang/api/routes.py +79 -79
- botrun_flow_lang/api/search_api.py +53 -53
- botrun_flow_lang/api/storage_api.py +395 -395
- botrun_flow_lang/api/subsidy_api.py +290 -290
- botrun_flow_lang/api/subsidy_api_system_prompt.txt +109 -109
- botrun_flow_lang/api/user_setting_api.py +70 -70
- botrun_flow_lang/api/version_api.py +31 -31
- botrun_flow_lang/api/youtube_api.py +26 -26
- botrun_flow_lang/constants.py +13 -13
- botrun_flow_lang/langgraph_agents/agents/agent_runner.py +178 -178
- botrun_flow_lang/langgraph_agents/agents/agent_tools/step_planner.py +77 -77
- botrun_flow_lang/langgraph_agents/agents/checkpointer/firestore_checkpointer.py +666 -666
- botrun_flow_lang/langgraph_agents/agents/gov_researcher/GOV_RESEARCHER_PRD.md +192 -192
- botrun_flow_lang/langgraph_agents/agents/gov_researcher/gemini_subsidy_graph.py +460 -460
- botrun_flow_lang/langgraph_agents/agents/gov_researcher/gov_researcher_2_graph.py +1002 -1002
- botrun_flow_lang/langgraph_agents/agents/gov_researcher/gov_researcher_graph.py +822 -822
- botrun_flow_lang/langgraph_agents/agents/langgraph_react_agent.py +730 -723
- botrun_flow_lang/langgraph_agents/agents/search_agent_graph.py +864 -864
- botrun_flow_lang/langgraph_agents/agents/tools/__init__.py +4 -4
- botrun_flow_lang/langgraph_agents/agents/tools/gemini_code_execution.py +376 -376
- botrun_flow_lang/langgraph_agents/agents/util/gemini_grounding.py +66 -66
- botrun_flow_lang/langgraph_agents/agents/util/html_util.py +316 -316
- botrun_flow_lang/langgraph_agents/agents/util/img_util.py +336 -294
- botrun_flow_lang/langgraph_agents/agents/util/local_files.py +419 -419
- botrun_flow_lang/langgraph_agents/agents/util/mermaid_util.py +86 -86
- botrun_flow_lang/langgraph_agents/agents/util/model_utils.py +143 -143
- botrun_flow_lang/langgraph_agents/agents/util/pdf_analyzer.py +562 -486
- botrun_flow_lang/langgraph_agents/agents/util/pdf_cache.py +250 -250
- botrun_flow_lang/langgraph_agents/agents/util/pdf_processor.py +204 -204
- botrun_flow_lang/langgraph_agents/agents/util/perplexity_search.py +464 -464
- botrun_flow_lang/langgraph_agents/agents/util/plotly_util.py +59 -59
- botrun_flow_lang/langgraph_agents/agents/util/tavily_search.py +199 -199
- botrun_flow_lang/langgraph_agents/agents/util/usage_metadata.py +34 -0
- botrun_flow_lang/langgraph_agents/agents/util/youtube_util.py +90 -90
- botrun_flow_lang/langgraph_agents/cache/langgraph_botrun_cache.py +197 -197
- botrun_flow_lang/llm_agent/llm_agent.py +19 -19
- botrun_flow_lang/llm_agent/llm_agent_util.py +83 -83
- botrun_flow_lang/log/.gitignore +2 -2
- botrun_flow_lang/main.py +61 -61
- botrun_flow_lang/main_fast.py +51 -51
- botrun_flow_lang/mcp_server/__init__.py +10 -10
- botrun_flow_lang/mcp_server/default_mcp.py +854 -744
- botrun_flow_lang/models/nodes/utils.py +205 -205
- botrun_flow_lang/models/token_usage.py +34 -34
- botrun_flow_lang/requirements.txt +21 -21
- botrun_flow_lang/services/base/firestore_base.py +30 -30
- botrun_flow_lang/services/hatch/hatch_factory.py +11 -11
- botrun_flow_lang/services/hatch/hatch_fs_store.py +419 -419
- botrun_flow_lang/services/storage/storage_cs_store.py +206 -206
- botrun_flow_lang/services/storage/storage_factory.py +12 -12
- botrun_flow_lang/services/storage/storage_store.py +65 -65
- botrun_flow_lang/services/user_setting/user_setting_factory.py +9 -9
- botrun_flow_lang/services/user_setting/user_setting_fs_store.py +66 -66
- botrun_flow_lang/static/docs/tools/index.html +926 -926
- botrun_flow_lang/tests/api_functional_tests.py +1525 -1525
- botrun_flow_lang/tests/api_stress_test.py +357 -357
- botrun_flow_lang/tests/shared_hatch_tests.py +333 -333
- botrun_flow_lang/tests/test_botrun_app.py +46 -46
- botrun_flow_lang/tests/test_html_util.py +31 -31
- botrun_flow_lang/tests/test_img_analyzer.py +190 -190
- botrun_flow_lang/tests/test_img_util.py +39 -39
- botrun_flow_lang/tests/test_local_files.py +114 -114
- botrun_flow_lang/tests/test_mermaid_util.py +103 -103
- botrun_flow_lang/tests/test_pdf_analyzer.py +104 -104
- botrun_flow_lang/tests/test_plotly_util.py +151 -151
- botrun_flow_lang/tests/test_run_workflow_engine.py +65 -65
- botrun_flow_lang/tools/generate_docs.py +133 -133
- botrun_flow_lang/tools/templates/tools.html +153 -153
- botrun_flow_lang/utils/__init__.py +7 -7
- botrun_flow_lang/utils/botrun_logger.py +344 -344
- botrun_flow_lang/utils/clients/rate_limit_client.py +209 -209
- botrun_flow_lang/utils/clients/token_verify_client.py +153 -153
- botrun_flow_lang/utils/google_drive_utils.py +654 -654
- botrun_flow_lang/utils/langchain_utils.py +324 -324
- botrun_flow_lang/utils/yaml_utils.py +9 -9
- {botrun_flow_lang-5.12.263.dist-info → botrun_flow_lang-6.2.21.dist-info}/METADATA +6 -6
- botrun_flow_lang-6.2.21.dist-info/RECORD +104 -0
- botrun_flow_lang-5.12.263.dist-info/RECORD +0 -102
- {botrun_flow_lang-5.12.263.dist-info → botrun_flow_lang-6.2.21.dist-info}/WHEEL +0 -0
|
@@ -1,419 +1,419 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
from typing import Union, List, Tuple
|
|
3
|
-
from datetime import datetime, timezone
|
|
4
|
-
from google.cloud.exceptions import GoogleCloudError
|
|
5
|
-
from botrun_flow_lang.constants import HATCH_SHARING_STORE_NAME, HATCH_STORE_NAME
|
|
6
|
-
from botrun_flow_lang.services.base.firestore_base import FirestoreBase
|
|
7
|
-
from botrun_hatch.models.hatch import Hatch
|
|
8
|
-
from botrun_hatch.models.hatch_sharing import HatchSharing
|
|
9
|
-
from google.cloud import firestore
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class HatchFsStore(FirestoreBase):
|
|
13
|
-
def __init__(self, env_name: str):
|
|
14
|
-
super().__init__(f"{env_name}-{HATCH_STORE_NAME}")
|
|
15
|
-
self.sharing_collection = self.db.collection(
|
|
16
|
-
f"{env_name}-{HATCH_SHARING_STORE_NAME}"
|
|
17
|
-
)
|
|
18
|
-
|
|
19
|
-
async def get_hatch(self, item_id: str) -> Union[Hatch, None]:
|
|
20
|
-
doc_ref = self.collection.document(item_id)
|
|
21
|
-
doc = doc_ref.get()
|
|
22
|
-
if doc.exists:
|
|
23
|
-
data = doc.to_dict()
|
|
24
|
-
return Hatch(**data)
|
|
25
|
-
else:
|
|
26
|
-
print(f">============Getting hatch {item_id} not exists")
|
|
27
|
-
return None
|
|
28
|
-
|
|
29
|
-
async def set_hatch(self, item: Hatch):
|
|
30
|
-
try:
|
|
31
|
-
# Update updated_at timestamp with current UTC time
|
|
32
|
-
item.updated_at = datetime.now(timezone.utc).isoformat()
|
|
33
|
-
|
|
34
|
-
# Proceed with saving the hatch
|
|
35
|
-
doc_ref = self.collection.document(str(item.id))
|
|
36
|
-
doc_ref.set(item.model_dump())
|
|
37
|
-
return True, item
|
|
38
|
-
|
|
39
|
-
except GoogleCloudError as e:
|
|
40
|
-
logging.error(f"Error setting hatch {item.id}: {e}")
|
|
41
|
-
return False, None
|
|
42
|
-
except Exception as e:
|
|
43
|
-
logging.error(f"Unexpected error setting hatch {item.id}: {e}")
|
|
44
|
-
return False, None
|
|
45
|
-
|
|
46
|
-
async def delete_hatch(self, item_id: str):
|
|
47
|
-
try:
|
|
48
|
-
doc_ref = self.collection.document(item_id)
|
|
49
|
-
doc_ref.delete()
|
|
50
|
-
return True
|
|
51
|
-
except GoogleCloudError as e:
|
|
52
|
-
print(f"Error deleting hatch {item_id}: {e}")
|
|
53
|
-
return False
|
|
54
|
-
|
|
55
|
-
async def get_hatches(
|
|
56
|
-
self,
|
|
57
|
-
user_id: str,
|
|
58
|
-
offset: int = 0,
|
|
59
|
-
limit: int = 20,
|
|
60
|
-
sort_by: str = "updated_at",
|
|
61
|
-
order: str = "desc",
|
|
62
|
-
) -> Tuple[List[Hatch], str]:
|
|
63
|
-
try:
|
|
64
|
-
# 向下兼容:先找出没有 updated_at 字段的记录并批量更新
|
|
65
|
-
# 注意:Firestore 不支持直接筛选"字段不存在",必须遍历所有记录
|
|
66
|
-
if sort_by == "updated_at":
|
|
67
|
-
missing_query = self.collection.where(
|
|
68
|
-
filter=firestore.FieldFilter("user_id", "==", user_id)
|
|
69
|
-
)
|
|
70
|
-
|
|
71
|
-
# 收集需要更新的文档
|
|
72
|
-
docs_to_update = []
|
|
73
|
-
current_time = datetime.now(timezone.utc).isoformat()
|
|
74
|
-
|
|
75
|
-
for doc in missing_query.stream():
|
|
76
|
-
data = doc.to_dict()
|
|
77
|
-
# 如果没有 updated_at 字段或为空,收集起来
|
|
78
|
-
if "updated_at" not in data or not data.get("updated_at"):
|
|
79
|
-
docs_to_update.append((doc.reference, data))
|
|
80
|
-
|
|
81
|
-
# 使用批量写入更新(每批最多 500 个)
|
|
82
|
-
if docs_to_update:
|
|
83
|
-
batch_size = 500
|
|
84
|
-
for i in range(0, len(docs_to_update), batch_size):
|
|
85
|
-
batch = self.db.batch()
|
|
86
|
-
batch_docs = docs_to_update[i:i + batch_size]
|
|
87
|
-
|
|
88
|
-
for doc_ref, data in batch_docs:
|
|
89
|
-
data["updated_at"] = current_time
|
|
90
|
-
batch.set(doc_ref, data)
|
|
91
|
-
|
|
92
|
-
batch.commit()
|
|
93
|
-
|
|
94
|
-
logging.info(f"Auto-migrated {len(docs_to_update)} hatches with updated_at for user {user_id}")
|
|
95
|
-
|
|
96
|
-
# Build base query
|
|
97
|
-
query = self.collection.where(
|
|
98
|
-
filter=firestore.FieldFilter("user_id", "==", user_id)
|
|
99
|
-
)
|
|
100
|
-
|
|
101
|
-
# Add sorting
|
|
102
|
-
# Firestore direction: DESCENDING or ASCENDING
|
|
103
|
-
direction = (
|
|
104
|
-
firestore.Query.DESCENDING if order == "desc" else firestore.Query.ASCENDING
|
|
105
|
-
)
|
|
106
|
-
query = query.order_by(sort_by, direction=direction)
|
|
107
|
-
|
|
108
|
-
# Add pagination
|
|
109
|
-
query = query.offset(offset).limit(limit)
|
|
110
|
-
|
|
111
|
-
docs = query.stream()
|
|
112
|
-
hatches = [Hatch(**doc.to_dict()) for doc in docs]
|
|
113
|
-
return hatches, ""
|
|
114
|
-
except GoogleCloudError as e:
|
|
115
|
-
import traceback
|
|
116
|
-
|
|
117
|
-
traceback.print_exc()
|
|
118
|
-
print(f"Error getting hatches for user {user_id}: {e}")
|
|
119
|
-
return [], f"Error getting hatches for user {user_id}: {e}"
|
|
120
|
-
except Exception as e:
|
|
121
|
-
import traceback
|
|
122
|
-
|
|
123
|
-
traceback.print_exc()
|
|
124
|
-
print(f"Error getting hatches for user {user_id}: {e}")
|
|
125
|
-
return [], f"Error getting hatches for user {user_id}: {e}"
|
|
126
|
-
|
|
127
|
-
async def get_default_hatch(self, user_id: str) -> Union[Hatch, None]:
|
|
128
|
-
try:
|
|
129
|
-
query = (
|
|
130
|
-
self.collection.where(
|
|
131
|
-
filter=firestore.FieldFilter("user_id", "==", user_id)
|
|
132
|
-
)
|
|
133
|
-
.where(filter=firestore.FieldFilter("is_default", "==", True))
|
|
134
|
-
.limit(1)
|
|
135
|
-
)
|
|
136
|
-
docs = query.stream()
|
|
137
|
-
for doc in docs:
|
|
138
|
-
return Hatch(**doc.to_dict())
|
|
139
|
-
return None
|
|
140
|
-
except GoogleCloudError as e:
|
|
141
|
-
print(f"Error getting default hatch for user {user_id}: {e}")
|
|
142
|
-
return None
|
|
143
|
-
|
|
144
|
-
async def set_default_hatch(self, user_id: str, hatch_id: str) -> Tuple[bool, str]:
|
|
145
|
-
try:
|
|
146
|
-
# 获取当前的默认 hatch
|
|
147
|
-
current_default = await self.get_default_hatch(user_id)
|
|
148
|
-
|
|
149
|
-
# 获取要设置为默认的 hatch
|
|
150
|
-
new_default = await self.get_hatch(hatch_id)
|
|
151
|
-
if not new_default or new_default.user_id != user_id:
|
|
152
|
-
return (
|
|
153
|
-
False,
|
|
154
|
-
f"Hatch with id {hatch_id} not found or does not belong to user {user_id}",
|
|
155
|
-
)
|
|
156
|
-
|
|
157
|
-
# 更新当前默认 hatch(如果存在)
|
|
158
|
-
if current_default and current_default.id != hatch_id:
|
|
159
|
-
current_default.is_default = False
|
|
160
|
-
success, _ = await self.set_hatch(current_default)
|
|
161
|
-
if not success:
|
|
162
|
-
return (
|
|
163
|
-
False,
|
|
164
|
-
f"Failed to update current default hatch {current_default.id}",
|
|
165
|
-
)
|
|
166
|
-
|
|
167
|
-
# 设置新的默认 hatch
|
|
168
|
-
new_default.is_default = True
|
|
169
|
-
success, _ = await self.set_hatch(new_default)
|
|
170
|
-
if not success:
|
|
171
|
-
return False, f"Failed to set hatch {hatch_id} as default"
|
|
172
|
-
|
|
173
|
-
return (
|
|
174
|
-
True,
|
|
175
|
-
f"Successfully set hatch {hatch_id} as default for user {user_id}",
|
|
176
|
-
)
|
|
177
|
-
except Exception as e:
|
|
178
|
-
print(f"Error setting default hatch: {e}")
|
|
179
|
-
return False, f"An error occurred: {str(e)}"
|
|
180
|
-
|
|
181
|
-
async def share_hatch(
|
|
182
|
-
self, hatch_id: str, owner_id: str, target_user_id: str
|
|
183
|
-
) -> Tuple[bool, str]:
|
|
184
|
-
"""Share a hatch with another user.
|
|
185
|
-
|
|
186
|
-
Args:
|
|
187
|
-
hatch_id: The ID of the hatch to share
|
|
188
|
-
owner_id: The ID of the user who owns the hatch
|
|
189
|
-
target_user_id: The ID of the user to share the hatch with
|
|
190
|
-
|
|
191
|
-
Returns:
|
|
192
|
-
Tuple[bool, str]: Success status and message
|
|
193
|
-
"""
|
|
194
|
-
try:
|
|
195
|
-
# Verify hatch exists and belongs to owner
|
|
196
|
-
hatch = await self.get_hatch(hatch_id)
|
|
197
|
-
if not hatch:
|
|
198
|
-
return False, f"Hatch with id {hatch_id} not found"
|
|
199
|
-
|
|
200
|
-
if hatch.user_id != owner_id:
|
|
201
|
-
return (
|
|
202
|
-
False,
|
|
203
|
-
f"Hatch with id {hatch_id} does not belong to user {owner_id}",
|
|
204
|
-
)
|
|
205
|
-
|
|
206
|
-
# Check if sharing already exists
|
|
207
|
-
query = (
|
|
208
|
-
self.sharing_collection.where(
|
|
209
|
-
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
210
|
-
)
|
|
211
|
-
.where(
|
|
212
|
-
filter=firestore.FieldFilter("shared_with_id", "==", target_user_id)
|
|
213
|
-
)
|
|
214
|
-
.limit(1)
|
|
215
|
-
)
|
|
216
|
-
|
|
217
|
-
docs = list(query.stream())
|
|
218
|
-
if docs:
|
|
219
|
-
return (
|
|
220
|
-
True,
|
|
221
|
-
f"Hatch {hatch_id} is already shared with user {target_user_id}",
|
|
222
|
-
)
|
|
223
|
-
|
|
224
|
-
# Create sharing record
|
|
225
|
-
sharing = HatchSharing(
|
|
226
|
-
hatch_id=hatch_id, owner_id=owner_id, shared_with_id=target_user_id
|
|
227
|
-
)
|
|
228
|
-
|
|
229
|
-
# Store in Firestore
|
|
230
|
-
doc_ref = self.sharing_collection.document()
|
|
231
|
-
doc_ref.set(sharing.model_dump())
|
|
232
|
-
|
|
233
|
-
return (
|
|
234
|
-
True,
|
|
235
|
-
f"Successfully shared hatch {hatch_id} with user {target_user_id}",
|
|
236
|
-
)
|
|
237
|
-
|
|
238
|
-
except Exception as e:
|
|
239
|
-
import traceback
|
|
240
|
-
|
|
241
|
-
traceback.print_exc()
|
|
242
|
-
print(f"Error sharing hatch {hatch_id} with user {target_user_id}: {e}")
|
|
243
|
-
return False, f"Error sharing hatch: {str(e)}"
|
|
244
|
-
|
|
245
|
-
async def unshare_hatch(
|
|
246
|
-
self, hatch_id: str, owner_id: str, target_user_id: str
|
|
247
|
-
) -> Tuple[bool, str]:
|
|
248
|
-
"""Remove sharing of a hatch with a user.
|
|
249
|
-
|
|
250
|
-
Args:
|
|
251
|
-
hatch_id: The ID of the hatch to unshare
|
|
252
|
-
owner_id: The ID of the user who owns the hatch
|
|
253
|
-
target_user_id: The ID of the user to remove sharing from
|
|
254
|
-
|
|
255
|
-
Returns:
|
|
256
|
-
Tuple[bool, str]: Success status and message
|
|
257
|
-
"""
|
|
258
|
-
try:
|
|
259
|
-
# Verify hatch exists and belongs to owner
|
|
260
|
-
hatch = await self.get_hatch(hatch_id)
|
|
261
|
-
if not hatch:
|
|
262
|
-
return False, f"Hatch with id {hatch_id} not found"
|
|
263
|
-
|
|
264
|
-
if hatch.user_id != owner_id:
|
|
265
|
-
return (
|
|
266
|
-
False,
|
|
267
|
-
f"Hatch with id {hatch_id} does not belong to user {owner_id}",
|
|
268
|
-
)
|
|
269
|
-
|
|
270
|
-
# Find sharing record
|
|
271
|
-
query = self.sharing_collection.where(
|
|
272
|
-
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
273
|
-
).where(
|
|
274
|
-
filter=firestore.FieldFilter("shared_with_id", "==", target_user_id)
|
|
275
|
-
)
|
|
276
|
-
|
|
277
|
-
# Delete all matching sharing records
|
|
278
|
-
deleted = False
|
|
279
|
-
for doc in query.stream():
|
|
280
|
-
doc.reference.delete()
|
|
281
|
-
deleted = True
|
|
282
|
-
|
|
283
|
-
if not deleted:
|
|
284
|
-
return (
|
|
285
|
-
False,
|
|
286
|
-
f"Hatch {hatch_id} is not shared with user {target_user_id}",
|
|
287
|
-
)
|
|
288
|
-
|
|
289
|
-
return (
|
|
290
|
-
True,
|
|
291
|
-
f"Successfully unshared hatch {hatch_id} from user {target_user_id}",
|
|
292
|
-
)
|
|
293
|
-
|
|
294
|
-
except Exception as e:
|
|
295
|
-
import traceback
|
|
296
|
-
|
|
297
|
-
traceback.print_exc()
|
|
298
|
-
print(f"Error unsharing hatch {hatch_id} from user {target_user_id}: {e}")
|
|
299
|
-
return False, f"Error unsharing hatch: {str(e)}"
|
|
300
|
-
|
|
301
|
-
async def get_shared_hatches(
|
|
302
|
-
self, user_id: str, offset: int = 0, limit: int = 20
|
|
303
|
-
) -> Tuple[List[Hatch], str]:
|
|
304
|
-
"""Get all hatches shared with a user.
|
|
305
|
-
|
|
306
|
-
Args:
|
|
307
|
-
user_id: The ID of the user to get shared hatches for
|
|
308
|
-
offset: Pagination offset
|
|
309
|
-
limit: Maximum number of results to return
|
|
310
|
-
|
|
311
|
-
Returns:
|
|
312
|
-
Tuple[List[Hatch], str]: List of shared hatches and error message if any
|
|
313
|
-
"""
|
|
314
|
-
try:
|
|
315
|
-
# Find all sharing records for this user
|
|
316
|
-
query = (
|
|
317
|
-
self.sharing_collection.where(
|
|
318
|
-
filter=firestore.FieldFilter("shared_with_id", "==", user_id)
|
|
319
|
-
)
|
|
320
|
-
.limit(limit)
|
|
321
|
-
.offset(offset)
|
|
322
|
-
)
|
|
323
|
-
|
|
324
|
-
# Get all sharing records
|
|
325
|
-
sharing_docs = list(query.stream())
|
|
326
|
-
|
|
327
|
-
# If no sharing records, return empty list
|
|
328
|
-
if not sharing_docs:
|
|
329
|
-
return [], ""
|
|
330
|
-
|
|
331
|
-
# Get shared hatches
|
|
332
|
-
shared_hatches = []
|
|
333
|
-
for doc in sharing_docs:
|
|
334
|
-
sharing_data = doc.to_dict()
|
|
335
|
-
hatch_id = sharing_data.get("hatch_id")
|
|
336
|
-
if hatch_id:
|
|
337
|
-
hatch = await self.get_hatch(hatch_id)
|
|
338
|
-
if hatch:
|
|
339
|
-
shared_hatches.append(hatch)
|
|
340
|
-
|
|
341
|
-
return shared_hatches, ""
|
|
342
|
-
|
|
343
|
-
except Exception as e:
|
|
344
|
-
import traceback
|
|
345
|
-
|
|
346
|
-
traceback.print_exc()
|
|
347
|
-
print(f"Error getting shared hatches for user {user_id}: {e}")
|
|
348
|
-
return [], f"Error getting shared hatches: {str(e)}"
|
|
349
|
-
|
|
350
|
-
async def is_hatch_shared_with_user(
|
|
351
|
-
self, hatch_id: str, user_id: str
|
|
352
|
-
) -> Tuple[bool, str]:
|
|
353
|
-
"""Check if a hatch is shared with a specific user.
|
|
354
|
-
|
|
355
|
-
Args:
|
|
356
|
-
hatch_id: The ID of the hatch to check
|
|
357
|
-
user_id: The ID of the user to check sharing with
|
|
358
|
-
|
|
359
|
-
Returns:
|
|
360
|
-
Tuple[bool, str]: Whether the hatch is shared with the user and a message
|
|
361
|
-
"""
|
|
362
|
-
try:
|
|
363
|
-
# Find sharing record for this hatch and user
|
|
364
|
-
query = (
|
|
365
|
-
self.sharing_collection.where(
|
|
366
|
-
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
367
|
-
)
|
|
368
|
-
.where(filter=firestore.FieldFilter("shared_with_id", "==", user_id))
|
|
369
|
-
.limit(1)
|
|
370
|
-
)
|
|
371
|
-
|
|
372
|
-
# Check if any sharing records exist
|
|
373
|
-
docs = list(query.stream())
|
|
374
|
-
if docs:
|
|
375
|
-
return True, f"Hatch {hatch_id} is shared with user {user_id}"
|
|
376
|
-
else:
|
|
377
|
-
return False, f"Hatch {hatch_id} is not shared with user {user_id}"
|
|
378
|
-
|
|
379
|
-
except Exception as e:
|
|
380
|
-
import traceback
|
|
381
|
-
|
|
382
|
-
traceback.print_exc()
|
|
383
|
-
print(
|
|
384
|
-
f"Error checking if hatch {hatch_id} is shared with user {user_id}: {e}"
|
|
385
|
-
)
|
|
386
|
-
return False, f"Error checking sharing status: {str(e)}"
|
|
387
|
-
|
|
388
|
-
async def delete_all_hatch_sharing(self, hatch_id: str) -> Tuple[bool, str]:
|
|
389
|
-
"""Delete all sharing records for a hatch.
|
|
390
|
-
|
|
391
|
-
Args:
|
|
392
|
-
hatch_id: The ID of the hatch to remove all sharing for
|
|
393
|
-
|
|
394
|
-
Returns:
|
|
395
|
-
Tuple[bool, str]: Success status and message
|
|
396
|
-
"""
|
|
397
|
-
try:
|
|
398
|
-
# Find all sharing records for this hatch
|
|
399
|
-
query = self.sharing_collection.where(
|
|
400
|
-
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
401
|
-
)
|
|
402
|
-
|
|
403
|
-
# Delete all matching sharing records
|
|
404
|
-
deleted_count = 0
|
|
405
|
-
for doc in query.stream():
|
|
406
|
-
doc.reference.delete()
|
|
407
|
-
deleted_count += 1
|
|
408
|
-
|
|
409
|
-
return (
|
|
410
|
-
True,
|
|
411
|
-
f"Successfully deleted {deleted_count} sharing records for hatch {hatch_id}",
|
|
412
|
-
)
|
|
413
|
-
|
|
414
|
-
except Exception as e:
|
|
415
|
-
import traceback
|
|
416
|
-
|
|
417
|
-
traceback.print_exc()
|
|
418
|
-
print(f"Error deleting sharing records for hatch {hatch_id}: {e}")
|
|
419
|
-
return False, f"Error deleting sharing records: {str(e)}"
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Union, List, Tuple
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from google.cloud.exceptions import GoogleCloudError
|
|
5
|
+
from botrun_flow_lang.constants import HATCH_SHARING_STORE_NAME, HATCH_STORE_NAME
|
|
6
|
+
from botrun_flow_lang.services.base.firestore_base import FirestoreBase
|
|
7
|
+
from botrun_hatch.models.hatch import Hatch
|
|
8
|
+
from botrun_hatch.models.hatch_sharing import HatchSharing
|
|
9
|
+
from google.cloud import firestore
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class HatchFsStore(FirestoreBase):
|
|
13
|
+
def __init__(self, env_name: str):
|
|
14
|
+
super().__init__(f"{env_name}-{HATCH_STORE_NAME}")
|
|
15
|
+
self.sharing_collection = self.db.collection(
|
|
16
|
+
f"{env_name}-{HATCH_SHARING_STORE_NAME}"
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
async def get_hatch(self, item_id: str) -> Union[Hatch, None]:
|
|
20
|
+
doc_ref = self.collection.document(item_id)
|
|
21
|
+
doc = doc_ref.get()
|
|
22
|
+
if doc.exists:
|
|
23
|
+
data = doc.to_dict()
|
|
24
|
+
return Hatch(**data)
|
|
25
|
+
else:
|
|
26
|
+
print(f">============Getting hatch {item_id} not exists")
|
|
27
|
+
return None
|
|
28
|
+
|
|
29
|
+
async def set_hatch(self, item: Hatch):
|
|
30
|
+
try:
|
|
31
|
+
# Update updated_at timestamp with current UTC time
|
|
32
|
+
item.updated_at = datetime.now(timezone.utc).isoformat()
|
|
33
|
+
|
|
34
|
+
# Proceed with saving the hatch
|
|
35
|
+
doc_ref = self.collection.document(str(item.id))
|
|
36
|
+
doc_ref.set(item.model_dump())
|
|
37
|
+
return True, item
|
|
38
|
+
|
|
39
|
+
except GoogleCloudError as e:
|
|
40
|
+
logging.error(f"Error setting hatch {item.id}: {e}")
|
|
41
|
+
return False, None
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logging.error(f"Unexpected error setting hatch {item.id}: {e}")
|
|
44
|
+
return False, None
|
|
45
|
+
|
|
46
|
+
async def delete_hatch(self, item_id: str):
|
|
47
|
+
try:
|
|
48
|
+
doc_ref = self.collection.document(item_id)
|
|
49
|
+
doc_ref.delete()
|
|
50
|
+
return True
|
|
51
|
+
except GoogleCloudError as e:
|
|
52
|
+
print(f"Error deleting hatch {item_id}: {e}")
|
|
53
|
+
return False
|
|
54
|
+
|
|
55
|
+
async def get_hatches(
|
|
56
|
+
self,
|
|
57
|
+
user_id: str,
|
|
58
|
+
offset: int = 0,
|
|
59
|
+
limit: int = 20,
|
|
60
|
+
sort_by: str = "updated_at",
|
|
61
|
+
order: str = "desc",
|
|
62
|
+
) -> Tuple[List[Hatch], str]:
|
|
63
|
+
try:
|
|
64
|
+
# 向下兼容:先找出没有 updated_at 字段的记录并批量更新
|
|
65
|
+
# 注意:Firestore 不支持直接筛选"字段不存在",必须遍历所有记录
|
|
66
|
+
if sort_by == "updated_at":
|
|
67
|
+
missing_query = self.collection.where(
|
|
68
|
+
filter=firestore.FieldFilter("user_id", "==", user_id)
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# 收集需要更新的文档
|
|
72
|
+
docs_to_update = []
|
|
73
|
+
current_time = datetime.now(timezone.utc).isoformat()
|
|
74
|
+
|
|
75
|
+
for doc in missing_query.stream():
|
|
76
|
+
data = doc.to_dict()
|
|
77
|
+
# 如果没有 updated_at 字段或为空,收集起来
|
|
78
|
+
if "updated_at" not in data or not data.get("updated_at"):
|
|
79
|
+
docs_to_update.append((doc.reference, data))
|
|
80
|
+
|
|
81
|
+
# 使用批量写入更新(每批最多 500 个)
|
|
82
|
+
if docs_to_update:
|
|
83
|
+
batch_size = 500
|
|
84
|
+
for i in range(0, len(docs_to_update), batch_size):
|
|
85
|
+
batch = self.db.batch()
|
|
86
|
+
batch_docs = docs_to_update[i:i + batch_size]
|
|
87
|
+
|
|
88
|
+
for doc_ref, data in batch_docs:
|
|
89
|
+
data["updated_at"] = current_time
|
|
90
|
+
batch.set(doc_ref, data)
|
|
91
|
+
|
|
92
|
+
batch.commit()
|
|
93
|
+
|
|
94
|
+
logging.info(f"Auto-migrated {len(docs_to_update)} hatches with updated_at for user {user_id}")
|
|
95
|
+
|
|
96
|
+
# Build base query
|
|
97
|
+
query = self.collection.where(
|
|
98
|
+
filter=firestore.FieldFilter("user_id", "==", user_id)
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
# Add sorting
|
|
102
|
+
# Firestore direction: DESCENDING or ASCENDING
|
|
103
|
+
direction = (
|
|
104
|
+
firestore.Query.DESCENDING if order == "desc" else firestore.Query.ASCENDING
|
|
105
|
+
)
|
|
106
|
+
query = query.order_by(sort_by, direction=direction)
|
|
107
|
+
|
|
108
|
+
# Add pagination
|
|
109
|
+
query = query.offset(offset).limit(limit)
|
|
110
|
+
|
|
111
|
+
docs = query.stream()
|
|
112
|
+
hatches = [Hatch(**doc.to_dict()) for doc in docs]
|
|
113
|
+
return hatches, ""
|
|
114
|
+
except GoogleCloudError as e:
|
|
115
|
+
import traceback
|
|
116
|
+
|
|
117
|
+
traceback.print_exc()
|
|
118
|
+
print(f"Error getting hatches for user {user_id}: {e}")
|
|
119
|
+
return [], f"Error getting hatches for user {user_id}: {e}"
|
|
120
|
+
except Exception as e:
|
|
121
|
+
import traceback
|
|
122
|
+
|
|
123
|
+
traceback.print_exc()
|
|
124
|
+
print(f"Error getting hatches for user {user_id}: {e}")
|
|
125
|
+
return [], f"Error getting hatches for user {user_id}: {e}"
|
|
126
|
+
|
|
127
|
+
async def get_default_hatch(self, user_id: str) -> Union[Hatch, None]:
|
|
128
|
+
try:
|
|
129
|
+
query = (
|
|
130
|
+
self.collection.where(
|
|
131
|
+
filter=firestore.FieldFilter("user_id", "==", user_id)
|
|
132
|
+
)
|
|
133
|
+
.where(filter=firestore.FieldFilter("is_default", "==", True))
|
|
134
|
+
.limit(1)
|
|
135
|
+
)
|
|
136
|
+
docs = query.stream()
|
|
137
|
+
for doc in docs:
|
|
138
|
+
return Hatch(**doc.to_dict())
|
|
139
|
+
return None
|
|
140
|
+
except GoogleCloudError as e:
|
|
141
|
+
print(f"Error getting default hatch for user {user_id}: {e}")
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
async def set_default_hatch(self, user_id: str, hatch_id: str) -> Tuple[bool, str]:
|
|
145
|
+
try:
|
|
146
|
+
# 获取当前的默认 hatch
|
|
147
|
+
current_default = await self.get_default_hatch(user_id)
|
|
148
|
+
|
|
149
|
+
# 获取要设置为默认的 hatch
|
|
150
|
+
new_default = await self.get_hatch(hatch_id)
|
|
151
|
+
if not new_default or new_default.user_id != user_id:
|
|
152
|
+
return (
|
|
153
|
+
False,
|
|
154
|
+
f"Hatch with id {hatch_id} not found or does not belong to user {user_id}",
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# 更新当前默认 hatch(如果存在)
|
|
158
|
+
if current_default and current_default.id != hatch_id:
|
|
159
|
+
current_default.is_default = False
|
|
160
|
+
success, _ = await self.set_hatch(current_default)
|
|
161
|
+
if not success:
|
|
162
|
+
return (
|
|
163
|
+
False,
|
|
164
|
+
f"Failed to update current default hatch {current_default.id}",
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
# 设置新的默认 hatch
|
|
168
|
+
new_default.is_default = True
|
|
169
|
+
success, _ = await self.set_hatch(new_default)
|
|
170
|
+
if not success:
|
|
171
|
+
return False, f"Failed to set hatch {hatch_id} as default"
|
|
172
|
+
|
|
173
|
+
return (
|
|
174
|
+
True,
|
|
175
|
+
f"Successfully set hatch {hatch_id} as default for user {user_id}",
|
|
176
|
+
)
|
|
177
|
+
except Exception as e:
|
|
178
|
+
print(f"Error setting default hatch: {e}")
|
|
179
|
+
return False, f"An error occurred: {str(e)}"
|
|
180
|
+
|
|
181
|
+
async def share_hatch(
|
|
182
|
+
self, hatch_id: str, owner_id: str, target_user_id: str
|
|
183
|
+
) -> Tuple[bool, str]:
|
|
184
|
+
"""Share a hatch with another user.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
hatch_id: The ID of the hatch to share
|
|
188
|
+
owner_id: The ID of the user who owns the hatch
|
|
189
|
+
target_user_id: The ID of the user to share the hatch with
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Tuple[bool, str]: Success status and message
|
|
193
|
+
"""
|
|
194
|
+
try:
|
|
195
|
+
# Verify hatch exists and belongs to owner
|
|
196
|
+
hatch = await self.get_hatch(hatch_id)
|
|
197
|
+
if not hatch:
|
|
198
|
+
return False, f"Hatch with id {hatch_id} not found"
|
|
199
|
+
|
|
200
|
+
if hatch.user_id != owner_id:
|
|
201
|
+
return (
|
|
202
|
+
False,
|
|
203
|
+
f"Hatch with id {hatch_id} does not belong to user {owner_id}",
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
# Check if sharing already exists
|
|
207
|
+
query = (
|
|
208
|
+
self.sharing_collection.where(
|
|
209
|
+
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
210
|
+
)
|
|
211
|
+
.where(
|
|
212
|
+
filter=firestore.FieldFilter("shared_with_id", "==", target_user_id)
|
|
213
|
+
)
|
|
214
|
+
.limit(1)
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
docs = list(query.stream())
|
|
218
|
+
if docs:
|
|
219
|
+
return (
|
|
220
|
+
True,
|
|
221
|
+
f"Hatch {hatch_id} is already shared with user {target_user_id}",
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
# Create sharing record
|
|
225
|
+
sharing = HatchSharing(
|
|
226
|
+
hatch_id=hatch_id, owner_id=owner_id, shared_with_id=target_user_id
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
# Store in Firestore
|
|
230
|
+
doc_ref = self.sharing_collection.document()
|
|
231
|
+
doc_ref.set(sharing.model_dump())
|
|
232
|
+
|
|
233
|
+
return (
|
|
234
|
+
True,
|
|
235
|
+
f"Successfully shared hatch {hatch_id} with user {target_user_id}",
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
except Exception as e:
|
|
239
|
+
import traceback
|
|
240
|
+
|
|
241
|
+
traceback.print_exc()
|
|
242
|
+
print(f"Error sharing hatch {hatch_id} with user {target_user_id}: {e}")
|
|
243
|
+
return False, f"Error sharing hatch: {str(e)}"
|
|
244
|
+
|
|
245
|
+
async def unshare_hatch(
|
|
246
|
+
self, hatch_id: str, owner_id: str, target_user_id: str
|
|
247
|
+
) -> Tuple[bool, str]:
|
|
248
|
+
"""Remove sharing of a hatch with a user.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
hatch_id: The ID of the hatch to unshare
|
|
252
|
+
owner_id: The ID of the user who owns the hatch
|
|
253
|
+
target_user_id: The ID of the user to remove sharing from
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
Tuple[bool, str]: Success status and message
|
|
257
|
+
"""
|
|
258
|
+
try:
|
|
259
|
+
# Verify hatch exists and belongs to owner
|
|
260
|
+
hatch = await self.get_hatch(hatch_id)
|
|
261
|
+
if not hatch:
|
|
262
|
+
return False, f"Hatch with id {hatch_id} not found"
|
|
263
|
+
|
|
264
|
+
if hatch.user_id != owner_id:
|
|
265
|
+
return (
|
|
266
|
+
False,
|
|
267
|
+
f"Hatch with id {hatch_id} does not belong to user {owner_id}",
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
# Find sharing record
|
|
271
|
+
query = self.sharing_collection.where(
|
|
272
|
+
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
273
|
+
).where(
|
|
274
|
+
filter=firestore.FieldFilter("shared_with_id", "==", target_user_id)
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
# Delete all matching sharing records
|
|
278
|
+
deleted = False
|
|
279
|
+
for doc in query.stream():
|
|
280
|
+
doc.reference.delete()
|
|
281
|
+
deleted = True
|
|
282
|
+
|
|
283
|
+
if not deleted:
|
|
284
|
+
return (
|
|
285
|
+
False,
|
|
286
|
+
f"Hatch {hatch_id} is not shared with user {target_user_id}",
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
return (
|
|
290
|
+
True,
|
|
291
|
+
f"Successfully unshared hatch {hatch_id} from user {target_user_id}",
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
except Exception as e:
|
|
295
|
+
import traceback
|
|
296
|
+
|
|
297
|
+
traceback.print_exc()
|
|
298
|
+
print(f"Error unsharing hatch {hatch_id} from user {target_user_id}: {e}")
|
|
299
|
+
return False, f"Error unsharing hatch: {str(e)}"
|
|
300
|
+
|
|
301
|
+
async def get_shared_hatches(
|
|
302
|
+
self, user_id: str, offset: int = 0, limit: int = 20
|
|
303
|
+
) -> Tuple[List[Hatch], str]:
|
|
304
|
+
"""Get all hatches shared with a user.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
user_id: The ID of the user to get shared hatches for
|
|
308
|
+
offset: Pagination offset
|
|
309
|
+
limit: Maximum number of results to return
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
Tuple[List[Hatch], str]: List of shared hatches and error message if any
|
|
313
|
+
"""
|
|
314
|
+
try:
|
|
315
|
+
# Find all sharing records for this user
|
|
316
|
+
query = (
|
|
317
|
+
self.sharing_collection.where(
|
|
318
|
+
filter=firestore.FieldFilter("shared_with_id", "==", user_id)
|
|
319
|
+
)
|
|
320
|
+
.limit(limit)
|
|
321
|
+
.offset(offset)
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
# Get all sharing records
|
|
325
|
+
sharing_docs = list(query.stream())
|
|
326
|
+
|
|
327
|
+
# If no sharing records, return empty list
|
|
328
|
+
if not sharing_docs:
|
|
329
|
+
return [], ""
|
|
330
|
+
|
|
331
|
+
# Get shared hatches
|
|
332
|
+
shared_hatches = []
|
|
333
|
+
for doc in sharing_docs:
|
|
334
|
+
sharing_data = doc.to_dict()
|
|
335
|
+
hatch_id = sharing_data.get("hatch_id")
|
|
336
|
+
if hatch_id:
|
|
337
|
+
hatch = await self.get_hatch(hatch_id)
|
|
338
|
+
if hatch:
|
|
339
|
+
shared_hatches.append(hatch)
|
|
340
|
+
|
|
341
|
+
return shared_hatches, ""
|
|
342
|
+
|
|
343
|
+
except Exception as e:
|
|
344
|
+
import traceback
|
|
345
|
+
|
|
346
|
+
traceback.print_exc()
|
|
347
|
+
print(f"Error getting shared hatches for user {user_id}: {e}")
|
|
348
|
+
return [], f"Error getting shared hatches: {str(e)}"
|
|
349
|
+
|
|
350
|
+
async def is_hatch_shared_with_user(
|
|
351
|
+
self, hatch_id: str, user_id: str
|
|
352
|
+
) -> Tuple[bool, str]:
|
|
353
|
+
"""Check if a hatch is shared with a specific user.
|
|
354
|
+
|
|
355
|
+
Args:
|
|
356
|
+
hatch_id: The ID of the hatch to check
|
|
357
|
+
user_id: The ID of the user to check sharing with
|
|
358
|
+
|
|
359
|
+
Returns:
|
|
360
|
+
Tuple[bool, str]: Whether the hatch is shared with the user and a message
|
|
361
|
+
"""
|
|
362
|
+
try:
|
|
363
|
+
# Find sharing record for this hatch and user
|
|
364
|
+
query = (
|
|
365
|
+
self.sharing_collection.where(
|
|
366
|
+
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
367
|
+
)
|
|
368
|
+
.where(filter=firestore.FieldFilter("shared_with_id", "==", user_id))
|
|
369
|
+
.limit(1)
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
# Check if any sharing records exist
|
|
373
|
+
docs = list(query.stream())
|
|
374
|
+
if docs:
|
|
375
|
+
return True, f"Hatch {hatch_id} is shared with user {user_id}"
|
|
376
|
+
else:
|
|
377
|
+
return False, f"Hatch {hatch_id} is not shared with user {user_id}"
|
|
378
|
+
|
|
379
|
+
except Exception as e:
|
|
380
|
+
import traceback
|
|
381
|
+
|
|
382
|
+
traceback.print_exc()
|
|
383
|
+
print(
|
|
384
|
+
f"Error checking if hatch {hatch_id} is shared with user {user_id}: {e}"
|
|
385
|
+
)
|
|
386
|
+
return False, f"Error checking sharing status: {str(e)}"
|
|
387
|
+
|
|
388
|
+
async def delete_all_hatch_sharing(self, hatch_id: str) -> Tuple[bool, str]:
|
|
389
|
+
"""Delete all sharing records for a hatch.
|
|
390
|
+
|
|
391
|
+
Args:
|
|
392
|
+
hatch_id: The ID of the hatch to remove all sharing for
|
|
393
|
+
|
|
394
|
+
Returns:
|
|
395
|
+
Tuple[bool, str]: Success status and message
|
|
396
|
+
"""
|
|
397
|
+
try:
|
|
398
|
+
# Find all sharing records for this hatch
|
|
399
|
+
query = self.sharing_collection.where(
|
|
400
|
+
filter=firestore.FieldFilter("hatch_id", "==", hatch_id)
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
# Delete all matching sharing records
|
|
404
|
+
deleted_count = 0
|
|
405
|
+
for doc in query.stream():
|
|
406
|
+
doc.reference.delete()
|
|
407
|
+
deleted_count += 1
|
|
408
|
+
|
|
409
|
+
return (
|
|
410
|
+
True,
|
|
411
|
+
f"Successfully deleted {deleted_count} sharing records for hatch {hatch_id}",
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
except Exception as e:
|
|
415
|
+
import traceback
|
|
416
|
+
|
|
417
|
+
traceback.print_exc()
|
|
418
|
+
print(f"Error deleting sharing records for hatch {hatch_id}: {e}")
|
|
419
|
+
return False, f"Error deleting sharing records: {str(e)}"
|