deepfos 1.1.60__py3-none-any.whl → 1.1.78__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. deepfos/_version.py +3 -3
  2. deepfos/api/V1_1/models/business_model.py +322 -322
  3. deepfos/api/V1_1/models/dimension.py +1075 -1075
  4. deepfos/api/V1_2/models/dimension.py +1119 -1116
  5. deepfos/api/account.py +1 -0
  6. deepfos/api/app.py +1 -0
  7. deepfos/api/base.py +70 -71
  8. deepfos/api/deep_pipeline.py +1 -1
  9. deepfos/api/deepconnector.py +3 -3
  10. deepfos/api/financial_model.py +12 -0
  11. deepfos/api/models/account.py +130 -130
  12. deepfos/api/models/accounting_engines.py +250 -250
  13. deepfos/api/models/app.py +355 -355
  14. deepfos/api/models/approval_process.py +231 -231
  15. deepfos/api/models/base.py +49 -209
  16. deepfos/api/models/business_model.py +239 -239
  17. deepfos/api/models/consolidation.py +196 -196
  18. deepfos/api/models/consolidation_process.py +31 -31
  19. deepfos/api/models/datatable_mysql.py +78 -78
  20. deepfos/api/models/deep_pipeline.py +20 -9
  21. deepfos/api/models/deepconnector.py +9 -8
  22. deepfos/api/models/deepfos_task.py +118 -118
  23. deepfos/api/models/deepmodel.py +120 -120
  24. deepfos/api/models/dimension.py +613 -610
  25. deepfos/api/models/financial_model.py +691 -663
  26. deepfos/api/models/journal_model.py +120 -120
  27. deepfos/api/models/journal_template.py +185 -185
  28. deepfos/api/models/memory_financial_model.py +131 -131
  29. deepfos/api/models/platform.py +16 -16
  30. deepfos/api/models/python.py +32 -32
  31. deepfos/api/models/reconciliation_engine.py +104 -104
  32. deepfos/api/models/reconciliation_report.py +29 -29
  33. deepfos/api/models/role_strategy.py +213 -213
  34. deepfos/api/models/smartlist.py +86 -86
  35. deepfos/api/models/space.py +312 -312
  36. deepfos/api/models/system.py +299 -297
  37. deepfos/api/models/variable.py +131 -131
  38. deepfos/api/models/workflow.py +290 -270
  39. deepfos/api/platform.py +3 -1
  40. deepfos/api/space.py +1 -0
  41. deepfos/api/system.py +1 -0
  42. deepfos/api/workflow.py +8 -0
  43. deepfos/cache.py +50 -4
  44. deepfos/element/bizmodel.py +2 -2
  45. deepfos/element/deep_pipeline.py +29 -16
  46. deepfos/element/deepconnector.py +36 -1
  47. deepfos/element/deepmodel.py +591 -332
  48. deepfos/element/dimension.py +30 -17
  49. deepfos/element/finmodel.py +542 -101
  50. deepfos/element/journal.py +20 -10
  51. deepfos/element/rolestrategy.py +4 -4
  52. deepfos/element/variable.py +23 -17
  53. deepfos/element/workflow.py +60 -3
  54. deepfos/exceptions/__init__.py +1 -1
  55. deepfos/lib/deepchart.py +14 -13
  56. deepfos/lib/deepux.py +11 -11
  57. deepfos/lib/discovery.py +3 -0
  58. deepfos/lib/filterparser.py +2 -2
  59. deepfos/lib/k8s.py +101 -0
  60. deepfos/lib/msg.py +34 -8
  61. deepfos/lib/serutils.py +34 -9
  62. deepfos/lib/sysutils.py +37 -18
  63. deepfos/lib/utils.py +62 -2
  64. deepfos/options.py +39 -8
  65. {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/METADATA +7 -7
  66. {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/RECORD +68 -67
  67. {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/WHEEL +0 -0
  68. {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/top_level.txt +0 -0
deepfos/lib/msg.py CHANGED
@@ -110,8 +110,9 @@ class AsyncMsgCenter:
110
110
  title_param: Dict[str, str] = None,
111
111
  content_param: Dict[str, str] = None,
112
112
  attachments: Dict[str, Union[str, bytes]] = None,
113
- cc_email: List[str] = None,
114
- api_idx: Literal[0, 1] = 0
113
+ cc_email: List = None,
114
+ api_idx: Literal[0, 1] = 0,
115
+ success_rule: Literal['partial', 'all'] = 'all'
115
116
  ) -> List:
116
117
  payload = {
117
118
  'receiver': receivers,
@@ -139,7 +140,11 @@ class AsyncMsgCenter:
139
140
 
140
141
  api = self.type_api_map[tpl_type][api_idx]
141
142
  resp = await api(payload)
142
- if resp.failure:
143
+ if resp.failure and (
144
+ success_rule == 'all' or (
145
+ success_rule == 'partial' and not resp.success
146
+ )
147
+ ):
143
148
  raise MsgCenterError(*resp.failure)
144
149
 
145
150
  return resp.success
@@ -153,6 +158,9 @@ class AsyncMsgCenter:
153
158
  title_param: Dict[str, str] = None,
154
159
  content_param: Dict[str, str] = None,
155
160
  attachments: Dict[str, Union[str, bytes]] = None,
161
+ success_rule: Literal['partial', 'all'] = 'all',
162
+ cc_users: List[str] = None,
163
+ cc_groups: List[str] = None,
156
164
  ) -> List:
157
165
  """推送指定消息模版的消息
158
166
 
@@ -164,6 +172,9 @@ class AsyncMsgCenter:
164
172
  title_param: 可选,标题变量
165
173
  content_param: 可选,内容变量
166
174
  attachments: 可选,站内消息或邮箱的消息附件,以 文件名: 文件(字符串/bytes) 的字典形式提供
175
+ success_rule: 可选,发送成功规则,'partial' 表示部分成功即可,'all' 表示所有收件人都必须成功
176
+ cc_users: 可选,抄送人userid列表
177
+ cc_groups: 可选,抄送人groupid列表
167
178
 
168
179
  .. admonition:: 示例
169
180
 
@@ -210,6 +221,11 @@ class AsyncMsgCenter:
210
221
  receivers.extend([{'id': r, 'type': 'USER'} for r in receiver_users])
211
222
  if receiver_groups:
212
223
  receivers.extend([{'id': r, 'type': 'GROUP'} for r in receiver_groups])
224
+ cc_email = []
225
+ if cc_users:
226
+ cc_email.extend([{'id': r, 'type': 'USER'} for r in cc_users])
227
+ if cc_groups:
228
+ cc_email.extend([{'id': r, 'type': 'GROUP'} for r in cc_groups])
213
229
 
214
230
  if not receivers:
215
231
  raise ValueError('需提供receiver_users和receiver_groups中的至少一项')
@@ -217,7 +233,8 @@ class AsyncMsgCenter:
217
233
  return await self._publish(
218
234
  template, sender=sender, receivers=receivers,
219
235
  title_param=title_param, content_param=content_param,
220
- attachments=attachments,
236
+ attachments=attachments, success_rule=success_rule,
237
+ cc_email=cc_email
221
238
  )
222
239
 
223
240
  async def send_mail(
@@ -229,6 +246,7 @@ class AsyncMsgCenter:
229
246
  content_param: Dict[str, str] = None,
230
247
  attachments: Dict[str, Union[str, bytes]] = None,
231
248
  cc_email: List[str] = None,
249
+ success_rule: Literal['partial', 'all'] = 'all'
232
250
  ) -> List:
233
251
  """发送指定消息模版的邮件
234
252
 
@@ -240,6 +258,7 @@ class AsyncMsgCenter:
240
258
  content_param: 可选,内容变量
241
259
  attachments: 可选,附件,以 文件名: 文件(字符串/bytes) 的字典形式提供
242
260
  cc_email: 可选,抄送人列表
261
+ success_rule: 可选,发送成功规则,'partial' 表示部分成功即可,'all' 表示所有收件人都必须成功
243
262
 
244
263
 
245
264
  .. admonition:: 示例
@@ -289,7 +308,7 @@ class AsyncMsgCenter:
289
308
  template, sender=sender, receivers=receivers,
290
309
  title_param=title_param, content_param=content_param,
291
310
  attachments=attachments, cc_email=cc_email,
292
- api_idx=1
311
+ api_idx=1, success_rule=success_rule,
293
312
  )
294
313
 
295
314
  async def send_sms(
@@ -298,7 +317,8 @@ class AsyncMsgCenter:
298
317
  receivers: List[str],
299
318
  sender: str = None,
300
319
  title_param: Dict[str, str] = None,
301
- content_param: Dict[str, str] = None
320
+ content_param: Dict[str, str] = None,
321
+ success_rule: Literal['partial', 'all'] = 'all'
302
322
  ) -> List:
303
323
  """发送指定消息模版的短信
304
324
 
@@ -308,6 +328,7 @@ class AsyncMsgCenter:
308
328
  sender: 可选,发送人userid,默认为当前用户id
309
329
  title_param: 可选,标题变量
310
330
  content_param: 可选,内容变量
331
+ success_rule: 可选,发送成功规则,'partial' 表示部分成功即可,'all' 表示所有收件人都必须成功
311
332
 
312
333
 
313
334
  .. admonition:: 示例
@@ -351,7 +372,7 @@ class AsyncMsgCenter:
351
372
  return await self._publish(
352
373
  template, sender=sender, receivers=receivers,
353
374
  title_param=title_param, content_param=content_param,
354
- api_idx=1
375
+ api_idx=1, success_rule=success_rule,
355
376
  )
356
377
 
357
378
 
@@ -368,6 +389,9 @@ class MsgCenter(AsyncMsgCenter, metaclass=SyncMeta):
368
389
  title_param: Dict[str, str] = None,
369
390
  content_param: Dict[str, str] = None,
370
391
  attachments: Dict[str, Union[str, bytes]] = None,
392
+ success_rule: Literal['partial', 'all'] = 'all',
393
+ cc_users: List[str] = None,
394
+ cc_groups: List[str] = None,
371
395
  ) -> List:
372
396
  ...
373
397
 
@@ -380,6 +404,7 @@ class MsgCenter(AsyncMsgCenter, metaclass=SyncMeta):
380
404
  content_param: Dict[str, str] = None,
381
405
  attachments: Dict[str, Union[str, bytes]] = None,
382
406
  cc_email: List[str] = None,
407
+ success_rule: Literal['partial', 'all'] = 'all'
383
408
  ) -> List:
384
409
  ...
385
410
 
@@ -389,6 +414,7 @@ class MsgCenter(AsyncMsgCenter, metaclass=SyncMeta):
389
414
  receivers: List[str],
390
415
  sender: str = None,
391
416
  title_param: Dict[str, str] = None,
392
- content_param: Dict[str, str] = None
417
+ content_param: Dict[str, str] = None,
418
+ success_rule: Literal['partial', 'all'] = 'all'
393
419
  ) -> List:
394
420
  ...
deepfos/lib/serutils.py CHANGED
@@ -3,9 +3,10 @@ from __future__ import annotations
3
3
  import datetime
4
4
  import decimal
5
5
  import functools
6
+ import json
6
7
  import uuid
7
8
  from dataclasses import dataclass
8
- from typing import Any
9
+ from typing import Any, Dict, Union, List
9
10
 
10
11
  import edgedb
11
12
  from edgedb.introspect import introspect_object as intro
@@ -24,12 +25,20 @@ def serialize(o, ctx: Context = Context()):
24
25
 
25
26
 
26
27
  @functools.singledispatch
27
- def deserialize(o):
28
+ def deserialize(o, field_info: Union[Dict, List[str], str] = None):
28
29
  raise TypeError(f'无法反序列化类型: {type(o)}')
29
30
 
30
31
 
31
32
  @deserialize.register
32
- def to_object(o: dict):
33
+ def to_object(o: dict, field_info: Union[Dict, List[str], str] = None):
34
+ if field_info == 'json':
35
+ return o
36
+ if (
37
+ isinstance(field_info, dict)
38
+ and len(field_info) == 1
39
+ and field_info.get(None) == 'json'
40
+ ):
41
+ return o
33
42
  _id = o.pop('id', None)
34
43
  ordered_attr = o.keys()
35
44
  obj_cls = datatypes.create_object_factory(
@@ -42,20 +51,34 @@ def to_object(o: dict):
42
51
  actual_id = uuid.UUID(_id)
43
52
  elif isinstance(_id, uuid.UUID):
44
53
  actual_id = _id
54
+ # NB: in case of incorrect field info
55
+ if not isinstance(field_info, dict):
56
+ field_info = {}
57
+
45
58
  return obj_cls(
46
59
  actual_id,
47
- *[deserialize(o[k]) for k in ordered_attr]
60
+ *[deserialize(o[k], field_info.get(k, {})) for k in ordered_attr]
48
61
  )
49
62
 
50
63
 
51
64
  @deserialize.register
52
- def to_set(o: list):
53
- return [deserialize(ele) for ele in o]
65
+ def to_set(o: list, field_info: Union[Dict, List[str], str] = None):
66
+ if isinstance(field_info, list):
67
+ field_info = {
68
+ f['name']: f['type'] if f['fields'] is None else f['fields']
69
+ for f in field_info
70
+ }
71
+ return [deserialize(ele, field_info) for ele in o]
54
72
 
55
73
 
56
74
  @deserialize.register
57
- def to_tuple(o: tuple):
58
- return tuple(deserialize(ele) for ele in o)
75
+ def to_tuple(o: tuple, field_info: Union[Dict, List[str], str] = None):
76
+ if isinstance(field_info, list):
77
+ field_info = {
78
+ f['name']: f['type'] if f['fields'] is None else f['fields']
79
+ for f in field_info
80
+ }
81
+ return tuple(deserialize(ele, field_info) for ele in o)
59
82
 
60
83
 
61
84
  @deserialize.register(int)
@@ -65,7 +88,7 @@ def to_tuple(o: tuple):
65
88
  @deserialize.register(bool)
66
89
  @deserialize.register(type(None))
67
90
  @deserialize.register(datetime.datetime)
68
- def to_scalar(o):
91
+ def to_scalar(o, field_info: Union[Dict, List[str], str] = None):
69
92
  return o
70
93
 
71
94
 
@@ -173,6 +196,8 @@ def _set(o, ctx: Context = Context()):
173
196
  @serialize.register(uuid.UUID)
174
197
  @serialize.register(decimal.Decimal)
175
198
  def _scalar(o, ctx: Context = Context()):
199
+ if ctx.frame_desc == 'std::json' and isinstance(o, str):
200
+ return json.loads(o)
176
201
  return o
177
202
 
178
203
 
deepfos/lib/sysutils.py CHANGED
@@ -104,6 +104,8 @@ def complete_cartesian_product(
104
104
  fix: Dict[str, Union[str, list]],
105
105
  df: pd.DataFrame = None,
106
106
  paths: Union[str, Dict[str, str]] = None,
107
+ folder_ids: Union[str, Dict[str, str]] = None,
108
+ col_dim_map: Dict[str, str] = None,
107
109
  ) -> pd.DataFrame:
108
110
  """
109
111
  构造完整的维度成员笛卡尔积
@@ -113,6 +115,8 @@ def complete_cartesian_product(
113
115
  df: 如果需要为现有DataFrame补全笛卡尔积,传入一个df。如果不传,则是生成fix中维度成员的笛卡尔积。
114
116
  paths: fix中维度的path,如果所有维度的目录相同,传同一个path,否则传字典,key为维度名,value为path。
115
117
  如果不传,则自动寻找维度对应的path。
118
+ folder_ids: 类似paths, 但值是folder_id
119
+ col_dim_map: data中的列名与实际维度名的映射关系,默认data中的列名与维度名相同
116
120
 
117
121
  Returns:
118
122
  维度成员笛卡尔积的DataFrame
@@ -147,33 +151,49 @@ def complete_cartesian_product(
147
151
  from deepfos.element.dimension import AsyncDimension
148
152
  from .asynchronous import evloop
149
153
 
150
- if isinstance(paths, dict):
151
- path_getter = paths.__getitem__
154
+ if paths is not None:
155
+ loc_key = "path"
156
+ if isinstance(paths, dict):
157
+ loc_getter = paths.get
158
+ else:
159
+ loc_getter = lambda _: paths
160
+ elif folder_ids is not None:
161
+ loc_key = "folder_id"
162
+
163
+ if isinstance(folder_ids, dict):
164
+ loc_getter = folder_ids.get
165
+ else:
166
+ loc_getter = lambda _: folder_ids
152
167
  else:
153
- path_getter = lambda _: paths
168
+ loc_key = "path"
169
+ loc_getter = lambda _: None
170
+ if col_dim_map is None:
171
+ col_dim_map = {}
154
172
 
155
173
  # 遍历fix,如果fix的值为str,则认为是维度表达式,将表达式转换为成员list
156
174
  mbrs = {}
157
175
  futures = []
158
176
 
159
- for dim, exp in fix.items():
177
+ for col, exp in fix.items():
160
178
  if isinstance(exp, str):
161
179
  if "(" not in exp:
162
- mbrs[dim] = exp.split(';')
180
+ mbrs[col] = exp.split(';')
163
181
  else:
164
- path = path_getter(dim)
165
- future = evloop.apply(AsyncDimension(element_name=dim, path=path).query(
182
+ dim = col_dim_map.get(col, col)
183
+ loc = loc_getter(dim)
184
+ future = evloop.apply(AsyncDimension(element_name=dim, **{loc_key: loc}).query(
166
185
  expression=exp, fields=['name'], as_model=False
167
186
  ))
168
187
 
169
- futures.append((dim, future))
188
+ futures.append((col, future))
170
189
  else:
171
190
  if not isinstance(exp, list):
172
191
  raise TypeError('fix参数的value只能为维度表达式(str)或维度成员(list)')
173
- mbrs[dim] = exp
192
+ mbrs[col] = exp
174
193
 
175
- for dim, future in futures:
176
- mbrs[dim] = [item['name'] for item in future.result()]
194
+ for col, future in futures:
195
+ dim_mbrs = future.result()
196
+ mbrs[col] = [item['name'] for item in dim_mbrs]
177
197
 
178
198
  if df is None:
179
199
  df = pd.DataFrame(columns=list(fix.keys()))
@@ -459,14 +479,14 @@ class TaskUtil:
459
479
  py_info: 任务实例对应的python元素信息,如未提供,仍可在 `run_job_contents` 中提供
460
480
 
461
481
  """
462
- api = TaskAPI(sync=True)
463
- _chunksize = 200
464
482
 
465
483
  def __init__(self, task_code, py_info: PyInfo = None):
466
484
  self.task_code = task_code
485
+ self.api = TaskAPI(sync=True)
467
486
  if self.meta is None:
468
487
  raise ValueError(f"No config for task_code: [{self.task_code}].")
469
488
  self.py_info = py_info
489
+ self._chunksize = 200
470
490
 
471
491
  @cached_property
472
492
  def meta(self):
@@ -559,18 +579,17 @@ class TaskUtil:
559
579
  if not_found_col:
560
580
  raise ValueError(f'Required columns:{sorted(not_found_col)} since they are compositeKeys.')
561
581
 
562
- @classmethod
563
- def _create_task_instance(cls, call_api, job_contents, payload):
564
- if job_contents.shape[0] <= cls._chunksize:
582
+ def _create_task_instance(self, call_api, job_contents, payload):
583
+ if job_contents.shape[0] <= self._chunksize:
565
584
  payload.jobContent = job_contents.to_dict(orient='records')
566
585
  payload.lastBatch = True
567
586
  call_api(payload)
568
587
  else:
569
- payload.jobContent = job_contents.iloc[0:cls._chunksize:].to_dict(orient='records')
588
+ payload.jobContent = job_contents.iloc[0:self._chunksize:].to_dict(orient='records')
570
589
  payload.batchId = call_api(payload).batchId
571
590
  payloads = []
572
591
 
573
- for batch_contents in split_dataframe(job_contents.iloc[cls._chunksize::], cls._chunksize):
592
+ for batch_contents in split_dataframe(job_contents.iloc[self._chunksize::], self._chunksize):
574
593
  payload.jobContent = batch_contents.to_dict(orient='records')
575
594
  payloads.append(payload)
576
595
 
deepfos/lib/utils.py CHANGED
@@ -7,13 +7,14 @@ import sys
7
7
  import time
8
8
  import weakref
9
9
  from collections import UserList, UserDict, defaultdict
10
+ from contextlib import contextmanager, nullcontext
10
11
  from enum import EnumMeta, Enum
11
12
  import random
12
13
  from typing import (
13
14
  Tuple, Optional, Dict,
14
15
  List, Union, Callable, Any,
15
16
  TypeVar, MutableMapping, Container,
16
- Iterator, Iterable, DefaultDict
17
+ Iterator, Iterable, DefaultDict,
17
18
  )
18
19
  from itertools import groupby, count
19
20
 
@@ -867,6 +868,38 @@ def dict_to_sql(
867
868
  return sql
868
869
 
869
870
 
871
+ class ChunkAlert:
872
+ def __call__(self, start: int, end: int, exc: Exception = None) -> None: ...
873
+
874
+
875
+ @contextmanager
876
+ def chunk_alert(
877
+ start: int, end: int,
878
+ before: ChunkAlert = None,
879
+ after: ChunkAlert = None,
880
+ ):
881
+ try:
882
+ if before is not None:
883
+ try:
884
+ before(start, end)
885
+ except Exception:
886
+ logger.warning('Error occurs while calling before_chunk.')
887
+ yield
888
+ except Exception as e:
889
+ if after is not None:
890
+ try:
891
+ after(start, end, e)
892
+ except Exception:
893
+ logger.warning('Error occurs while calling after_chunk.')
894
+ raise
895
+ else:
896
+ if after is not None:
897
+ try:
898
+ after(start, end)
899
+ except Exception:
900
+ logger.warning('Error occurs while calling after_chunk.')
901
+
902
+
870
903
  def split_dataframe(data: pd.DataFrame, chunksize: int = None):
871
904
  nrows = len(data)
872
905
  if chunksize is None or chunksize > nrows:
@@ -878,6 +911,33 @@ def split_dataframe(data: pd.DataFrame, chunksize: int = None):
878
911
  yield data.iloc[i: i + chunksize]
879
912
 
880
913
 
914
+ def split_dataframe_alert(
915
+ data: pd.DataFrame,
916
+ chunksize: int = None,
917
+ before_chunk: ChunkAlert = None,
918
+ after_chunk: ChunkAlert = None,
919
+ ):
920
+ no_alert = before_chunk is None and after_chunk is None
921
+
922
+ nrows = len(data)
923
+ if chunksize is None or chunksize > nrows:
924
+ if no_alert:
925
+ yield data, nullcontext()
926
+ else:
927
+ yield data, chunk_alert(0, nrows, before_chunk, after_chunk)
928
+ elif chunksize <= 0:
929
+ raise ValueError("chunksize must be greater than 0.")
930
+ else:
931
+ for i in range(0, nrows, chunksize):
932
+ if no_alert:
933
+ yield data.iloc[i: i + chunksize], nullcontext()
934
+ else:
935
+ yield (
936
+ data.iloc[i: i + chunksize],
937
+ chunk_alert(i, min(i + chunksize, nrows), before_chunk, after_chunk)
938
+ )
939
+
940
+
881
941
  def find_str(
882
942
  target: str,
883
943
  candidates: Iterable[str],
@@ -916,7 +976,7 @@ def to_version_tuple(ver: Union[float, str], max_split: int = 1):
916
976
  if isinstance(ver, float):
917
977
  ver = str(ver)
918
978
  version_parts = ver.replace('.', '_').split('_', max_split)
919
- version_list = [int(part) for part in version_parts]
979
+ version_list = [int(part) if part.isdigit() else 0 for part in version_parts]
920
980
  return tuple(version_list)
921
981
 
922
982
 
deepfos/options.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import configparser
2
2
  import sys
3
+ import threading
3
4
  import warnings
4
5
  from enum import Enum
5
6
  from functools import partial
@@ -114,7 +115,8 @@ def _ensure_discovery_server_is_set(value):
114
115
  impl = OPTION.discovery.implementation
115
116
  server_opt = {
116
117
  'eureka': OPTION.server.eureka,
117
- 'nacos': OPTION.nacos.server
118
+ 'nacos': OPTION.nacos.server,
119
+ 'k8s': OPTION.k8s.namespace,
118
120
  }[impl]
119
121
 
120
122
  if server_opt is None:
@@ -276,6 +278,10 @@ class _Option(Generic[T_Opt]):
276
278
  self.unset_action = _Action.PASS
277
279
  self.unset = False
278
280
 
281
+ def _quick_set(self, instance, value):
282
+ setattr(instance, self._var_name, value)
283
+ self.unset = False
284
+
279
285
  def __str__(self):
280
286
  return self._display_name
281
287
 
@@ -366,7 +372,7 @@ class _Category:
366
372
  setattr(self, '_name', getattr(other, '_name'))
367
373
  for attr, option in self._options.items():
368
374
  try:
369
- option.__set__(self, getattr(other, attr))
375
+ option._quick_set(self, getattr(other, attr)) # noqa
370
376
  except BaseOptionError:
371
377
  continue
372
378
 
@@ -374,9 +380,9 @@ class _Category:
374
380
  class _Server(_Category):
375
381
  __id__ = 'server_url'
376
382
 
377
- base = _Option('http://', val_type=str)
383
+ base = _Option('http://web-gateway', val_type=str)
378
384
  app = _Option('http://app-server', val_type=str)
379
- account = _Option('http://account-server', val_type=str)
385
+ account = _Option('http://seepln-account', val_type=str)
380
386
  system = _Option('http://system-server', val_type=str)
381
387
  space = _Option('http://space-server', val_type=str)
382
388
  platform_file = _Option('http://platform-file-server', val_type=str)
@@ -479,16 +485,18 @@ class _Boost(_Category):
479
485
  class _ServiceDiscovery(_Category):
480
486
  __id__ = 'service_discovery'
481
487
 
482
- #: 是否使用服务发现功能
488
+ #: 是否使用服务发现功能(仅影响DynamicAPI)
483
489
  enabled = _Option(False, val_type=bool, on_set=_ensure_discovery_server_is_set)
484
490
  #: 服务注册发现使用的实现
485
- implementation = _Option('eureka', val_type=str, val_choices=('eureka', 'nacos'))
491
+ implementation = _Option('eureka', val_type=str, val_choices=('eureka', 'nacos', 'k8s'))
486
492
  #: 服务注册发现使用的缓存策略
487
493
  cache_strategy = _Option(
488
494
  'ranked',
489
495
  val_type=str,
490
496
  val_choices=('ranked', 'roundrobin', 'random')
491
497
  )
498
+ #: 是否完全使用服务发现获取请求地址(包括Root API)
499
+ take_over = _Option(False, val_type=bool)
492
500
 
493
501
  def __get__(self, instance, owner) -> '_ServiceDiscovery':
494
502
  """defined to help ide"""
@@ -508,6 +516,16 @@ class _Nacos(_Category):
508
516
  return super().__get__(instance, owner)
509
517
 
510
518
 
519
+ class _Kubernets(_Category):
520
+ """Nacos 相关配置"""
521
+
522
+ namespace = _Option(None, val_type=str)
523
+
524
+ def __get__(self, instance, owner) -> '_Kubernets':
525
+ """defined to help ide"""
526
+ return super().__get__(instance, owner)
527
+
528
+
511
529
  # -----------------------------------------------------------------------------
512
530
  # Options
513
531
  class _GlobalOptions:
@@ -520,6 +538,7 @@ class _GlobalOptions:
520
538
  discovery = _ServiceDiscovery()
521
539
  nacos = _Nacos()
522
540
  edgedb = _Edgedb()
541
+ k8s = _Kubernets()
523
542
 
524
543
  def load_file(self, filepath):
525
544
  parser = configparser.ConfigParser()
@@ -643,19 +662,31 @@ class _OptionCTX:
643
662
  boost: _Boost
644
663
  discovery: _ServiceDiscovery
645
664
  nacos: _Nacos
665
+ edgedb: _Edgedb
646
666
 
647
667
  def show_options(self, category=None):
648
668
  ...
649
669
 
650
670
  def __init__(self):
651
- self._token = _option_ctx.set(_GlobalOptions())
671
+ self._create_at_main = threading.current_thread() is threading.main_thread()
672
+ self._primary_opt = _GlobalOptions()
673
+ self._token = _option_ctx.set(self._primary_opt)
652
674
 
653
675
  def create_local(self):
654
676
  self._token = _option_ctx.set(_GlobalOptions())
655
677
 
656
678
  @property
657
679
  def _option(self) -> _GlobalOptions:
658
- return _option_ctx.get()
680
+ try:
681
+ return _option_ctx.get()
682
+ except LookupError:
683
+ if (
684
+ not self._create_at_main
685
+ and threading.current_thread() is threading.main_thread()
686
+ ):
687
+ _option_ctx.set(self._primary_opt.__copy__())
688
+ return _option_ctx.get()
689
+ raise
659
690
 
660
691
  def __getattr__(self, item):
661
692
  return getattr(self._option, item)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: deepfos
3
- Version: 1.1.60
3
+ Version: 1.1.78
4
4
  Summary: Collecions of useful and handy tools for deepfos platform
5
5
  Home-page: http://py.deepfos.com
6
6
  Author: deepfos-python-team
@@ -13,15 +13,15 @@ Classifier: Programming Language :: Python :: 3.9
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Requires-Python: >=3.8.3
15
15
  Requires-Dist: pandas
16
- Requires-Dist: requests (==2.25.1)
16
+ Requires-Dist: requests (~=2.26)
17
17
  Requires-Dist: pymysql (==1.1.1)
18
- Requires-Dist: aiohttp (==3.9.0)
19
- Requires-Dist: pydantic (<2.0.0,>=1.10.0)
20
- Requires-Dist: cachetools (==4.2.2)
18
+ Requires-Dist: aiohttp (~=3.9.5)
19
+ Requires-Dist: pydantic (!=2.0.0,!=2.0.1,!=2.1.0,>=1.10.0)
20
+ Requires-Dist: cachetools (<6,>=5.0.0)
21
21
  Requires-Dist: loguru (==0.5.3)
22
22
  Requires-Dist: PyPika (==0.48.8)
23
- Requires-Dist: aiomysql (==0.2.0)
24
- Requires-Dist: clickhouse-driver (==0.2.2)
23
+ Requires-Dist: aiomysql (~=0.2.0)
24
+ Requires-Dist: clickhouse-driver (~=0.2.9)
25
25
  Requires-Dist: aioredis (==2.0.1)
26
26
  Requires-Dist: redis (==4.5.3)
27
27
  Requires-Dist: nest-asyncio (==1.5.4)