internal 1.1.35__py3-none-any.whl → 1.1.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,3 +7,4 @@ class OperatorTypeEnum(str, Enum):
7
7
  CUSTOMER = "customer"
8
8
  PROVIDER = "provider"
9
9
  CLIENT = "client"
10
+ ADMIN = "admin"
internal/http/requests.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import json
2
3
  import random
3
4
 
4
5
  import httpx
@@ -21,9 +22,16 @@ async def invoke_request(timeout: httpx.Timeout, method: str, url: str, app: Fas
21
22
 
22
23
  response = await client.request(method, url, **kwargs)
23
24
 
24
- app.state.logger.info(
25
- f"RESPONSE | {kwargs.get('headers').get(CORRELATION_ID_HEADER_KEY_NAME)} | {method} | {url} | {response.status_code} | {response.text}"
26
- )
25
+ try:
26
+ response_json = json.loads(response.text)
27
+ response_text = json.dumps(response_json, ensure_ascii=False)
28
+ app.state.logger.info(
29
+ f"RESPONSE | {kwargs.get('headers').get(CORRELATION_ID_HEADER_KEY_NAME)} | {method} | {url} | {response.status_code} | {response_text}"
30
+ )
31
+ except json.decoder.JSONDecodeError:
32
+ app.state.logger.info(
33
+ f"RESPONSE | {kwargs.get('headers').get(CORRELATION_ID_HEADER_KEY_NAME)} | {method} | {url} | {response.status_code} | {response.text}"
34
+ )
27
35
 
28
36
  return response
29
37
  except httpx.TimeoutException as exc:
@@ -2,6 +2,7 @@ import json
2
2
  import arrow
3
3
 
4
4
  from datetime import datetime, date
5
+ from pydantic import BaseModel
5
6
 
6
7
  import httpx
7
8
 
@@ -16,23 +17,29 @@ async def async_response(data=None, message=None, code=None, page_no=None, total
16
17
  time_zone="UTC", time_format=ARR_HUMAN_READ_FMT, date_format=ARR_DATE_FMT,
17
18
  status_code=status.HTTP_200_OK):
18
19
  def _serialize(data):
19
- if issubclass(type(data), Document):
20
+ if issubclass(type(data), Document) or issubclass(type(data), BaseModel):
20
21
  link_field_list = []
21
22
  datetime_field_list = []
22
23
  date_field_list = []
24
+ replace_dict = {}
23
25
  for field_name in data.__annotations__:
24
26
  field_type = getattr(data, field_name)
27
+ if field_name in ["contact", "pick_up", "car", "customer"]:
28
+ if field_type:
29
+ replace_dict[field_name] = _serialize(field_type)
30
+
25
31
  if isinstance(field_type, Link):
26
32
  link_field_list.append(field_name)
27
33
 
28
- if field_name.endswith('_date') or field_name == "birthday":
34
+ if field_name.endswith('_date') or field_name == "birthday" or field_name.endswith('_birthday'):
29
35
  date_field_list.append(field_name)
30
36
  elif isinstance(field_type, datetime):
31
37
  datetime_field_list.append(field_name)
32
38
  elif isinstance(field_type, date):
33
39
  date_field_list.append(field_name)
34
40
 
35
- data = json.loads(data.model_dump_json(exclude={"password", "metadata", "otp_code_universal"}, by_alias=False))
41
+ data = json.loads(
42
+ data.model_dump_json(exclude={"password", "metadata", "otp_code_universal"}, by_alias=False))
36
43
  if link_field_list:
37
44
  for field_name in link_field_list:
38
45
  if isinstance(data[field_name], dict) and "id" in data[field_name].keys():
@@ -44,6 +51,34 @@ async def async_response(data=None, message=None, code=None, page_no=None, total
44
51
  if data[field_name]:
45
52
  data[field_name] = arrow.get(data[field_name]).to(time_zone).format(date_format)
46
53
 
54
+ for key, value in replace_dict.items():
55
+ data[key] = value
56
+
57
+ if "create_time" in data.keys() and data.get("create_time"):
58
+ data["create_time"] = arrow.get(data["create_time"]).to(time_zone).format(ARR_HUMAN_READ_FMT)
59
+
60
+ if "update_time" in data.keys() and data.get("update_time"):
61
+ data["update_time"] = arrow.get(data["update_time"]).to(time_zone).format(ARR_HUMAN_READ_FMT)
62
+
63
+ elif isinstance(data, dict):
64
+ datetime_field_list = []
65
+ date_field_list = []
66
+ for field_name, field_type in data.items():
67
+ if field_name.endswith('_date') or field_name == "birthday" or field_name.endswith('_birthday'):
68
+ date_field_list.append(field_name)
69
+ elif isinstance(field_type, datetime):
70
+ datetime_field_list.append(field_name)
71
+ elif isinstance(field_type, date):
72
+ date_field_list.append(field_name)
73
+
74
+ data = json.loads(json.dumps(data, default=jsonable_encoder))
75
+ for field_name in datetime_field_list:
76
+ if data[field_name]:
77
+ data[field_name] = arrow.get(data[field_name]).to(time_zone).format(time_format)
78
+ for field_name in date_field_list:
79
+ if data[field_name]:
80
+ data[field_name] = arrow.get(data[field_name]).to(time_zone).format(date_format)
81
+
47
82
  if "create_time" in data.keys() and data.get("create_time"):
48
83
  data["create_time"] = arrow.get(data["create_time"]).to(time_zone).format(ARR_HUMAN_READ_FMT)
49
84
 
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import logging
2
3
  import time
3
4
 
@@ -30,18 +31,60 @@ class LogRequestMiddleware(BaseHTTPMiddleware):
30
31
  params = {k: v[0] if len(v) == 1 else v for k, v in temp.items()}
31
32
 
32
33
  body = await request.body()
34
+
35
+ # 解碼 body 為字符串(如果不是 multipart)
33
36
  if is_multipart:
34
- self.logger.info(f"[Request id: {request_id}] \nURL: {method} {url} \nParams: {params} \nBody: 因上傳檔案不顯示body \nHeaders: {headers} \nstart processing...")
37
+ body_str = "因上傳檔案不顯示body"
35
38
  else:
36
- self.logger.info(f"[Request id: {request_id}] \nURL: {method} {url} \nParams: {params} \nBody: {body} \nHeaders: {headers} \nstart processing...")
39
+ try:
40
+ # 嘗試使用 UTF-8 解碼
41
+ body_str = body.decode('utf-8')
42
+ except UnicodeDecodeError:
43
+ # 如果解碼失敗,顯示原始 bytes
44
+ body_str = str(body)
45
+
46
+ request_info = {
47
+ "request_id": request_id,
48
+ "method": method,
49
+ "url": url,
50
+ "params": params,
51
+ "body": body_str,
52
+ "content_type": content_type,
53
+ "user_agent": headers.get("user-agent", "")
54
+ }
55
+ self.logger.info(f"Request started: {json.dumps(request_info, ensure_ascii=False)}")
37
56
 
38
57
  # 记录请求处理时间
39
58
  start_time = time.time()
40
- response = await call_next(request)
41
- process_time = time.time() - start_time
59
+ try:
60
+ response = await call_next(request)
61
+ process_time = time.time() - start_time
62
+ status_code = response.status_code
42
63
 
43
- if is_multipart:
44
- self.logger.info(f"[Request id: {request_id}] \nURL: {method} {url} \nParams: {params} \nBody: 因上傳檔案不顯示body \nCompleted in {process_time:.4f} seconds")
45
- else:
46
- self.logger.info(f"[Request id: {request_id}] \nURL: {method} {url} \nParams: {params} \nBody: {body} \nCompleted in {process_time:.4f} seconds")
47
- return response
64
+ # 记录成功响应
65
+ response_info = {
66
+ "request_id": request_id,
67
+ "method": method,
68
+ "url": url,
69
+ "status_code": status_code,
70
+ "process_time": round(process_time, 4)
71
+ }
72
+
73
+ self.logger.info(f"Request completed: {json.dumps(response_info, ensure_ascii=False)}")
74
+
75
+ return response
76
+
77
+ except Exception as e:
78
+ process_time = time.time() - start_time
79
+
80
+ # 记录异常
81
+ error_info = {
82
+ "request_id": request_id,
83
+ "method": method,
84
+ "url": url,
85
+ "error": str(e),
86
+ "process_time": round(process_time, 4)
87
+ }
88
+
89
+ self.logger.error(f"Request failed: {json.dumps(error_info, ensure_ascii=False)}")
90
+ raise
@@ -1,8 +1,6 @@
1
1
  from datetime import datetime
2
2
 
3
3
  from typing import List, Tuple, Union, Dict, Type, Optional, Any, Literal, Union, Set, Mapping
4
- import typing_extensions
5
- from pydantic_core import PydanticUndefined
6
4
  from typing_extensions import Self, TypeAlias, Unpack
7
5
 
8
6
  import arrow
@@ -27,8 +25,8 @@ class InternalBaseDocument(Document):
27
25
  @classmethod
28
26
  async def get_pagination_list(cls, app: FastAPI, query: list = None, sort: List[Tuple] = None,
29
27
  page_size: int = DEF_PAGE_SIZE, page_no: int = DEF_PAGE_NO,
30
- ignore_cache: bool = False,
31
- fetch_links: bool = False):
28
+ ignore_cache: bool = False, fetch_links: bool = False,
29
+ exclude_field_list: List[str] = None):
32
30
  if not query:
33
31
  final_query = []
34
32
  else:
@@ -46,21 +44,59 @@ class InternalBaseDocument(Document):
46
44
  else:
47
45
  print(f"order type value error: temp_sort:{temp_sort}")
48
46
  continue
49
- final_sort.append((cls.id, pymongo.ASCENDING))
50
47
 
51
- total_num = await cls.find(*final_query, ignore_cache=ignore_cache, fetch_links=fetch_links).sort(
52
- *final_sort).count()
48
+ if not any(s[0] == str(cls.id) for s in sort):
49
+ final_sort.append((cls.id, pymongo.ASCENDING))
50
+
51
+ if exclude_field_list:
52
+ # 當需要排除欄位時,使用 Motor 直接操作
53
+ collection = cls.get_motor_collection()
54
+ projection = {field: 0 for field in exclude_field_list}
55
+
56
+ # 建立查詢條件
57
+ mongo_query = {}
58
+ for q in final_query:
59
+ if hasattr(q, 'query'):
60
+ mongo_query.update(q.query)
61
+
62
+ # 計算總數
63
+ total_num = await collection.count_documents(mongo_query)
64
+ total_pages = (total_num + page_size - 1) // page_size
65
+
66
+ if total_pages == 0:
67
+ page_no = 1
68
+ page_data = []
69
+ else:
70
+ page_no = max(1, min(page_no, total_pages))
71
+
72
+ # 執行分頁查詢
73
+ cursor = collection.find(mongo_query, projection).sort(final_sort).skip(
74
+ (page_no - 1) * page_size).limit(page_size)
75
+ documents = await cursor.to_list(None)
76
+
77
+ # 轉換為 Pydantic 模型
78
+ page_data = []
79
+ for doc in documents:
80
+ try:
81
+ page_data.append(cls.model_validate(doc))
82
+ except Exception as e:
83
+ print(f"模型驗證失敗: {e}")
84
+ continue
85
+ else:
86
+ # 沒有排除欄位時使用 Beanie 的方法
87
+ total_num = await cls.find(*final_query, ignore_cache=ignore_cache, fetch_links=fetch_links).sort(
88
+ *final_sort).count()
53
89
 
54
- total_pages = (total_num + page_size - 1) // page_size
90
+ total_pages = (total_num + page_size - 1) // page_size
55
91
 
56
- if total_pages == 0:
57
- page_no = 1
58
- page_data = []
59
- else:
60
- page_no = max(1, min(page_no, total_pages))
92
+ if total_pages == 0:
93
+ page_no = 1
94
+ page_data = []
95
+ else:
96
+ page_no = max(1, min(page_no, total_pages))
61
97
 
62
- page_data = await cls.find(*final_query, ignore_cache=ignore_cache, fetch_links=fetch_links).sort(
63
- *final_sort).limit(page_size).skip((page_no - 1) * page_size).to_list()
98
+ page_data = await cls.find(*final_query, ignore_cache=ignore_cache, fetch_links=fetch_links).sort(
99
+ *final_sort).limit(page_size).skip((page_no - 1) * page_size).to_list()
64
100
 
65
101
  return page_no, page_size, total_num, page_data
66
102
 
@@ -89,7 +125,7 @@ class InternalBaseDocument(Document):
89
125
 
90
126
  @classmethod
91
127
  async def get_list(cls, app: FastAPI, query: list = None, sort: List[Tuple] = None, ignore_cache: bool = False,
92
- fetch_links: bool = False):
128
+ fetch_links: bool = False, exclude_field_list: List[str] = None):
93
129
  if not query:
94
130
  final_query = []
95
131
  else:
@@ -107,10 +143,37 @@ class InternalBaseDocument(Document):
107
143
  else:
108
144
  print(f"order type value error: temp_sort:{temp_sort}")
109
145
  continue
110
- final_sort.append((cls.id, pymongo.ASCENDING))
111
146
 
112
- data = await cls.find(*final_query, ignore_cache=ignore_cache, fetch_links=fetch_links).sort(
113
- *final_sort).to_list()
147
+ if not any(s[0] == str(cls.id) for s in sort):
148
+ final_sort.append((cls.id, pymongo.ASCENDING))
149
+
150
+ if exclude_field_list:
151
+ # 當需要排除欄位時,使用 Motor 直接操作
152
+ collection = cls.get_motor_collection()
153
+ projection = {field: 0 for field in exclude_field_list}
154
+
155
+ # 建立查詢條件
156
+ mongo_query = {}
157
+ for q in final_query:
158
+ if hasattr(q, 'query'):
159
+ mongo_query.update(q.query)
160
+
161
+ # 執行查詢
162
+ cursor = collection.find(mongo_query, projection).sort(final_sort)
163
+ documents = await cursor.to_list(None)
164
+
165
+ # 轉換為 Pydantic 模型
166
+ data = []
167
+ for doc in documents:
168
+ try:
169
+ data.append(cls.model_validate(doc))
170
+ except Exception as e:
171
+ print(f"模型驗證失敗: {e}")
172
+ continue
173
+ else:
174
+ # 沒有排除欄位時使用 Beanie 的方法
175
+ data = await cls.find(*final_query, ignore_cache=ignore_cache, fetch_links=fetch_links).sort(
176
+ *final_sort).to_list()
114
177
 
115
178
  return data
116
179
 
internal/utils.py CHANGED
@@ -1,11 +1,12 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  import json
3
+ import hashlib
3
4
  from datetime import datetime, timezone
4
5
 
5
6
  import arrow
6
7
 
7
8
  from .base_config import BaseConfig
8
- from .const import STR_EMPTY, ARR_EXPORT_DATETIME_FMT, STR_DASH, REDIS_LPR_DATA_LIST_PREFIX
9
+ from .const import STR_EMPTY, ARR_EXPORT_DATETIME_FMT, STR_DASH, REDIS_LPR_DATA_LIST_PREFIX, STR_SPACE
9
10
 
10
11
 
11
12
  def is_today(time, system_time_zone):
@@ -48,7 +49,7 @@ def update_dict_with_cast(curr_settings: BaseConfig, new_conf: dict):
48
49
 
49
50
 
50
51
  def sanitize_plate_no(plate_no):
51
- return plate_no.replace(STR_DASH, STR_EMPTY).upper()
52
+ return plate_no.replace(STR_SPACE, STR_EMPTY).replace(STR_DASH, STR_EMPTY).upper()
52
53
 
53
54
 
54
55
  def get_current_utc() -> datetime:
@@ -105,6 +106,7 @@ def get_dealer_by_organization_id(organization_id: str) -> str:
105
106
 
106
107
  return organization_id
107
108
 
109
+
108
110
  def extract_title(name):
109
111
  if '小姐' in name:
110
112
  return '小姐'
@@ -115,6 +117,7 @@ def extract_title(name):
115
117
  else:
116
118
  return None
117
119
 
120
+
118
121
  def extract_name(name):
119
122
  """从姓名中提取真实姓名"""
120
123
  # 检查是否包含 '小姐' 或 '先生' 并提取姓名
@@ -125,4 +128,28 @@ def extract_name(name):
125
128
  elif '先生' in name:
126
129
  return name.split('先生')[0].strip()
127
130
  else:
128
- return name.strip()
131
+ return name.strip()
132
+
133
+
134
+ def hash_login_password(passwd):
135
+ prefix = '___'
136
+ postfix = '_______'
137
+ data = str(passwd)
138
+ sha_256 = hashlib.sha256()
139
+ sha_256.update(prefix.encode())
140
+ sha_256.update(data.encode())
141
+ sha_256.update(postfix.encode())
142
+
143
+ return sha_256.hexdigest()
144
+
145
+
146
+ def hash_secret_key(passwd):
147
+ prefix = '___'
148
+ postfix = '_______'
149
+ data = str(passwd)
150
+ sha_256 = hashlib.sha256()
151
+ sha_256.update(prefix.encode())
152
+ sha_256.update(data.encode())
153
+ sha_256.update(postfix.encode())
154
+
155
+ return sha_256.hexdigest()
@@ -10,8 +10,12 @@ def verify_and_sanitize_plate_no(value: str, is_require: bool = False):
10
10
  raise PlateNoFormatException()
11
11
 
12
12
  if value:
13
- if not re.match(r'^[A-Za-z0-9]{6,7}$', value):
13
+ # 移除空格和常見分隔符號進行檢查
14
+ cleaned = sanitize_plate_no(value)
15
+
16
+ if not re.match(r'^[外使領試臨軍A-Za-z0-9]{5,7}$', cleaned):
14
17
  raise PlateNoFormatException()
18
+
15
19
  return sanitize_plate_no(value)
16
20
 
17
21
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: internal
3
- Version: 1.1.35
3
+ Version: 1.1.45
4
4
  Summary:
5
5
  Author: Ray
6
6
  Author-email: ray@cruisys.com
@@ -9,7 +9,7 @@ internal/common_enum/device_code.py,sha256=lDTqRmP8zl6-k62HCsRehNAKCTXxH6WltZnvi
9
9
  internal/common_enum/event_code.py,sha256=XpxbgpP8wkykEngma4cGgv_NefuSU6Juqy33x_vsWio,2119
10
10
  internal/common_enum/lpr_direction.py,sha256=glCGSkcXHUB6_p2jO_7IdEHycyFVhu0vHDDDXbFJ174,113
11
11
  internal/common_enum/notify_type.py,sha256=p01d9ODiJ9vDpFBJ55bhu7WOyO7Te3slToOhvFKxQbk,188
12
- internal/common_enum/operator_type.py,sha256=XfHwGDsuOG6-ajitDoRuLFLT1LhHKoXXzFRW4e5vmJ8,173
12
+ internal/common_enum/operator_type.py,sha256=RiAY5eUAWQRODG9XasYqsfZFRglCZv4Y0UbCSqBXKEk,193
13
13
  internal/common_enum/order_type.py,sha256=XwAl5JaZgFLahBQhBKG4VuMJ39Si9KZEhcSS0-DKKzQ,90
14
14
  internal/common_enum/point_type.py,sha256=nmrHw_6OJIadgWwqCa63IVnc4T5uf69vQojWPbAwCrE,120
15
15
  internal/common_enum/websocket_channel.py,sha256=BIhdE2z_xF3lgStS8GiaS5M8BXm4_g3vHQaaN5qNJTQ,283
@@ -24,17 +24,17 @@ internal/ext/amazon/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
24
24
  internal/ext/amazon/aws/__init__.py,sha256=2YFjb-rHG1JaZGZiZffYDesgTAJjDshOqQbswOYzhP8,834
25
25
  internal/ext/amazon/aws/const.py,sha256=l4WMg5bKWujwOKABBkCO2zclNg3abnYOfbhD7DG8GsA,109
26
26
  internal/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- internal/http/requests.py,sha256=cgqDl9k4XsfvXW-EPLid-kPtHPWUyF8athu3GWQWAKo,8474
28
- internal/http/responses.py,sha256=75vJGS_aA9a5ff0HI6N03KM0CErIsAOtD06x7qr6yoQ,3079
27
+ internal/http/requests.py,sha256=jLnOXEQOMEOpZrevVGFKlQ_MCkhjfudRqkuC-HWeHx0,8913
28
+ internal/http/responses.py,sha256=CueHdh9JvUZ8MEuLgLefuB-yv4ikJZIDzA-dMWtYZAk,4872
29
29
  internal/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
30
  internal/interface/base_interface.py,sha256=3YaVjIgLi_pZpLk5SEIk8WVkuICM8qPavT8rB0MdB5U,1536
31
31
  internal/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
- internal/middleware/log_request.py,sha256=OFwWnGfzXnllcQsBHolAWVsahXKoEhw3OQ1YWOm7RHM,2087
32
+ internal/middleware/log_request.py,sha256=ZtCyfrF3IyKTF6Uj8L66CutdZi3srVmppqO_EXT5Tuw,3035
33
33
  internal/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- internal/model/base_model.py,sha256=hxleV8fYNvFgUoYmCv_inEP3kA4tD4HhCBCNFVK8SZg,5509
34
+ internal/model/base_model.py,sha256=-YlVPZGuG926PksyQw5SgKjkdC80q_eQTL3EnzVSfIs,8020
35
35
  internal/model/operate.py,sha256=QSM6yXYXpJMwrqkUGEWZLrEBaUgqHwVHY_Fi4S42hKc,3190
36
- internal/utils.py,sha256=wK1QumW1AaWE1ga2-WcDH2rtXRr2hSLwXzy-iI5dTzY,3962
37
- internal/validator_utils.py,sha256=CqjaVFoAu5MqvBG_AkTP-r7AliWawtUWB851USj4moI,1519
38
- internal-1.1.35.dist-info/METADATA,sha256=3Smb8SVFmJITUWASgmak62MkOMqiIfxtvVqDT6n1iPI,939
39
- internal-1.1.35.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
40
- internal-1.1.35.dist-info/RECORD,,
36
+ internal/utils.py,sha256=6iIM1EPLeYYj5LEdgqDADGH-52q9YM6lOxaaxs6z1Gg,4559
37
+ internal/validator_utils.py,sha256=iRI8aJLz0wz-j8p-T1BOGOF8VE2zwNh424uApbc8IZs,1640
38
+ internal-1.1.45.dist-info/METADATA,sha256=-g_Uw-lP4eo9_jIYZVg_nSJdxhd_L8J5AQ-g8kZjtws,939
39
+ internal-1.1.45.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
40
+ internal-1.1.45.dist-info/RECORD,,