fastapi-scaff 0.1.0__py3-none-any.whl → 0.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fastapi_scaff/__init__.py +1 -1
- fastapi_scaff/__main__.py +292 -41
- fastapi_scaff/_api_tpl.json +15 -12
- fastapi_scaff/_project_tpl.json +69 -39
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.5.5.dist-info}/METADATA +44 -12
- fastapi_scaff-0.5.5.dist-info/RECORD +10 -0
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.5.5.dist-info}/top_level.txt +0 -1
- fastapi_scaff-0.1.0.dist-info/RECORD +0 -11
- tests/__init__.py +0 -3
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.5.5.dist-info}/WHEEL +0 -0
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.5.5.dist-info}/entry_points.txt +0 -0
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.5.5.dist-info}/licenses/LICENSE +0 -0
fastapi_scaff/__init__.py
CHANGED
fastapi_scaff/__main__.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
@author axiner
|
|
3
3
|
@version v1.0.0
|
|
4
4
|
@created 2024/07/29 22:22
|
|
5
|
-
@abstract
|
|
5
|
+
@abstract
|
|
6
6
|
@description
|
|
7
7
|
@history
|
|
8
8
|
"""
|
|
@@ -13,7 +13,7 @@ import re
|
|
|
13
13
|
import sys
|
|
14
14
|
from pathlib import Path
|
|
15
15
|
|
|
16
|
-
from
|
|
16
|
+
from fastapi_scaff import __version__
|
|
17
17
|
|
|
18
18
|
here = Path(__file__).absolute().parent
|
|
19
19
|
prog = "fastapi-scaff"
|
|
@@ -24,10 +24,8 @@ def main():
|
|
|
24
24
|
prog=prog,
|
|
25
25
|
description="fastapi脚手架,一键生成项目或api,让开发变得更简单",
|
|
26
26
|
epilog="examples: \n"
|
|
27
|
-
" `new`: %(prog)s new <myproj
|
|
27
|
+
" `new`: %(prog)s new <myproj>\n"
|
|
28
28
|
" `add`: %(prog)s add <myapi>\n"
|
|
29
|
-
" `add`: %(prog)s add <myapi> -v <vn>\n"
|
|
30
|
-
" `add`: %(prog)s add <myapi> -s <subdir>\n"
|
|
31
29
|
"",
|
|
32
30
|
formatter_class=argparse.RawDescriptionHelpFormatter
|
|
33
31
|
)
|
|
@@ -43,6 +41,13 @@ def main():
|
|
|
43
41
|
"name",
|
|
44
42
|
type=str,
|
|
45
43
|
help="项目或api名称(多个api可逗号分隔)")
|
|
44
|
+
parser.add_argument(
|
|
45
|
+
"-e",
|
|
46
|
+
"--edition",
|
|
47
|
+
default="standard",
|
|
48
|
+
choices=["standard", "light", "tiny", "single"],
|
|
49
|
+
metavar="",
|
|
50
|
+
help="`new`时可指定项目结构版本(默认标准版)")
|
|
46
51
|
parser.add_argument(
|
|
47
52
|
"-d",
|
|
48
53
|
"--db",
|
|
@@ -53,15 +58,15 @@ def main():
|
|
|
53
58
|
parser.add_argument(
|
|
54
59
|
"-v",
|
|
55
60
|
"--vn",
|
|
56
|
-
type=str,
|
|
57
61
|
default="v1",
|
|
62
|
+
type=str,
|
|
58
63
|
metavar="",
|
|
59
64
|
help="`add`时可指定版本(默认v1)")
|
|
60
65
|
parser.add_argument(
|
|
61
66
|
"-s",
|
|
62
67
|
"--subdir",
|
|
63
|
-
type=str,
|
|
64
68
|
default="",
|
|
69
|
+
type=str,
|
|
65
70
|
metavar="",
|
|
66
71
|
help="`add`时可指定子目录(默认空)")
|
|
67
72
|
parser.add_argument(
|
|
@@ -70,7 +75,11 @@ def main():
|
|
|
70
75
|
default="asm",
|
|
71
76
|
choices=["a", "as", "asm"],
|
|
72
77
|
metavar="",
|
|
73
|
-
help="`add`时可指定目标(默认asm
|
|
78
|
+
help="`add`时可指定目标(默认asm)")
|
|
79
|
+
parser.add_argument(
|
|
80
|
+
"--celery",
|
|
81
|
+
action='store_true',
|
|
82
|
+
help="`new`|`add`时可指定是否集成celery(默认不集成)")
|
|
74
83
|
args = parser.parse_args()
|
|
75
84
|
cmd = CMD(args)
|
|
76
85
|
if args.command == "new":
|
|
@@ -122,23 +131,17 @@ class CMD:
|
|
|
122
131
|
with open(here.joinpath("_project_tpl.json"), "r") as f:
|
|
123
132
|
project = json.loads(f.read())
|
|
124
133
|
for k, v in project.items():
|
|
134
|
+
k, v = self._edition_handler(k, v, edition=self.args.edition, celery=self.args.celery)
|
|
135
|
+
if not k:
|
|
136
|
+
continue
|
|
125
137
|
tplpath = name.joinpath(k)
|
|
126
138
|
tplpath.parent.mkdir(parents=True, exist_ok=True)
|
|
127
139
|
with open(tplpath, "w+", encoding="utf-8") as f:
|
|
128
140
|
# rpl
|
|
129
|
-
if re.search(r"README\.md$", k):
|
|
130
|
-
v = v.replace(f"# {prog}", f"# {prog} ( => yourProj)")
|
|
131
|
-
if re.search(r"requirements\.txt$", k):
|
|
132
|
-
_default = self._db_requirements_map("default")
|
|
133
|
-
_user = self._db_requirements_map(self.args.db) or _default
|
|
134
|
-
v = v.replace(
|
|
135
|
-
_default,
|
|
136
|
-
'\n'.join(_user)
|
|
137
|
-
)
|
|
138
141
|
if _env := re.search(r"app_(.*?).yaml$", k):
|
|
139
142
|
_rpl_name = f"/app_{_env.group(1)}"
|
|
140
143
|
_default = self._db_yaml_map("default")
|
|
141
|
-
_user = self._db_yaml_map(self.args.db)
|
|
144
|
+
_user = self._db_yaml_map(self.args.db)
|
|
142
145
|
v = v.replace(
|
|
143
146
|
_default["db_url"].replace("/app_dev", _rpl_name),
|
|
144
147
|
_user["db_url"].replace("/app_dev", _rpl_name)
|
|
@@ -146,28 +149,215 @@ class CMD:
|
|
|
146
149
|
_default["db_async_url"].replace("/app_dev", _rpl_name),
|
|
147
150
|
_user["db_async_url"].replace("/app_dev", _rpl_name)
|
|
148
151
|
)
|
|
152
|
+
elif k == "build.sh":
|
|
153
|
+
v = v.replace("fastapi-scaff", self.args.name.replace("_", "-"))
|
|
154
|
+
elif k.startswith("docker-compose."):
|
|
155
|
+
v = v.replace("fastapi-scaff", self.args.name.replace("_", "-"))
|
|
156
|
+
elif k == "README.md":
|
|
157
|
+
v = v.replace(f"# {prog}", f"# {prog} ( => yourProj)")
|
|
158
|
+
elif k == "requirements.txt":
|
|
159
|
+
_default = self._db_requirements_map("default")
|
|
160
|
+
_user = self._db_requirements_map(self.args.db)
|
|
161
|
+
v = re.sub(rf'^{_default}.*\n?', '\n'.join(_user) + '\n', v, flags=re.MULTILINE)
|
|
162
|
+
elif k == "config/nginx.conf":
|
|
163
|
+
v = v.replace("server backend:", f"server {self.args.name.replace('_', '-')}-prod_backend:")
|
|
149
164
|
# < rpl
|
|
150
165
|
f.write(v)
|
|
151
166
|
sys.stdout.write("Done. Now run:\n"
|
|
152
167
|
f"> 1. cd {name}\n"
|
|
153
|
-
f"> 2. modify config, eg: db\n"
|
|
168
|
+
f"> 2. modify config{', eg: db' if self.args.edition != 'single' else ''}\n"
|
|
154
169
|
f"> 3. pip install -r requirements.txt\n"
|
|
155
170
|
f"> 4. python runserver.py\n"
|
|
156
171
|
f"----- More see README.md -----\n")
|
|
157
172
|
|
|
173
|
+
@staticmethod
|
|
174
|
+
def _edition_handler(k: str, v: str, edition: str, celery: bool):
|
|
175
|
+
if "tiny" in k:
|
|
176
|
+
if edition == "tiny":
|
|
177
|
+
k = k.replace("tiny_", "")
|
|
178
|
+
return k, v
|
|
179
|
+
return None, None
|
|
180
|
+
elif "single" in k:
|
|
181
|
+
if edition == "single":
|
|
182
|
+
k = k.replace("single_", "")
|
|
183
|
+
return k, v
|
|
184
|
+
return None, None
|
|
185
|
+
|
|
186
|
+
if not celery:
|
|
187
|
+
if k.startswith("app_celery/") or k in [
|
|
188
|
+
"app/api/default/aping.py",
|
|
189
|
+
"runcbeat.py",
|
|
190
|
+
"runcworker.py",
|
|
191
|
+
]:
|
|
192
|
+
return None, None
|
|
193
|
+
elif k.startswith("config/app_"):
|
|
194
|
+
v = v.replace("""# #
|
|
195
|
+
celery_broker_url: redis://:<password>@<host>:<port>/<db>
|
|
196
|
+
celery_backend_url: redis://:<password>@<host>:<port>/<db>
|
|
197
|
+
celery_timezone: Asia/Shanghai
|
|
198
|
+
celery_enable_utc: true
|
|
199
|
+
celery_task_serializer: json
|
|
200
|
+
celery_result_serializer: json
|
|
201
|
+
celery_accept_content: [ json ]
|
|
202
|
+
celery_task_ignore_result: false
|
|
203
|
+
celery_result_expire: 86400
|
|
204
|
+
celery_task_track_started: true
|
|
205
|
+
celery_worker_concurrency: 8
|
|
206
|
+
celery_worker_prefetch_multiplier: 2
|
|
207
|
+
celery_worker_max_tasks_per_child: 100
|
|
208
|
+
celery_broker_connection_retry_on_startup: true
|
|
209
|
+
celery_task_reject_on_worker_lost: true
|
|
210
|
+
""", "")
|
|
211
|
+
elif k == "requirements.txt":
|
|
212
|
+
v = re.sub(r'^celery==.*\n?', '', v, flags=re.MULTILINE)
|
|
213
|
+
|
|
214
|
+
if edition == "standard":
|
|
215
|
+
return k, v
|
|
216
|
+
|
|
217
|
+
if edition == "light":
|
|
218
|
+
filter_list = [
|
|
219
|
+
"app/api/v1/user.py",
|
|
220
|
+
"app/initializer/_redis.py",
|
|
221
|
+
"app/initializer/_snow.py",
|
|
222
|
+
"app/models/",
|
|
223
|
+
"app/schemas/",
|
|
224
|
+
"app/services/user.py",
|
|
225
|
+
"docs/",
|
|
226
|
+
"tests/",
|
|
227
|
+
]
|
|
228
|
+
elif edition == "tiny":
|
|
229
|
+
filter_list = [
|
|
230
|
+
"app/api/v1/user.py",
|
|
231
|
+
"app/initializer/",
|
|
232
|
+
"app/middleware/",
|
|
233
|
+
"app/models/",
|
|
234
|
+
"app/schemas/",
|
|
235
|
+
"app/services/",
|
|
236
|
+
"docs/",
|
|
237
|
+
"tests/",
|
|
238
|
+
]
|
|
239
|
+
else:
|
|
240
|
+
filter_list = [
|
|
241
|
+
"app/",
|
|
242
|
+
"docs/",
|
|
243
|
+
"tests/",
|
|
244
|
+
]
|
|
245
|
+
if re.match(r"^({filter_k})".format(filter_k="|".join(filter_list)), k) is not None:
|
|
246
|
+
return None, None
|
|
247
|
+
if k == "app/api/responses.py":
|
|
248
|
+
if edition == "tiny":
|
|
249
|
+
v = v.replace("""from app.initializer.context import request_id_var""",
|
|
250
|
+
"""from app.initializer import request_id_var""")
|
|
251
|
+
elif k == "app/api/status.py":
|
|
252
|
+
v = v.replace("""USER_OR_PASSWORD_ERROR = (10002, '用户名或密码错误')
|
|
253
|
+
""", "")
|
|
254
|
+
elif k == "app/initializer/__init__.py":
|
|
255
|
+
v = v.replace("""from toollib.guid import SnowFlake
|
|
256
|
+
from toollib.rediser import RedisClient
|
|
257
|
+
""", "").replace("""from app.initializer._redis import init_redis_client
|
|
258
|
+
from app.initializer._snow import init_snow_client
|
|
259
|
+
""", "").replace("""'redis_client',
|
|
260
|
+
'snow_client',
|
|
261
|
+
""", "").replace("""@cached_property
|
|
262
|
+
def redis_client(self) -> RedisClient:
|
|
263
|
+
return init_redis_client(
|
|
264
|
+
host=self.config.redis_host,
|
|
265
|
+
port=self.config.redis_port,
|
|
266
|
+
db=self.config.redis_db,
|
|
267
|
+
password=self.config.redis_password,
|
|
268
|
+
max_connections=self.config.redis_max_connections,
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
@cached_property
|
|
272
|
+
def snow_client(self) -> SnowFlake:
|
|
273
|
+
return init_snow_client(
|
|
274
|
+
redis_client=self.redis_client,
|
|
275
|
+
datacenter_id=self.config.snow_datacenter_id,
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
""", "")
|
|
279
|
+
elif k == "app/initializer/_conf.py":
|
|
280
|
+
v = v.replace("""snow_datacenter_id: int = None
|
|
281
|
+
""", "").replace("""redis_host: str = None
|
|
282
|
+
redis_port: int = None
|
|
283
|
+
redis_db: int = None
|
|
284
|
+
redis_password: str = None
|
|
285
|
+
redis_max_connections: int = None
|
|
286
|
+
""", "")
|
|
287
|
+
elif k == "app/initializer/_db.py":
|
|
288
|
+
v = v.replace("""_MODELS_MOD_DIR = APP_DIR.joinpath("models")
|
|
289
|
+
_MODELS_MOD_BASE = "app.models"
|
|
290
|
+
""", """_MODELS_MOD_DIR = APP_DIR.joinpath("services")
|
|
291
|
+
_MODELS_MOD_BASE = "app.services"
|
|
292
|
+
""")
|
|
293
|
+
elif k == "app/services/__init__.py":
|
|
294
|
+
v = v.replace("""\"\"\"
|
|
295
|
+
业务逻辑
|
|
296
|
+
\"\"\"""", """\"\"\"
|
|
297
|
+
业务逻辑
|
|
298
|
+
\"\"\"
|
|
299
|
+
from sqlalchemy.orm import DeclarativeBase
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
class DeclBase(DeclarativeBase):
|
|
303
|
+
pass
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
# DeclBase 使用示例(官方文档:https://docs.sqlalchemy.org/en/latest/orm/quickstart.html#declare-models)
|
|
307
|
+
\"\"\"
|
|
308
|
+
from sqlalchemy import Column, String
|
|
309
|
+
|
|
310
|
+
from app.services import DeclBase
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
class User(DeclBase):
|
|
314
|
+
__tablename__ = "user"
|
|
315
|
+
|
|
316
|
+
id = Column(String(20), primary_key=True, comment="主键")
|
|
317
|
+
name = Column(String(50), nullable=False, comment="名称")
|
|
318
|
+
\"\"\"""")
|
|
319
|
+
elif k == "config/.env":
|
|
320
|
+
v = v.replace("""# 雪花算法数据中心id(取值:0-31,在分布式部署时需确保每个节点的取值不同)
|
|
321
|
+
snow_datacenter_id=0
|
|
322
|
+
""", "")
|
|
323
|
+
elif k.startswith("config/app_"):
|
|
324
|
+
v = v.replace("""redis_host:
|
|
325
|
+
redis_port:
|
|
326
|
+
redis_db:
|
|
327
|
+
redis_password:
|
|
328
|
+
redis_max_connections:
|
|
329
|
+
""", "")
|
|
330
|
+
if edition == "single":
|
|
331
|
+
v = v.replace("""# #
|
|
332
|
+
db_url: sqlite:///app_dev.sqlite
|
|
333
|
+
db_async_url: sqlite+aiosqlite:///app_dev.sqlite
|
|
334
|
+
""", "").replace("""# #
|
|
335
|
+
db_url: sqlite:///app_test.sqlite
|
|
336
|
+
db_async_url: sqlite+aiosqlite:///app_test.sqlite
|
|
337
|
+
""", "").replace("""# #
|
|
338
|
+
db_url: sqlite:///app_prod.sqlite
|
|
339
|
+
db_async_url: sqlite+aiosqlite:///app_prod.sqlite
|
|
340
|
+
""", "")
|
|
341
|
+
elif k == "requirements.txt":
|
|
342
|
+
if not celery:
|
|
343
|
+
v = re.sub(r'^redis==.*\n?', '', v, flags=re.MULTILINE)
|
|
344
|
+
if edition == "single":
|
|
345
|
+
v = re.sub(r'^(PyJWT==|bcrypt==|SQLAlchemy==|aiosqlite==).*\n?', '', v, flags=re.MULTILINE)
|
|
346
|
+
return k, v
|
|
347
|
+
|
|
158
348
|
@staticmethod
|
|
159
349
|
def _db_requirements_map(name: str):
|
|
160
350
|
return {
|
|
161
|
-
"default": "aiosqlite==
|
|
351
|
+
"default": "aiosqlite==",
|
|
162
352
|
"sqlite": [
|
|
163
353
|
"aiosqlite==0.21.0",
|
|
164
354
|
],
|
|
165
355
|
"mysql": [
|
|
166
|
-
"PyMySQL==1.1.
|
|
167
|
-
"aiomysql==0.2
|
|
356
|
+
"PyMySQL==1.1.2",
|
|
357
|
+
"aiomysql==0.3.2",
|
|
168
358
|
],
|
|
169
359
|
"postgresql": [
|
|
170
|
-
"psycopg2-binary==2.9.
|
|
360
|
+
"psycopg2-binary==2.9.11",
|
|
171
361
|
"asyncpg==0.30.0",
|
|
172
362
|
],
|
|
173
363
|
}.get(name)
|
|
@@ -184,16 +374,18 @@ class CMD:
|
|
|
184
374
|
"db_async_url": "db_async_url: sqlite+aiosqlite:///app_dev.sqlite",
|
|
185
375
|
},
|
|
186
376
|
"mysql": {
|
|
187
|
-
"db_url": "db_url: mysql+pymysql://<username>:<password>@<host>:<port
|
|
188
|
-
"db_async_url": "db_async_url: mysql+aiomysql://<username>:<password>@<host>:<port
|
|
377
|
+
"db_url": "db_url: mysql+pymysql://<username>:<password>@<host>:<port>/<database>?charset=utf8mb4",
|
|
378
|
+
"db_async_url": "db_async_url: mysql+aiomysql://<username>:<password>@<host>:<port>/<database>?charset=utf8mb4",
|
|
189
379
|
},
|
|
190
380
|
"postgresql": {
|
|
191
|
-
"db_url": "db_url: postgresql://<username>:<password>@<host>:<port
|
|
192
|
-
"db_async_url": "db_async_url: postgresql+asyncpg://<username>:<password>@<host>:<port
|
|
381
|
+
"db_url": "db_url: postgresql://<username>:<password>@<host>:<port>/<database>",
|
|
382
|
+
"db_async_url": "db_async_url: postgresql+asyncpg://<username>:<password>@<host>:<port>/<database>",
|
|
193
383
|
},
|
|
194
384
|
}.get(name)
|
|
195
385
|
|
|
196
386
|
def add(self):
|
|
387
|
+
if self.args.celery:
|
|
388
|
+
return self._add_celery_handler(self.args.name.split(","))
|
|
197
389
|
vn = self.args.vn
|
|
198
390
|
subdir = self.args.subdir
|
|
199
391
|
target = self.args.target
|
|
@@ -201,22 +393,35 @@ class CMD:
|
|
|
201
393
|
work_dir = Path.cwd()
|
|
202
394
|
with open(here.joinpath("_api_tpl.json"), "r", encoding="utf-8") as f:
|
|
203
395
|
api_tpl_dict = json.loads(f.read())
|
|
204
|
-
if target
|
|
396
|
+
if target != "a":
|
|
397
|
+
if not any([
|
|
398
|
+
work_dir.joinpath("app/schemas").is_dir(),
|
|
399
|
+
work_dir.joinpath("app/models").is_dir(),
|
|
400
|
+
]):
|
|
401
|
+
target = "light"
|
|
402
|
+
if not work_dir.joinpath("app/services").is_dir():
|
|
403
|
+
target = "tiny"
|
|
404
|
+
if target in ["a", "tiny"]:
|
|
405
|
+
tpl_mods = [
|
|
406
|
+
"app/api",
|
|
407
|
+
]
|
|
408
|
+
elif target == "light":
|
|
205
409
|
tpl_mods = [
|
|
206
410
|
"app/api",
|
|
411
|
+
"app/services",
|
|
207
412
|
]
|
|
208
413
|
elif target == "as":
|
|
209
414
|
tpl_mods = [
|
|
210
415
|
"app/api",
|
|
211
|
-
"app/
|
|
212
|
-
"app/
|
|
416
|
+
"app/services",
|
|
417
|
+
"app/schemas",
|
|
213
418
|
]
|
|
214
419
|
else:
|
|
215
420
|
tpl_mods = [
|
|
216
421
|
"app/api",
|
|
217
|
-
"app/
|
|
218
|
-
"app/
|
|
219
|
-
"app/
|
|
422
|
+
"app/services",
|
|
423
|
+
"app/schemas",
|
|
424
|
+
"app/models",
|
|
220
425
|
]
|
|
221
426
|
for mod in tpl_mods:
|
|
222
427
|
if not work_dir.joinpath(mod).is_dir():
|
|
@@ -227,11 +432,14 @@ class CMD:
|
|
|
227
432
|
flags = {
|
|
228
433
|
# - 键:目标是否存在: 0-不存在,1-存在
|
|
229
434
|
# - 值:创建是否关联: 0-不关联,1-关联
|
|
230
|
-
# - 创建a时,如果se存在为0
|
|
231
|
-
# - 创建se时,如果sc存在为0
|
|
435
|
+
# - 创建a时,如果se存在为0,不存在为1
|
|
436
|
+
# - 创建se时,如果sc存在为0,不存在为1
|
|
232
437
|
# - 创建sc时,全为1
|
|
233
438
|
# - 创建m时,全为1
|
|
234
|
-
#
|
|
439
|
+
# - light:
|
|
440
|
+
# - 创建a时,如果se存在为0,不存在为1
|
|
441
|
+
# - 创建se时,如果a存在为0,不存在为1
|
|
442
|
+
# a|tiny (a)
|
|
235
443
|
"0": [1],
|
|
236
444
|
"1": [1],
|
|
237
445
|
# as (a, se, sc)
|
|
@@ -259,7 +467,12 @@ class CMD:
|
|
|
259
467
|
"1100": [0, 1, 1, 1],
|
|
260
468
|
"1101": [0, 1, 1, 1],
|
|
261
469
|
"1110": [0, 0, 1, 1],
|
|
262
|
-
"1111": [0, 0, 1, 1]
|
|
470
|
+
"1111": [0, 0, 1, 1],
|
|
471
|
+
# light (a, se)
|
|
472
|
+
"00": [1, 1],
|
|
473
|
+
"01": [0, 1],
|
|
474
|
+
"10": [1, 0],
|
|
475
|
+
"11": [0, 0],
|
|
263
476
|
}
|
|
264
477
|
e_flag = [
|
|
265
478
|
1 if (Path(work_dir, mod, vn if mod.endswith("api") else "", subdir, f"{name}.py")).is_file() else 0
|
|
@@ -296,7 +509,6 @@ class CMD:
|
|
|
296
509
|
f.write("""\"\"\"\n{subdir}\n\"\"\"\n\n_prefix = "/{subdir}"\n""".format(
|
|
297
510
|
subdir=subdir,
|
|
298
511
|
))
|
|
299
|
-
|
|
300
512
|
# file
|
|
301
513
|
curr_mod_file = curr_mod_dir.joinpath(name + ".py")
|
|
302
514
|
curr_mod_file_rel = curr_mod_file.relative_to(work_dir)
|
|
@@ -309,11 +521,11 @@ class CMD:
|
|
|
309
521
|
k = prefix + mod.replace("/", "_") + ".py"
|
|
310
522
|
if subdir:
|
|
311
523
|
v = api_tpl_dict.get(k, "").replace(
|
|
312
|
-
"from app.
|
|
524
|
+
"from app.schemas.tpl import (", f"from app.schemas.{subdir}.tpl import ("
|
|
313
525
|
).replace(
|
|
314
|
-
"from app.
|
|
526
|
+
"from app.services.tpl import (", f"from app.services.{subdir}.tpl import ("
|
|
315
527
|
).replace(
|
|
316
|
-
"from app.
|
|
528
|
+
"from app.models.tpl import (", f"from app.models.{subdir}.tpl import ("
|
|
317
529
|
).replace(
|
|
318
530
|
"tpl", name).replace(
|
|
319
531
|
"Tpl", "".join([i[0].upper() + i[1:] if i else "_" for i in name.split("_")]))
|
|
@@ -323,6 +535,45 @@ class CMD:
|
|
|
323
535
|
"Tpl", "".join([i[0].upper() + i[1:] if i else "_" for i in name.split("_")]))
|
|
324
536
|
f.write(v)
|
|
325
537
|
|
|
538
|
+
@staticmethod
|
|
539
|
+
def _add_celery_handler(names: list):
|
|
540
|
+
work_dir = Path.cwd()
|
|
541
|
+
with open(here.joinpath("_project_tpl.json"), "r", encoding="utf-8") as f:
|
|
542
|
+
project_tpl_dict = json.loads(f.read())
|
|
543
|
+
sys.stdout.write(f"Adding celery:\n")
|
|
544
|
+
f = False
|
|
545
|
+
for name in names:
|
|
546
|
+
if name == "celery":
|
|
547
|
+
sys.stdout.write(f"[{name}] Can't be `celery`\n")
|
|
548
|
+
continue
|
|
549
|
+
f = True
|
|
550
|
+
celery_dir = work_dir.joinpath(name)
|
|
551
|
+
if celery_dir.is_dir():
|
|
552
|
+
sys.stdout.write(f"[{name}] Existed\n")
|
|
553
|
+
continue
|
|
554
|
+
sys.stdout.write(f"[{name}] Writing\n")
|
|
555
|
+
celery_dir.mkdir(parents=True, exist_ok=True)
|
|
556
|
+
for k, v in project_tpl_dict.items():
|
|
557
|
+
if k.startswith("app_celery/"):
|
|
558
|
+
tplpath = celery_dir.joinpath(k.replace("app_celery/", ""))
|
|
559
|
+
tplpath.parent.mkdir(parents=True, exist_ok=True)
|
|
560
|
+
with open(tplpath, "w+", encoding="utf-8") as f:
|
|
561
|
+
v = v.replace("app_celery", name).replace("app-celery", name.replace("_", "-"))
|
|
562
|
+
f.write(v)
|
|
563
|
+
if not f: return
|
|
564
|
+
for ext in ["runcbeat.py", "runcworker.py", "app/api/default/aping.py"]:
|
|
565
|
+
if ext == "app/api/default/aping.py" and not (work_dir / "app/api/default").is_dir():
|
|
566
|
+
continue
|
|
567
|
+
path = work_dir / ext
|
|
568
|
+
if path.is_file():
|
|
569
|
+
sys.stdout.write(f"[{ext}] Existed\n")
|
|
570
|
+
else:
|
|
571
|
+
sys.stdout.write(f"[{ext}] Writing\n")
|
|
572
|
+
with open(path, "w+", encoding="utf-8") as f:
|
|
573
|
+
v = project_tpl_dict[ext]
|
|
574
|
+
v = v.replace("app_celery", names[0])
|
|
575
|
+
f.write(v)
|
|
576
|
+
|
|
326
577
|
|
|
327
578
|
if __name__ == "__main__":
|
|
328
579
|
main()
|
fastapi_scaff/_api_tpl.json
CHANGED
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
{
|
|
2
|
-
"asm_app_api.py": "
|
|
3
|
-
"
|
|
4
|
-
"
|
|
5
|
-
"
|
|
6
|
-
"as_app_api.py": "
|
|
7
|
-
"
|
|
8
|
-
"
|
|
9
|
-
"a_app_api.py": "
|
|
10
|
-
"
|
|
11
|
-
"
|
|
12
|
-
"
|
|
13
|
-
"
|
|
2
|
+
"asm_app_api.py": "from fastapi import APIRouter\nfrom loguru import logger\n\nfrom app.api.responses import Responses, response_docs\nfrom app.api.status import Status\nfrom app.services.tpl import (\n TplDetailSvc,\n)\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n tpl_id: str,\n # TODO: \u8ba4\u8bc1\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Responses.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n msg = \"tplDetail\u64cd\u4f5c\u5f02\u5e38\"\n logger.exception(msg)\n return Responses.failure(msg=msg, error=e)\n return Responses.success(data=data)\n",
|
|
3
|
+
"asm_app_models.py": "from sqlalchemy import Column, String\n\nfrom app.models import DeclBase\nfrom app.initializer import g\n\n\nclass Tpl(DeclBase):\n __tablename__ = \"tpl\"\n\n id = Column(String(20), primary_key=True, default=g.snow_client.gen_uid, comment=\"\u4e3b\u952e\")\n name = Column(String(50), nullable=False, comment=\"\u540d\u79f0\")\n",
|
|
4
|
+
"asm_app_schemas.py": "from pydantic import BaseModel, Field\n\nfrom app.schemas import filter_fields\n\n\nclass TplDetail(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
|
|
5
|
+
"asm_app_services.py": "from app.schemas.tpl import (\n TplDetail,\n)\n\n\nclass TplDetailSvc(TplDetail):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
|
|
6
|
+
"as_app_api.py": "from fastapi import APIRouter\nfrom loguru import logger\n\nfrom app.api.responses import Responses, response_docs\nfrom app.api.status import Status\nfrom app.services.tpl import (\n TplDetailSvc,\n)\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n tpl_id: str,\n # TODO: \u8ba4\u8bc1\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Responses.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n msg = \"tplDetail\u64cd\u4f5c\u5f02\u5e38\"\n logger.exception(msg)\n return Responses.failure(msg=msg, error=e)\n return Responses.success(data=data)\n",
|
|
7
|
+
"as_app_schemas.py": "from pydantic import BaseModel, Field\n\nfrom app.schemas import filter_fields\n\n\nclass TplDetail(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
|
|
8
|
+
"as_app_services.py": "from app.schemas.tpl import (\n TplDetail,\n)\n\n\nclass TplDetailSvc(TplDetail):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
|
|
9
|
+
"a_app_api.py": "from fastapi import APIRouter\nfrom loguru import logger\n\nfrom app.api.responses import Responses, response_docs\nfrom app.api.status import Status\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n tpl_id: str,\n # TODO: \u8ba4\u8bc1\n):\n try:\n data = {} # TODO: \u6570\u636e\n if not data:\n return Responses.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n msg = \"tplDetail\u64cd\u4f5c\u5f02\u5e38\"\n logger.exception(msg)\n return Responses.failure(msg=msg, error=e)\n return Responses.success(data=data)\n",
|
|
10
|
+
"light_app_api.py": "from fastapi import APIRouter\nfrom loguru import logger\n\nfrom app.api.responses import Responses, response_docs\nfrom app.api.status import Status\nfrom app.services.tpl import (\n TplDetailSvc,\n)\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n tpl_id: str,\n # TODO: \u8ba4\u8bc1\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Responses.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n msg = \"tplDetail\u64cd\u4f5c\u5f02\u5e38\"\n logger.exception(msg)\n return Responses.failure(msg=msg, error=e)\n return Responses.success(data=data)\n",
|
|
11
|
+
"light_app_services.py": "from pydantic import BaseModel\n\n\nclass TplDetailSvc(BaseModel):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
|
|
12
|
+
"only_app_api.py": "from fastapi import APIRouter\nfrom loguru import logger\n\nfrom app.api.responses import Responses, response_docs\nfrom app.api.status import Status\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n tpl_id: str,\n # TODO: \u8ba4\u8bc1\n):\n try:\n data = {} # TODO: \u6570\u636e\n if not data:\n return Responses.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n msg = \"tplDetail\u64cd\u4f5c\u5f02\u5e38\"\n logger.exception(msg)\n return Responses.failure(msg=msg, error=e)\n return Responses.success(data=data)\n",
|
|
13
|
+
"only_app_models.py": "from sqlalchemy import Column, String\n\nfrom app.models import DeclBase\nfrom app.initializer import g\n\n\nclass Tpl(DeclBase):\n __tablename__ = \"tpl\"\n\n id = Column(String(20), primary_key=True, default=g.snow_client.gen_uid, comment=\"\u4e3b\u952e\")\n name = Column(String(50), nullable=False, comment=\"\u540d\u79f0\")\n",
|
|
14
|
+
"only_app_schemas.py": "from pydantic import BaseModel, Field\n\nfrom app.schemas import filter_fields\n\n\nclass TplDetail(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
|
|
15
|
+
"only_app_services.py": "from pydantic import BaseModel\n\n\nclass TplDetailSvc(BaseModel):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
|
|
16
|
+
"tiny_app_api.py": "from fastapi import APIRouter\nfrom loguru import logger\n\nfrom app.api.responses import Responses, response_docs\nfrom app.api.status import Status\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n tpl_id: str,\n # TODO: \u8ba4\u8bc1\n):\n try:\n data = {} # TODO: \u6570\u636e\n if not data:\n return Responses.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n msg = \"tplDetail\u64cd\u4f5c\u5f02\u5e38\"\n logger.exception(msg)\n return Responses.failure(msg=msg, error=e)\n return Responses.success(data=data)\n"
|
|
14
17
|
}
|