fastapi-scaff 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fastapi-scaff might be problematic. Click here for more details.

@@ -0,0 +1,10 @@
1
+ """
2
+ @author axiner
3
+ @version v1.0.0
4
+ @created 2024/07/29 22:22
5
+ @abstract
6
+ @description
7
+ @history
8
+ """
9
+
10
+ __version__ = "0.0.1"
@@ -0,0 +1,304 @@
1
+ """
2
+ @author axiner
3
+ @version v1.0.0
4
+ @created 2024/07/29 22:22
5
+ @abstract main
6
+ @description
7
+ @history
8
+ """
9
+ import argparse
10
+ import json
11
+ import os
12
+ import re
13
+ import sys
14
+ from pathlib import Path
15
+
16
+ from . import __version__
17
+
18
+ here = Path(__file__).absolute().parent
19
+
20
+ prog = "fastapi-scaff"
21
+
22
+
23
+ def main():
24
+ parser = argparse.ArgumentParser(
25
+ prog=prog,
26
+ description="fastapi脚手架,一键生成项目或api,让开发变得更简单",
27
+ epilog="examples: \n"
28
+ " `new`: %(prog)s new myproj -d postgresql\n"
29
+ " `add`: %(prog)s add myapi",
30
+ formatter_class=argparse.RawDescriptionHelpFormatter
31
+ )
32
+ parser.add_argument(
33
+ "--version",
34
+ action="version",
35
+ version=f"%(prog)s {__version__}")
36
+ parser.add_argument(
37
+ "command",
38
+ choices=["new", "add"],
39
+ help="创建项目或添加api")
40
+ parser.add_argument(
41
+ "name",
42
+ type=str,
43
+ help="项目或api名称(多个api可逗号分隔)")
44
+ parser.add_argument(
45
+ "-d",
46
+ "--db",
47
+ default="sqlite",
48
+ choices=["sqlite", "mysql", "postgresql"],
49
+ metavar="",
50
+ help="`new`时可指定项目数据库(默认sqlite)")
51
+ parser.add_argument(
52
+ "-v",
53
+ "--vn",
54
+ type=str,
55
+ default="v1",
56
+ metavar="",
57
+ help="`add`时可指定版本(默认v1)")
58
+ parser.add_argument(
59
+ "-s",
60
+ "--subdir",
61
+ type=str,
62
+ default="",
63
+ metavar="",
64
+ help="`add`时可指定子目录(默认空)")
65
+ parser.add_argument(
66
+ "-t",
67
+ "--target",
68
+ default="abd",
69
+ choices=["a", "ab", "abd"],
70
+ metavar="",
71
+ help="`add`时可指定目标(默认abd, a:api,b:business,d:datatype)")
72
+ args = parser.parse_args()
73
+ cmd = CMD(args)
74
+ if args.command == "new":
75
+ cmd.new()
76
+ else:
77
+ cmd.add()
78
+
79
+
80
+ class CMD:
81
+
82
+ def __init__(self, args: argparse.Namespace):
83
+ args.name = args.name.replace(" ", "")
84
+ if not args.name:
85
+ sys.stderr.write(f"{prog}: name cannot be empty\n")
86
+ sys.exit(1)
87
+ if args.command == "new":
88
+ pattern = r"^[A-Za-z][A-Za-z0-9_-]{0,64}$"
89
+ if not re.search(pattern, args.name):
90
+ sys.stderr.write(f"{prog}: '{args.name}' only support regex: {pattern}\n")
91
+ sys.exit(1)
92
+ else:
93
+ pattern = r"^[A-Za-z][A-Za-z0-9_]{0,64}$"
94
+ args.name = args.name.replace(",", ",").strip(",")
95
+ for t in args.name.split(","):
96
+ if not re.search(pattern, t):
97
+ sys.stderr.write(f"{prog}: '{t}' only support regex: {pattern}\n")
98
+ sys.exit(1)
99
+ args.vn = args.vn.replace(" ", "")
100
+ if not args.vn:
101
+ sys.stderr.write(f"{prog}: vn cannot be empty\n")
102
+ sys.exit(1)
103
+ if not re.search(pattern, args.vn):
104
+ sys.stderr.write(f"{prog}: '{args.vn}' only support regex: {pattern}\n")
105
+ sys.exit(1)
106
+ args.subdir = args.subdir.replace(" ", "")
107
+ if args.subdir:
108
+ if not re.search(pattern, args.subdir):
109
+ sys.stderr.write(f"{prog}: '{args.subdir}' only support regex: {pattern}\n")
110
+ sys.exit(1)
111
+ self.args = args
112
+
113
+ def new(self):
114
+ sys.stdout.write("Starting new project...\n")
115
+ name = Path(self.args.name)
116
+ if name.is_dir() and any(name.iterdir()):
117
+ sys.stderr.write(f"{prog}: '{name}' exists\n")
118
+ sys.exit(1)
119
+ name.mkdir(parents=True, exist_ok=True)
120
+ with open(here.joinpath("_project_tpl.json"), "r") as f:
121
+ project = json.loads(f.read())
122
+ for k, v in project.items():
123
+ tplpath = name.joinpath(k)
124
+ tplpath.parent.mkdir(parents=True, exist_ok=True)
125
+ with open(tplpath, "w+", encoding="utf-8") as f:
126
+ # rpl
127
+ if re.search(r"README\.md$", k):
128
+ v = v.replace(f"# {prog}", f"# {prog} ( => yourProj)")
129
+ if re.search(r"requirements\.txt$", k):
130
+ _default = self._db_requirements_map("default")
131
+ _user = self._db_requirements_map(self.args.db) or _default
132
+ v = v.replace(
133
+ _default,
134
+ '\n'.join(_user)
135
+ )
136
+ if _env := re.search(r"app_(.*?).yaml$", k):
137
+ _rpl_name = f"/app_{_env.group(1)}"
138
+ _default = self._db_yaml_map("default")
139
+ _user = self._db_yaml_map(self.args.db) or _default
140
+ v = v.replace(
141
+ _default["db_url"].replace("/app_dev", _rpl_name),
142
+ _user["db_url"].replace("/app_dev", _rpl_name)
143
+ ).replace(
144
+ _default["db_async_url"].replace("/app_dev", _rpl_name),
145
+ _user["db_async_url"].replace("/app_dev", _rpl_name)
146
+ )
147
+ # < rpl
148
+ f.write(v)
149
+ sys.stdout.write("Done. Now run:\n"
150
+ f"> 1. cd {name}\n"
151
+ f"> 2. modify config, eg: db\n"
152
+ f"> 3. pip install -r requirements.txt\n"
153
+ f"> 4. python runserver.py\n"
154
+ f"> ----- more see README.md -----\n")
155
+
156
+ @staticmethod
157
+ def _db_requirements_map(name: str):
158
+ return {
159
+ "default": "aiosqlite==0.21.0",
160
+ "sqlite": [
161
+ "aiosqlite==0.21.0",
162
+ ],
163
+ "mysql": [
164
+ "PyMySQL==1.1.1",
165
+ "aiomysql==0.2.0",
166
+ ],
167
+ "postgresql": [
168
+ "psycopg2-binary==2.9.10",
169
+ "asyncpg==0.30.0",
170
+ ],
171
+ }.get(name)
172
+
173
+ @staticmethod
174
+ def _db_yaml_map(name: str):
175
+ return {
176
+ "default": {
177
+ "db_url": "db_url: sqlite:///app_dev.sqlite",
178
+ "db_async_url": "db_async_url: sqlite+aiosqlite:///app_dev.sqlite",
179
+ },
180
+ "sqlite": {
181
+ "db_url": "db_url: sqlite:///app_dev.sqlite",
182
+ "db_async_url": "db_async_url: sqlite+aiosqlite:///app_dev.sqlite",
183
+ },
184
+ "mysql": {
185
+ "db_url": "db_url: mysql+pymysql://<username>:<password>@<host>:<port>/app_dev?charset=utf8mb4",
186
+ "db_async_url": "db_async_url: mysql+aiomysql://<username>:<password>@<host>:<port>/app_dev?charset=utf8mb4",
187
+ },
188
+ "postgresql": {
189
+ "db_url": "db_url: postgresql://<username>:<password>@<host>:<port>/app_dev",
190
+ "db_async_url": "db_async_url: postgresql+asyncpg://<username>:<password>@<host>:<port>/app_dev",
191
+ },
192
+ }.get(name)
193
+
194
+ def add(self):
195
+ vn = self.args.vn
196
+ subdir = self.args.subdir
197
+ target = self.args.target
198
+
199
+ work_dir = Path.cwd()
200
+ with open(here.joinpath("_api_tpl.json"), "r", encoding="utf-8") as f:
201
+ api_tpl_dict = json.loads(f.read())
202
+ if target == "a":
203
+ tpl_mods = [
204
+ "app/api",
205
+ ]
206
+ elif target == "ab":
207
+ tpl_mods = [
208
+ "app/api",
209
+ "app/business",
210
+ ]
211
+ else:
212
+ tpl_mods = [
213
+ "app/api",
214
+ "app/business",
215
+ "app/datatype",
216
+ ]
217
+ for mod in tpl_mods:
218
+ if not work_dir.joinpath(mod).is_dir():
219
+ sys.stderr.write(f"[error] not exists: {mod.replace('/', os.sep)}")
220
+ sys.exit(1)
221
+ for name in self.args.name.split(","):
222
+ sys.stdout.write(f"Adding api:\n")
223
+ flags = {
224
+ # a
225
+ "0": [0],
226
+ "1": [0],
227
+ # ab
228
+ "00": [0, 0],
229
+ "10": [0, 1],
230
+ "01": [1, 0],
231
+ "11": [0, 0],
232
+ # abd
233
+ "000": [0, 0, 0],
234
+ "100": [0, 0, 0],
235
+ "010": [1, 0, 0],
236
+ "001": [0, 1, 0],
237
+ "110": [0, 0, 0],
238
+ "101": [0, 1, 0],
239
+ "011": [1, 0, 0],
240
+ "111": [0, 0, 0],
241
+ }
242
+ e_flag = [
243
+ 1 if (Path(work_dir, mod, vn if mod.endswith("api") else "", subdir, f"{name}.py")).is_file() else 0
244
+ for mod in tpl_mods
245
+ ]
246
+ p_flag = flags["".join(map(str, e_flag))]
247
+ for i, mod in enumerate(tpl_mods):
248
+ # dir
249
+ curr_mod_dir = work_dir.joinpath(mod)
250
+ if mod.endswith("api"):
251
+ # vn dir
252
+ curr_mod_dir = curr_mod_dir.joinpath(vn)
253
+ if not curr_mod_dir.is_dir():
254
+ curr_mod_dir_rel = curr_mod_dir.relative_to(work_dir)
255
+ is_create = input(f"{curr_mod_dir_rel} not exists, create? [y/n]: ")
256
+ if is_create.lower() == "y" or is_create == "":
257
+ try:
258
+ curr_mod_dir.mkdir(parents=True, exist_ok=True)
259
+ with open(curr_mod_dir.joinpath("__init__.py"), "w+", encoding="utf-8") as f:
260
+ f.write("""\"\"\"\napi-{vn}\n\"\"\"\n\n_prefix = "/api/{vn}"\n""".format(
261
+ vn=vn,
262
+ ))
263
+ except Exception as e:
264
+ sys.stderr.write(f"[error] create {curr_mod_dir_rel} failed: {e}\n")
265
+ sys.exit(1)
266
+ else:
267
+ sys.exit(1)
268
+ if subdir:
269
+ curr_mod_dir = curr_mod_dir.joinpath(subdir)
270
+ curr_mod_dir.mkdir(parents=True, exist_ok=True)
271
+ with open(curr_mod_dir.joinpath("__init__.py"), "w+", encoding="utf-8") as f:
272
+ f.write("")
273
+ if mod.endswith("api"):
274
+ f.write("""\"\"\"\n{subdir}\n\"\"\"\n\n_prefix = "/{subdir}"\n""".format(
275
+ subdir=subdir,
276
+ ))
277
+
278
+ # file
279
+ curr_mod_file = curr_mod_dir.joinpath(name + ".py")
280
+ curr_mod_file_rel = curr_mod_file.relative_to(work_dir)
281
+ if e_flag[i]:
282
+ sys.stdout.write(f"[{name}] Existed {curr_mod_file_rel}\n")
283
+ else:
284
+ with open(curr_mod_file, "w+", encoding="utf-8") as f:
285
+ sys.stdout.write(f"[{name}] Writing {curr_mod_file_rel}\n")
286
+ prefix = "only_" if p_flag[i] else f"{target}_"
287
+ k = prefix + mod.replace("/", "_") + ".py"
288
+ if subdir:
289
+ v = api_tpl_dict.get(k, "").replace(
290
+ "from app.business.tpl import (", f"from app.business.{subdir}.tpl import ("
291
+ ).replace(
292
+ "from app.datatype.tpl import (", f"from app.datatype.{subdir}.tpl import ("
293
+ ).replace(
294
+ "tpl", name).replace(
295
+ "Tpl", "".join([i[0].upper() + i[1:] if i else "_" for i in name.split("_")]))
296
+ else:
297
+ v = api_tpl_dict.get(k, "").replace(
298
+ "tpl", name).replace(
299
+ "Tpl", "".join([i[0].upper() + i[1:] if i else "_" for i in name.split("_")]))
300
+ f.write(v)
301
+
302
+
303
+ if __name__ == "__main__":
304
+ main()
@@ -0,0 +1,10 @@
1
+ {
2
+ "abd_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\n\nfrom app.api.response import Response, response_docs\nfrom app.business.tpl import (\n TplDetailBiz,\n)\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\ntpl_router = APIRouter()\n\n\n@tpl_router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailBiz,\n ),\n)\nasync def detail(\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n tpl_biz = TplDetailBiz(id=tpl_id)\n data = await tpl_biz.detail()\n if not data:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e)\n return Response.success(data=data)\n",
3
+ "abd_app_business.py": "from app.datatype.tpl import (\n TplDetailMdl,\n)\n\n\nclass TplDetailBiz(TplDetailMdl):\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
4
+ "abd_app_datatype.py": "from pydantic import BaseModel, Field\nfrom sqlalchemy import Column, String\n\nfrom app.datatype import DeclBase, filter_fields\nfrom app.initializer import g\n\n\nclass Tpl(DeclBase):\n __tablename__ = \"tpl\"\n\n id = Column(String(20), primary_key=True, default=g.snow_cli.gen_uid, comment=\"\u4e3b\u952e\")\n name = Column(String(50), nullable=False, comment=\"\u540d\u79f0\")\n\n\nclass TplDetailMdl(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
5
+ "ab_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\n\nfrom app.api.response import Response, response_docs\nfrom app.business.tpl import (\n TplDetailBiz,\n)\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\ntpl_router = APIRouter()\n\n\n@tpl_router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailBiz,\n ),\n)\nasync def detail(\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n tpl_biz = TplDetailBiz(id=tpl_id)\n data = await tpl_biz.detail()\n if not data:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e)\n return Response.success(data=data)\n",
6
+ "ab_app_business.py": "from pydantic import BaseModel, Field\n\n\nclass TplDetailBiz(BaseModel):\n id: str = Field(...)\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
7
+ "a_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\ntpl_router = APIRouter()\n\n\n@tpl_router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n data = {} # TODO: \u5f85\u5904\u7406\n if not data:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e)\n return Response.success(data=data)\n",
8
+ "only_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\ntpl_router = APIRouter()\n\n\n@tpl_router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n data = {} # TODO: \u5f85\u5904\u7406\n if not data:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e)\n return Response.success(data=data)\n",
9
+ "only_app_business.py": "from pydantic import BaseModel, Field\n\n\nclass TplDetailBiz(BaseModel):\n id: str = Field(...)\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n"
10
+ }
@@ -0,0 +1,46 @@
1
+ {
2
+ ".gitignore": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n# Usually these files are written by a python script from a template\n# before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\ncover/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\n.pybuilder/\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n# For a library or package, you might want to ignore these files since the code is\n# intended to run in multiple environments; otherwise, check them in:\n# .python-version\n\n# pipenv\n# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n# However, in case of collaboration, if having platform-specific dependencies or dependencies\n# having no cross-platform support, pipenv may install dependencies that don't work, or not\n# install all needed dependencies.\n#Pipfile.lock\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n#.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# pytype static type analyzer\n.pytype/\n\n# Cython debug symbols\ncython_debug/\n\n# Append\n.idea\n.vscode\n*.sqlite\n",
3
+ "LICENSE": "Copyright (c) 2024 axiner\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n",
4
+ "README.md": "# fastapi-scaff\n\n## What is this?\n\n- by: axiner\n- fastapi-scaff\n- This is a fastapi scaff.\n - new project\n - add api\n - about project:\n - auto init project (conf, db, log...)\n - auto register router\n - auto register middleware\n - ...\n - more documents: [\u8bf7\u70b9\u51fb\u94fe\u63a5](https://blog.csdn.net/atpuxiner/article/details/144291336?fromshare=blogdetail&sharetype=blogdetail&sharerId=144291336&sharerefer=PC&sharesource=atpuxiner&sharefrom=from_link)\n\n## Project structure\n\n- ABD: ABD\u6a21\u5f0f\n - A api\n - B business\n - D datatype\n- \u8c03\u7528\u8fc7\u7a0b: main.py(initializer) -> (middleware) - router - api - business - (datatype)\n- \u7ed3\u6784\u5982\u4e0b: (\u547d\u540d\u7ecf\u8fc7\u591a\u6b21\u4fee\u6539\u6572\u5b9a\uff0c\u7b80\u6d01\u6613\u61c2\uff0cABD\u76ee\u5f55\u8d34\u5408\u907f\u514d\u6742\u4e71\u65e0\u7ae0)\n ```\n \u2514\u2500\u2500 fastapi-scaff\n \u251c\u2500\u2500 app (\u5e94\u7528)\n \u2502 \u251c\u2500\u2500 api \u251c\u2500\u2500 (api)\n \u2502 \u2502 \u2514\u2500\u2500 v1 \u2502 \u2514\u2500\u2500 (v1)\n \u2502 \u251c\u2500\u2500 business \u251c\u2500\u2500 (\u4e1a\u52a1)\n \u2502 \u251c\u2500\u2500 datatype \u251c\u2500\u2500 (\u6570\u636e\u7c7b\u578b)\n \u2502 \u251c\u2500\u2500 initializer \u251c\u2500\u2500 (\u521d\u59cb\u5316)\n \u2502 \u2502 \u251c\u2500\u2500 conf \u2502 \u251c\u2500\u2500 (\u914d\u7f6e)\n \u2502 \u2502 \u251c\u2500\u2500 db \u2502 \u251c\u2500\u2500 (\u6570\u636e\u5e93)\n \u2502 \u2502 \u251c\u2500\u2500 log \u2502 \u251c\u2500\u2500 (\u65e5\u5fd7)\n \u2502 \u2502 \u2514\u2500\u2500 ... \u2502 \u2514\u2500\u2500 (...)\n \u2502 \u251c\u2500\u2500 middleware \u251c\u2500\u2500 (\u4e2d\u95f4\u4ef6)\n \u2502 \u251c\u2500\u2500 router \u251c\u2500\u2500 (\u8def\u7531)\n \u2502 \u251c\u2500\u2500 utils \u251c\u2500\u2500 (utils)\n \u2502 \u2514\u2500\u2500 main.py \u2514\u2500\u2500 (main.py)\n \u251c\u2500\u2500 config (\u914d\u7f6e\u76ee\u5f55)\n \u251c\u2500\u2500 deploy (\u90e8\u7f72\u76ee\u5f55)\n \u251c\u2500\u2500 docs (\u6587\u6863\u76ee\u5f55)\n \u251c\u2500\u2500 log (\u65e5\u5fd7\u76ee\u5f55)\n \u251c\u2500\u2500 .gitignore\n \u251c\u2500\u2500 LICENSE\n \u251c\u2500\u2500 README.md\n \u251c\u2500\u2500 requirements.txt\n \u2514\u2500\u2500 runserver.py\n ```\n\n## Installation\n\nThis package can be installed using pip (Python>=3.11):\n> pip install fastapi-scaff\n\n## Scaff usage\n\n- 1\uff09help document\n - `fastapi-scaff -h`\n- 2\uff09new project\n - `fastapi-scaff new <myproj>`\n- 3\uff09add api\n - `cd to project root dir`\n - `fastapi-scaff add <myapi>`\n\n## Project run\n\n- 1\uff09cd to project root dir\n- 2\uff09modify the configuration, such as for the database\n- 3\uff09`pip install -r requirements.txt`\n- 4\uff09`python runserver.py`\n - more parameters see:\n - about uvicorn: [click here](https://www.uvicorn.org/)\n - about gunicorn: [click here](https://docs.gunicorn.org/en/stable/)\n\n## License\n\nThis project is released under the MIT License (MIT). See [LICENSE](LICENSE)\n",
5
+ "requirements.txt": "# -*- coding: utf-8 -*-\n# Python>=3.11\nfastapi==0.115.12\nuvicorn==0.34.0\ntoollib==1.7.3\npython-dotenv==1.1.0\nPyYAML==6.0.2\nloguru==0.7.3\nSQLAlchemy==2.0.40\naiosqlite==0.21.0\nredis==5.2.1\nPyJWT==2.10.1\nbcrypt==4.3.0\n",
6
+ "runserver.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract runserver\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n@description\n@history\n\"\"\"\nimport argparse\nimport subprocess\nimport sys\n\nimport uvicorn\n\n\ndef run_by_unicorn(\n host: str,\n port: int,\n workers: int,\n log_level: str,\n is_reload: bool,\n):\n log_config = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"default\": {\n \"()\": \"uvicorn.logging.DefaultFormatter\",\n \"fmt\": \"%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(message)s\",\n \"use_colors\": None\n },\n \"access\": {\n \"()\": \"uvicorn.logging.AccessFormatter\",\n \"fmt\": \"%(asctime)s %(levelname)s %(client_addr)s - \\\"%(request_line)s\\\" %(status_code)s\"\n }\n },\n \"handlers\": {\n \"default\": {\n \"formatter\": \"default\",\n \"class\": \"logging.StreamHandler\",\n \"stream\": \"ext://sys.stderr\"\n },\n \"access\": {\n \"formatter\": \"access\",\n \"class\": \"logging.StreamHandler\",\n \"stream\": \"ext://sys.stdout\"\n }\n },\n \"loggers\": {\n \"uvicorn\": {\n \"handlers\": [\n \"default\"\n ],\n \"level\": \"INFO\",\n \"propagate\": False\n },\n \"uvicorn.error\": {\n \"level\": \"INFO\"\n },\n \"uvicorn.access\": {\n \"handlers\": [\n \"access\"\n ],\n \"level\": \"INFO\",\n \"propagate\": False\n }\n }\n }\n uvicorn.run(\n app=\"app.main:app\",\n host=host,\n port=port,\n workers=workers,\n log_level=log_level,\n log_config=log_config,\n reload=is_reload,\n )\n\n\ndef run_by_gunicorn(\n host: str,\n port: int,\n workers: int,\n log_level: str,\n is_reload: bool,\n):\n cmd = (\n \"gunicorn app.main:app \"\n \"--worker-class=uvicorn.workers.UvicornWorker \"\n \"--bind={host}:{port} \"\n \"--workers={workers} \"\n \"--log-level={log_level} \"\n \"--access-logfile=- \"\n \"--error-logfile=- \"\n .format(\n host=host,\n port=port,\n workers=workers,\n log_level=log_level,\n )\n )\n if is_reload:\n cmd += f\" --reload\"\n subprocess.run(cmd, shell=True)\n\n\ndef main(\n host: str,\n port: int,\n workers: int,\n log_level: str,\n is_reload: bool,\n is_gunicorn: bool,\n):\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--host\", type=str, metavar=\"\", help=\"host\")\n parser.add_argument(\"--port\", type=int, metavar=\"\", help=\"port\")\n parser.add_argument(\"--workers\", type=int, metavar=\"\", help=\"\u8fdb\u7a0b\u6570\")\n parser.add_argument(\"--log-level\", type=str, metavar=\"\", help=\"\u65e5\u5fd7\u7b49\u7ea7\")\n parser.add_argument(\"--is-reload\", action=\"store_true\", help=\"\u662f\u5426reload\")\n parser.add_argument(\"--is-gunicorn\", action=\"store_true\", help=\"\u662f\u5426gunicorn\")\n args = parser.parse_args()\n kwargs = {\n \"host\": args.host or host,\n \"port\": args.port or port,\n \"workers\": args.workers or workers,\n \"log_level\": args.log_level or log_level,\n \"is_reload\": args.is_reload or is_reload,\n }\n if (args.is_gunicorn or is_gunicorn) and not sys.platform.lower().startswith(\"win\"):\n try:\n import gunicorn # noqa\n except ImportError:\n sys.stderr.write(\"gunicorn\u672a\u627e\u5230\uff0c\u6b63\u5728\u5c1d\u8bd5\u81ea\u52a8\u5b89\u88c5...\\n\")\n try:\n subprocess.run(\n [\"pip\", \"install\", \"gunicorn\"],\n check=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n sys.stderr.write(\"gunicorn\u5b89\u88c5\u6210\u529f\\n\")\n except subprocess.CalledProcessError as e:\n sys.stderr.write(f\"gunicorn\u5b89\u88c5\u5931\u8d25: {e.stderr.decode().strip()}\\n\")\n raise\n run_by_gunicorn(**kwargs)\n else:\n run_by_unicorn(**kwargs)\n\n\nif __name__ == '__main__':\n main(\n host=\"0.0.0.0\",\n port=8000,\n workers=3,\n log_level=\"debug\",\n is_reload=False, # \u9002\u7528\u4e8edev\n is_gunicorn=False, # \u4e0d\u652f\u6301win\n )\n",
7
+ "app/main.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract main\n@description\n@history\n\"\"\"\nfrom contextlib import asynccontextmanager\nfrom fastapi import FastAPI\n\nfrom app import (\n router,\n middleware,\n)\nfrom app.initializer import g\n\ng.setup()\n# #\nopenapi_url = \"/openapi.json\"\ndocs_url = \"/docs\"\nredoc_url = \"/redoc\"\nif g.config.is_disable_docs is True:\n openapi_url, docs_url, redoc_url = None, None, None\n\n\n@asynccontextmanager\nasync def lifespan(app_: FastAPI):\n g.logger.info(f\"Application using config file '{g.config.yamlname}'\")\n g.logger.info(f\"Application name '{g.config.appname}'\")\n g.logger.info(f\"Application version '{g.config.appversion}'\")\n # #\n g.logger.info(\"Application server running\")\n yield\n g.logger.info(\"Application server shutdown\")\n\n\napp = FastAPI(\n title=g.config.appname,\n version=g.config.appversion,\n debug=g.config.debug,\n openapi_url=openapi_url,\n docs_url=docs_url,\n redoc_url=redoc_url,\n lifespan=lifespan,\n)\n# #\nrouter.register_routers(app)\nmiddleware.register_middlewares(app)\n",
8
+ "app/__init__.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract app\n@description\n@history\n\"\"\"\nfrom pathlib import Path\n\nAPP_DIR = Path(__file__).absolute().parent\n",
9
+ "app/api/exception.py": "from typing import Any\n\nfrom app.api.status import Status\n\n\nclass CustomException(Exception):\n\n def __init__(\n self,\n msg: str = None,\n code: int = None,\n data: Any = None,\n status: Status = Status.FAILURE,\n ):\n self.msg = msg or status.msg\n self.code = code or status.code\n self.data = data\n self.status = status\n\n def __str__(self) -> str:\n return f\"{self.code} {self.msg}\"\n\n def __repr__(self) -> str:\n return f\"<{self.__class__.__name__}: ({self.code!r}, {self.msg!r})>\"\n",
10
+ "app/api/response.py": "from typing import Mapping, get_type_hints\n\nfrom starlette.background import BackgroundTask\nfrom starlette.responses import JSONResponse, StreamingResponse, ContentStream\nfrom toollib.utils import now2timestamp, map_jsontype\n\nfrom app.api.status import Status\n\n\nclass Response:\n\n @staticmethod\n def success(\n data: dict | list | str | None = None,\n msg: str = None,\n code: int = None,\n status: Status = Status.SUCCESS,\n status_code: int = 200,\n headers: Mapping[str, str] | None = None,\n media_type: str | None = None,\n background: BackgroundTask | None = None,\n ) -> JSONResponse:\n return JSONResponse(\n content={\n \"time\": now2timestamp(),\n \"msg\": msg or status.msg,\n \"code\": code or status.code,\n \"data\": data,\n },\n status_code=status_code,\n headers=headers,\n media_type=media_type,\n background=background,\n )\n\n @staticmethod\n def failure(\n msg: str = None,\n code: int = None,\n error: str | Exception | None = None,\n data: dict | list | str | None = None,\n status: Status = Status.FAILURE,\n status_code: int = 200,\n headers: Mapping[str, str] | None = None,\n media_type: str | None = None,\n background: BackgroundTask | None = None,\n ) -> JSONResponse:\n return JSONResponse(\n content={\n \"time\": now2timestamp(),\n \"msg\": msg or status.msg,\n \"code\": code or status.code,\n \"error\": str(error) if error else None,\n \"data\": data,\n },\n status_code=status_code,\n headers=headers,\n media_type=media_type,\n background=background,\n )\n\n @staticmethod\n def stream(\n content: ContentStream,\n status_code: int = 200,\n headers: Mapping[str, str] | None = None,\n media_type: str | None = None,\n background: BackgroundTask | None = None,\n ) -> StreamingResponse:\n return StreamingResponse(\n content=content,\n status_code=status_code,\n headers=headers,\n media_type=media_type,\n background=background,\n )\n\n\ndef response_docs(\n model=None, # \u6a21\u578b(BaseModel): \u81ea\u52a8\u4ece\u6a21\u578b\u4e2d\u89e3\u6790\u5b57\u6bb5\u4e0e\u7c7b\u578b\n data: dict | str = None, # \u6570\u636e(dict/str): \u76f4\u63a5\u7ed9\u5b9a\u5b57\u6bb5\u4e0e\u7c7b\u578b/\u7c7b\u578b\n is_listwrap: bool = False,\n listwrap_key: str = None,\n listwrap_key_extra: dict = None,\n docs_extra: dict = None,\n):\n \"\"\"\u54cd\u5e94\u6587\u6863\"\"\"\n\n def _data_from_model(model_, default: str = \"\u672a\u77e5\") -> dict:\n \"\"\"\u6570\u636e\u6a21\u677f\"\"\"\n data_ = {}\n if hasattr(model_, \"response_fields\"):\n all_fields = set(model_.response_fields())\n else:\n all_fields = set(model_.model_fields.keys())\n type_hints = get_type_hints(model_)\n for field_name in all_fields:\n try:\n t = type_hints.get(field_name)\n t = str(t).replace(\"<class '\", \"\").replace(\"'>\", \"\") if t else default\n except Exception:\n t = default\n data_[field_name] = t\n return data_\n\n final_data = {}\n if model:\n final_data = _data_from_model(model)\n if data:\n if isinstance(data, dict):\n final_data.update(data)\n else:\n final_data = data\n if is_listwrap:\n final_data = [final_data] if not isinstance(final_data, list) else final_data\n if listwrap_key:\n final_data = {listwrap_key: final_data}\n if listwrap_key_extra:\n final_data.update(listwrap_key_extra)\n\n def _format_value(value):\n if isinstance(value, str):\n _value = value.split(\"|\")\n if len(_value) > 1:\n return \" | \".join([map_jsontype(_v.strip(), is_keep_integer=True) for _v in _value])\n return map_jsontype(value, is_keep_integer=True)\n elif isinstance(value, dict):\n return {k: _format_value(v) for k, v in value.items()}\n elif isinstance(value, (list, tuple)):\n return [_format_value(item) for item in value]\n else:\n return str(value)\n\n format_data = _format_value(final_data)\n\n docs = {\n 200: {\n \"description\": \"\u64cd\u4f5c\u6210\u529f\u3010code\u4e3a0 & http\u72b6\u6001\u7801200\u3011\",\n \"content\": {\n \"application/json\": {\n \"example\": {\n \"time\": \"integer\",\n \"msg\": \"string\",\n \"code\": \"integer\",\n \"data\": format_data\n }\n }\n }\n },\n 422: {\n \"description\": \"\u64cd\u4f5c\u5931\u8d25\u3010code\u975e0 & http\u72b6\u6001\u7801200\u3011\",\n \"content\": {\n \"application/json\": {\n \"example\": {\n \"time\": \"integer\",\n \"msg\": \"string\",\n \"code\": \"integer\",\n \"error\": \"string\",\n \"data\": \"object | array | ...\",\n }\n }\n }\n },\n }\n if docs_extra:\n docs.update(docs_extra)\n return docs\n",
11
+ "app/api/status.py": "from enum import Enum\n\n\nclass Status(Enum):\n SUCCESS = (0, '\u64cd\u4f5c\u6210\u529f')\n FAILURE = (1, '\u64cd\u4f5c\u5931\u8d25')\n\n PARAMS_ERROR = (400, '\u53c2\u6570\u9519\u8bef')\n UNAUTHORIZED_ERROR = (401, '\u8ba4\u8bc1\u5931\u8d25')\n # \u5efa\u8bae\uff1a\u4e1a\u52a1\u6a21\u5757\u9519\u8bef\u7801\u4ece10000\u5f00\u59cb\n RECORD_NOT_EXIST_ERROR = (10000, '\u8bb0\u5f55\u4e0d\u5b58\u5728')\n RECORD_EXISTS_ERROR = (10001, '\u8bb0\u5f55\u5df2\u5b58\u5728')\n\n @property\n def code(self):\n return self.value[0]\n\n @property\n def msg(self):\n return self.value[1]\n\n @classmethod\n def collect_status(cls):\n text = \"\"\n for s in cls:\n text += f\"{s.code} {s.msg}\\n\"\n return text\n",
12
+ "app/api/__init__.py": "\"\"\"\napi\n\"\"\"\n",
13
+ "app/api/default/ping.py": "from fastapi import APIRouter\n\nping_router = APIRouter()\n\n\n@ping_router.get(\n path=\"/ping\",\n summary=\"ping\",\n)\ndef ping():\n return \"pong\"\n",
14
+ "app/api/default/__init__.py": "\"\"\"\napi-default\n\"\"\"\n\n_prefix = \"/api\"\n",
15
+ "app/api/v1/user.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.business.user import (\n UserDetailBiz,\n UserListBiz,\n UserCreateBiz,\n UserUpdateBiz,\n UserDeleteBiz,\n UserLoginBiz,\n UserTokenBiz,\n)\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\nuser_router = APIRouter()\n_active = True # \u6fc0\u6d3b\u72b6\u6001\uff08\u9ed8\u8ba4\u6fc0\u6d3b\uff09\n_tag = \"user\" # \u6807\u7b7e\uff08\u9ed8\u8ba4\u6a21\u5757\u540d\u6216\u5b50\u76ee\u5f55\u540d\uff09\n\n\n# \u6ce8\u610f\uff1a`user`\u4ec5\u4e3a\u6a21\u5757\u793a\u4f8b\uff0c\u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\n# \u6ce8\u610f\uff1a`user`\u4ec5\u4e3a\u6a21\u5757\u793a\u4f8b\uff0c\u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\n# \u6ce8\u610f\uff1a`user`\u4ec5\u4e3a\u6a21\u5757\u793a\u4f8b\uff0c\u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\n\n\n@user_router.get(\n path=\"/user/{user_id}\",\n summary=\"userDetail\",\n responses=response_docs(\n model=UserDetailBiz,\n ),\n)\nasync def detail(\n user_id: str,\n current_user: JWTUser = Depends(get_current_user), # \u8ba4\u8bc1\n):\n try:\n user_biz = UserDetailBiz(id=user_id)\n data = await user_biz.detail()\n if not data:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userDetail\u5931\u8d25\", error=e)\n return Response.success(data=data)\n\n\n@user_router.get(\n path=\"/user\",\n summary=\"userList\",\n responses=response_docs(\n model=UserListBiz,\n is_listwrap=True,\n listwrap_key=\"items\",\n listwrap_key_extra={\n \"total\": \"int\",\n },\n ),\n)\nasync def lst(\n page: int = 1,\n size: int = 10,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n user_biz = UserListBiz(page=page, size=size)\n data, total = await user_biz.lst()\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userList\u5931\u8d25\", error=e)\n return Response.success(data={\"items\": data, \"total\": total})\n\n\n@user_router.post(\n path=\"/user\",\n summary=\"userCreate\",\n responses=response_docs(data={\n \"id\": \"str\",\n }),\n)\nasync def create(\n user_biz: UserCreateBiz,\n):\n try:\n user_id = await user_biz.create()\n if not user_id:\n return Response.failure(msg=\"\u7528\u6237\u5df2\u5b58\u5728\", status=Status.RECORD_EXISTS_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userCreate\u5931\u8d25\", error=e)\n return Response.success(data={\"id\": user_id})\n\n\n@user_router.put(\n path=\"/user/{user_id}\",\n summary=\"userUpdate\",\n responses=response_docs(data={\n \"id\": \"str\",\n }),\n)\nasync def update(\n user_id: str,\n user_biz: UserUpdateBiz,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n updated_ids = await user_biz.update(user_id)\n if not updated_ids:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userUpdate\u5931\u8d25\", error=e)\n return Response.success(data={\"id\": user_id})\n\n\n@user_router.delete(\n path=\"/user/{user_id}\",\n summary=\"userDelete\",\n responses=response_docs(data={\n \"id\": \"str\",\n }),\n)\nasync def delete(\n user_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n user_biz = UserDeleteBiz()\n deleted_ids = await user_biz.delete(user_id)\n if not deleted_ids:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userDelete\u5931\u8d25\", error=e)\n return Response.success(data={\"id\": user_id})\n\n\n@user_router.post(\n path=\"/user/login\",\n summary=\"userLogin\",\n responses=response_docs(data={\n \"token\": \"str\",\n }),\n)\nasync def login(\n user_biz: UserLoginBiz,\n):\n try:\n data = await user_biz.login()\n if not data:\n return Response.failure(msg=\"\u7528\u6237\u540d\u6216\u5bc6\u7801\u9519\u8bef\", status=Status.UNAUTHORIZED_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userLogin\u5931\u8d25\", error=e)\n return Response.success(data={\"token\": data})\n\n\n@user_router.post(\n path=\"/user/token\",\n summary=\"userToken\",\n responses=response_docs(data={\n \"token\": \"str\",\n }),\n)\nasync def token(\n user_biz: UserTokenBiz,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n data = await user_biz.token()\n if not data:\n return Response.failure(msg=\"\u672a\u5339\u914d\u5230\u8bb0\u5f55\", status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userToken\u5931\u8d25\", error=e)\n return Response.success(data={\"token\": data})\n",
16
+ "app/api/v1/__init__.py": "\"\"\"\napi-v1\n\"\"\"\n\n_prefix = \"/api/v1\"\n",
17
+ "app/aplugin/__init__.py": "\"\"\"\n\u63d2\u4ef6\n\"\"\"\n",
18
+ "app/atask/__init__.py": "\"\"\"\n\u5f02\u6b65\u4efb\u52a1\n eg: celery\n\"\"\"\n",
19
+ "app/business/user.py": "from app.datatype.user import (\n User,\n UserDetailMdl,\n UserListMdl,\n UserCreateMdl,\n UserUpdateMdl,\n UserDeleteMdl,\n UserLoginMdl,\n UserTokenMdl,\n)\nfrom app.initializer import g\nfrom app.utils import auth, db_async\n\n\nclass UserDetailBiz(UserDetailMdl):\n\n async def detail(self):\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n fields=self.response_fields(),\n filter_by={\"id\": self.id},\n )\n return data\n\n\nclass UserListBiz(UserListMdl):\n\n async def lst(self):\n async with g.db_async_session() as session:\n data = await db_async.query_all(\n session=session,\n model=User,\n fields=self.response_fields(),\n page=self.page,\n size=self.size,\n )\n total = await db_async.query_total(session, User)\n return data, total\n\n\nclass UserCreateBiz(UserCreateMdl):\n\n async def create(self):\n async with g.db_async_session() as session:\n return await db_async.create(\n session=session,\n model=User,\n data={\n \"name\": self.name,\n \"phone\": self.phone,\n \"age\": self.age,\n \"gender\": self.gender,\n \"password\": auth.hash_password(self.password),\n \"jwt_key\": auth.gen_jwt_key(),\n },\n filter_by={\"phone\": self.phone},\n )\n\n\nclass UserUpdateBiz(UserUpdateMdl):\n\n async def update(self, user_id: str):\n async with g.db_async_session() as session:\n return await db_async.update(\n session=session,\n model=User,\n data=self.model_dump(),\n filter_by={\"id\": user_id},\n )\n\n\nclass UserDeleteBiz(UserDeleteMdl):\n\n @staticmethod\n async def delete(user_id: str):\n async with g.db_async_session() as session:\n return await db_async.delete(\n session=session,\n model=User,\n filter_by={\"id\": user_id},\n )\n\n\nclass UserLoginBiz(UserLoginMdl):\n\n async def login(self):\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n filter_by={\"phone\": self.phone},\n )\n if not data or not auth.verify_password(self.password, data.get(\"password\")):\n return None\n new_jwt_key = auth.gen_jwt_key()\n token = auth.gen_jwt(\n payload={\n \"id\": data.get(\"id\"),\n \"phone\": data.get(\"phone\"),\n \"name\": data.get(\"name\"),\n \"age\": data.get(\"age\"),\n \"gender\": data.get(\"gender\"),\n },\n jwt_key=new_jwt_key,\n exp_minutes=24 * 60 * 30,\n )\n # \u66f4\u65b0jwt_key\n await db_async.update(\n session=session,\n model=User,\n data={\"jwt_key\": new_jwt_key},\n filter_by={\"phone\": self.phone},\n )\n return token\n\n\nclass UserTokenBiz(UserTokenMdl):\n\n async def token(self):\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n filter_by={\"id\": self.id},\n )\n if not data:\n return None\n new_jwt_key = auth.gen_jwt_key()\n token = auth.gen_jwt(\n payload={\n \"id\": data.get(\"id\"),\n \"phone\": data.get(\"phone\"),\n \"name\": data.get(\"name\"),\n \"age\": data.get(\"age\"),\n \"gender\": data.get(\"gender\"),\n },\n jwt_key=new_jwt_key,\n exp_minutes=self.exp_minutes,\n )\n # \u66f4\u65b0jwt_key\n await db_async.update(\n session=session,\n model=User,\n data={\"jwt_key\": new_jwt_key},\n filter_by={\"id\": self.id},\n )\n return token\n",
20
+ "app/business/__init__.py": "\"\"\"\n\u4e1a\u52a1\n\"\"\"\n",
21
+ "app/datatype/user.py": "import re\nfrom typing import Literal\n\nfrom pydantic import BaseModel, Field, field_validator\nfrom sqlalchemy import Column, BigInteger, Integer, String\nfrom toollib.utils import now2timestamp\n\nfrom app.datatype import DeclBase, filter_fields\nfrom app.initializer import g\n\n\nclass User(DeclBase):\n __tablename__ = \"user\"\n\n id = Column(String(20), primary_key=True, default=g.snow_cli.gen_uid, comment=\"\u4e3b\u952e\")\n phone = Column(String(15), unique=True, index=True, nullable=False, comment=\"\u624b\u673a\u53f7\")\n password = Column(String(128), nullable=True, comment=\"\u5bc6\u7801\")\n jwt_key = Column(String(128), nullable=True, comment=\"jwtKey\")\n name = Column(String(50), nullable=True, comment=\"\u540d\u79f0\")\n age = Column(Integer, nullable=True, comment=\"\u5e74\u9f84\")\n gender = Column(Integer, nullable=True, comment=\"\u6027\u522b\")\n created_at = Column(BigInteger, default=now2timestamp, comment=\"\u521b\u5efa\u65f6\u95f4\")\n updated_at = Column(BigInteger, default=now2timestamp, onupdate=now2timestamp, comment=\"\u66f4\u65b0\u65f6\u95f4\")\n\n\nclass UserDetailMdl(BaseModel):\n id: str = Field(...)\n # #\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n created_at: int = None\n updated_at: int = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n\n\nclass UserListMdl(BaseModel):\n page: int = Field(1, ge=1)\n size: int = Field(10, ge=1)\n # #\n id: str = None\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n created_at: int = None\n updated_at: int = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[\n \"page\",\n \"size\",\n ]\n )\n\n\nclass UserCreateMdl(BaseModel):\n phone: str = Field(..., pattern=r\"^1[3-9]\\d{9}$\")\n password: str = Field(...)\n name: str | None = Field(None)\n age: int | None = Field(None, ge=0, le=200)\n gender: Literal[1, 2] | None = Field(None)\n\n @field_validator(\"password\")\n def validate_password(cls, v):\n if not re.match(r\"^(?=.*[A-Za-z])(?=.*\\d)\\S{6,20}$\", v):\n raise ValueError(\"\u5bc6\u7801\u5fc5\u987b\u5305\u542b\u81f3\u5c11\u4e00\u4e2a\u5b57\u6bcd\u548c\u4e00\u4e2a\u6570\u5b57\uff0c\u957f\u5ea6\u4e3a6-20\u4f4d\u7684\u975e\u7a7a\u767d\u5b57\u7b26\u7ec4\u5408\")\n return v\n\n @field_validator(\"name\")\n def validate_name(cls, v, info):\n if not v and (phone := info.data.get(\"phone\")):\n return f\"\u7528\u6237{phone[-4:]}\"\n if v and not re.match(r\"^[\\u4e00-\\u9fffA-Za-z0-9_\\-.]{1,50}$\", v):\n raise ValueError(\"\u540d\u79f0\u4ec5\u96501-50\u4f4d\u7684\u4e2d\u6587\u3001\u82f1\u6587\u3001\u6570\u5b57\u3001_-.\u7ec4\u5408\")\n return v\n\n\nclass UserUpdateMdl(BaseModel):\n name: str | None = Field(None)\n age: int | None = Field(None, ge=0, le=200)\n gender: Literal[1, 2] | None = Field(None)\n\n @field_validator(\"name\")\n def validate_name(cls, v):\n if v and not re.match(r\"^[\\u4e00-\\u9fffA-Za-z0-9_\\-.]{1,50}$\", v):\n raise ValueError(\"\u540d\u79f0\u4ec5\u96501-50\u4f4d\u7684\u4e2d\u6587\u3001\u82f1\u6587\u3001\u6570\u5b57\u3001_-.\u7ec4\u5408\")\n return v\n\n\nclass UserDeleteMdl(BaseModel):\n pass\n\n\nclass UserLoginMdl(BaseModel):\n phone: str = Field(...)\n password: str = Field(...)\n\n\nclass UserTokenMdl(BaseModel):\n id: str = Field(...)\n exp_minutes: int = Field(24 * 60 * 30, ge=1)\n",
22
+ "app/datatype/__init__.py": "\"\"\"\n\u6570\u636e\u7c7b\u578b\n\"\"\"\nfrom sqlalchemy.orm import declarative_base\n\nDeclBase = declarative_base()\n\n\ndef filter_fields(\n model,\n exclude: list = None,\n):\n if exclude:\n return list(set(model.model_fields.keys()) - set(exclude))\n return list(model.model_fields.keys())\n",
23
+ "app/initializer/_conf.py": "import os\nfrom pathlib import Path\n\nimport yaml\nfrom dotenv import load_dotenv\nfrom toollib.utils import get_cls_attrs, parse_variable\n\nfrom app import APP_DIR\n\n_CONFIG_DIR = APP_DIR.parent.joinpath(\"config\")\n\nload_dotenv(dotenv_path=os.environ.setdefault(\n key=\"envpath\",\n value=str(_CONFIG_DIR.joinpath(\".env\")))\n)\n# #\nappyaml = Path(\n os.environ.get(\"appyaml\") or\n _CONFIG_DIR.joinpath(f\"app_{os.environ.setdefault(key='appenv', value='dev')}.yaml\")\n)\nif not appyaml.is_file():\n raise RuntimeError(f\"\u914d\u7f6e\u6587\u4ef6\u4e0d\u5b58\u5728\uff1a{appyaml}\")\n\n\nclass EnvConfig:\n \"\"\"env\u914d\u7f6e\"\"\"\n snow_datacenter_id: int = None\n\n def setattr_from_env(self):\n cls_attrs = get_cls_attrs(EnvConfig)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n v = parse_variable(k=k, v_type=v_type, v_from=os.environ, default=v)\n setattr(self, k, v)\n\n\nclass Config(EnvConfig):\n \"\"\"\u914d\u7f6e\"\"\"\n yamlname: str = appyaml.name\n #\n appname: str = \"xApp\"\n appversion: str = \"1.0.0\"\n debug: bool = True\n log_dir: str = \"./log\"\n is_disable_docs: bool = True\n # #\n redis_host: str = None\n redis_port: int = None\n redis_db: int = None\n redis_password: str = None\n redis_max_connections: int = None\n db_url: str = None\n db_async_url: str = None\n\n def setup(self):\n self.setattr_from_env()\n self.setattr_from_yaml()\n return self\n\n def setattr_from_yaml(self):\n cls_attrs = get_cls_attrs(Config)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n v = parse_variable(k=k, v_type=v_type, v_from=self.load_yaml(), default=v)\n setattr(self, k, v)\n\n @staticmethod\n def load_yaml() -> dict:\n with open(appyaml, mode=\"r\", encoding=\"utf-8\") as file:\n return yaml.safe_load(file)\n\n\ndef init_config() -> Config:\n return Config().setup()\n",
24
+ "app/initializer/_db.py": "import asyncio\nimport importlib\n\nfrom sqlalchemy import create_engine, exc\nfrom sqlalchemy.ext.asyncio import create_async_engine, AsyncSession\nfrom sqlalchemy.orm import sessionmaker, scoped_session\n\nfrom app import APP_DIR\n\n_DATATYPE_MOD_DIR = APP_DIR.joinpath(\"datatype\")\n_DATATYPE_MOD_BASE = \"app.datatype\"\n\n_is_tables_created = False\n\n\ndef init_db_session(\n db_url: str,\n db_echo: bool,\n db_pool_size: int = 10,\n db_max_overflow: int = 5,\n db_pool_recycle: int = 3600,\n is_create_tables: bool = True,\n) -> scoped_session:\n db_echo = db_echo or False\n kwargs = {\n \"pool_size\": db_pool_size,\n \"max_overflow\": db_max_overflow,\n \"pool_recycle\": db_pool_recycle,\n }\n if db_url.startswith(\"sqlite\"):\n kwargs = {}\n engine = create_engine(\n url=db_url,\n echo=db_echo,\n echo_pool=db_echo,\n **kwargs,\n )\n db_session = sessionmaker(engine, expire_on_commit=False)\n\n def create_tables():\n from app.datatype import DeclBase\n _import_tables()\n try:\n DeclBase.metadata.create_all(engine)\n except (\n exc.OperationalError,\n exc.IntegrityError,\n exc.ProgrammingError,\n ) as e:\n if \"already exists\" not in str(e):\n raise\n\n global _is_tables_created\n if is_create_tables and not _is_tables_created:\n create_tables()\n _is_tables_created = True\n\n return scoped_session(db_session)\n\n\ndef init_db_async_session(\n db_url: str,\n db_echo: bool,\n db_pool_size: int = 10,\n db_max_overflow: int = 5,\n db_pool_recycle: int = 3600,\n is_create_tables: bool = True,\n) -> sessionmaker:\n db_echo = db_echo or False\n kwargs = {\n \"pool_size\": db_pool_size,\n \"max_overflow\": db_max_overflow,\n \"pool_recycle\": db_pool_recycle,\n }\n if db_url.startswith(\"sqlite\"):\n kwargs = {}\n async_engine = create_async_engine(\n url=db_url,\n echo=db_echo,\n echo_pool=db_echo,\n **kwargs,\n )\n db_async_session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False) # noqa\n\n async def create_tables():\n from app.datatype import DeclBase\n _import_tables()\n async with async_engine.begin() as conn:\n try:\n await conn.run_sync(DeclBase.metadata.create_all)\n except (\n exc.OperationalError,\n exc.IntegrityError,\n exc.ProgrammingError,\n ) as e:\n if \"already exists\" not in str(e):\n raise\n\n global _is_tables_created\n if is_create_tables and not _is_tables_created:\n try:\n loop = asyncio.get_running_loop()\n except RuntimeError:\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n task = loop.create_task(create_tables())\n task.add_done_callback(lambda t: t.result() if not t.cancelled() else None)\n if not loop.is_running():\n loop.run_until_complete(task)\n _is_tables_created = True\n return db_async_session\n\n\ndef _import_tables():\n \"\"\"\u5bfc\u5165\u8868\"\"\"\n for f in _DATATYPE_MOD_DIR.glob(\"*.py\"):\n if not f.name.startswith(\"__\"):\n _ = importlib.import_module(f\"{_DATATYPE_MOD_BASE}.{f.stem}\")\n",
25
+ "app/initializer/_log.py": "import os\nimport sys\nfrom pathlib import Path\n\nfrom loguru import logger\nfrom loguru._logger import Logger # noqa\n\n_LOG_CONSOLE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {file}:{line} {message}\"\n_LOG_FILE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {file}:{line} {message}\"\n_LOG_FILE_PREFIX = \"app\"\n_LOG_ROTATION = \"100 MB\"\n_LOG_RETENTION = \"15 days\"\n_LOG_COMPRESSION = None\n_LOG_ENQUEUE = True\n_LOG_BACKTRACE = False\n_LOG_DIAGNOSE = False\n_LOG_PID = False\n\n\ndef init_logger(\n debug: bool,\n log_dir: str = None,\n) -> Logger:\n logger.remove(None)\n _lever = \"DEBUG\" if debug else \"INFO\"\n logger.add(\n sys.stdout,\n format=_LOG_CONSOLE_FORMAT,\n level=_lever,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n )\n if log_dir:\n _log_dir = Path(log_dir)\n _log_access_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-access.log\")\n _log_error_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-error.log\")\n if _LOG_PID:\n _log_access_file = str(_log_access_file).replace(\".log\", f\".{os.getpid()}.log\")\n _log_error_file = str(_log_error_file).replace(\".log\", f\".{os.getpid()}.log\")\n logger.add(\n _log_access_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=_lever,\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n )\n logger.add(\n _log_error_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=\"ERROR\",\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n )\n return logger\n",
26
+ "app/initializer/_redis.py": "from toollib.rediser import RedisCli\n\n\ndef init_redis_cli(\n host: str,\n port: int,\n db: int,\n password: str = None,\n max_connections: int = None,\n **kwargs,\n) -> RedisCli:\n if not host:\n return RedisCli()\n return RedisCli(\n host=host,\n port=port,\n db=db,\n password=password,\n max_connections=max_connections,\n **kwargs,\n )\n",
27
+ "app/initializer/_snow.py": "import os\n\nfrom loguru import logger\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import localip\n\n_CACHE_KEY_SNOW_WORKER_ID_INCR = \"config:snow_worker_id_incr\"\n_CACHE_KEY_SNOW_DATACENTER_ID_INCR = \"config:snow_datacenter_id_incr\"\n_CACHE_EXPIRE_SNOW = 120\n\n\ndef init_snow_cli(\n redis_cli: RedisCli,\n datacenter_id: int = None,\n to_str: bool = True,\n) -> SnowFlake: # \u5efa\u8bae\uff1a\u91c7\u7528\u670d\u52a1\u7684\u65b9\u5f0f\u8c03\u7528api\u83b7\u53d6\n if datacenter_id is None:\n datacenter_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_DATACENTER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if datacenter_id is None:\n local_ip = localip()\n if local_ip:\n ip_parts = list(map(int, local_ip.split('.')))\n ip_int = (ip_parts[0] << 24) + (ip_parts[1] << 16) + (ip_parts[2] << 8) + ip_parts[3]\n datacenter_id = ip_int % 32\n worker_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_WORKER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if worker_id is None:\n worker_id = os.getpid() % 32\n return SnowFlake(worker_id=worker_id, datacenter_id=datacenter_id, to_str=to_str)\n\n\ndef _snow_incr(redis_cli, cache_key: str, cache_expire: int):\n incr = None\n try:\n with redis_cli.connection() as r:\n resp = r.ping()\n if resp:\n lua_script = \"\"\"\n if redis.call('exists', KEYS[1]) == 1 then\n redis.call('expire', KEYS[1], ARGV[1])\n return redis.call('incr', KEYS[1])\n else\n redis.call('set', KEYS[1], 0)\n redis.call('expire', KEYS[1], ARGV[1])\n return 0\n end\n \"\"\"\n incr = r.eval(lua_script, 1, cache_key, cache_expire)\n except Exception as e:\n logger.warning(f\"snow\u521d\u59cb\u5316id\u5c06\u91c7\u7528\u672c\u5730\u65b9\u5f0f\uff0c\u7531\u4e8e\uff08{e}\uff09\")\n return incr\n",
28
+ "app/initializer/__init__.py": "\"\"\"\n\u521d\u59cb\u5316\n\"\"\"\nfrom loguru._logger import Logger # noqa\nfrom sqlalchemy.orm import sessionmaker, scoped_session\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import Singleton\n\nfrom app.initializer._conf import init_config\nfrom app.initializer._db import init_db_session, init_db_async_session\nfrom app.initializer._log import init_logger\nfrom app.initializer._redis import init_redis_cli\nfrom app.initializer._snow import init_snow_cli\n\n\nclass G(metaclass=Singleton):\n \"\"\"\n \u5168\u5c40\u53d8\u91cf\n \"\"\"\n config = None\n logger: Logger = None\n redis_cli: RedisCli = None\n snow_cli: SnowFlake = None\n db_session: scoped_session = None\n db_async_session: sessionmaker = None\n\n def __getattribute__(self, name):\n try:\n value = super().__getattribute__(name)\n except AttributeError:\n value = None\n if value is None:\n getter_name = f\"_get_{name}\"\n getter_method = getattr(self.__class__, getter_name, None)\n if callable(getter_method):\n value = getter_method()\n setattr(self, name, value)\n return value\n\n @classmethod\n def _get_config(cls):\n if not cls.config:\n cls.config = init_config()\n return cls.config\n\n @classmethod\n def _get_logger(cls):\n if not cls.logger:\n cls.logger = init_logger(\n debug=cls.config.debug,\n log_dir=cls.config.log_dir,\n )\n return cls.logger\n\n @classmethod\n def _get_redis_cli(cls):\n if not cls.redis_cli:\n cls.redis_cli = init_redis_cli(\n host=cls.config.redis_host,\n port=cls.config.redis_port,\n db=cls.config.redis_db,\n password=cls.config.redis_password,\n max_connections=cls.config.redis_max_connections,\n )\n return cls.redis_cli\n\n @classmethod\n def _get_snow_cli(cls):\n if not cls.snow_cli:\n cls.snow_cli = init_snow_cli(\n redis_cli=cls.redis_cli,\n datacenter_id=cls.config.snow_datacenter_id,\n )\n return cls.snow_cli\n\n @classmethod\n def _get_db_session(cls):\n if not cls.db_session:\n cls.db_session = init_db_session(\n db_url=cls.config.db_url,\n db_echo=cls.config.debug,\n )\n return cls.db_session\n\n @classmethod\n def _get_db_async_session(cls):\n if not cls.db_async_session:\n cls.db_async_session = init_db_async_session(\n db_url=cls.config.db_async_url,\n db_echo=cls.config.debug,\n )\n return cls.db_async_session\n\n @classmethod\n def setup(cls):\n \"\"\"\n \u521d\u59cb\u5316\n \"\"\"\n cls._get_config()\n cls._get_logger()\n cls._get_redis_cli()\n cls._get_snow_cli()\n # cls._get_db_session()\n cls._get_db_async_session()\n\n\ng = G()\n",
29
+ "app/middleware/auth.py": "from fastapi import Depends\nfrom fastapi.security import HTTPBearer, HTTPAuthorizationCredentials\nfrom typing import Optional\n\nfrom fastapi.security.utils import get_authorization_scheme_param\nfrom pydantic import BaseModel\nfrom starlette.requests import Request\n\nfrom app.api.exception import CustomException\nfrom app.api.status import Status\nfrom app.datatype.user import User\nfrom app.initializer import g\nfrom app.utils import db_async\nfrom app.utils.auth import verify_jwt\n\n\nclass JWTUser(BaseModel):\n # \u5b57\u6bb5\u4e0eUser\u5bf9\u9f50\n id: str = None\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n\n\nclass JWTAuthorizationCredentials(HTTPAuthorizationCredentials):\n user: JWTUser\n\n\nclass JWTBearer(HTTPBearer):\n\n async def __call__(\n self, request: Request\n ) -> Optional[JWTAuthorizationCredentials]:\n authorization = request.headers.get(\"Authorization\")\n scheme, credentials = get_authorization_scheme_param(authorization)\n if not (authorization and scheme and credentials):\n if self.auto_error:\n raise CustomException(\n msg=\"Not authenticated\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n if scheme.lower() != \"bearer\":\n if self.auto_error:\n raise CustomException(\n msg=\"Invalid authentication credentials\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n user = await self.verify_credentials(credentials)\n return JWTAuthorizationCredentials(scheme=scheme, credentials=credentials, user=user)\n\n async def verify_credentials(self, credentials: str) -> JWTUser:\n playload = await self._verify_jwt(credentials)\n if playload is None:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # \u5efa\u8bae\uff1ajwt_key\u8fdb\u884credis\u7f13\u5b58\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n fields=[\"jwt_key\"],\n filter_by={\"id\": playload.get(\"id\")}\n )\n if not data:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # <<< \u5efa\u8bae\n await self._verify_jwt(credentials, jwt_key=data.get(\"jwt_key\"))\n return JWTUser(\n id=playload.get(\"id\"),\n phone=playload.get(\"phone\"),\n name=playload.get(\"name\"),\n age=playload.get(\"age\"),\n gender=playload.get(\"gender\"),\n )\n\n @staticmethod\n async def _verify_jwt(token: str, jwt_key: str = None) -> dict:\n try:\n return verify_jwt(token=token, jwt_key=jwt_key)\n except Exception as e:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR, msg=str(e))\n\n\ndef get_current_user(\n credentials: Optional[JWTAuthorizationCredentials] = Depends(JWTBearer(auto_error=True))\n) -> JWTUser:\n if not credentials:\n return JWTUser()\n return credentials.user\n",
30
+ "app/middleware/cors.py": "from fastapi.middleware.cors import CORSMiddleware\n\n\nclass Cors:\n middleware_class = CORSMiddleware\n allow_origins = [\n \"http://localhost:8000\",\n # \u53ef\u8ffd\u52a0\u6240\u9700\n ]\n allow_credentials = True\n allow_methods = [\"*\"]\n allow_headers = [\"*\"]\n",
31
+ "app/middleware/exception.py": "import traceback\n\nfrom fastapi.exceptions import RequestValidationError\nfrom starlette.exceptions import HTTPException\nfrom starlette.requests import Request\nfrom starlette.responses import JSONResponse\n\nfrom app.api.exception import CustomException\nfrom app.api.response import Response\nfrom app.api.status import Status\nfrom app.initializer import g\n\n\nclass ExceptionHandler:\n\n @staticmethod\n async def custom_exception_handler(\n request: Request,\n exc: CustomException,\n is_traceback: bool = False,\n ) -> JSONResponse:\n lmsg = f'- \"{request.method} {request.url.path}\" {exc.code} {exc.msg}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(msg=exc.msg, code=exc.code, data=exc.data)\n\n @staticmethod\n async def http_exception_handler(\n request: Request,\n exc: HTTPException,\n is_traceback: bool = False,\n ) -> JSONResponse:\n lmsg = f'- \"{request.method} {request.url.path}\" {exc.status_code} {exc.detail}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(msg=exc.detail, code=exc.status_code)\n\n @staticmethod\n async def validation_exception_handler(\n request: Request,\n exc: RequestValidationError,\n is_display_all: bool = False,\n is_traceback: bool = False,\n ) -> JSONResponse:\n if is_display_all:\n msg = \", \".join([f\"'{item['loc'][1] if len(item['loc']) > 1 else item['loc'][0]}' {item['msg'].lower()}\" for item in exc.errors()]) # noqa: E501\n else:\n _first_error = exc.errors()[0]\n msg = f\"'{_first_error['loc'][1] if len(_first_error['loc']) > 1 else _first_error['loc'][0]}' {_first_error['msg'].lower()}\" # noqa: E501\n lmsg = f'- \"{request.method} {request.url.path}\" {Status.PARAMS_ERROR.code} {msg}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=msg,\n status=Status.PARAMS_ERROR,\n )\n",
32
+ "app/middleware/__init__.py": "\"\"\"\n\u4e2d\u95f4\u4ef6\n\"\"\"\nfrom fastapi import FastAPI\nfrom fastapi.exceptions import RequestValidationError\nfrom starlette.exceptions import HTTPException\n\nfrom app.api.exception import CustomException\nfrom app.middleware.cors import Cors\nfrom app.middleware.exception import ExceptionHandler\n\n\ndef register_middlewares(app: FastAPI):\n \"\"\"\u6ce8\u518c\u4e2d\u95f4\u4ef6\"\"\"\n app.add_middleware(\n middleware_class=Cors.middleware_class,\n allow_origins=Cors.allow_origins,\n allow_credentials=Cors.allow_credentials,\n allow_methods=Cors.allow_methods,\n allow_headers=Cors.allow_headers,\n )\n\n app.add_exception_handler(CustomException, ExceptionHandler.custom_exception_handler) # type: ignore\n app.add_exception_handler(HTTPException, ExceptionHandler.http_exception_handler) # type: ignore\n app.add_exception_handler(RequestValidationError, ExceptionHandler.validation_exception_handler) # type: ignore\n",
33
+ "app/router/__init__.py": "\"\"\"\n\u8def\u7531\n\"\"\"\nimport importlib\nimport sys\nfrom pathlib import Path\n\nfrom fastapi import FastAPI\nfrom loguru import logger\n\nfrom app import APP_DIR\n\n_API_MOD_DIR = APP_DIR.joinpath(\"api\")\n_API_MOD_BASE = \"app.api\"\n\n\ndef register_routers(\n app: FastAPI,\n mod_dir: Path = _API_MOD_DIR,\n mod_base: str = _API_MOD_BASE,\n prefix: str = \"\",\n obj_suffix: str = \"_router\",\n depth: int = 0,\n max_depth: int = 2\n):\n \"\"\"\n \u6ce8\u518c\u8def\u7531\n \u8981\u6c42\uff1a\n \u8def\u7531\u6a21\u5757\uff1a\u975e'__'\u5f00\u5934\u7684\u6a21\u5757\n \u8def\u7531\u5bf9\u8c61\uff1a{\u6a21\u5757\u540d\u79f0}{\u8def\u7531\u5bf9\u8c61\u540e\u7f00}\n :param app: FastAPI\u5e94\u7528\n :param mod_dir: api\u6a21\u5757\u76ee\u5f55\n :param mod_base: api\u6a21\u5757\u57fa\u7840\n :param prefix: url\u524d\u7f00\n :param obj_suffix: \u8def\u7531\u5bf9\u8c61\u540e\u7f00\n :param depth: \u5f53\u524d\u9012\u5f52\u6df1\u5ea6\n :param max_depth: \u6700\u5927\u9012\u5f52\u6df1\u5ea6\n \"\"\"\n if depth > max_depth:\n return\n for item in mod_dir.iterdir():\n if item.name.startswith(\"__\") or item.name == \"__pycache__\":\n continue\n if item.is_dir():\n new_mod_dir = item\n new_mod_base = f\"{mod_base}.{item.name}\"\n new_prefix = prefix\n try:\n mod = importlib.import_module(new_mod_base)\n _prefix = getattr(mod, \"_prefix\", None)\n if _prefix:\n new_prefix = f\"{new_prefix}/{_prefix}\"\n except ImportError:\n logger.error(f\"Register router failed to import module: {new_mod_base}\")\n continue\n register_routers(\n app=app,\n mod_dir=new_mod_dir,\n mod_base=new_mod_base,\n prefix=new_prefix,\n obj_suffix=obj_suffix,\n depth=depth + 1,\n max_depth=max_depth\n )\n elif item.is_file() and item.suffix == \".py\" and depth > 0:\n mod_name = item.stem\n final_mod = f\"{mod_base}.{mod_name}\"\n try:\n mod = importlib.import_module(final_mod)\n if not getattr(mod, \"_active\", True):\n logger.info(f\"Register router skipping inactive module: {final_mod}\")\n sys.modules.pop(final_mod)\n continue\n router_name = f\"{mod_name}{obj_suffix}\"\n if router := getattr(mod, router_name, None):\n tag = getattr(mod, \"_tag\", None)\n if not tag:\n tag = item.parent.stem if depth > 1 else mod_name\n app.include_router(\n router=router,\n prefix=prefix.replace(\"//\", \"/\").rstrip(\"/\"),\n tags=[tag]\n )\n except ImportError:\n logger.error(f\"Register router failed to import module: {final_mod}\")\n continue\n",
34
+ "app/tests/__init__.py": "\"\"\"\n\u6d4b\u8bd5\n\"\"\"\n",
35
+ "app/utils/auth.py": "import secrets\nfrom datetime import datetime, timedelta\n\nimport bcrypt\nimport jwt\n\n_ALGORITHM = \"HS256\"\n\n\ndef gen_jwt(payload: dict, jwt_key: str, exp_minutes: int = 24 * 60 * 30):\n payload.update({\"exp\": datetime.utcnow() + timedelta(minutes=exp_minutes)})\n encoded_jwt = jwt.encode(payload=payload, key=jwt_key, algorithm=_ALGORITHM)\n return encoded_jwt\n\n\ndef verify_jwt(token: str, jwt_key: str = None) -> dict:\n if not jwt_key:\n return jwt.decode(jwt=token, options={\"verify_signature\": False})\n return jwt.decode(jwt=token, key=jwt_key, algorithms=[_ALGORITHM])\n\n\ndef gen_jwt_key():\n return secrets.token_hex(16)\n\n\ndef hash_password(password: str) -> str:\n salt = bcrypt.gensalt()\n hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt)\n return hashed_password.decode('utf-8')\n\n\ndef verify_password(password: str, hashed_password: str) -> bool:\n return bcrypt.checkpw(password.encode('utf-8'), hashed_password.encode('utf-8'))\n",
36
+ "app/utils/db_async.py": "from sqlalchemy import (\n select,\n func,\n update as update_,\n delete as delete_,\n)\n\n\nasync def format_all(\n rows,\n fields: list[str],\n) -> list[dict]:\n if not rows:\n return list()\n return [dict(zip(fields, row)) for row in rows]\n\n\nasync def format_one(\n row,\n fields: list[str],\n) -> dict:\n if not row:\n return dict()\n return dict(zip(fields, row))\n\n\nasync def model_dict(\n model,\n fields: list[str] = None,\n) -> dict:\n if not model:\n return dict()\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n return {field: getattr(model, field) for field in fields}\n\n\nasync def query_one(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n) -> dict:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return await format_one(result.fetchone(), fields)\n\n\nasync def query_all(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n page: int = None,\n size: int = None,\n) -> list[dict]:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n if page and size:\n query = query.offset((page - 1) * size).limit(size)\n result = await session.execute(query)\n return await format_all(result.fetchall(), fields)\n\n\nasync def query_total(\n session,\n model,\n filter_by: dict = None,\n) -> int:\n query = select(func.count()).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return result.scalar()\n\n\nasync def create(\n session,\n model,\n data: dict,\n filter_by: dict = None,\n) -> int:\n try:\n if filter_by:\n result = await query_one(session, model, filter_by=filter_by)\n if result:\n return 0\n stmt = model(**data)\n session.add(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return stmt.id\n\n\nasync def update(\n session,\n model,\n data: dict,\n filter_by: dict | None,\n is_exclude_none: bool = True,\n) -> list:\n try:\n if is_exclude_none:\n data = {k: v for k, v in data.items() if v is not None}\n stmt = update_(model).values(**data)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n updated_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n updated_ids = result.scalars().all()\n if updated_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return updated_ids\n\n\nasync def delete(\n session,\n model,\n filter_by: dict | None,\n) -> list:\n try:\n stmt = delete_(model)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n deleted_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n deleted_ids = result.scalars().all()\n if deleted_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return deleted_ids\n",
37
+ "app/utils/z7z8.py": "",
38
+ "app/utils/__init__.py": "\"\"\"\nutils\n- \u67427\u67428\u7684\u53ef\u4ee5\u5199\u5728 z7z8.py\n\"\"\"\n",
39
+ "config/.env": "# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# \u5e94\u7528\u73af\u5883\uff08\u5b9a\u4f4dyaml\u914d\u7f6e\uff09\nappenv=dev\n# \u5e94\u7528\u914d\u7f6e\uff08\u6307\u5b9ayaml\u914d\u7f6e\uff0c\u4f18\u4e8e`appenv`\u5b9a\u4f4d\uff09\nappyaml=\n# \u96ea\u82b1\u7b97\u6cd5\u6570\u636e\u4e2d\u5fc3id\uff08\u53d6\u503c\uff1a0-31\uff0c\u5728\u5206\u5e03\u5f0f\u90e8\u7f72\u65f6\u9700\u786e\u4fdd\u6bcf\u4e2a\u8282\u70b9\u7684\u53d6\u503c\u4e0d\u540c\uff09\nsnow_datacenter_id=0",
40
+ "config/app_dev.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\nappname: xApp-dev\nappversion: 1.0.0\ndebug: true\nlog_dir: ./log\nis_disable_docs: false\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_dev.sqlite\ndb_async_url: sqlite+aiosqlite:///app_dev.sqlite\n",
41
+ "config/app_prod.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\nappname: xApp-prod\nappversion: 1.0.0\ndebug: false\nlog_dir: ./log\nis_disable_docs: true\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_prod.sqlite\ndb_async_url: sqlite+aiosqlite:///app_prod.sqlite\n",
42
+ "config/app_test.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\nappname: xApp-test\nappversion: 1.0.0\ndebug: true\nlog_dir: ./log\nis_disable_docs: false\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_test.sqlite\ndb_async_url: sqlite+aiosqlite:///app_test.sqlite\n",
43
+ "deploy/.gitkeep": "",
44
+ "docs/.gitkeep": "",
45
+ "log/.gitkeep": ""
46
+ }
@@ -0,0 +1,92 @@
1
+ Metadata-Version: 2.4
2
+ Name: fastapi-scaff
3
+ Version: 0.0.1
4
+ Summary: This is a fastapi scaff.
5
+ Author-email: axiner <atpuxiner@163.com>
6
+ Project-URL: Homepage, https://github.com/atpuxiner/fastapi-scaff
7
+ Classifier: Programming Language :: Python :: 3.11
8
+ Classifier: Operating System :: OS Independent
9
+ Requires-Python: >=3.11
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Dynamic: license-file
13
+
14
+ # fastapi-scaff
15
+
16
+ ## What is this?
17
+
18
+ - by: axiner
19
+ - fastapi-scaff
20
+ - This is a fastapi scaff.
21
+ - new project
22
+ - add api
23
+ - about project:
24
+ - auto init project (conf, db, log...)
25
+ - auto register router
26
+ - auto register middleware
27
+ - ...
28
+ - more documents: [请点击链接](https://blog.csdn.net/atpuxiner/article/details/144291336?fromshare=blogdetail&sharetype=blogdetail&sharerId=144291336&sharerefer=PC&sharesource=atpuxiner&sharefrom=from_link)
29
+
30
+ ## Project structure
31
+
32
+ - ABD: ABD模式
33
+ - A api
34
+ - B business
35
+ - D datatype
36
+ - 调用过程: main.py(initializer) -> (middleware) - router - api - business - (datatype)
37
+ - 结构如下: (命名经过多次修改敲定,简洁易懂,ABD目录贴合避免杂乱无章)
38
+ ```
39
+ └── fastapi-scaff
40
+ ├── app (应用)
41
+ │ ├── api ├── (api)
42
+ │ │ └── v1 │ └── (v1)
43
+ │ ├── business ├── (业务)
44
+ │ ├── datatype ├── (数据类型)
45
+ │ ├── initializer ├── (初始化)
46
+ │ │ ├── conf │ ├── (配置)
47
+ │ │ ├── db │ ├── (数据库)
48
+ │ │ ├── log │ ├── (日志)
49
+ │ │ └── ... │ └── (...)
50
+ │ ├── middleware ├── (中间件)
51
+ │ ├── router ├── (路由)
52
+ │ ├── utils ├── (utils)
53
+ │ └── main.py └── (main.py)
54
+ ├── config (配置目录)
55
+ ├── deploy (部署目录)
56
+ ├── docs (文档目录)
57
+ ├── log (日志目录)
58
+ ├── .gitignore
59
+ ├── LICENSE
60
+ ├── README.md
61
+ ├── requirements.txt
62
+ └── runserver.py
63
+ ```
64
+
65
+ ## Installation
66
+
67
+ This package can be installed using pip (Python>=3.11):
68
+ > pip install fastapi-scaff
69
+
70
+ ## Scaff usage
71
+
72
+ - 1)help document
73
+ - `fastapi-scaff -h`
74
+ - 2)new project
75
+ - `fastapi-scaff new <myproj>`
76
+ - 3)add api
77
+ - `cd to project root dir`
78
+ - `fastapi-scaff add <myapi>`
79
+
80
+ ## Project run
81
+
82
+ - 1)cd to project root dir
83
+ - 2)modify the configuration, such as for the database
84
+ - 3)`pip install -r requirements.txt`
85
+ - 4)`python runserver.py`
86
+ - more parameters see:
87
+ - about uvicorn: [click here](https://www.uvicorn.org/)
88
+ - about gunicorn: [click here](https://docs.gunicorn.org/en/stable/)
89
+
90
+ ## License
91
+
92
+ This project is released under the MIT License (MIT). See [LICENSE](LICENSE)
@@ -0,0 +1,10 @@
1
+ fastapi_scaff/__init__.py,sha256=FPXqnjzjXjySWwbd2puvZ-1kluvz--bnp944RtPbjsk,120
2
+ fastapi_scaff/__main__.py,sha256=Q9juuExsTZN1ApBjflJv1RTBDkL0715AMEwP-FwuGEc,12140
3
+ fastapi_scaff/_api_tpl.json,sha256=qUS4OWcsHmpRDZCQSSRjj-faMgHiiohBAgr98XbLNgQ,5169
4
+ fastapi_scaff/_project_tpl.json,sha256=0tr9-N0pRd2-_e5_oY0XzJ3dsD_FmqBbbk0XJ0a_Cck,67804
5
+ fastapi_scaff-0.0.1.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
6
+ fastapi_scaff-0.0.1.dist-info/METADATA,sha256=ZqRMhr4qOqRoTV4-3o9op2WfyFMwLdXHp0qAXEqYkqE,3339
7
+ fastapi_scaff-0.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
+ fastapi_scaff-0.0.1.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
9
+ fastapi_scaff-0.0.1.dist-info/top_level.txt,sha256=LeyfUxMRhdbRHcYoH37ftfdspyZ8V3Uut2YBaTCzq2k,14
10
+ fastapi_scaff-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ fastapi-scaff = fastapi_scaff.__main__:main
@@ -0,0 +1,19 @@
1
+ Copyright (c) 2024 axiner
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ of this software and associated documentation files (the "Software"), to deal
5
+ in the Software without restriction, including without limitation the rights
6
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ copies of the Software, and to permit persons to whom the Software is
8
+ furnished to do so, subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in all
11
+ copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ fastapi_scaff