fastapi-scaff 0.2.4__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fastapi-scaff might be problematic. Click here for more details.

fastapi_scaff/__init__.py CHANGED
@@ -7,4 +7,4 @@
7
7
  @history
8
8
  """
9
9
 
10
- __version__ = "0.2.4"
10
+ __version__ = "0.2.5"
fastapi_scaff/__main__.py CHANGED
@@ -182,12 +182,12 @@ class CMD:
182
182
  "db_async_url": "db_async_url: sqlite+aiosqlite:///app_dev.sqlite",
183
183
  },
184
184
  "mysql": {
185
- "db_url": "db_url: mysql+pymysql://<username>:<password>@<host>:<port>/app_dev?charset=utf8mb4",
186
- "db_async_url": "db_async_url: mysql+aiomysql://<username>:<password>@<host>:<port>/app_dev?charset=utf8mb4",
185
+ "db_url": "db_url: mysql+pymysql://<username>:<password>@<host>:<port>/<database>?charset=utf8mb4",
186
+ "db_async_url": "db_async_url: mysql+aiomysql://<username>:<password>@<host>:<port>/<database>?charset=utf8mb4",
187
187
  },
188
188
  "postgresql": {
189
- "db_url": "db_url: postgresql://<username>:<password>@<host>:<port>/app_dev",
190
- "db_async_url": "db_async_url: postgresql+asyncpg://<username>:<password>@<host>:<port>/app_dev",
189
+ "db_url": "db_url: postgresql://<username>:<password>@<host>:<port>/<database>",
190
+ "db_async_url": "db_async_url: postgresql+asyncpg://<username>:<password>@<host>:<port>/<database>",
191
191
  },
192
192
  }.get(name)
193
193
 
@@ -1,13 +1,13 @@
1
1
  {
2
- "asm_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.services.tpl import (\n TplDetailSvc,\n)\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
2
+ "asm_app_api.py": "import traceback\n\nfrom fastapi import APIRouter\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.services.tpl import (\n TplDetailSvc,\n)\nfrom app.api.status import Status\nfrom app.initializer import g\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
3
3
  "asm_app_models.py": "from sqlalchemy import Column, String\n\nfrom app.models import DeclBase\nfrom app.initializer import g\n\n\nclass Tpl(DeclBase):\n __tablename__ = \"tpl\"\n\n id = Column(String(20), primary_key=True, default=g.snow_cli.gen_uid, comment=\"\u4e3b\u952e\")\n name = Column(String(50), nullable=False, comment=\"\u540d\u79f0\")\n",
4
4
  "asm_app_schemas.py": "from pydantic import BaseModel, Field\n\nfrom app.schemas import filter_fields\n\n\nclass TplDetail(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
5
5
  "asm_app_services.py": "from app.schemas.tpl import (\n TplDetail,\n)\n\n\nclass TplDetailSvc(TplDetail):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
6
- "as_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.services.tpl import (\n TplDetailSvc,\n)\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
6
+ "as_app_api.py": "import traceback\n\nfrom fastapi import APIRouter\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.services.tpl import (\n TplDetailSvc,\n)\nfrom app.api.status import Status\nfrom app.initializer import g\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(\n model=TplDetailSvc,\n ),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n):\n try:\n tpl_svc = TplDetailSvc(id=tpl_id)\n data = await tpl_svc.detail()\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
7
7
  "as_app_schemas.py": "from pydantic import BaseModel, Field\n\nfrom app.schemas import filter_fields\n\n\nclass TplDetail(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
8
8
  "as_app_services.py": "from app.schemas.tpl import (\n TplDetail,\n)\n\n\nclass TplDetailSvc(TplDetail):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n",
9
- "a_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n data = {} # TODO: \u5f85\u5904\u7406\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
10
- "only_app_api.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n data = {} # TODO: \u5f85\u5904\u7406\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
9
+ "a_app_api.py": "import traceback\n\nfrom fastapi import APIRouter\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.initializer import g\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n):\n try:\n data = {} # TODO: \u5f85\u5904\u7406\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
10
+ "only_app_api.py": "import traceback\n\nfrom fastapi import APIRouter\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.initializer import g\n\nrouter = APIRouter()\n\n\n@router.get(\n path=\"/tpl/{tpl_id}\",\n summary=\"tplDetail\",\n responses=response_docs(),\n)\nasync def detail(\n request: Request,\n tpl_id: str,\n):\n try:\n data = {} # TODO: \u5f85\u5904\u7406\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"tplDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n",
11
11
  "only_app_models.py": "from sqlalchemy import Column, String\n\nfrom app.models import DeclBase\nfrom app.initializer import g\n\n\nclass Tpl(DeclBase):\n __tablename__ = \"tpl\"\n\n id = Column(String(20), primary_key=True, default=g.snow_cli.gen_uid, comment=\"\u4e3b\u952e\")\n name = Column(String(50), nullable=False, comment=\"\u540d\u79f0\")\n",
12
12
  "only_app_schemas.py": "from pydantic import BaseModel, Field\n\nfrom app.schemas import filter_fields\n\n\nclass TplDetail(BaseModel):\n id: str = Field(...)\n # #\n name: str = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n",
13
13
  "only_app_services.py": "from pydantic import BaseModel\n\n\nclass TplDetailSvc(BaseModel):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"TplDetail\"\n }\n }\n\n async def detail(self):\n # TODO: \u4e1a\u52a1\u903b\u8f91\n pass\n"
@@ -6,7 +6,7 @@
6
6
  "runcbeat.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2025/09/20 10:10\n@abstract runcbeat\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n@description\n@history\n\"\"\"\nimport argparse\nimport subprocess\n\n\ndef main(\n loglevel: str = \"info\",\n scheduler: str = None,\n pidfile: str = None,\n max_interval: int = 5,\n):\n parser = argparse.ArgumentParser(description=\"CeleryBeat\u542f\u52a8\u5668\")\n parser.add_argument(\"-l\", \"--loglevel\", type=str, default=\"info\", metavar=\"\", help=\"\u65e5\u5fd7\u7b49\u7ea7\")\n parser.add_argument(\"-S\", \"--scheduler\", type=str, default=None, metavar=\"\", help=\"\u8c03\u5ea6\u5668\u7c7b\u578b\")\n parser.add_argument(\"--pidfile\", type=str, default=None, metavar=\"\", help=\"pid\u6587\u4ef6\")\n parser.add_argument(\"--max-interval\", type=int, default=5, metavar=\"\", help=\"\u68c0\u6d4b\u4efb\u52a1\u95f4\u9694\")\n args = parser.parse_args()\n loglevel = args.loglevel or loglevel\n scheduler = args.scheduler or scheduler\n pidfile = args.pidfile or pidfile\n max_interval = args.max_interval or max_interval\n command = [\n \"celery\",\n \"-A\",\n \"app_celery.consumer\",\n \"beat\",\n f\"--loglevel={loglevel}\",\n f\"--max-interval={max_interval}\",\n ]\n if scheduler:\n command.extend([\"--scheduler\", scheduler])\n if pidfile:\n command.extend([\"--pidfile\", pidfile])\n subprocess.run(command, check=True)\n\n\nif __name__ == '__main__':\n main()\n",
7
7
  "runcworker.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2025/09/20 10:10\n@abstract runcworker\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n@description\n@history\n\"\"\"\nimport argparse\nimport platform\nimport subprocess\nfrom os import cpu_count\n\n\ndef main(\n name: str, # `app_celery/consumer/workers`\u4e0b\u7684\u6a21\u5757\u540d\n loglevel: str = \"info\",\n concurrency: int = None,\n pool: str = None,\n):\n parser = argparse.ArgumentParser(description=\"CeleryWorker\u542f\u52a8\u5668\")\n parser.add_argument(\"-n\", \"--name\", type=str, metavar=\"\", help=\"\u540d\u79f0\")\n parser.add_argument(\"-l\", \"--loglevel\", type=str, default=\"info\", metavar=\"\", help=\"\u65e5\u5fd7\u7b49\u7ea7\")\n parser.add_argument(\"-c\", \"--concurrency\", type=int, default=None, metavar=\"\", help=\"\u5e76\u53d1\u6570\")\n parser.add_argument(\"-P\", \"--pool\", type=str, default=None, metavar=\"\", help=\"\u5e76\u53d1\u6a21\u578b\")\n args = parser.parse_args()\n name = args.name or name\n loglevel = args.loglevel or loglevel\n concurrency = args.concurrency or concurrency\n pool = args.pool or pool\n if pool is None:\n if platform.system().lower().startswith(\"win\"):\n pool = 'gevent'\n if not concurrency:\n concurrency = 100\n else:\n pool = 'prefork'\n if not concurrency:\n concurrency = cpu_count()\n command = [\n \"celery\",\n \"-A\",\n f\"app_celery.consumer.workers.{name}\",\n \"worker\",\n f\"--loglevel={loglevel}\",\n f\"--concurrency={concurrency}\",\n f\"--pool={pool}\",\n ]\n subprocess.run(\n command,\n check=True,\n )\n\n\nif __name__ == '__main__':\n main(\n name=\"ping\",\n )\n",
8
8
  "runserver.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract runserver\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n@description\n@history\n\"\"\"\nimport argparse\nimport subprocess\nimport sys\n\nimport uvicorn\n\n\ndef run_by_unicorn(\n host: str,\n port: int,\n workers: int,\n log_level: str,\n is_reload: bool,\n):\n log_config = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"default\": {\n \"()\": \"uvicorn.logging.DefaultFormatter\",\n \"fmt\": \"%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(message)s\",\n \"use_colors\": None\n },\n \"access\": {\n \"()\": \"uvicorn.logging.AccessFormatter\",\n \"fmt\": \"%(asctime)s %(levelname)s %(client_addr)s - \\\"%(request_line)s\\\" %(status_code)s\"\n }\n },\n \"handlers\": {\n \"default\": {\n \"formatter\": \"default\",\n \"class\": \"logging.StreamHandler\",\n \"stream\": \"ext://sys.stderr\"\n },\n \"access\": {\n \"formatter\": \"access\",\n \"class\": \"logging.StreamHandler\",\n \"stream\": \"ext://sys.stdout\"\n }\n },\n \"loggers\": {\n \"uvicorn\": {\n \"handlers\": [\n \"default\"\n ],\n \"level\": \"INFO\",\n \"propagate\": False\n },\n \"uvicorn.error\": {\n \"level\": \"INFO\"\n },\n \"uvicorn.access\": {\n \"handlers\": [\n \"access\"\n ],\n \"level\": \"INFO\",\n \"propagate\": False\n }\n }\n }\n uvicorn.run(\n app=\"app.main:app\",\n host=host,\n port=port,\n workers=workers,\n log_level=log_level,\n log_config=log_config,\n reload=is_reload,\n )\n\n\ndef run_by_gunicorn(\n host: str,\n port: int,\n workers: int,\n log_level: str,\n is_reload: bool,\n):\n cmd = (\n \"gunicorn app.main:app \"\n \"--worker-class=uvicorn.workers.UvicornWorker \"\n \"--bind={host}:{port} \"\n \"--workers={workers} \"\n \"--log-level={log_level} \"\n \"--access-logfile=- \"\n \"--error-logfile=- \"\n .format(\n host=host,\n port=port,\n workers=workers,\n log_level=log_level,\n )\n )\n if is_reload:\n cmd += f\" --reload\"\n subprocess.run(cmd, shell=True)\n\n\ndef main(\n host: str,\n port: int,\n workers: int,\n log_level: str,\n is_reload: bool,\n is_gunicorn: bool,\n):\n parser = argparse.ArgumentParser(description=\"App\u542f\u52a8\u5668\")\n parser.add_argument(\"--host\", type=str, metavar=\"\", help=\"host\")\n parser.add_argument(\"--port\", type=int, metavar=\"\", help=\"port\")\n parser.add_argument(\"--workers\", type=int, metavar=\"\", help=\"\u8fdb\u7a0b\u6570\")\n parser.add_argument(\"--log-level\", type=str, metavar=\"\", help=\"\u65e5\u5fd7\u7b49\u7ea7\")\n parser.add_argument(\"--is-reload\", action=\"store_true\", help=\"\u662f\u5426reload\")\n parser.add_argument(\"--is-gunicorn\", action=\"store_true\", help=\"\u662f\u5426gunicorn\")\n args = parser.parse_args()\n kwargs = {\n \"host\": args.host or host,\n \"port\": args.port or port,\n \"workers\": args.workers or workers,\n \"log_level\": args.log_level or log_level,\n \"is_reload\": args.is_reload or is_reload,\n }\n if (args.is_gunicorn or is_gunicorn) and not sys.platform.lower().startswith(\"win\"):\n try:\n import gunicorn # noqa\n except ImportError:\n sys.stderr.write(\"gunicorn\u672a\u627e\u5230\uff0c\u6b63\u5728\u5c1d\u8bd5\u81ea\u52a8\u5b89\u88c5...\\n\")\n try:\n subprocess.run(\n [\"pip\", \"install\", \"gunicorn\"],\n check=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n sys.stderr.write(\"gunicorn\u5b89\u88c5\u6210\u529f\\n\")\n except subprocess.CalledProcessError as e:\n sys.stderr.write(f\"gunicorn\u5b89\u88c5\u5931\u8d25: {e.stderr.decode().strip()}\\n\")\n raise\n run_by_gunicorn(**kwargs)\n else:\n run_by_unicorn(**kwargs)\n\n\nif __name__ == '__main__':\n main(\n host=\"0.0.0.0\",\n port=8000,\n workers=3,\n log_level=\"debug\",\n is_reload=False, # \u9002\u7528\u4e8edev\n is_gunicorn=False, # \u4e0d\u652f\u6301win\n )\n",
9
- "app/main.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract main\n@description\n@history\n\"\"\"\nfrom contextlib import asynccontextmanager\nfrom fastapi import FastAPI\nfrom fastapi.responses import ORJSONResponse\n\nfrom app import (\n api,\n middleware,\n)\nfrom app.initializer import g\n\ng.setup()\n# #\nopenapi_url = \"/openapi.json\"\ndocs_url = \"/docs\"\nredoc_url = \"/redoc\"\nif g.config.app_disable_docs is True:\n openapi_url, docs_url, redoc_url = None, None, None\n\n\n@asynccontextmanager\nasync def lifespan(app_: FastAPI):\n g.logger.info(f\"Application using config file '{g.config.yaml_name}'\")\n g.logger.info(f\"Application title '{g.config.app_title}'\")\n g.logger.info(f\"Application version '{g.config.app_version}'\")\n # #\n g.logger.info(\"Application server running\")\n yield\n g.logger.info(\"Application server shutdown\")\n\n\napp = FastAPI(\n title=g.config.app_title,\n summary=g.config.app_summary,\n description=g.config.app_description,\n version=g.config.app_version,\n debug=g.config.app_debug,\n openapi_url=openapi_url,\n docs_url=docs_url,\n redoc_url=redoc_url,\n lifespan=lifespan,\n default_response_class=ORJSONResponse,\n)\n# #\napi.register_routers(app)\nmiddleware.register_middlewares(app)\n",
9
+ "app/main.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract main\n@description\n@history\n\"\"\"\nfrom contextlib import asynccontextmanager\nfrom fastapi import FastAPI\nfrom fastapi.responses import ORJSONResponse\n\nfrom app import (\n api,\n middleware,\n)\nfrom app.initializer import g\n\ng.setup()\n# #\nopenapi_url = \"/openapi.json\"\ndocs_url = \"/docs\"\nredoc_url = \"/redoc\"\nif g.config.app_disable_docs is True:\n openapi_url, docs_url, redoc_url = None, None, None\n\n\n@asynccontextmanager\nasync def lifespan(app_: FastAPI):\n g.logger.info(f\"Application env '{g.config.app_env}'\")\n g.logger.info(f\"Application yaml '{g.config.app_yaml}'\")\n g.logger.info(f\"Application title '{g.config.app_title}'\")\n g.logger.info(f\"Application version '{g.config.app_version}'\")\n # #\n g.logger.info(\"Application server running\")\n yield\n g.logger.info(\"Application server shutdown\")\n\n\napp = FastAPI(\n title=g.config.app_title,\n summary=g.config.app_summary,\n description=g.config.app_description,\n version=g.config.app_version,\n debug=g.config.app_debug,\n openapi_url=openapi_url,\n docs_url=docs_url,\n redoc_url=redoc_url,\n lifespan=lifespan,\n default_response_class=ORJSONResponse,\n)\n# #\napi.register_routers(app)\nmiddleware.register_middlewares(app)\n",
10
10
  "app/__init__.py": "\"\"\"\n@author axiner\n@version v1.0.0\n@created 2024/07/29 22:22\n@abstract app\n@description\n@history\n\"\"\"\nfrom pathlib import Path\n\nAPP_DIR = Path(__file__).absolute().parent\n",
11
11
  "app/api/exception.py": "from typing import Any\n\nfrom app.api.status import Status\n\n\nclass CustomException(Exception):\n\n def __init__(\n self,\n msg: str = None,\n code: int = None,\n data: Any = None,\n status: Status = Status.FAILURE,\n ):\n self.msg = msg or status.msg\n self.code = code or status.code\n self.data = data\n self.status = status\n\n def __str__(self) -> str:\n return f\"{self.code} {self.msg}\"\n\n def __repr__(self) -> str:\n return f\"<{self.__class__.__name__}: ({self.code!r}, {self.msg!r})>\"\n",
12
12
  "app/api/response.py": "import json\nfrom typing import Mapping, get_type_hints, Any\n\nfrom fastapi.encoders import jsonable_encoder\nfrom starlette.background import BackgroundTask\nfrom starlette.requests import Request\nfrom starlette.responses import JSONResponse, StreamingResponse, ContentStream\nfrom toollib.utils import map_jsontype\n\nfrom app.api.status import Status\n\n\nclass Response:\n\n @staticmethod\n def success(\n data: dict | list | str | None = None,\n msg: str = None,\n code: int = None,\n status: Status = Status.SUCCESS,\n is_encode_data: bool = False,\n request: Request = None,\n status_code: int = 200,\n headers: Mapping[str, str] | None = None,\n media_type: str | None = None,\n background: BackgroundTask | None = None,\n ) -> JSONResponse:\n content = {\n \"msg\": msg or status.msg,\n \"code\": code or status.code,\n \"data\": Response.encode_data(data) if is_encode_data else data,\n }\n if request:\n if request_id := getattr(request.state, 'request_id', None):\n content[\"request_id\"] = request_id\n return JSONResponse(\n content=content,\n status_code=status_code,\n headers=headers,\n media_type=media_type,\n background=background,\n )\n\n @staticmethod\n def failure(\n msg: str = None,\n code: int = None,\n error: str | Exception | None = None,\n data: dict | list | str | None = None,\n status: Status = Status.FAILURE,\n is_encode_data: bool = False,\n request: Request = None,\n status_code: int = 200,\n headers: Mapping[str, str] | None = None,\n media_type: str | None = None,\n background: BackgroundTask | None = None,\n ) -> JSONResponse:\n content = {\n \"msg\": msg or status.msg,\n \"code\": code or status.code,\n \"error\": str(error) if error else None,\n \"data\": Response.encode_data(data) if is_encode_data else data,\n }\n if request:\n if request_id := getattr(request.state, 'request_id', None):\n content[\"request_id\"] = request_id\n return JSONResponse(\n content=content,\n status_code=status_code,\n headers=headers,\n media_type=media_type,\n background=background,\n )\n\n @staticmethod\n def encode_data(data: Any) -> Any:\n if data is None or isinstance(data, (str, int, float, bool)):\n return data\n if isinstance(data, (dict, list)):\n try:\n json.dumps(data)\n return data\n except (TypeError, OverflowError):\n pass\n return jsonable_encoder(data)\n\n @staticmethod\n def stream(\n content: ContentStream,\n status_code: int = 200,\n headers: Mapping[str, str] | None = None,\n media_type: str | None = None,\n background: BackgroundTask | None = None,\n ) -> StreamingResponse:\n return StreamingResponse(\n content=content,\n status_code=status_code,\n headers=headers,\n media_type=media_type,\n background=background,\n )\n\n\ndef response_docs(\n model=None, # \u6a21\u578b(BaseModel): \u81ea\u52a8\u4ece\u6a21\u578b\u4e2d\u89e3\u6790\u5b57\u6bb5\u4e0e\u7c7b\u578b\n data: dict | str = None, # \u6570\u636e(dict/str): \u76f4\u63a5\u7ed9\u5b9a\u5b57\u6bb5\u4e0e\u7c7b\u578b/\u7c7b\u578b\n is_listwrap: bool = False,\n listwrap_key: str = None,\n listwrap_key_extra: dict = None,\n docs_extra: dict = None,\n):\n \"\"\"\u54cd\u5e94\u6587\u6863\"\"\"\n\n def _data_from_model(model_, default: str = \"\u672a\u77e5\") -> dict:\n \"\"\"\u6570\u636e\u6a21\u677f\"\"\"\n data_ = {}\n if hasattr(model_, \"response_fields\"):\n all_fields = set(model_.response_fields())\n else:\n all_fields = set(model_.model_fields.keys())\n type_hints = get_type_hints(model_)\n for field_name in all_fields:\n try:\n t = type_hints.get(field_name)\n t = str(t).replace(\"<class '\", \"\").replace(\"'>\", \"\") if t else default\n except Exception:\n t = default\n data_[field_name] = t\n return data_\n\n final_data = {}\n if model:\n final_data = _data_from_model(model)\n if data:\n if isinstance(data, dict):\n final_data.update(data)\n else:\n final_data = data\n if is_listwrap:\n final_data = [final_data] if not isinstance(final_data, list) else final_data\n if listwrap_key:\n final_data = {listwrap_key: final_data}\n if listwrap_key_extra:\n final_data.update(listwrap_key_extra)\n\n def _format_value(value):\n if isinstance(value, str):\n _value = value.split(\"|\")\n if len(_value) > 1:\n return \" | \".join([map_jsontype(_v.strip(), is_keep_integer=True) for _v in _value])\n return map_jsontype(value, is_keep_integer=True)\n elif isinstance(value, dict):\n return {k: _format_value(v) for k, v in value.items()}\n elif isinstance(value, (list, tuple)):\n return [_format_value(item) for item in value]\n else:\n return str(value)\n\n format_data = _format_value(final_data)\n\n docs = {\n 200: {\n \"description\": \"\u64cd\u4f5c\u6210\u529f\u3010code\u4e3a0 & http\u72b6\u6001\u7801200\u3011\",\n \"content\": {\n \"application/json\": {\n \"example\": {\n \"msg\": \"string\",\n \"code\": \"integer\",\n \"data\": format_data,\n \"request_id\": \"string\",\n }\n }\n }\n },\n 422: {\n \"description\": \"\u64cd\u4f5c\u5931\u8d25\u3010code\u975e0 & http\u72b6\u6001\u7801200\u3011\",\n \"content\": {\n \"application/json\": {\n \"example\": {\n \"msg\": \"string\",\n \"code\": \"integer\",\n \"error\": \"string\",\n \"data\": \"object | array | ...\",\n \"request_id\": \"string\",\n }\n }\n }\n },\n }\n if docs_extra:\n docs.update(docs_extra)\n return docs\n",
@@ -17,15 +17,14 @@
17
17
  "app/api/default/__init__.py": "\"\"\"\napi-default\n\"\"\"\n\n_prefix = \"/api\"\n",
18
18
  "app/api/v1/user.py": "import traceback\n\nfrom fastapi import APIRouter, Depends\nfrom starlette.requests import Request\n\nfrom app.api.response import Response, response_docs\nfrom app.api.status import Status\nfrom app.services.user import (\n UserDetailSvc,\n UserListSvc,\n UserCreateSvc,\n UserUpdateSvc,\n UserDeleteSvc,\n UserLoginSvc,\n UserTokenSvc,\n)\nfrom app.initializer import g\nfrom app.middleware.auth import JWTUser, get_current_user\n\nrouter = APIRouter()\n_active = True # \u6fc0\u6d3b\u72b6\u6001\uff08\u9ed8\u8ba4\u6fc0\u6d3b\uff09\n_tag = \"user\" # \u6807\u7b7e\uff08\u9ed8\u8ba4\u6a21\u5757\u540d\uff09\n\n\n# \u6ce8\u610f\uff1a`user`\u4ec5\u4e3a\u6a21\u5757\u793a\u4f8b\uff0c\u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\n# \u6ce8\u610f\uff1a`user`\u4ec5\u4e3a\u6a21\u5757\u793a\u4f8b\uff0c\u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\n# \u6ce8\u610f\uff1a`user`\u4ec5\u4e3a\u6a21\u5757\u793a\u4f8b\uff0c\u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\n\n\n@router.get(\n path=\"/user/{user_id}\",\n summary=\"userDetail\",\n responses=response_docs(\n model=UserDetailSvc,\n ),\n)\nasync def detail(\n request: Request,\n user_id: str,\n current_user: JWTUser = Depends(get_current_user), # \u8ba4\u8bc1\n):\n try:\n user_svc = UserDetailSvc(id=user_id)\n data = await user_svc.detail()\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userDetail\u5931\u8d25\", error=e, request=request)\n return Response.success(data=data, request=request)\n\n\n@router.get(\n path=\"/user\",\n summary=\"userList\",\n responses=response_docs(\n model=UserListSvc,\n is_listwrap=True,\n listwrap_key=\"items\",\n listwrap_key_extra={\n \"total\": \"int\",\n },\n ),\n)\nasync def lst(\n request: Request,\n page: int = 1,\n size: int = 10,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n user_svc = UserListSvc(page=page, size=size)\n data, total = await user_svc.lst()\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userList\u5931\u8d25\", error=e, request=request)\n return Response.success(data={\"items\": data, \"total\": total}, request=request)\n\n\n@router.post(\n path=\"/user\",\n summary=\"userCreate\",\n responses=response_docs(data={\n \"id\": \"str\",\n }),\n)\nasync def create(\n request: Request,\n user_svc: UserCreateSvc,\n):\n try:\n user_id = await user_svc.create()\n if not user_id:\n return Response.failure(status=Status.RECORD_EXISTS_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userCreate\u5931\u8d25\", error=e, request=request)\n return Response.success(data={\"id\": user_id}, request=request)\n\n\n@router.put(\n path=\"/user/{user_id}\",\n summary=\"userUpdate\",\n responses=response_docs(data={\n \"id\": \"str\",\n }),\n)\nasync def update(\n request: Request,\n user_id: str,\n user_svc: UserUpdateSvc,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n updated_ids = await user_svc.update(user_id)\n if not updated_ids:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userUpdate\u5931\u8d25\", error=e, request=request)\n return Response.success(data={\"id\": user_id}, request=request)\n\n\n@router.delete(\n path=\"/user/{user_id}\",\n summary=\"userDelete\",\n responses=response_docs(data={\n \"id\": \"str\",\n }),\n)\nasync def delete(\n request: Request,\n user_id: str,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n user_svc = UserDeleteSvc()\n deleted_ids = await user_svc.delete(user_id)\n if not deleted_ids:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userDelete\u5931\u8d25\", error=e, request=request)\n return Response.success(data={\"id\": user_id}, request=request)\n\n\n@router.post(\n path=\"/user/login\",\n summary=\"userLogin\",\n responses=response_docs(data={\n \"token\": \"str\",\n }),\n)\nasync def login(\n request: Request,\n user_svc: UserLoginSvc,\n):\n try:\n data = await user_svc.login()\n if not data:\n return Response.failure(status=Status.USER_OR_PASSWORD_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userLogin\u5931\u8d25\", error=e, request=request)\n return Response.success(data={\"token\": data}, request=request)\n\n\n@router.post(\n path=\"/user/token\",\n summary=\"userToken\",\n responses=response_docs(data={\n \"token\": \"str\",\n }),\n)\nasync def token(\n request: Request,\n user_svc: UserTokenSvc,\n current_user: JWTUser = Depends(get_current_user),\n):\n try:\n data = await user_svc.token()\n if not data:\n return Response.failure(status=Status.RECORD_NOT_EXIST_ERROR, request=request)\n except Exception as e:\n g.logger.error(traceback.format_exc())\n return Response.failure(msg=\"userToken\u5931\u8d25\", error=e, request=request)\n return Response.success(data={\"token\": data}, request=request)\n",
19
19
  "app/api/v1/__init__.py": "\"\"\"\napi-v1\n\"\"\"\n\n_prefix = \"/api/v1\"\n",
20
- "app/aplugin/__init__.py": "\"\"\"\n\u63d2\u4ef6\n\"\"\"\n",
21
20
  "app/initializer/context.py": "from contextvars import ContextVar\n\nrequest_id_ctx_var: ContextVar[str] = ContextVar(\"request_id\", default=\"N/A\")\n",
22
- "app/initializer/_conf.py": "import os\nfrom pathlib import Path\n\nimport yaml\nfrom dotenv import load_dotenv\nfrom toollib.utils import get_cls_attrs, parse_variable\n\nfrom app import APP_DIR\n\n_CONFIG_DIR = APP_DIR.parent.joinpath(\"config\")\n\nload_dotenv(dotenv_path=os.environ.setdefault(\n key=\"env_path\",\n value=str(_CONFIG_DIR.joinpath(\".env\")))\n)\n# #\napp_yaml = Path(\n os.environ.get(\"app_yaml\") or\n _CONFIG_DIR.joinpath(f\"app_{os.environ.setdefault(key='app_env', value='dev')}.yaml\")\n)\nif not app_yaml.is_file():\n raise RuntimeError(f\"\u914d\u7f6e\u6587\u4ef6\u4e0d\u5b58\u5728\uff1a{app_yaml}\")\n\n\nclass EnvConfig:\n \"\"\"env\u914d\u7f6e\"\"\"\n snow_datacenter_id: int = None\n\n def setattr_from_env(self):\n cls_attrs = get_cls_attrs(EnvConfig)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n v = parse_variable(k=k, v_type=v_type, v_from=os.environ, default=v)\n setattr(self, k, v)\n\n\nclass Config(EnvConfig):\n \"\"\"\u914d\u7f6e\"\"\"\n _yaml_conf: dict = None\n yaml_name: str = app_yaml.name\n #\n app_title: str = \"xApp\"\n app_summary: str = \"xxApp\"\n app_description: str = \"xxxApp\"\n app_version: str = \"1.0.0\"\n app_debug: bool = True\n app_log_dir: str = \"./logs\"\n app_disable_docs: bool = True\n app_allow_origins: list = [\"*\"]\n # #\n redis_host: str = None\n redis_port: int = None\n redis_db: int = None\n redis_password: str = None\n redis_max_connections: int = None\n db_url: str = None\n db_async_url: str = None\n\n def setup(self):\n self.setattr_from_env()\n self.setattr_from_env_or_yaml()\n return self\n\n def setattr_from_env_or_yaml(self):\n cls_attrs = get_cls_attrs(Config)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n if k in os.environ: # \u4f18\u5148\u73af\u5883\u53d8\u91cf\n v = parse_variable(k=k, v_type=v_type, v_from=os.environ, default=v)\n else:\n v = parse_variable(k=k, v_type=v_type, v_from=self.load_yaml(), default=v)\n setattr(self, k, v)\n\n def load_yaml(self, reload: bool = False) -> dict:\n if self._yaml_conf and not reload:\n return self._yaml_conf\n with open(app_yaml, mode=\"r\", encoding=\"utf-8\") as file:\n self._yaml_conf = yaml.load(file, Loader=yaml.FullLoader)\n return self._yaml_conf\n\n\ndef init_config() -> Config:\n return Config().setup()\n",
21
+ "app/initializer/_conf.py": "import os\nfrom pathlib import Path\n\nimport yaml\nfrom dotenv import load_dotenv\nfrom toollib.utils import get_cls_attrs, parse_variable\n\nfrom app import APP_DIR\n\n_CONFIG_DIR = APP_DIR.parent.joinpath(\"config\")\n\nload_dotenv(dotenv_path=os.environ.setdefault(\n key=\"env_path\",\n value=str(_CONFIG_DIR.joinpath(\".env\")))\n)\n# #\napp_yaml = Path(\n os.environ.get(\"app_yaml\") or\n _CONFIG_DIR.joinpath(f\"app_{os.environ.setdefault(key='app_env', value='dev')}.yaml\")\n)\nif not app_yaml.is_file():\n raise RuntimeError(f\"\u914d\u7f6e\u6587\u4ef6\u4e0d\u5b58\u5728\uff1a{app_yaml}\")\n\n\nclass Config:\n \"\"\"\u914d\u7f6e\"\"\"\n _yaml_conf: dict = None\n # from env\n app_env: str = \"dev\"\n app_yaml: str = app_yaml.name\n snow_datacenter_id: int = None\n # from yaml\n app_title: str = \"xApp\"\n app_summary: str = \"xxApp\"\n app_description: str = \"xxxApp\"\n app_version: str = \"1.0.0\"\n app_debug: bool = True\n app_log_dir: str = \"./logs\"\n app_disable_docs: bool = True\n app_allow_origins: list = [\"*\"]\n # #\n redis_host: str = None\n redis_port: int = None\n redis_db: int = None\n redis_password: str = None\n redis_max_connections: int = None\n db_url: str = None\n db_async_url: str = None\n\n def setup(self):\n self.setattr_from_env_or_yaml()\n return self\n\n def setattr_from_env_or_yaml(self):\n cls_attrs = get_cls_attrs(Config)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n if k in os.environ: # \u4f18\u5148\u73af\u5883\u53d8\u91cf\n v = parse_variable(k=k, v_type=v_type, v_from=os.environ, default=v)\n else:\n v = parse_variable(k=k, v_type=v_type, v_from=self.load_yaml(), default=v)\n setattr(self, k, v)\n\n def load_yaml(self, reload: bool = False) -> dict:\n if self._yaml_conf and not reload:\n return self._yaml_conf\n with open(app_yaml, mode=\"r\", encoding=\"utf-8\") as file:\n self._yaml_conf = yaml.load(file, Loader=yaml.FullLoader)\n return self._yaml_conf\n\n\ndef init_config() -> Config:\n return Config().setup()\n",
23
22
  "app/initializer/_db.py": "import asyncio\nimport importlib\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.ext.asyncio import create_async_engine, AsyncSession\nfrom sqlalchemy.orm import sessionmaker, scoped_session\n\nfrom app import APP_DIR\n\n_DSCHEMA_MOD_DIR = APP_DIR.joinpath(\"models\")\n_DSCHEMA_MOD_BASE = \"app.models\"\n_TABLES_CREATED = False\n\n\ndef init_db_session(\n db_url: str,\n db_echo: bool,\n db_pool_size: int = 10,\n db_max_overflow: int = 5,\n db_pool_recycle: int = 3600,\n is_create_tables: bool = True,\n) -> scoped_session:\n db_echo = db_echo or False\n kwargs = {\n \"pool_size\": db_pool_size,\n \"max_overflow\": db_max_overflow,\n \"pool_recycle\": db_pool_recycle,\n }\n if db_url.startswith(\"sqlite\"):\n kwargs = {}\n engine = create_engine(\n url=db_url,\n echo=db_echo,\n echo_pool=db_echo,\n **kwargs,\n )\n db_session = sessionmaker(engine, expire_on_commit=False)\n\n def create_tables():\n from app.models import DeclBase\n _import_tables()\n try:\n DeclBase.metadata.create_all(engine)\n except Exception as e:\n if \"already exists\" not in str(e):\n raise\n\n global _TABLES_CREATED\n if is_create_tables and not _TABLES_CREATED:\n create_tables()\n _TABLES_CREATED = True\n\n return scoped_session(db_session)\n\n\ndef init_db_async_session(\n db_url: str,\n db_echo: bool,\n db_pool_size: int = 10,\n db_max_overflow: int = 5,\n db_pool_recycle: int = 3600,\n is_create_tables: bool = True,\n) -> sessionmaker:\n db_echo = db_echo or False\n kwargs = {\n \"pool_size\": db_pool_size,\n \"max_overflow\": db_max_overflow,\n \"pool_recycle\": db_pool_recycle,\n }\n if db_url.startswith(\"sqlite\"):\n kwargs = {}\n async_engine = create_async_engine(\n url=db_url,\n echo=db_echo,\n echo_pool=db_echo,\n **kwargs,\n )\n db_async_session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False) # noqa\n\n async def create_tables():\n from app.models import DeclBase\n _import_tables()\n async with async_engine.begin() as conn:\n try:\n await conn.run_sync(DeclBase.metadata.create_all)\n except Exception as e:\n if \"already exists\" not in str(e):\n raise\n\n global _TABLES_CREATED\n if is_create_tables and not _TABLES_CREATED:\n try:\n loop = asyncio.get_running_loop()\n except RuntimeError:\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n task = loop.create_task(create_tables())\n task.add_done_callback(lambda t: t.result() if not t.cancelled() else None)\n if not loop.is_running():\n loop.run_until_complete(task)\n _TABLES_CREATED = True\n return db_async_session\n\n\ndef _import_tables():\n \"\"\"\u5bfc\u5165\u8868\"\"\"\n for f in _DSCHEMA_MOD_DIR.glob(\"*.py\"):\n if not f.name.startswith(\"__\"):\n _ = importlib.import_module(f\"{_DSCHEMA_MOD_BASE}.{f.stem}\")\n",
24
23
  "app/initializer/_log.py": "import os\nimport sys\nfrom pathlib import Path\n\nfrom loguru import logger\nfrom loguru._logger import Logger # noqa\n\nfrom app.initializer.context import request_id_ctx_var\n\n_LOG_CONSOLE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {extra[request_id]} {file}:{line} {message}\"\n_LOG_FILE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {extra[request_id]} {file}:{line} {message}\"\n_LOG_FILE_PREFIX = \"app\"\n_LOG_ROTATION = \"100 MB\"\n_LOG_RETENTION = \"15 days\"\n_LOG_COMPRESSION = None\n_LOG_ENQUEUE = True\n_LOG_BACKTRACE = False\n_LOG_DIAGNOSE = False\n_LOG_CATCH = False\n_LOG_PID = False\n\n\ndef init_logger(\n debug: bool,\n log_dir: str = None,\n) -> Logger:\n logger.remove(None)\n _lever = \"DEBUG\" if debug else \"INFO\"\n\n def _filter(record: dict) -> bool:\n record[\"extra\"][\"request_id\"] = request_id_ctx_var.get()\n return True\n\n logger.add(\n sys.stdout,\n format=_LOG_CONSOLE_FORMAT,\n level=_lever,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n filter=_filter,\n )\n if log_dir:\n _log_dir = Path(log_dir)\n _log_access_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-access.log\")\n _log_error_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-error.log\")\n if _LOG_PID:\n _log_access_file = str(_log_access_file).replace(\".log\", f\".{os.getpid()}.log\")\n _log_error_file = str(_log_error_file).replace(\".log\", f\".{os.getpid()}.log\")\n logger.add(\n _log_access_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=_lever,\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n )\n logger.add(\n _log_error_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=\"ERROR\",\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n )\n return logger\n",
25
24
  "app/initializer/_redis.py": "from toollib.rediser import RedisCli\n\n\ndef init_redis_cli(\n host: str,\n port: int,\n db: int,\n password: str = None,\n max_connections: int = None,\n **kwargs,\n) -> RedisCli:\n if not host:\n return RedisCli()\n return RedisCli(\n host=host,\n port=port,\n db=db,\n password=password,\n max_connections=max_connections,\n **kwargs,\n )\n",
26
25
  "app/initializer/_snow.py": "import os\n\nfrom loguru import logger\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import localip\n\n_CACHE_KEY_SNOW_WORKER_ID_INCR = \"config:snow_worker_id_incr\"\n_CACHE_KEY_SNOW_DATACENTER_ID_INCR = \"config:snow_datacenter_id_incr\"\n_CACHE_EXPIRE_SNOW = 120\n\n\ndef init_snow_cli(\n redis_cli: RedisCli,\n datacenter_id: int = None,\n to_str: bool = True,\n) -> SnowFlake: # \u5efa\u8bae\uff1a\u91c7\u7528\u670d\u52a1\u7684\u65b9\u5f0f\u8c03\u7528api\u83b7\u53d6\n if datacenter_id is None:\n datacenter_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_DATACENTER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if datacenter_id is None:\n local_ip = localip()\n if local_ip:\n ip_parts = list(map(int, local_ip.split('.')))\n ip_int = (ip_parts[0] << 24) + (ip_parts[1] << 16) + (ip_parts[2] << 8) + ip_parts[3]\n datacenter_id = ip_int % 32\n worker_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_WORKER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if worker_id is None:\n worker_id = os.getpid() % 32\n return SnowFlake(worker_id=worker_id, datacenter_id=datacenter_id, to_str=to_str)\n\n\ndef _snow_incr(redis_cli, cache_key: str, cache_expire: int):\n incr = None\n try:\n with redis_cli.connection() as r:\n resp = r.ping()\n if resp:\n lua_script = \"\"\"\n if redis.call('exists', KEYS[1]) == 1 then\n redis.call('expire', KEYS[1], ARGV[1])\n return redis.call('incr', KEYS[1])\n else\n redis.call('set', KEYS[1], 0)\n redis.call('expire', KEYS[1], ARGV[1])\n return 0\n end\n \"\"\"\n incr = r.eval(lua_script, 1, cache_key, cache_expire)\n except Exception as e:\n logger.warning(f\"snow\u521d\u59cb\u5316id\u5c06\u91c7\u7528\u672c\u5730\u65b9\u5f0f\uff0c\u7531\u4e8e\uff08{e}\uff09\")\n return incr\n",
27
26
  "app/initializer/__init__.py": "\"\"\"\n\u521d\u59cb\u5316\n\"\"\"\nfrom loguru._logger import Logger # noqa\nfrom sqlalchemy.orm import sessionmaker, scoped_session\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import Singleton\n\nfrom app.initializer._conf import init_config\nfrom app.initializer._db import init_db_session, init_db_async_session\nfrom app.initializer._log import init_logger\nfrom app.initializer._redis import init_redis_cli\nfrom app.initializer._snow import init_snow_cli\n\n\nclass G(metaclass=Singleton):\n \"\"\"\n \u5168\u5c40\u53d8\u91cf\n \"\"\"\n config = None\n logger: Logger = None\n redis_cli: RedisCli = None\n snow_cli: SnowFlake = None\n db_session: scoped_session = None\n db_async_session: sessionmaker = None\n\n def __getattribute__(self, name):\n try:\n value = super().__getattribute__(name)\n except AttributeError:\n value = None\n if value is None:\n getter_name = f\"_get_{name}\"\n getter_method = getattr(self.__class__, getter_name, None)\n if callable(getter_method):\n value = getter_method()\n setattr(self, name, value)\n return value\n\n @classmethod\n def _get_config(cls):\n if not cls.config:\n cls.config = init_config()\n return cls.config\n\n @classmethod\n def _get_logger(cls):\n if not cls.logger:\n cls.logger = init_logger(\n debug=cls.config.app_debug,\n log_dir=cls.config.app_log_dir,\n )\n return cls.logger\n\n @classmethod\n def _get_redis_cli(cls):\n if not cls.redis_cli:\n cls.redis_cli = init_redis_cli(\n host=cls.config.redis_host,\n port=cls.config.redis_port,\n db=cls.config.redis_db,\n password=cls.config.redis_password,\n max_connections=cls.config.redis_max_connections,\n )\n return cls.redis_cli\n\n @classmethod\n def _get_snow_cli(cls):\n if not cls.snow_cli:\n cls.snow_cli = init_snow_cli(\n redis_cli=cls.redis_cli,\n datacenter_id=cls.config.snow_datacenter_id,\n )\n return cls.snow_cli\n\n @classmethod\n def _get_db_session(cls):\n if not cls.db_session:\n cls.db_session = init_db_session(\n db_url=cls.config.db_url,\n db_echo=cls.config.app_debug,\n )\n return cls.db_session\n\n @classmethod\n def _get_db_async_session(cls):\n if not cls.db_async_session:\n cls.db_async_session = init_db_async_session(\n db_url=cls.config.db_async_url,\n db_echo=cls.config.app_debug,\n )\n return cls.db_async_session\n\n @classmethod\n def setup(cls):\n \"\"\"\n \u521d\u59cb\u5316\n \"\"\"\n cls._get_config()\n cls._get_logger()\n cls._get_redis_cli()\n cls._get_snow_cli()\n # cls._get_db_session()\n cls._get_db_async_session()\n\n\ng = G()\n",
28
- "app/middleware/auth.py": "from fastapi import Depends\nfrom fastapi.security import HTTPBearer, HTTPAuthorizationCredentials\nfrom typing import Optional\n\nfrom fastapi.security.utils import get_authorization_scheme_param\nfrom pydantic import BaseModel\nfrom starlette.requests import Request\n\nfrom app.api.exception import CustomException\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.models.user import User\nfrom app.utils import db_async\nfrom app.utils.auth import verify_jwt\n\n\nclass JWTUser(BaseModel):\n # \u5b57\u6bb5\u4e0eUser\u5bf9\u9f50\n id: str = None\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n\n\nclass JWTAuthorizationCredentials(HTTPAuthorizationCredentials):\n user: JWTUser\n\n\nclass JWTBearer(HTTPBearer):\n\n async def __call__(\n self, request: Request\n ) -> Optional[JWTAuthorizationCredentials]:\n authorization = request.headers.get(\"Authorization\")\n scheme, credentials = get_authorization_scheme_param(authorization)\n if not (authorization and scheme and credentials):\n if self.auto_error:\n raise CustomException(\n msg=\"Not authenticated\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n if scheme.lower() != \"bearer\":\n if self.auto_error:\n raise CustomException(\n msg=\"Invalid authentication credentials\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n user = await self.verify_credentials(credentials)\n return JWTAuthorizationCredentials(scheme=scheme, credentials=credentials, user=user)\n\n async def verify_credentials(self, credentials: str) -> JWTUser:\n playload = await self._verify_jwt(credentials)\n if playload is None:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # \u5efa\u8bae\uff1ajwt_key\u8fdb\u884credis\u7f13\u5b58\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n fields=[\"jwt_key\"],\n filter_by={\"id\": playload.get(\"id\")}\n )\n if not data:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # <<< \u5efa\u8bae\n await self._verify_jwt(credentials, jwt_key=data.get(\"jwt_key\"))\n return JWTUser(\n id=playload.get(\"id\"),\n phone=playload.get(\"phone\"),\n name=playload.get(\"name\"),\n age=playload.get(\"age\"),\n gender=playload.get(\"gender\"),\n )\n\n @staticmethod\n async def _verify_jwt(token: str, jwt_key: str = None) -> dict:\n try:\n return verify_jwt(token=token, jwt_key=jwt_key)\n except Exception as e:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR, msg=str(e))\n\n\ndef get_current_user(\n credentials: Optional[JWTAuthorizationCredentials] = Depends(JWTBearer(auto_error=True))\n) -> JWTUser:\n if not credentials:\n return JWTUser()\n return credentials.user\n",
27
+ "app/middleware/auth.py": "from fastapi import Depends\nfrom fastapi.security import HTTPBearer, HTTPAuthorizationCredentials\nfrom typing import Optional\n\nfrom fastapi.security.utils import get_authorization_scheme_param\nfrom pydantic import BaseModel\nfrom starlette.requests import Request\n\nfrom app.api.exception import CustomException\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.models.user import User\nfrom app.utils import db_async_util\nfrom app.utils.auth_util import verify_jwt\n\n\nclass JWTUser(BaseModel):\n # \u5b57\u6bb5\u4e0eUser\u5bf9\u9f50\n id: str = None\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n\n\nclass JWTAuthorizationCredentials(HTTPAuthorizationCredentials):\n user: JWTUser\n\n\nclass JWTBearer(HTTPBearer):\n\n async def __call__(\n self, request: Request\n ) -> Optional[JWTAuthorizationCredentials]:\n authorization = request.headers.get(\"Authorization\")\n scheme, credentials = get_authorization_scheme_param(authorization)\n if not (authorization and scheme and credentials):\n if self.auto_error:\n raise CustomException(\n msg=\"Not authenticated\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n if scheme.lower() != \"bearer\":\n if self.auto_error:\n raise CustomException(\n msg=\"Invalid authentication credentials\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n user = await self.verify_credentials(credentials)\n return JWTAuthorizationCredentials(scheme=scheme, credentials=credentials, user=user)\n\n async def verify_credentials(self, credentials: str) -> JWTUser:\n playload = await self._verify_jwt(credentials)\n if playload is None:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # \u5efa\u8bae\uff1ajwt_key\u8fdb\u884credis\u7f13\u5b58\n async with g.db_async_session() as session:\n data = await db_async_util.query_one(\n session=session,\n model=User,\n fields=[\"jwt_key\"],\n filter_by={\"id\": playload.get(\"id\")}\n )\n if not data:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # <<< \u5efa\u8bae\n await self._verify_jwt(credentials, jwt_key=data.get(\"jwt_key\"))\n return JWTUser(\n id=playload.get(\"id\"),\n phone=playload.get(\"phone\"),\n name=playload.get(\"name\"),\n age=playload.get(\"age\"),\n gender=playload.get(\"gender\"),\n )\n\n @staticmethod\n async def _verify_jwt(token: str, jwt_key: str = None) -> dict:\n try:\n return verify_jwt(token=token, jwt_key=jwt_key)\n except Exception as e:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR, msg=str(e))\n\n\ndef get_current_user(\n credentials: Optional[JWTAuthorizationCredentials] = Depends(JWTBearer(auto_error=True))\n) -> JWTUser:\n if not credentials:\n return JWTUser()\n return credentials.user\n",
29
28
  "app/middleware/cors.py": "from fastapi.middleware.cors import CORSMiddleware\n\nfrom app.initializer import g\n\n\nclass Cors:\n middleware_class = CORSMiddleware\n allow_origins = g.config.app_allow_origins\n allow_credentials = True\n allow_methods = [\"*\"]\n allow_headers = [\"*\"]\n",
30
29
  "app/middleware/exception.py": "import traceback\n\nfrom fastapi.exceptions import RequestValidationError\nfrom starlette.exceptions import HTTPException\nfrom starlette.requests import Request\nfrom starlette.responses import JSONResponse\n\nfrom app.api.exception import CustomException\nfrom app.api.response import Response\nfrom app.api.status import Status\nfrom app.initializer import g\n\n\nclass ExceptionHandler:\n\n @staticmethod\n async def custom_exception_handler(\n request: Request,\n exc: CustomException,\n is_traceback: bool = False,\n ) -> JSONResponse:\n lmsg = f'- \"{request.method} {request.url.path}\" {exc.code} {exc.msg}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=exc.msg,\n code=exc.code,\n data=exc.data,\n request=request,\n )\n\n @staticmethod\n async def http_exception_handler(\n request: Request,\n exc: HTTPException,\n is_traceback: bool = False,\n ) -> JSONResponse:\n lmsg = f'- \"{request.method} {request.url.path}\" {exc.status_code} {exc.detail}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=exc.detail,\n code=exc.status_code,\n request=request,\n )\n\n @staticmethod\n async def validation_exception_handler(\n request: Request,\n exc: RequestValidationError,\n is_display_all: bool = False,\n is_traceback: bool = False,\n ) -> JSONResponse:\n if is_display_all:\n msg = \", \".join([f\"'{item['loc'][1] if len(item['loc']) > 1 else item['loc'][0]}' {item['msg'].lower()}\" for item in exc.errors()]) # noqa: E501\n else:\n _first_error = exc.errors()[0]\n msg = f\"'{_first_error['loc'][1] if len(_first_error['loc']) > 1 else _first_error['loc'][0]}' {_first_error['msg'].lower()}\" # noqa: E501\n lmsg = f'- \"{request.method} {request.url.path}\" {Status.PARAMS_ERROR.code} {msg}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=msg,\n status=Status.PARAMS_ERROR,\n request=request,\n )\n",
31
30
  "app/middleware/headers.py": "import uuid\nfrom starlette.middleware.base import BaseHTTPMiddleware\nfrom starlette.requests import Request\n\nfrom app.initializer.context import request_id_ctx_var\n\n\nclass HeadersMiddleware(BaseHTTPMiddleware):\n \"\"\"\u5934\u5904\u7406\u4e2d\u95f4\u4ef6\"\"\"\n _HEADERS = {\n # \u53ef\u6dfb\u52a0\u76f8\u5173\u5934\n }\n\n async def dispatch(self, request: Request, call_next):\n request_id = self._get_or_create_request_id(request)\n request.state.request_id = request_id\n ctx_token = request_id_ctx_var.set(request_id)\n try:\n response = await call_next(request)\n response.headers[\"X-Request-ID\"] = request_id\n for key, value in self._HEADERS.items():\n if key not in response.headers:\n response.headers[key] = value\n return response\n finally:\n request_id_ctx_var.reset(ctx_token)\n\n @staticmethod\n def _get_or_create_request_id(request: Request) -> str:\n request_id = request.headers.get(\"X-Request-ID\")\n if not request_id:\n request_id = f\"req-{uuid.uuid4()}\"\n return request_id\n",
@@ -34,13 +33,13 @@
34
33
  "app/models/__init__.py": "\"\"\"\n\u6570\u636e\u6a21\u578b\n\"\"\"\nfrom sqlalchemy.orm import declarative_base\n\nDeclBase = declarative_base()\n",
35
34
  "app/schemas/user.py": "import re\nfrom typing import Literal\n\nfrom pydantic import BaseModel, Field, field_validator\n\nfrom app.schemas import filter_fields\n\n\nclass UserDetail(BaseModel):\n id: str = Field(...)\n # #\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n created_at: int = None\n updated_at: int = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[]\n )\n\n\nclass UserList(BaseModel):\n page: int = Field(1, ge=1)\n size: int = Field(10, ge=1)\n # #\n id: str = None\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n created_at: int = None\n updated_at: int = None\n\n @classmethod\n def response_fields(cls):\n return filter_fields(\n cls,\n exclude=[\n \"page\",\n \"size\",\n ]\n )\n\n\nclass UserCreate(BaseModel):\n phone: str = Field(..., pattern=r\"^1[3-9]\\d{9}$\")\n password: str = Field(...)\n name: str | None = Field(None)\n age: int | None = Field(None, ge=0, le=200)\n gender: Literal[1, 2] | None = Field(None)\n\n @field_validator(\"password\")\n def validate_password(cls, v):\n if not re.match(r\"^(?=.*[A-Za-z])(?=.*\\d)\\S{6,20}$\", v):\n raise ValueError(\"\u5bc6\u7801\u5fc5\u987b\u5305\u542b\u81f3\u5c11\u4e00\u4e2a\u5b57\u6bcd\u548c\u4e00\u4e2a\u6570\u5b57\uff0c\u957f\u5ea6\u4e3a6-20\u4f4d\u7684\u975e\u7a7a\u767d\u5b57\u7b26\u7ec4\u5408\")\n return v\n\n @field_validator(\"name\")\n def validate_name(cls, v, info):\n if not v and (phone := info.data.get(\"phone\")):\n return f\"\u7528\u6237{phone[-4:]}\"\n if v and not re.match(r\"^[\\u4e00-\\u9fffA-Za-z0-9_\\-.]{1,50}$\", v):\n raise ValueError(\"\u540d\u79f0\u4ec5\u96501-50\u4f4d\u7684\u4e2d\u6587\u3001\u82f1\u6587\u3001\u6570\u5b57\u3001_-.\u7ec4\u5408\")\n return v\n\n\nclass UserUpdate(BaseModel):\n name: str | None = Field(None)\n age: int | None = Field(None, ge=0, le=200)\n gender: Literal[1, 2] | None = Field(None)\n\n @field_validator(\"name\")\n def validate_name(cls, v):\n if v and not re.match(r\"^[\\u4e00-\\u9fffA-Za-z0-9_\\-.]{1,50}$\", v):\n raise ValueError(\"\u540d\u79f0\u4ec5\u96501-50\u4f4d\u7684\u4e2d\u6587\u3001\u82f1\u6587\u3001\u6570\u5b57\u3001_-.\u7ec4\u5408\")\n return v\n\n\nclass UserDelete(BaseModel):\n pass\n\n\nclass UserLogin(BaseModel):\n phone: str = Field(...)\n password: str = Field(...)\n\n\nclass UserToken(BaseModel):\n id: str = Field(...)\n exp_minutes: int = Field(24 * 60 * 30, ge=1)\n",
36
35
  "app/schemas/__init__.py": "\"\"\"\n\u6570\u636e\u7ed3\u6784\n\"\"\"\n\n\ndef filter_fields(\n model,\n exclude: list = None,\n):\n if exclude:\n return list(set(model.model_fields.keys()) - set(exclude))\n return list(model.model_fields.keys())\n",
37
- "app/services/user.py": "from app.models.user import User\nfrom app.schemas.user import (\n UserDetail,\n UserList,\n UserCreate,\n UserUpdate,\n UserDelete,\n UserLogin,\n UserToken,\n)\nfrom app.initializer import g\nfrom app.utils import auth, db_async\n\n\nclass UserDetailSvc(UserDetail):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserDetail\"\n }\n }\n\n async def detail(self):\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n fields=self.response_fields(),\n filter_by={\"id\": self.id},\n )\n return data\n\n\nclass UserListSvc(UserList):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserList\"\n }\n }\n\n async def lst(self):\n async with g.db_async_session() as session:\n data = await db_async.query_all(\n session=session,\n model=User,\n fields=self.response_fields(),\n page=self.page,\n size=self.size,\n )\n total = await db_async.query_total(session, User)\n return data, total\n\n\nclass UserCreateSvc(UserCreate):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserCreate\"\n }\n }\n\n async def create(self):\n async with g.db_async_session() as session:\n return await db_async.create(\n session=session,\n model=User,\n data={\n \"name\": self.name,\n \"phone\": self.phone,\n \"age\": self.age,\n \"gender\": self.gender,\n \"password\": auth.hash_password(self.password),\n \"jwt_key\": auth.gen_jwt_key(),\n },\n filter_by={\"phone\": self.phone},\n )\n\n\nclass UserUpdateSvc(UserUpdate):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserUpdate\"\n }\n }\n\n async def update(self, user_id: str):\n async with g.db_async_session() as session:\n return await db_async.update(\n session=session,\n model=User,\n data=self.model_dump(),\n filter_by={\"id\": user_id},\n )\n\n\nclass UserDeleteSvc(UserDelete):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserDelete\"\n }\n }\n\n @staticmethod\n async def delete(user_id: str):\n async with g.db_async_session() as session:\n return await db_async.delete(\n session=session,\n model=User,\n filter_by={\"id\": user_id},\n )\n\n\nclass UserLoginSvc(UserLogin):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserLogin\"\n }\n }\n\n async def login(self):\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n filter_by={\"phone\": self.phone},\n )\n if not data or not auth.verify_password(self.password, data.get(\"password\")):\n return None\n new_jwt_key = auth.gen_jwt_key()\n token = auth.gen_jwt(\n payload={\n \"id\": data.get(\"id\"),\n \"phone\": data.get(\"phone\"),\n \"name\": data.get(\"name\"),\n \"age\": data.get(\"age\"),\n \"gender\": data.get(\"gender\"),\n },\n jwt_key=new_jwt_key,\n exp_minutes=24 * 60 * 30,\n )\n # \u66f4\u65b0jwt_key\n await db_async.update(\n session=session,\n model=User,\n data={\"jwt_key\": new_jwt_key},\n filter_by={\"phone\": self.phone},\n )\n return token\n\n\nclass UserTokenSvc(UserToken):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserToken\"\n }\n }\n\n async def token(self):\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n filter_by={\"id\": self.id},\n )\n if not data:\n return None\n new_jwt_key = auth.gen_jwt_key()\n token = auth.gen_jwt(\n payload={\n \"id\": data.get(\"id\"),\n \"phone\": data.get(\"phone\"),\n \"name\": data.get(\"name\"),\n \"age\": data.get(\"age\"),\n \"gender\": data.get(\"gender\"),\n },\n jwt_key=new_jwt_key,\n exp_minutes=self.exp_minutes,\n )\n # \u66f4\u65b0jwt_key\n await db_async.update(\n session=session,\n model=User,\n data={\"jwt_key\": new_jwt_key},\n filter_by={\"id\": self.id},\n )\n return token\n",
36
+ "app/services/user.py": "from app.models.user import User\nfrom app.schemas.user import (\n UserDetail,\n UserList,\n UserCreate,\n UserUpdate,\n UserDelete,\n UserLogin,\n UserToken,\n)\nfrom app.initializer import g\nfrom app.utils import auth_util, db_async_util\n\n\nclass UserDetailSvc(UserDetail):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserDetail\"\n }\n }\n\n async def detail(self):\n async with g.db_async_session() as session:\n data = await db_async_util.query_one(\n session=session,\n model=User,\n fields=self.response_fields(),\n filter_by={\"id\": self.id},\n )\n return data\n\n\nclass UserListSvc(UserList):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserList\"\n }\n }\n\n async def lst(self):\n async with g.db_async_session() as session:\n data = await db_async_util.query_all(\n session=session,\n model=User,\n fields=self.response_fields(),\n page=self.page,\n size=self.size,\n )\n total = await db_async_util.query_total(session, User)\n return data, total\n\n\nclass UserCreateSvc(UserCreate):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserCreate\"\n }\n }\n\n async def create(self):\n async with g.db_async_session() as session:\n return await db_async_util.create(\n session=session,\n model=User,\n data={\n \"name\": self.name,\n \"phone\": self.phone,\n \"age\": self.age,\n \"gender\": self.gender,\n \"password\": auth_util.hash_password(self.password),\n \"jwt_key\": auth_util.gen_jwt_key(),\n },\n filter_by={\"phone\": self.phone},\n )\n\n\nclass UserUpdateSvc(UserUpdate):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserUpdate\"\n }\n }\n\n async def update(self, user_id: str):\n async with g.db_async_session() as session:\n return await db_async_util.update(\n session=session,\n model=User,\n data=self.model_dump(),\n filter_by={\"id\": user_id},\n )\n\n\nclass UserDeleteSvc(UserDelete):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserDelete\"\n }\n }\n\n @staticmethod\n async def delete(user_id: str):\n async with g.db_async_session() as session:\n return await db_async_util.delete(\n session=session,\n model=User,\n filter_by={\"id\": user_id},\n )\n\n\nclass UserLoginSvc(UserLogin):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserLogin\"\n }\n }\n\n async def login(self):\n async with g.db_async_session() as session:\n data = await db_async_util.query_one(\n session=session,\n model=User,\n filter_by={\"phone\": self.phone},\n )\n if not data or not auth_util.verify_password(self.password, data.get(\"password\")):\n return None\n new_jwt_key = auth_util.gen_jwt_key()\n token = auth_util.gen_jwt(\n payload={\n \"id\": data.get(\"id\"),\n \"phone\": data.get(\"phone\"),\n \"name\": data.get(\"name\"),\n \"age\": data.get(\"age\"),\n \"gender\": data.get(\"gender\"),\n },\n jwt_key=new_jwt_key,\n exp_minutes=24 * 60 * 30,\n )\n # \u66f4\u65b0jwt_key\n await db_async_util.update(\n session=session,\n model=User,\n data={\"jwt_key\": new_jwt_key},\n filter_by={\"phone\": self.phone},\n )\n return token\n\n\nclass UserTokenSvc(UserToken):\n model_config = {\n \"json_schema_extra\": {\n \"title\": \"UserToken\"\n }\n }\n\n async def token(self):\n async with g.db_async_session() as session:\n data = await db_async_util.query_one(\n session=session,\n model=User,\n filter_by={\"id\": self.id},\n )\n if not data:\n return None\n new_jwt_key = auth_util.gen_jwt_key()\n token = auth_util.gen_jwt(\n payload={\n \"id\": data.get(\"id\"),\n \"phone\": data.get(\"phone\"),\n \"name\": data.get(\"name\"),\n \"age\": data.get(\"age\"),\n \"gender\": data.get(\"gender\"),\n },\n jwt_key=new_jwt_key,\n exp_minutes=self.exp_minutes,\n )\n # \u66f4\u65b0jwt_key\n await db_async_util.update(\n session=session,\n model=User,\n data={\"jwt_key\": new_jwt_key},\n filter_by={\"id\": self.id},\n )\n return token\n",
38
37
  "app/services/__init__.py": "\"\"\"\n\u4e1a\u52a1\u903b\u8f91\n\"\"\"\n",
39
- "app/utils/auth.py": "import secrets\nfrom datetime import datetime, timedelta\n\nimport bcrypt\nimport jwt\n\n_ALGORITHM = \"HS256\"\n\n\ndef gen_jwt(payload: dict, jwt_key: str, exp_minutes: int = 24 * 60 * 30):\n payload.update({\"exp\": datetime.utcnow() + timedelta(minutes=exp_minutes)})\n encoded_jwt = jwt.encode(payload=payload, key=jwt_key, algorithm=_ALGORITHM)\n return encoded_jwt\n\n\ndef verify_jwt(token: str, jwt_key: str = None) -> dict:\n if not jwt_key:\n return jwt.decode(jwt=token, options={\"verify_signature\": False})\n return jwt.decode(jwt=token, key=jwt_key, algorithms=[_ALGORITHM])\n\n\ndef gen_jwt_key():\n return secrets.token_hex(16)\n\n\ndef hash_password(password: str) -> str:\n salt = bcrypt.gensalt()\n hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt)\n return hashed_password.decode('utf-8')\n\n\ndef verify_password(password: str, hashed_password: str) -> bool:\n return bcrypt.checkpw(password.encode('utf-8'), hashed_password.encode('utf-8'))\n",
40
- "app/utils/db_async.py": "from sqlalchemy import (\n select,\n func,\n update as update_,\n delete as delete_,\n)\n\n\ndef format_all(\n rows,\n fields: list[str],\n) -> list[dict]:\n if not rows:\n return list()\n return [dict(zip(fields, row)) for row in rows]\n\n\ndef format_one(\n row,\n fields: list[str],\n) -> dict:\n if not row:\n return {}\n return dict(zip(fields, row))\n\n\ndef model_dict(\n model,\n fields: list[str] = None,\n) -> dict:\n if not model:\n return {}\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n return {field: getattr(model, field) for field in fields}\n\n\nasync def query_one(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n) -> dict:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return format_one(result.fetchone(), fields)\n\n\nasync def query_all(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n page: int = None,\n size: int = None,\n) -> list[dict]:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n if page and size:\n query = query.offset((page - 1) * size).limit(size)\n result = await session.execute(query)\n return format_all(result.fetchall(), fields)\n\n\nasync def query_total(\n session,\n model,\n filter_by: dict = None,\n) -> int:\n query = select(func.count()).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return result.scalar()\n\n\nasync def create(\n session,\n model,\n data: dict,\n filter_by: dict = None,\n) -> int:\n try:\n if filter_by:\n result = await query_one(session, model, filter_by=filter_by)\n if result:\n return 0\n stmt = model(**data)\n session.add(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return stmt.id\n\n\nasync def update(\n session,\n model,\n data: dict,\n filter_by: dict | None,\n is_exclude_none: bool = True,\n) -> list:\n try:\n if is_exclude_none:\n data = {k: v for k, v in data.items() if v is not None}\n stmt = update_(model).values(**data)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n updated_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n updated_ids = result.scalars().all()\n if updated_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return updated_ids\n\n\nasync def delete(\n session,\n model,\n filter_by: dict | None,\n) -> list:\n try:\n stmt = delete_(model)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n deleted_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n deleted_ids = result.scalars().all()\n if deleted_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return deleted_ids\n",
38
+ "app/utils/auth_util.py": "import secrets\nfrom datetime import datetime, timedelta\n\nimport bcrypt\nimport jwt\n\n_ALGORITHM = \"HS256\"\n\n\ndef gen_jwt(payload: dict, jwt_key: str, exp_minutes: int = 24 * 60 * 30):\n payload.update({\"exp\": datetime.utcnow() + timedelta(minutes=exp_minutes)})\n encoded_jwt = jwt.encode(payload=payload, key=jwt_key, algorithm=_ALGORITHM)\n return encoded_jwt\n\n\ndef verify_jwt(token: str, jwt_key: str = None) -> dict:\n if not jwt_key:\n return jwt.decode(jwt=token, options={\"verify_signature\": False})\n return jwt.decode(jwt=token, key=jwt_key, algorithms=[_ALGORITHM])\n\n\ndef gen_jwt_key():\n return secrets.token_hex(16)\n\n\ndef hash_password(password: str) -> str:\n salt = bcrypt.gensalt()\n hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt)\n return hashed_password.decode('utf-8')\n\n\ndef verify_password(password: str, hashed_password: str) -> bool:\n return bcrypt.checkpw(password.encode('utf-8'), hashed_password.encode('utf-8'))\n",
39
+ "app/utils/db_async_util.py": "from sqlalchemy import (\n select,\n func,\n update as update_,\n delete as delete_,\n)\n\n\ndef format_all(\n rows,\n fields: list[str],\n) -> list[dict]:\n if not rows:\n return list()\n return [dict(zip(fields, row)) for row in rows]\n\n\ndef format_one(\n row,\n fields: list[str],\n) -> dict:\n if not row:\n return {}\n return dict(zip(fields, row))\n\n\ndef model_dict(\n model,\n fields: list[str] = None,\n) -> dict:\n if not model:\n return {}\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n return {field: getattr(model, field) for field in fields}\n\n\nasync def query_one(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n) -> dict:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return format_one(result.fetchone(), fields)\n\n\nasync def query_all(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n page: int = None,\n size: int = None,\n) -> list[dict]:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n if page and size:\n query = query.offset((page - 1) * size).limit(size)\n result = await session.execute(query)\n return format_all(result.fetchall(), fields)\n\n\nasync def query_total(\n session,\n model,\n filter_by: dict = None,\n) -> int:\n query = select(func.count()).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return result.scalar()\n\n\nasync def create(\n session,\n model,\n data: dict,\n filter_by: dict = None,\n) -> int:\n try:\n if filter_by:\n result = await query_one(session, model, filter_by=filter_by)\n if result:\n return 0\n stmt = model(**data)\n session.add(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return stmt.id\n\n\nasync def update(\n session,\n model,\n data: dict,\n filter_by: dict | None,\n is_exclude_none: bool = True,\n) -> list:\n try:\n if is_exclude_none:\n data = {k: v for k, v in data.items() if v is not None}\n stmt = update_(model).values(**data)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n updated_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n updated_ids = result.scalars().all()\n if updated_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return updated_ids\n\n\nasync def delete(\n session,\n model,\n filter_by: dict | None,\n) -> list:\n try:\n stmt = delete_(model)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n deleted_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n deleted_ids = result.scalars().all()\n if deleted_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return deleted_ids\n",
41
40
  "app/utils/__init__.py": "\"\"\"\nutils\n\"\"\"\n",
42
41
  "app_celery/conf.py": "import os\nfrom pathlib import Path\n\nimport yaml\nfrom dotenv import load_dotenv\nfrom toollib.utils import get_cls_attrs, parse_variable\n\nfrom app import APP_DIR\n\n_CONFIG_DIR = APP_DIR.parent.joinpath(\"config\")\n\nload_dotenv(dotenv_path=os.environ.setdefault(\n key=\"env_path\",\n value=str(_CONFIG_DIR.joinpath(\".env\")))\n)\n# #\napp_yaml = Path(\n os.environ.get(\"app_yaml\") or\n _CONFIG_DIR.joinpath(f\"app_{os.environ.setdefault(key='app_env', value='dev')}.yaml\")\n)\nif not app_yaml.is_file():\n raise RuntimeError(f\"\u914d\u7f6e\u6587\u4ef6\u4e0d\u5b58\u5728\uff1a{app_yaml}\")\n\n\nclass Config:\n \"\"\"\u914d\u7f6e\"\"\"\n _yaml_conf: dict = None\n yaml_name: str = app_yaml.name\n #\n celery_broker_url: str\n celery_backend_url: str\n celery_timezone: str = \"Asia/Shanghai\"\n celery_enable_utc: bool = True\n celery_task_serializer: str = \"json\"\n celery_result_serializer: str = \"json\"\n celery_accept_content: list = [\"json\"]\n celery_task_ignore_result: bool = False\n celery_result_expire: int = 86400\n celery_task_track_started: bool = True\n celery_worker_concurrency: int = 8\n celery_worker_prefetch_multiplier: int = 2\n celery_worker_max_tasks_per_child: int = 100\n celery_broker_connection_retry_on_startup: bool = True\n celery_task_reject_on_worker_lost: bool = True\n\n def setup(self):\n self.setattr_from_env_or_yaml()\n return self\n\n def setattr_from_env_or_yaml(self):\n cls_attrs = get_cls_attrs(Config)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n if k in os.environ: # \u4f18\u5148\u73af\u5883\u53d8\u91cf\n v = parse_variable(k=k, v_type=v_type, v_from=os.environ, default=v)\n else:\n v = parse_variable(k=k, v_type=v_type, v_from=self.load_yaml(), default=v)\n setattr(self, k, v)\n\n def load_yaml(self, reload: bool = False) -> dict:\n if self._yaml_conf and not reload:\n return self._yaml_conf\n with open(app_yaml, mode=\"r\", encoding=\"utf-8\") as file:\n self._yaml_conf = yaml.load(file, Loader=yaml.FullLoader)\n return self._yaml_conf\n\n\nconfig = Config().setup()\n",
43
- "app_celery/README.md": "# app-celery\n\n## \u7b80\u4ecb\n\n### producer\uff1a\u751f\u4ea7\u8005\uff08\u53d1\u5e03\u4efb\u52a1\uff09\n\n- register\uff1a\u6ce8\u518c\u4e2d\u5fc3\n - \u5c06`consumer`\u7684`tasks`\u6ce8\u518c\u5230`producer`\u7684`register`\u4e2d\n- publisher\uff1a\u53d1\u5e03\u8005\n - \u9879\u76ee\u4e2d\u901a\u8fc7\u53d1\u5e03\u8005\u6765\u53d1\u5e03\u4efb\u52a1\uff1a\u53ef\u53c2\u8003`app/api/default/aping.py`\uff08\u8fd9\u91cc\u53ea\u662f\u7b80\u5355\u793a\u4f8b\uff0c\u5b9e\u9645\u4e0a\u5e94\u8be5\u5728`services`\u5c42\u8c03\u7528\uff09\n\n### consumer\uff1a\u6d88\u8d39\u8005\uff08\u6267\u884c\u4efb\u52a1\uff09\n\n- tasks: \u4efb\u52a1\n - \u5b9a\u65f6\u4efb\u52a1\uff08beat_xxx\uff09\n - 1\u3002\u521b\u5efa\u5b9a\u65f6\u4efb\u52a1\n - 2\u3002\u53d1\u5e03\u5b9a\u65f6\u4efb\u52a1\uff08\u901a\u8fc7celery\u5185\u90e8\u7684`beat`\u8c03\u7528\uff09\n - \u8fdb\u5165`app_celery`\u7236\u7ea7\u76ee\u5f55\uff0c\u5373\u5de5\u4f5c\u76ee\u5f55\n - \u542f\u52a8\u547d\u4ee4\uff1a\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n - \u65b9\u5f0f1\u3002\u76f4\u63a5\u6267\u884c\u811a\u672c: `python runcbeat.py`\n - \u65b9\u5f0f2\u3002\u4f7f\u7528\u547d\u4ee4\u884c\uff1a`celery -A app_celery.consumer beat --loglevel=info --max-interval=5`\n - 3\u3002\u542f\u52a8\u6d88\u8d39\u8005worker\n - \u5f02\u6b65\u4efb\u52a1\uff08xxx)\n - 1\u3002\u521b\u5efa\u5f02\u6b65\u4efb\u52a1\uff0c\u5e76\u6ce8\u518c\u5230`producer`\u7684`register`\uff0c\u6839\u636e\u6ce8\u518c\u7684\u89c4\u5219\u8fdb\u884c`\u4efb\u52a1\u8c03\u7528`\u548c`worker\u542f\u52a8`\n - 2\u3002\u53d1\u5e03\u5f02\u6b65\u4efb\u52a1\uff08\u901a\u8fc7\u751f\u4ea7\u8005\u7684`publisher`\u8c03\u7528\uff09\n - \u53ef\u53c2\u8003`app/api/default/aping.py`\uff08\u8fd9\u91cc\u53ea\u662f\u7b80\u5355\u793a\u4f8b\uff0c\u5b9e\u9645\u4e0a\u5e94\u8be5\u5728`services`\u5c42\u8c03\u7528\uff09\n - 3\u3002\u542f\u52a8\u6d88\u8d39\u8005worker\n- workers: \u5de5\u4f5c\u8005\n - 1\u3002\u521b\u5efaworker\u670d\u52a1\uff0c\u5b9a\u4e49\u961f\u5217\u7b49\u5c5e\u6027\uff08\u4e3a\u65b9\u4fbf\u6269\u5c55\u5efa\u8bae\u4e00\u7c7b\u4efb\u52a1\u4e00\u4e2a\u670d\u52a1\uff09\n - 2\u3002\u542f\u52a8worker\u670d\u52a1\uff1a\n - 1\u3002\u8fdb\u5165`app_celery`\u7236\u7ea7\u76ee\u5f55\uff0c\u5373\u5de5\u4f5c\u76ee\u5f55\n - 2\u3002\u542f\u52a8\u547d\u4ee4\uff1a\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n - \u65b9\u5f0f1\u3002\u76f4\u63a5\u6267\u884c\u811a\u672c: `python runcworker.py -n ping`\n - \u65b9\u5f0f2\u3002\u4f7f\u7528\u547d\u4ee4\u884c\uff1a`celery -A app_celery.consumer.workers.ping worker --loglevel=info --concurrency=5`\n\n### \u6ce8\u610f\uff1a\n\n- \u6700\u597d\u4e0e`app`\u89e3\u8026\uff0c\u5373\uff1a\n - \u53ea\u6709`app`\u5355\u5411\u8c03\u7528`app_celery`\n - \u4f46`app_celery`\u4e0d\u8c03\u7528`app`",
42
+ "app_celery/README.md": "# app-celery\n\n## \u7b80\u4ecb\n\n### producer\uff1a\u751f\u4ea7\u8005\uff08\u53d1\u5e03\u4efb\u52a1\uff09\n\n- register\uff1a\u6ce8\u518c\u4e2d\u5fc3\n - \u5c06`consumer`\u7684`tasks`\u6ce8\u518c\u5230`producer`\u7684`register`\u4e2d\n- publisher\uff1a\u53d1\u5e03\u8005\n - \u9879\u76ee\u4e2d\u901a\u8fc7\u53d1\u5e03\u8005\u6765\u53d1\u5e03\u4efb\u52a1\uff1a\u53ef\u53c2\u8003`app/api/default/aping.py`\uff08\u8fd9\u91cc\u53ea\u662f\u7b80\u5355\u793a\u4f8b\uff0c\u5b9e\u9645\u4e0a\u5e94\u8be5\u5728`services`\u5c42\u8c03\u7528\uff09\n\n### consumer\uff1a\u6d88\u8d39\u8005\uff08\u6267\u884c\u4efb\u52a1\uff09\n\n- tasks: \u4efb\u52a1\n - \u5b9a\u65f6\u4efb\u52a1\uff08beat_xxx\uff09\n - 1\u3002\u521b\u5efa\u5b9a\u65f6\u4efb\u52a1\n - 2\u3002\u53d1\u5e03\u5b9a\u65f6\u4efb\u52a1\uff08\u901a\u8fc7celery\u5185\u90e8\u7684`beat`\u8c03\u7528\uff09\n - \u8fdb\u5165`app_celery`\u7236\u7ea7\u76ee\u5f55\uff0c\u5373\u5de5\u4f5c\u76ee\u5f55\n - \u542f\u52a8\u547d\u4ee4\uff1a\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n - \u65b9\u5f0f1\u3002\u76f4\u63a5\u6267\u884c\u811a\u672c: `python runcbeat.py`\n - \u65b9\u5f0f2\u3002\u4f7f\u7528\u547d\u4ee4\u884c\uff1a`celery -A app_celery.consumer beat --loglevel=info --max-interval=5`\n - 3\u3002\u542f\u52a8\u6d88\u8d39\u8005worker\n - \u5f02\u6b65\u4efb\u52a1\uff08xxx)\n - 1\u3002\u521b\u5efa\u5f02\u6b65\u4efb\u52a1\uff0c\u5e76\u6ce8\u518c\u5230`producer`\u7684`register`\uff0c\u6839\u636e\u6ce8\u518c\u7684\u89c4\u5219\u8fdb\u884c`\u4efb\u52a1\u8c03\u7528`\u548c`worker\u542f\u52a8`\n - 2\u3002\u53d1\u5e03\u5f02\u6b65\u4efb\u52a1\uff08\u901a\u8fc7\u751f\u4ea7\u8005\u7684`publisher`\u8c03\u7528\uff09\n - \u53ef\u53c2\u8003`app/api/default/aping.py`\uff08\u8fd9\u91cc\u53ea\u662f\u7b80\u5355\u793a\u4f8b\uff0c\u5b9e\u9645\u4e0a\u5e94\u8be5\u5728`services`\u5c42\u8c03\u7528\uff09\n - 3\u3002\u542f\u52a8\u6d88\u8d39\u8005worker\n- workers: \u5de5\u4f5c\u8005\n - 1\u3002\u521b\u5efaworker\u670d\u52a1\uff0c\u5b9a\u4e49\u961f\u5217\u7b49\u5c5e\u6027\uff08\u4e3a\u65b9\u4fbf\u6269\u5c55\u5efa\u8bae\u4e00\u7c7b\u4efb\u52a1\u4e00\u4e2a\u670d\u52a1\uff09\n - 2\u3002\u542f\u52a8worker\u670d\u52a1\uff1a\n - 1\u3002\u8fdb\u5165`app_celery`\u7236\u7ea7\u76ee\u5f55\uff0c\u5373\u5de5\u4f5c\u76ee\u5f55\n - 2\u3002\u542f\u52a8\u547d\u4ee4\uff1a\uff08\u66f4\u591a\u53c2\u6570\u8bf7\u81ea\u884c\u6307\u5b9a\uff09\n - \u65b9\u5f0f1\u3002\u76f4\u63a5\u6267\u884c\u811a\u672c: `python runcworker.py -n ping`\n - \u65b9\u5f0f2\u3002\u4f7f\u7528\u547d\u4ee4\u884c\uff1a`celery -A app_celery.consumer.workers.ping worker --loglevel=info --concurrency=5`\n\n### \u6ce8\u610f\uff1a\n\n- \u6700\u597d\u4e0e`app`\u89e3\u8026\uff0c\u5373\uff1a\n - \u53ea`app`\u5355\u5411\u8c03\u7528`app_celery`\n - \u4f46`app_celery`\u4e0d\u8c03\u7528`app`",
44
43
  "app_celery/requirements.txt": "# -*- coding: utf-8 -*-\n# Python>=3.11\ntoollib==1.7.8\npython-dotenv==1.1.1\nPyYAML==6.0.2\npydantic==2.11.9\ncelery==5.5.3\nredis==6.4.0\ngevent==25.9.1",
45
44
  "app_celery/__init__.py": "\"\"\"\n@author axiner\n@version v0.0.1\n@created 2025/09/20 10:10\n@abstract app-celery\n@description\n@history\n\"\"\"\nfrom celery import Celery\n\nfrom app_celery.conf import config\n\n\ndef make_celery(include: list = None, configs: dict = None):\n app = Celery(\n main=\"app_celery\",\n broker=config.celery_broker_url,\n backend=config.celery_backend_url,\n include=include,\n )\n app.conf.update(\n timezone=config.celery_timezone,\n enable_utc=config.celery_enable_utc,\n task_serializer=config.celery_task_serializer,\n result_serializer=config.celery_result_serializer,\n accept_content=config.celery_accept_content,\n celery_task_ignore_result=config.celery_task_ignore_result,\n celery_result_expire=config.celery_result_expire,\n celery_task_track_started=config.celery_task_track_started,\n worker_concurrency=config.celery_worker_concurrency,\n worker_prefetch_multiplier=config.celery_worker_prefetch_multiplier,\n worker_max_tasks_per_child=config.celery_worker_max_tasks_per_child,\n broker_connection_retry_on_startup=config.celery_broker_connection_retry_on_startup,\n task_reject_on_worker_lost=config.celery_task_reject_on_worker_lost,\n )\n if configs:\n app.conf.update(configs)\n return app\n",
46
45
  "app_celery/consumer/__init__.py": "\"\"\"\n\u6d88\u8d39\u8005\n\"\"\"\nimport re\nfrom pathlib import Path\n\nfrom app_celery import make_celery\n\n\ndef autodiscover_task_modules(\n task_name: str = \"tasks\",\n task_module: str = \"app_celery.consumer.tasks\",\n) -> list:\n \"\"\"\n \u81ea\u52a8\u53d1\u73b0\u4efb\u52a1\u6a21\u5757\n - \u53ef\u5728\u6a21\u5757\u4e2d\u52a0\u5165`_active = False`\u6765\u53d6\u6d88\u6fc0\u6d3b\n \"\"\"\n task_modules = []\n active_pat = re.compile(r\"^_active\\s*=\\s*False\\s*(?:#.*)?$\", re.MULTILINE)\n for p in Path(__file__).parent.joinpath(task_name).rglob(\"*.py\"):\n if p.stem == \"__init__\":\n continue\n if active_pat.search(p.read_text(encoding=\"utf-8\")):\n continue\n task_modules.append(f\"{task_module}.{p.stem}\")\n return task_modules\n\n\ncelery_app = make_celery(\n include=autodiscover_task_modules()\n)\n",
@@ -54,10 +53,10 @@
54
53
  "app_celery/producer/registry.py": "from pydantic import BaseModel\n\n\nclass TaskParams(BaseModel):\n name: str\n queue: str\n options: dict = {}\n\n\nAllTasks: dict[str, TaskParams] = { # label: TaskParams\n \"ping\": TaskParams(\n name=\"app_celery.consumer.tasks.ping.ping\",\n queue=\"ping\"\n ),\n}\n",
55
54
  "app_celery/producer/tests.py": "import unittest\n\nfrom app_celery.producer.publisher import publish\n\n\nclass TestPublisher(unittest.TestCase):\n\n def test_publish_ping(self):\n publish(\"ping\")\n",
56
55
  "app_celery/producer/__init__.py": "\"\"\"\n\u751f\u4ea7\u8005\n\"\"\"\nfrom app_celery import make_celery\n\ncelery_app = make_celery()\n",
57
- "config/.env": "# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# \u5e94\u7528\u73af\u5883\uff08\u5b9a\u4f4dyaml\u914d\u7f6e\uff09\napp_env=dev\n# \u5e94\u7528\u914d\u7f6e\uff08\u6307\u5b9ayaml\u914d\u7f6e\uff0c\u4f18\u4e8e`app_env`\u5b9a\u4f4d\uff09\napp_yaml=\n# -----EnvConfig-----\n# \u96ea\u82b1\u7b97\u6cd5\u6570\u636e\u4e2d\u5fc3id\uff08\u53d6\u503c\uff1a0-31\uff0c\u5728\u5206\u5e03\u5f0f\u90e8\u7f72\u65f6\u9700\u786e\u4fdd\u6bcf\u4e2a\u8282\u70b9\u7684\u53d6\u503c\u4e0d\u540c\uff09\nsnow_datacenter_id=0",
58
- "config/app_dev.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-dev\napp_summary: xxApp-dev\napp_description: xxxApp-dev\napp_version: 1.0.0\napp_debug: true\napp_log_dir: ./logs\napp_disable_docs: false\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_dev.sqlite\ndb_async_url: sqlite+aiosqlite:///app_dev.sqlite\n# #\ncelery_broker_url: redis://:Mimmm%40129@192.168.22.21:6379/1\ncelery_backend_url: redis://:Mimmm%40129@192.168.22.21:6379/2\ncelery_timezone: Asia/Shanghai\ncelery_enable_utc: true\ncelery_task_serializer: json\ncelery_result_serializer: json\ncelery_accept_content: [ json ]\ncelery_task_ignore_result: false\ncelery_result_expire: 86400\ncelery_task_track_started: true\ncelery_worker_concurrency: 8\ncelery_worker_prefetch_multiplier: 2\ncelery_worker_max_tasks_per_child: 100\ncelery_broker_connection_retry_on_startup: true\ncelery_task_reject_on_worker_lost: true\n",
59
- "config/app_prod.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-prod\napp_summary: xxApp-prod\napp_description: xxxApp-prod\napp_version: 1.0.0\napp_debug: false\napp_log_dir: ./logs\napp_disable_docs: true\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_prod.sqlite\ndb_async_url: sqlite+aiosqlite:///app_prod.sqlite\n# #\ncelery_broker_url: redis://:Mimmm%40129@192.168.22.21:6379/1\ncelery_backend_url: redis://:Mimmm%40129@192.168.22.21:6379/2\ncelery_timezone: Asia/Shanghai\ncelery_enable_utc: true\ncelery_task_serializer: json\ncelery_result_serializer: json\ncelery_accept_content: [ json ]\ncelery_task_ignore_result: false\ncelery_result_expire: 86400\ncelery_task_track_started: true\ncelery_worker_concurrency: 8\ncelery_worker_prefetch_multiplier: 2\ncelery_worker_max_tasks_per_child: 100\ncelery_broker_connection_retry_on_startup: true\ncelery_task_reject_on_worker_lost: true\n",
60
- "config/app_test.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-test\napp_summary: xxApp-test\napp_description: xxxApp-test\napp_version: 1.0.0\napp_debug: true\napp_log_dir: ./logs\napp_disable_docs: false\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_test.sqlite\ndb_async_url: sqlite+aiosqlite:///app_test.sqlite\n# #\ncelery_broker_url: redis://:Mimmm%40129@192.168.22.21:6379/1\ncelery_backend_url: redis://:Mimmm%40129@192.168.22.21:6379/2\ncelery_timezone: Asia/Shanghai\ncelery_enable_utc: true\ncelery_task_serializer: json\ncelery_result_serializer: json\ncelery_accept_content: [ json ]\ncelery_task_ignore_result: false\ncelery_result_expire: 86400\ncelery_task_track_started: true\ncelery_worker_concurrency: 8\ncelery_worker_prefetch_multiplier: 2\ncelery_worker_max_tasks_per_child: 100\ncelery_broker_connection_retry_on_startup: true\ncelery_task_reject_on_worker_lost: true\n",
56
+ "config/.env": "# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# \u5e94\u7528\u73af\u5883\uff08\u5b9a\u4f4dyaml\u914d\u7f6e\uff09\napp_env=dev\n# \u5e94\u7528\u914d\u7f6e\uff08\u6307\u5b9ayaml\u914d\u7f6e\uff0c\u4f18\u4e8e`app_env`\u5b9a\u4f4d\uff09\napp_yaml=\n# -----Config-----\n# \u96ea\u82b1\u7b97\u6cd5\u6570\u636e\u4e2d\u5fc3id\uff08\u53d6\u503c\uff1a0-31\uff0c\u5728\u5206\u5e03\u5f0f\u90e8\u7f72\u65f6\u9700\u786e\u4fdd\u6bcf\u4e2a\u8282\u70b9\u7684\u53d6\u503c\u4e0d\u540c\uff09\nsnow_datacenter_id=0",
57
+ "config/app_dev.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-dev\napp_summary: xxApp-dev\napp_description: xxxApp-dev\napp_version: 1.0.0\napp_debug: true\napp_log_dir: ./logs\napp_disable_docs: false\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_dev.sqlite\ndb_async_url: sqlite+aiosqlite:///app_dev.sqlite\n# #\ncelery_broker_url: redis://:<password>@<host>:<port>/<db>\ncelery_backend_url: redis://:<password>@<host>:<port>/<db>\ncelery_timezone: Asia/Shanghai\ncelery_enable_utc: true\ncelery_task_serializer: json\ncelery_result_serializer: json\ncelery_accept_content: [ json ]\ncelery_task_ignore_result: false\ncelery_result_expire: 86400\ncelery_task_track_started: true\ncelery_worker_concurrency: 8\ncelery_worker_prefetch_multiplier: 2\ncelery_worker_max_tasks_per_child: 100\ncelery_broker_connection_retry_on_startup: true\ncelery_task_reject_on_worker_lost: true\n",
58
+ "config/app_prod.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-prod\napp_summary: xxApp-prod\napp_description: xxxApp-prod\napp_version: 1.0.0\napp_debug: false\napp_log_dir: ./logs\napp_disable_docs: true\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_prod.sqlite\ndb_async_url: sqlite+aiosqlite:///app_prod.sqlite\n# #\ncelery_broker_url: redis://:<password>@<host>:<port>/<db>\ncelery_backend_url: redis://:<password>@<host>:<port>/<db>\ncelery_timezone: Asia/Shanghai\ncelery_enable_utc: true\ncelery_task_serializer: json\ncelery_result_serializer: json\ncelery_accept_content: [ json ]\ncelery_task_ignore_result: false\ncelery_result_expire: 86400\ncelery_task_track_started: true\ncelery_worker_concurrency: 8\ncelery_worker_prefetch_multiplier: 2\ncelery_worker_max_tasks_per_child: 100\ncelery_broker_connection_retry_on_startup: true\ncelery_task_reject_on_worker_lost: true\n",
59
+ "config/app_test.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-test\napp_summary: xxApp-test\napp_description: xxxApp-test\napp_version: 1.0.0\napp_debug: true\napp_log_dir: ./logs\napp_disable_docs: false\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_test.sqlite\ndb_async_url: sqlite+aiosqlite:///app_test.sqlite\n# #\ncelery_broker_url: redis://:<password>@<host>:<port>/<db>\ncelery_backend_url: redis://:<password>@<host>:<port>/<db>\ncelery_timezone: Asia/Shanghai\ncelery_enable_utc: true\ncelery_task_serializer: json\ncelery_result_serializer: json\ncelery_accept_content: [ json ]\ncelery_task_ignore_result: false\ncelery_result_expire: 86400\ncelery_task_track_started: true\ncelery_worker_concurrency: 8\ncelery_worker_prefetch_multiplier: 2\ncelery_worker_max_tasks_per_child: 100\ncelery_broker_connection_retry_on_startup: true\ncelery_task_reject_on_worker_lost: true\n",
61
60
  "deploy/.gitkeep": "",
62
61
  "docs/.gitkeep": "",
63
62
  "logs/.gitkeep": "",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fastapi-scaff
3
- Version: 0.2.4
3
+ Version: 0.2.5
4
4
  Summary: This is a fastapi scaff.
5
5
  Author-email: axiner <atpuxiner@163.com>
6
6
  Project-URL: Homepage, https://github.com/atpuxiner/fastapi-scaff
@@ -0,0 +1,10 @@
1
+ fastapi_scaff/__init__.py,sha256=W7z5byxYIKVKFPyboxla4uy3KVU8nuiLDYErY1ZkTvY,120
2
+ fastapi_scaff/__main__.py,sha256=qSwkBd0yLJMLa5fP5HENF7RmKhvs-4JKxSSGuKu7uDA,13232
3
+ fastapi_scaff/_api_tpl.json,sha256=p-obrtySBAFPzJeYEHfP49g3Xbx0yNZDMUcizuxkx6k,6283
4
+ fastapi_scaff/_project_tpl.json,sha256=rHAfqHWV3fiIe-bJvFhwZOH-LuMU238eKVIpPlGxjKE,89544
5
+ fastapi_scaff-0.2.5.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
6
+ fastapi_scaff-0.2.5.dist-info/METADATA,sha256=ps3eL2-A9VEugXqoPfrlwxlZki5LpTzM7laxUBG9P0w,3503
7
+ fastapi_scaff-0.2.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
+ fastapi_scaff-0.2.5.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
9
+ fastapi_scaff-0.2.5.dist-info/top_level.txt,sha256=LeyfUxMRhdbRHcYoH37ftfdspyZ8V3Uut2YBaTCzq2k,14
10
+ fastapi_scaff-0.2.5.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- fastapi_scaff/__init__.py,sha256=6pCpB2QWhGMY5b9DyZddQ5Dhwpd3N62kXT1AxMx1IOs,120
2
- fastapi_scaff/__main__.py,sha256=T5ODMU9Ge5jDz0F1EXjbvKhNZPMWWy2gmln8RoXwtPY,13220
3
- fastapi_scaff/_api_tpl.json,sha256=8oQC2cb9yD1azuOpxvTeY98hxq0U7lXJB0cd_D6jCe4,6795
4
- fastapi_scaff/_project_tpl.json,sha256=FEoe8HQdEnVCyIh6x03dwwGBQKSaJ3ATCJyrDXVXPhA,89822
5
- fastapi_scaff-0.2.4.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
6
- fastapi_scaff-0.2.4.dist-info/METADATA,sha256=0XBiA5nXk3t6ewkBARRUgKGrS98fGRQS5tRCOZf_J6I,3503
7
- fastapi_scaff-0.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
- fastapi_scaff-0.2.4.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
9
- fastapi_scaff-0.2.4.dist-info/top_level.txt,sha256=LeyfUxMRhdbRHcYoH37ftfdspyZ8V3Uut2YBaTCzq2k,14
10
- fastapi_scaff-0.2.4.dist-info/RECORD,,