fastapi-scaff 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fastapi-scaff might be problematic. Click here for more details.
- fastapi_scaff/__init__.py +1 -1
- fastapi_scaff/__main__.py +1 -3
- fastapi_scaff/_project_tpl.json +2 -2
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.1.1.dist-info}/METADATA +1 -1
- fastapi_scaff-0.1.1.dist-info/RECORD +11 -0
- fastapi_scaff-0.1.0.dist-info/RECORD +0 -11
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.1.1.dist-info}/WHEEL +0 -0
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.1.1.dist-info}/entry_points.txt +0 -0
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.1.1.dist-info}/licenses/LICENSE +0 -0
- {fastapi_scaff-0.1.0.dist-info → fastapi_scaff-0.1.1.dist-info}/top_level.txt +0 -0
fastapi_scaff/__init__.py
CHANGED
fastapi_scaff/__main__.py
CHANGED
|
@@ -24,10 +24,8 @@ def main():
|
|
|
24
24
|
prog=prog,
|
|
25
25
|
description="fastapi脚手架,一键生成项目或api,让开发变得更简单",
|
|
26
26
|
epilog="examples: \n"
|
|
27
|
-
" `new`: %(prog)s new <myproj
|
|
27
|
+
" `new`: %(prog)s new <myproj>\n"
|
|
28
28
|
" `add`: %(prog)s add <myapi>\n"
|
|
29
|
-
" `add`: %(prog)s add <myapi> -v <vn>\n"
|
|
30
|
-
" `add`: %(prog)s add <myapi> -s <subdir>\n"
|
|
31
29
|
"",
|
|
32
30
|
formatter_class=argparse.RawDescriptionHelpFormatter
|
|
33
31
|
)
|
fastapi_scaff/_project_tpl.json
CHANGED
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"app/initializer/_log.py": "import os\nimport sys\nfrom pathlib import Path\n\nfrom loguru import logger\nfrom loguru._logger import Logger # noqa\n\nfrom app.initializer.context import request_id_ctx_var\n\n_LOG_CONSOLE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {extra[request_id]} {file}:{line} {message}\"\n_LOG_FILE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {extra[request_id]} {file}:{line} {message}\"\n_LOG_FILE_PREFIX = \"app\"\n_LOG_ROTATION = \"100 MB\"\n_LOG_RETENTION = \"15 days\"\n_LOG_COMPRESSION = None\n_LOG_ENQUEUE = True\n_LOG_BACKTRACE = False\n_LOG_DIAGNOSE = False\n_LOG_CATCH = False\n_LOG_PID = False\n\n\ndef init_logger(\n debug: bool,\n log_dir: str = None,\n) -> Logger:\n logger.remove(None)\n _lever = \"DEBUG\" if debug else \"INFO\"\n\n def _filter(record: dict) -> bool:\n record[\"extra\"][\"request_id\"] = request_id_ctx_var.get()\n return True\n\n logger.add(\n sys.stdout,\n format=_LOG_CONSOLE_FORMAT,\n level=_lever,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n filter=_filter,\n )\n if log_dir:\n _log_dir = Path(log_dir)\n _log_access_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-access.log\")\n _log_error_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-error.log\")\n if _LOG_PID:\n _log_access_file = str(_log_access_file).replace(\".log\", f\".{os.getpid()}.log\")\n _log_error_file = str(_log_error_file).replace(\".log\", f\".{os.getpid()}.log\")\n logger.add(\n _log_access_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=_lever,\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n )\n logger.add(\n _log_error_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=\"ERROR\",\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n )\n return logger\n",
|
|
23
23
|
"app/initializer/_redis.py": "from toollib.rediser import RedisCli\n\n\ndef init_redis_cli(\n host: str,\n port: int,\n db: int,\n password: str = None,\n max_connections: int = None,\n **kwargs,\n) -> RedisCli:\n if not host:\n return RedisCli()\n return RedisCli(\n host=host,\n port=port,\n db=db,\n password=password,\n max_connections=max_connections,\n **kwargs,\n )\n",
|
|
24
24
|
"app/initializer/_snow.py": "import os\n\nfrom loguru import logger\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import localip\n\n_CACHE_KEY_SNOW_WORKER_ID_INCR = \"config:snow_worker_id_incr\"\n_CACHE_KEY_SNOW_DATACENTER_ID_INCR = \"config:snow_datacenter_id_incr\"\n_CACHE_EXPIRE_SNOW = 120\n\n\ndef init_snow_cli(\n redis_cli: RedisCli,\n datacenter_id: int = None,\n to_str: bool = True,\n) -> SnowFlake: # \u5efa\u8bae\uff1a\u91c7\u7528\u670d\u52a1\u7684\u65b9\u5f0f\u8c03\u7528api\u83b7\u53d6\n if datacenter_id is None:\n datacenter_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_DATACENTER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if datacenter_id is None:\n local_ip = localip()\n if local_ip:\n ip_parts = list(map(int, local_ip.split('.')))\n ip_int = (ip_parts[0] << 24) + (ip_parts[1] << 16) + (ip_parts[2] << 8) + ip_parts[3]\n datacenter_id = ip_int % 32\n worker_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_WORKER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if worker_id is None:\n worker_id = os.getpid() % 32\n return SnowFlake(worker_id=worker_id, datacenter_id=datacenter_id, to_str=to_str)\n\n\ndef _snow_incr(redis_cli, cache_key: str, cache_expire: int):\n incr = None\n try:\n with redis_cli.connection() as r:\n resp = r.ping()\n if resp:\n lua_script = \"\"\"\n if redis.call('exists', KEYS[1]) == 1 then\n redis.call('expire', KEYS[1], ARGV[1])\n return redis.call('incr', KEYS[1])\n else\n redis.call('set', KEYS[1], 0)\n redis.call('expire', KEYS[1], ARGV[1])\n return 0\n end\n \"\"\"\n incr = r.eval(lua_script, 1, cache_key, cache_expire)\n except Exception as e:\n logger.warning(f\"snow\u521d\u59cb\u5316id\u5c06\u91c7\u7528\u672c\u5730\u65b9\u5f0f\uff0c\u7531\u4e8e\uff08{e}\uff09\")\n return incr\n",
|
|
25
|
-
"app/initializer/__init__.py": "\"\"\"\n\u521d\u59cb\u5316\n\"\"\"\nfrom loguru._logger import Logger # noqa\nfrom sqlalchemy.orm import sessionmaker, scoped_session\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import Singleton\n\nfrom app.initializer._conf import init_config\nfrom app.initializer._db import init_db_session, init_db_async_session\nfrom app.initializer._log import init_logger\nfrom app.initializer._redis import init_redis_cli\nfrom app.initializer._snow import init_snow_cli\n\n\nclass G(metaclass=Singleton):\n \"\"\"\n \u5168\u5c40\u53d8\u91cf\n \"\"\"\n config = None\n logger: Logger = None\n redis_cli: RedisCli = None\n snow_cli: SnowFlake = None\n db_session: scoped_session = None\n db_async_session: sessionmaker = None\n\n def __getattribute__(self, name):\n try:\n value = super().__getattribute__(name)\n except AttributeError:\n value = None\n if value is None:\n getter_name = f\"_get_{name}\"\n getter_method = getattr(self.__class__, getter_name, None)\n if callable(getter_method):\n value = getter_method()\n setattr(self, name, value)\n return value\n\n @classmethod\n def _get_config(cls):\n if not cls.config:\n cls.config = init_config()\n return cls.config\n\n @classmethod\n def _get_logger(cls):\n if not cls.logger:\n cls.logger = init_logger(\n debug=cls.config.app_debug,\n log_dir=cls.config.app_log_dir,\n )\n return cls.logger\n\n @classmethod\n def _get_redis_cli(cls):\n if not cls.redis_cli:\n cls.redis_cli = init_redis_cli(\n host=cls.config.redis_host,\n port=cls.config.redis_port,\n db=cls.config.redis_db,\n password=cls.config.redis_password,\n max_connections=cls.config.redis_max_connections,\n )\n return cls.redis_cli\n\n @classmethod\n def _get_snow_cli(cls):\n if not cls.snow_cli:\n cls.snow_cli = init_snow_cli(\n redis_cli=cls.redis_cli,\n datacenter_id=cls.config.snow_datacenter_id,\n )\n return cls.snow_cli\n\n @classmethod\n def _get_db_session(cls):\n if not cls.db_session:\n cls.db_session = init_db_session(\n db_url=cls.config.db_url,\n db_echo=cls.config.
|
|
25
|
+
"app/initializer/__init__.py": "\"\"\"\n\u521d\u59cb\u5316\n\"\"\"\nfrom loguru._logger import Logger # noqa\nfrom sqlalchemy.orm import sessionmaker, scoped_session\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import Singleton\n\nfrom app.initializer._conf import init_config\nfrom app.initializer._db import init_db_session, init_db_async_session\nfrom app.initializer._log import init_logger\nfrom app.initializer._redis import init_redis_cli\nfrom app.initializer._snow import init_snow_cli\n\n\nclass G(metaclass=Singleton):\n \"\"\"\n \u5168\u5c40\u53d8\u91cf\n \"\"\"\n config = None\n logger: Logger = None\n redis_cli: RedisCli = None\n snow_cli: SnowFlake = None\n db_session: scoped_session = None\n db_async_session: sessionmaker = None\n\n def __getattribute__(self, name):\n try:\n value = super().__getattribute__(name)\n except AttributeError:\n value = None\n if value is None:\n getter_name = f\"_get_{name}\"\n getter_method = getattr(self.__class__, getter_name, None)\n if callable(getter_method):\n value = getter_method()\n setattr(self, name, value)\n return value\n\n @classmethod\n def _get_config(cls):\n if not cls.config:\n cls.config = init_config()\n return cls.config\n\n @classmethod\n def _get_logger(cls):\n if not cls.logger:\n cls.logger = init_logger(\n debug=cls.config.app_debug,\n log_dir=cls.config.app_log_dir,\n )\n return cls.logger\n\n @classmethod\n def _get_redis_cli(cls):\n if not cls.redis_cli:\n cls.redis_cli = init_redis_cli(\n host=cls.config.redis_host,\n port=cls.config.redis_port,\n db=cls.config.redis_db,\n password=cls.config.redis_password,\n max_connections=cls.config.redis_max_connections,\n )\n return cls.redis_cli\n\n @classmethod\n def _get_snow_cli(cls):\n if not cls.snow_cli:\n cls.snow_cli = init_snow_cli(\n redis_cli=cls.redis_cli,\n datacenter_id=cls.config.snow_datacenter_id,\n )\n return cls.snow_cli\n\n @classmethod\n def _get_db_session(cls):\n if not cls.db_session:\n cls.db_session = init_db_session(\n db_url=cls.config.db_url,\n db_echo=cls.config.app_debug,\n )\n return cls.db_session\n\n @classmethod\n def _get_db_async_session(cls):\n if not cls.db_async_session:\n cls.db_async_session = init_db_async_session(\n db_url=cls.config.db_async_url,\n db_echo=cls.config.app_debug,\n )\n return cls.db_async_session\n\n @classmethod\n def setup(cls):\n \"\"\"\n \u521d\u59cb\u5316\n \"\"\"\n cls._get_config()\n cls._get_logger()\n cls._get_redis_cli()\n cls._get_snow_cli()\n # cls._get_db_session()\n cls._get_db_async_session()\n\n\ng = G()\n",
|
|
26
26
|
"app/middleware/auth.py": "from fastapi import Depends\nfrom fastapi.security import HTTPBearer, HTTPAuthorizationCredentials\nfrom typing import Optional\n\nfrom fastapi.security.utils import get_authorization_scheme_param\nfrom pydantic import BaseModel\nfrom starlette.requests import Request\n\nfrom app.api.exception import CustomException\nfrom app.api.status import Status\nfrom app.initializer import g\nfrom app.model.user import User\nfrom app.utils import db_async\nfrom app.utils.auth import verify_jwt\n\n\nclass JWTUser(BaseModel):\n # \u5b57\u6bb5\u4e0eUser\u5bf9\u9f50\n id: str = None\n phone: str = None\n name: str = None\n age: int = None\n gender: int = None\n\n\nclass JWTAuthorizationCredentials(HTTPAuthorizationCredentials):\n user: JWTUser\n\n\nclass JWTBearer(HTTPBearer):\n\n async def __call__(\n self, request: Request\n ) -> Optional[JWTAuthorizationCredentials]:\n authorization = request.headers.get(\"Authorization\")\n scheme, credentials = get_authorization_scheme_param(authorization)\n if not (authorization and scheme and credentials):\n if self.auto_error:\n raise CustomException(\n msg=\"Not authenticated\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n if scheme.lower() != \"bearer\":\n if self.auto_error:\n raise CustomException(\n msg=\"Invalid authentication credentials\",\n status=Status.UNAUTHORIZED_ERROR,\n )\n else:\n return None\n user = await self.verify_credentials(credentials)\n return JWTAuthorizationCredentials(scheme=scheme, credentials=credentials, user=user)\n\n async def verify_credentials(self, credentials: str) -> JWTUser:\n playload = await self._verify_jwt(credentials)\n if playload is None:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # \u5efa\u8bae\uff1ajwt_key\u8fdb\u884credis\u7f13\u5b58\n async with g.db_async_session() as session:\n data = await db_async.query_one(\n session=session,\n model=User,\n fields=[\"jwt_key\"],\n filter_by={\"id\": playload.get(\"id\")}\n )\n if not data:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR)\n # <<< \u5efa\u8bae\n await self._verify_jwt(credentials, jwt_key=data.get(\"jwt_key\"))\n return JWTUser(\n id=playload.get(\"id\"),\n phone=playload.get(\"phone\"),\n name=playload.get(\"name\"),\n age=playload.get(\"age\"),\n gender=playload.get(\"gender\"),\n )\n\n @staticmethod\n async def _verify_jwt(token: str, jwt_key: str = None) -> dict:\n try:\n return verify_jwt(token=token, jwt_key=jwt_key)\n except Exception as e:\n raise CustomException(status=Status.UNAUTHORIZED_ERROR, msg=str(e))\n\n\ndef get_current_user(\n credentials: Optional[JWTAuthorizationCredentials] = Depends(JWTBearer(auto_error=True))\n) -> JWTUser:\n if not credentials:\n return JWTUser()\n return credentials.user\n",
|
|
27
27
|
"app/middleware/cors.py": "from fastapi.middleware.cors import CORSMiddleware\n\nfrom app.initializer import g\n\n\nclass Cors:\n middleware_class = CORSMiddleware\n allow_origins = g.config.app_allow_origins\n allow_credentials = True\n allow_methods = [\"*\"]\n allow_headers = [\"*\"]\n",
|
|
28
28
|
"app/middleware/exception.py": "import traceback\n\nfrom fastapi.exceptions import RequestValidationError\nfrom starlette.exceptions import HTTPException\nfrom starlette.requests import Request\nfrom starlette.responses import JSONResponse\n\nfrom app.api.exception import CustomException\nfrom app.api.response import Response\nfrom app.api.status import Status\nfrom app.initializer import g\n\n\nclass ExceptionHandler:\n\n @staticmethod\n async def custom_exception_handler(\n request: Request,\n exc: CustomException,\n is_traceback: bool = False,\n ) -> JSONResponse:\n lmsg = f'- \"{request.method} {request.url.path}\" {exc.code} {exc.msg}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=exc.msg,\n code=exc.code,\n data=exc.data,\n request=request,\n )\n\n @staticmethod\n async def http_exception_handler(\n request: Request,\n exc: HTTPException,\n is_traceback: bool = False,\n ) -> JSONResponse:\n lmsg = f'- \"{request.method} {request.url.path}\" {exc.status_code} {exc.detail}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=exc.detail,\n code=exc.status_code,\n request=request,\n )\n\n @staticmethod\n async def validation_exception_handler(\n request: Request,\n exc: RequestValidationError,\n is_display_all: bool = False,\n is_traceback: bool = False,\n ) -> JSONResponse:\n if is_display_all:\n msg = \", \".join([f\"'{item['loc'][1] if len(item['loc']) > 1 else item['loc'][0]}' {item['msg'].lower()}\" for item in exc.errors()]) # noqa: E501\n else:\n _first_error = exc.errors()[0]\n msg = f\"'{_first_error['loc'][1] if len(_first_error['loc']) > 1 else _first_error['loc'][0]}' {_first_error['msg'].lower()}\" # noqa: E501\n lmsg = f'- \"{request.method} {request.url.path}\" {Status.PARAMS_ERROR.code} {msg}'\n if is_traceback:\n lmsg = traceback.format_exc()\n g.logger.error(lmsg)\n return Response.failure(\n msg=msg,\n status=Status.PARAMS_ERROR,\n request=request,\n )\n",
|
|
@@ -37,7 +37,7 @@
|
|
|
37
37
|
"app/utils/auth.py": "import secrets\nfrom datetime import datetime, timedelta\n\nimport bcrypt\nimport jwt\n\n_ALGORITHM = \"HS256\"\n\n\ndef gen_jwt(payload: dict, jwt_key: str, exp_minutes: int = 24 * 60 * 30):\n payload.update({\"exp\": datetime.utcnow() + timedelta(minutes=exp_minutes)})\n encoded_jwt = jwt.encode(payload=payload, key=jwt_key, algorithm=_ALGORITHM)\n return encoded_jwt\n\n\ndef verify_jwt(token: str, jwt_key: str = None) -> dict:\n if not jwt_key:\n return jwt.decode(jwt=token, options={\"verify_signature\": False})\n return jwt.decode(jwt=token, key=jwt_key, algorithms=[_ALGORITHM])\n\n\ndef gen_jwt_key():\n return secrets.token_hex(16)\n\n\ndef hash_password(password: str) -> str:\n salt = bcrypt.gensalt()\n hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt)\n return hashed_password.decode('utf-8')\n\n\ndef verify_password(password: str, hashed_password: str) -> bool:\n return bcrypt.checkpw(password.encode('utf-8'), hashed_password.encode('utf-8'))\n",
|
|
38
38
|
"app/utils/db_async.py": "from sqlalchemy import (\n select,\n func,\n update as update_,\n delete as delete_,\n)\n\n\ndef format_all(\n rows,\n fields: list[str],\n) -> list[dict]:\n if not rows:\n return list()\n return [dict(zip(fields, row)) for row in rows]\n\n\ndef format_one(\n row,\n fields: list[str],\n) -> dict:\n if not row:\n return dict()\n return dict(zip(fields, row))\n\n\ndef model_dict(\n model,\n fields: list[str] = None,\n) -> dict:\n if not model:\n return dict()\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n return {field: getattr(model, field) for field in fields}\n\n\nasync def query_one(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n) -> dict:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return format_one(result.fetchone(), fields)\n\n\nasync def query_all(\n session,\n model,\n fields: list[str] = None,\n filter_by: dict = None,\n page: int = None,\n size: int = None,\n) -> list[dict]:\n if not fields:\n fields = [field.name for field in model.__table__.columns]\n query = select(*[getattr(model, field) for field in fields if hasattr(model, field)]).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n if page and size:\n query = query.offset((page - 1) * size).limit(size)\n result = await session.execute(query)\n return format_all(result.fetchall(), fields)\n\n\nasync def query_total(\n session,\n model,\n filter_by: dict = None,\n) -> int:\n query = select(func.count()).select_from(model)\n if filter_by:\n query = query.filter_by(**filter_by)\n result = await session.execute(query)\n return result.scalar()\n\n\nasync def create(\n session,\n model,\n data: dict,\n filter_by: dict = None,\n) -> int:\n try:\n if filter_by:\n result = await query_one(session, model, filter_by=filter_by)\n if result:\n return 0\n stmt = model(**data)\n session.add(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return stmt.id\n\n\nasync def update(\n session,\n model,\n data: dict,\n filter_by: dict | None,\n is_exclude_none: bool = True,\n) -> list:\n try:\n if is_exclude_none:\n data = {k: v for k, v in data.items() if v is not None}\n stmt = update_(model).values(**data)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n updated_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n updated_ids = result.scalars().all()\n if updated_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return updated_ids\n\n\nasync def delete(\n session,\n model,\n filter_by: dict | None,\n) -> list:\n try:\n stmt = delete_(model)\n if filter_by:\n stmt = stmt.filter_by(**filter_by)\n if session.bind.dialect.name == \"postgresql\":\n stmt = stmt.returning(model.id)\n result = await session.execute(stmt)\n deleted_ids = [row[0] for row in result]\n else:\n query_stmt = select(model.id).filter_by(**filter_by)\n result = await session.execute(query_stmt)\n deleted_ids = result.scalars().all()\n if deleted_ids:\n await session.execute(stmt)\n await session.commit()\n except Exception:\n await session.rollback()\n raise\n return deleted_ids\n",
|
|
39
39
|
"app/utils/__init__.py": "\"\"\"\nutils\n\"\"\"\n",
|
|
40
|
-
"config/.env": "# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# \u5e94\u7528\u73af\u5883\uff08\u5b9a\u4f4dyaml\u914d\u7f6e\uff09\napp_env=dev\n# \u5e94\u7528\u914d\u7f6e\uff08\u6307\u5b9ayaml\u914d\u7f6e\uff0c\u4f18\u4e8e`app_env`\u5b9a\u4f4d\uff09\napp_yaml=\n# \u96ea\u82b1\u7b97\u6cd5\u6570\u636e\u4e2d\u5fc3id\uff08\u53d6\u503c\uff1a0-31\uff0c\u5728\u5206\u5e03\u5f0f\u90e8\u7f72\u65f6\u9700\u786e\u4fdd\u6bcf\u4e2a\u8282\u70b9\u7684\u53d6\u503c\u4e0d\u540c\uff09\nsnow_datacenter_id=0",
|
|
40
|
+
"config/.env": "# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# -----\u5747\u53ef\u76f4\u63a5\u8bbe\u7f6e\u73af\u5883\u53d8\u91cf-----\n# \u5e94\u7528\u73af\u5883\uff08\u5b9a\u4f4dyaml\u914d\u7f6e\uff09\napp_env=dev\n# \u5e94\u7528\u914d\u7f6e\uff08\u6307\u5b9ayaml\u914d\u7f6e\uff0c\u4f18\u4e8e`app_env`\u5b9a\u4f4d\uff09\napp_yaml=\n# -----EnvConfig-----\n# \u96ea\u82b1\u7b97\u6cd5\u6570\u636e\u4e2d\u5fc3id\uff08\u53d6\u503c\uff1a0-31\uff0c\u5728\u5206\u5e03\u5f0f\u90e8\u7f72\u65f6\u9700\u786e\u4fdd\u6bcf\u4e2a\u8282\u70b9\u7684\u53d6\u503c\u4e0d\u540c\uff09\nsnow_datacenter_id=0",
|
|
41
41
|
"config/app_dev.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-dev\napp_summary: xxApp-dev\napp_description: xxxApp-dev\napp_version: 1.0.0\napp_debug: true\napp_log_dir: ./logs\napp_disable_docs: false\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_dev.sqlite\ndb_async_url: sqlite+aiosqlite:///app_dev.sqlite\n",
|
|
42
42
|
"config/app_prod.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-prod\napp_summary: xxApp-prod\napp_description: xxxApp-prod\napp_version: 1.0.0\napp_debug: false\napp_log_dir: ./logs\napp_disable_docs: true\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_prod.sqlite\ndb_async_url: sqlite+aiosqlite:///app_prod.sqlite\n",
|
|
43
43
|
"config/app_test.yaml": "# \u8bf7\u6839\u636e\u81ea\u8eab\u9700\u6c42\u4fee\u6539\napp_title: xApp-test\napp_summary: xxApp-test\napp_description: xxxApp-test\napp_version: 1.0.0\napp_debug: true\napp_log_dir: ./logs\napp_disable_docs: false\napp_allow_origins:\n - '*'\n# #\nredis_host:\nredis_port:\nredis_db:\nredis_password:\nredis_max_connections:\ndb_url: sqlite:///app_test.sqlite\ndb_async_url: sqlite+aiosqlite:///app_test.sqlite\n",
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
fastapi_scaff/__init__.py,sha256=DBOEOfdc5GhHpvXIoFI-1U8DoUVzfgQzZxPP8RGqvsE,120
|
|
2
|
+
fastapi_scaff/__main__.py,sha256=g3EYKEbGwnYJDLA884TN3Qy3izD5HpcPv-ikS7oOabM,13244
|
|
3
|
+
fastapi_scaff/_api_tpl.json,sha256=Xxqz5UmJYOr0EedQLm45tZlxJuq7hlRf7r-j9Mq9cJc,6810
|
|
4
|
+
fastapi_scaff/_project_tpl.json,sha256=wBiJ8xxi_Pt1AYgnw4N0VxSwDJ93FaQpnuyjcHp1MUM,72681
|
|
5
|
+
fastapi_scaff-0.1.1.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
|
|
6
|
+
tests/__init__.py,sha256=hL1sZ5kdrEFj8BqVmf98RgCrZAfXWNUhc122nk4hwuw,18
|
|
7
|
+
fastapi_scaff-0.1.1.dist-info/METADATA,sha256=FMb8GfsTExE2oMKww1HiUeYihv_-aslvk-RoWWvZQ_A,3376
|
|
8
|
+
fastapi_scaff-0.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
+
fastapi_scaff-0.1.1.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
|
|
10
|
+
fastapi_scaff-0.1.1.dist-info/top_level.txt,sha256=GYgW8daqMJBN-Gv-z0JiFM2XWuSyBoEgXwqJW1wi0hg,20
|
|
11
|
+
fastapi_scaff-0.1.1.dist-info/RECORD,,
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
fastapi_scaff/__init__.py,sha256=n40GtUaNM5XhBd7sIO5zzcmLNztvk7C4ngCT1xMKp1Q,120
|
|
2
|
-
fastapi_scaff/__main__.py,sha256=F9nXaL5P4pzj_I-Hkal204GVsJEqaYUPMlvgSf_nYbw,13378
|
|
3
|
-
fastapi_scaff/_api_tpl.json,sha256=Xxqz5UmJYOr0EedQLm45tZlxJuq7hlRf7r-j9Mq9cJc,6810
|
|
4
|
-
fastapi_scaff/_project_tpl.json,sha256=N65CzEn7836oRy_u4zwVaMBr6rbHwA8zqBLaGMe-SJM,72654
|
|
5
|
-
fastapi_scaff-0.1.0.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
|
|
6
|
-
tests/__init__.py,sha256=hL1sZ5kdrEFj8BqVmf98RgCrZAfXWNUhc122nk4hwuw,18
|
|
7
|
-
fastapi_scaff-0.1.0.dist-info/METADATA,sha256=4wekLntgtRbUPVz7fBKB_v3yBvUAiOZd56jD312zFDU,3376
|
|
8
|
-
fastapi_scaff-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
-
fastapi_scaff-0.1.0.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
|
|
10
|
-
fastapi_scaff-0.1.0.dist-info/top_level.txt,sha256=GYgW8daqMJBN-Gv-z0JiFM2XWuSyBoEgXwqJW1wi0hg,20
|
|
11
|
-
fastapi_scaff-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|