fastapi-scaff 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fastapi-scaff might be problematic. Click here for more details.
- fastapi_scaff/__init__.py +1 -1
- fastapi_scaff/__main__.py +1 -1
- fastapi_scaff/_project_tpl.json +1 -1
- {fastapi_scaff-0.1.3.dist-info → fastapi_scaff-0.1.5.dist-info}/METADATA +1 -1
- fastapi_scaff-0.1.5.dist-info/RECORD +11 -0
- fastapi_scaff-0.1.3.dist-info/RECORD +0 -11
- {fastapi_scaff-0.1.3.dist-info → fastapi_scaff-0.1.5.dist-info}/WHEEL +0 -0
- {fastapi_scaff-0.1.3.dist-info → fastapi_scaff-0.1.5.dist-info}/entry_points.txt +0 -0
- {fastapi_scaff-0.1.3.dist-info → fastapi_scaff-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {fastapi_scaff-0.1.3.dist-info → fastapi_scaff-0.1.5.dist-info}/top_level.txt +0 -0
fastapi_scaff/__init__.py
CHANGED
fastapi_scaff/__main__.py
CHANGED
fastapi_scaff/_project_tpl.json
CHANGED
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"app/atask/__init__.py": "\"\"\"\n\u5f02\u6b65\u4efb\u52a1\n\"\"\"\n",
|
|
19
19
|
"app/initializer/context.py": "from contextvars import ContextVar\n\nrequest_id_ctx_var: ContextVar[str] = ContextVar(\"request_id\", default=\"N/A\")\n",
|
|
20
20
|
"app/initializer/_conf.py": "import os\nfrom pathlib import Path\n\nimport yaml\nfrom dotenv import load_dotenv\nfrom toollib.utils import get_cls_attrs, parse_variable\n\nfrom app import APP_DIR\n\n_CONFIG_DIR = APP_DIR.parent.joinpath(\"config\")\n\nload_dotenv(dotenv_path=os.environ.setdefault(\n key=\"env_path\",\n value=str(_CONFIG_DIR.joinpath(\".env\")))\n)\n# #\napp_yaml = Path(\n os.environ.get(\"app_yaml\") or\n _CONFIG_DIR.joinpath(f\"app_{os.environ.setdefault(key='app_env', value='dev')}.yaml\")\n)\nif not app_yaml.is_file():\n raise RuntimeError(f\"\u914d\u7f6e\u6587\u4ef6\u4e0d\u5b58\u5728\uff1a{app_yaml}\")\n\n\nclass EnvConfig:\n \"\"\"env\u914d\u7f6e\"\"\"\n snow_datacenter_id: int = None\n\n def setattr_from_env(self):\n cls_attrs = get_cls_attrs(EnvConfig)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n v = parse_variable(k=k, v_type=v_type, v_from=os.environ, default=v)\n setattr(self, k, v)\n\n\nclass Config(EnvConfig):\n \"\"\"\u914d\u7f6e\"\"\"\n _yaml_conf: dict = None\n yaml_name: str = app_yaml.name\n #\n app_title: str = \"xApp\"\n app_summary: str = \"xxApp\"\n app_description: str = \"xxxApp\"\n app_version: str = \"1.0.0\"\n app_debug: bool = True\n app_log_dir: str = \"./logs\"\n app_disable_docs: bool = True\n app_allow_origins: list = [\"*\"]\n # #\n redis_host: str = None\n redis_port: int = None\n redis_db: int = None\n redis_password: str = None\n redis_max_connections: int = None\n db_url: str = None\n db_async_url: str = None\n\n def setup(self):\n self.setattr_from_env()\n self.setattr_from_yaml()\n return self\n\n def setattr_from_yaml(self):\n cls_attrs = get_cls_attrs(Config)\n for k, item in cls_attrs.items():\n v_type, v = item\n if callable(v_type):\n v = parse_variable(k=k, v_type=v_type, v_from=self.load_yaml(), default=v)\n setattr(self, k, v)\n\n def load_yaml(self, reload: bool = False) -> dict:\n if self._yaml_conf and not reload:\n return self._yaml_conf\n with open(app_yaml, mode=\"r\", encoding=\"utf-8\") as file:\n self._yaml_conf = yaml.load(file, Loader=yaml.FullLoader)\n return self._yaml_conf\n\n\ndef init_config() -> Config:\n return Config().setup()\n",
|
|
21
|
-
"app/initializer/_db.py": "import asyncio\nimport importlib\n\nfrom sqlalchemy import create_engine
|
|
21
|
+
"app/initializer/_db.py": "import asyncio\nimport importlib\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.ext.asyncio import create_async_engine, AsyncSession\nfrom sqlalchemy.orm import sessionmaker, scoped_session\n\nfrom app import APP_DIR\n\n_DSCHEMA_MOD_DIR = APP_DIR.joinpath(\"models\")\n_DSCHEMA_MOD_BASE = \"app.models\"\n_TABLES_CREATED = False\n\n\ndef init_db_session(\n db_url: str,\n db_echo: bool,\n db_pool_size: int = 10,\n db_max_overflow: int = 5,\n db_pool_recycle: int = 3600,\n is_create_tables: bool = True,\n) -> scoped_session:\n db_echo = db_echo or False\n kwargs = {\n \"pool_size\": db_pool_size,\n \"max_overflow\": db_max_overflow,\n \"pool_recycle\": db_pool_recycle,\n }\n if db_url.startswith(\"sqlite\"):\n kwargs = {}\n engine = create_engine(\n url=db_url,\n echo=db_echo,\n echo_pool=db_echo,\n **kwargs,\n )\n db_session = sessionmaker(engine, expire_on_commit=False)\n\n def create_tables():\n from app.models import DeclBase\n _import_tables()\n try:\n DeclBase.metadata.create_all(engine)\n except Exception as e:\n if \"already exists\" not in str(e):\n raise\n\n global _TABLES_CREATED\n if is_create_tables and not _TABLES_CREATED:\n create_tables()\n _TABLES_CREATED = True\n\n return scoped_session(db_session)\n\n\ndef init_db_async_session(\n db_url: str,\n db_echo: bool,\n db_pool_size: int = 10,\n db_max_overflow: int = 5,\n db_pool_recycle: int = 3600,\n is_create_tables: bool = True,\n) -> sessionmaker:\n db_echo = db_echo or False\n kwargs = {\n \"pool_size\": db_pool_size,\n \"max_overflow\": db_max_overflow,\n \"pool_recycle\": db_pool_recycle,\n }\n if db_url.startswith(\"sqlite\"):\n kwargs = {}\n async_engine = create_async_engine(\n url=db_url,\n echo=db_echo,\n echo_pool=db_echo,\n **kwargs,\n )\n db_async_session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False) # noqa\n\n async def create_tables():\n from app.models import DeclBase\n _import_tables()\n async with async_engine.begin() as conn:\n try:\n await conn.run_sync(DeclBase.metadata.create_all)\n except Exception as e:\n if \"already exists\" not in str(e):\n raise\n\n global _TABLES_CREATED\n if is_create_tables and not _TABLES_CREATED:\n try:\n loop = asyncio.get_running_loop()\n except RuntimeError:\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n task = loop.create_task(create_tables())\n task.add_done_callback(lambda t: t.result() if not t.cancelled() else None)\n if not loop.is_running():\n loop.run_until_complete(task)\n _TABLES_CREATED = True\n return db_async_session\n\n\ndef _import_tables():\n \"\"\"\u5bfc\u5165\u8868\"\"\"\n for f in _DSCHEMA_MOD_DIR.glob(\"*.py\"):\n if not f.name.startswith(\"__\"):\n _ = importlib.import_module(f\"{_DSCHEMA_MOD_BASE}.{f.stem}\")\n",
|
|
22
22
|
"app/initializer/_log.py": "import os\nimport sys\nfrom pathlib import Path\n\nfrom loguru import logger\nfrom loguru._logger import Logger # noqa\n\nfrom app.initializer.context import request_id_ctx_var\n\n_LOG_CONSOLE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {extra[request_id]} {file}:{line} {message}\"\n_LOG_FILE_FORMAT = \"{time:YYYY-MM-DD HH:mm:ss.SSS} {level} {extra[request_id]} {file}:{line} {message}\"\n_LOG_FILE_PREFIX = \"app\"\n_LOG_ROTATION = \"100 MB\"\n_LOG_RETENTION = \"15 days\"\n_LOG_COMPRESSION = None\n_LOG_ENQUEUE = True\n_LOG_BACKTRACE = False\n_LOG_DIAGNOSE = False\n_LOG_CATCH = False\n_LOG_PID = False\n\n\ndef init_logger(\n debug: bool,\n log_dir: str = None,\n) -> Logger:\n logger.remove(None)\n _lever = \"DEBUG\" if debug else \"INFO\"\n\n def _filter(record: dict) -> bool:\n record[\"extra\"][\"request_id\"] = request_id_ctx_var.get()\n return True\n\n logger.add(\n sys.stdout,\n format=_LOG_CONSOLE_FORMAT,\n level=_lever,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n filter=_filter,\n )\n if log_dir:\n _log_dir = Path(log_dir)\n _log_access_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-access.log\")\n _log_error_file = _log_dir.joinpath(f\"{_LOG_FILE_PREFIX}-error.log\")\n if _LOG_PID:\n _log_access_file = str(_log_access_file).replace(\".log\", f\".{os.getpid()}.log\")\n _log_error_file = str(_log_error_file).replace(\".log\", f\".{os.getpid()}.log\")\n logger.add(\n _log_access_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=_lever,\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n )\n logger.add(\n _log_error_file,\n encoding=\"utf-8\",\n format=_LOG_FILE_FORMAT,\n level=\"ERROR\",\n rotation=_LOG_ROTATION,\n retention=_LOG_RETENTION,\n compression=_LOG_COMPRESSION,\n enqueue=_LOG_ENQUEUE,\n backtrace=_LOG_BACKTRACE,\n diagnose=_LOG_DIAGNOSE,\n catch=_LOG_CATCH,\n )\n return logger\n",
|
|
23
23
|
"app/initializer/_redis.py": "from toollib.rediser import RedisCli\n\n\ndef init_redis_cli(\n host: str,\n port: int,\n db: int,\n password: str = None,\n max_connections: int = None,\n **kwargs,\n) -> RedisCli:\n if not host:\n return RedisCli()\n return RedisCli(\n host=host,\n port=port,\n db=db,\n password=password,\n max_connections=max_connections,\n **kwargs,\n )\n",
|
|
24
24
|
"app/initializer/_snow.py": "import os\n\nfrom loguru import logger\nfrom toollib.guid import SnowFlake\nfrom toollib.rediser import RedisCli\nfrom toollib.utils import localip\n\n_CACHE_KEY_SNOW_WORKER_ID_INCR = \"config:snow_worker_id_incr\"\n_CACHE_KEY_SNOW_DATACENTER_ID_INCR = \"config:snow_datacenter_id_incr\"\n_CACHE_EXPIRE_SNOW = 120\n\n\ndef init_snow_cli(\n redis_cli: RedisCli,\n datacenter_id: int = None,\n to_str: bool = True,\n) -> SnowFlake: # \u5efa\u8bae\uff1a\u91c7\u7528\u670d\u52a1\u7684\u65b9\u5f0f\u8c03\u7528api\u83b7\u53d6\n if datacenter_id is None:\n datacenter_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_DATACENTER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if datacenter_id is None:\n local_ip = localip()\n if local_ip:\n ip_parts = list(map(int, local_ip.split('.')))\n ip_int = (ip_parts[0] << 24) + (ip_parts[1] << 16) + (ip_parts[2] << 8) + ip_parts[3]\n datacenter_id = ip_int % 32\n worker_id = _snow_incr(redis_cli, _CACHE_KEY_SNOW_WORKER_ID_INCR, _CACHE_EXPIRE_SNOW)\n if worker_id is None:\n worker_id = os.getpid() % 32\n return SnowFlake(worker_id=worker_id, datacenter_id=datacenter_id, to_str=to_str)\n\n\ndef _snow_incr(redis_cli, cache_key: str, cache_expire: int):\n incr = None\n try:\n with redis_cli.connection() as r:\n resp = r.ping()\n if resp:\n lua_script = \"\"\"\n if redis.call('exists', KEYS[1]) == 1 then\n redis.call('expire', KEYS[1], ARGV[1])\n return redis.call('incr', KEYS[1])\n else\n redis.call('set', KEYS[1], 0)\n redis.call('expire', KEYS[1], ARGV[1])\n return 0\n end\n \"\"\"\n incr = r.eval(lua_script, 1, cache_key, cache_expire)\n except Exception as e:\n logger.warning(f\"snow\u521d\u59cb\u5316id\u5c06\u91c7\u7528\u672c\u5730\u65b9\u5f0f\uff0c\u7531\u4e8e\uff08{e}\uff09\")\n return incr\n",
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
fastapi_scaff/__init__.py,sha256=5x8O4ld0gY4d39YakhLMHZnDZYq2LICaYTlFQgmAjJs,120
|
|
2
|
+
fastapi_scaff/__main__.py,sha256=T5ODMU9Ge5jDz0F1EXjbvKhNZPMWWy2gmln8RoXwtPY,13220
|
|
3
|
+
fastapi_scaff/_api_tpl.json,sha256=8oQC2cb9yD1azuOpxvTeY98hxq0U7lXJB0cd_D6jCe4,6795
|
|
4
|
+
fastapi_scaff/_project_tpl.json,sha256=jKMx01UDKR30zxyJqfpgmL-3Xydwf93hFJnOPLQVNJo,72303
|
|
5
|
+
fastapi_scaff-0.1.5.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
|
|
6
|
+
tests/__init__.py,sha256=hL1sZ5kdrEFj8BqVmf98RgCrZAfXWNUhc122nk4hwuw,18
|
|
7
|
+
fastapi_scaff-0.1.5.dist-info/METADATA,sha256=u750o4exU3WIn05oW9F-Jvl-ILVQ8TCu1idOVXH75TU,3382
|
|
8
|
+
fastapi_scaff-0.1.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
+
fastapi_scaff-0.1.5.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
|
|
10
|
+
fastapi_scaff-0.1.5.dist-info/top_level.txt,sha256=GYgW8daqMJBN-Gv-z0JiFM2XWuSyBoEgXwqJW1wi0hg,20
|
|
11
|
+
fastapi_scaff-0.1.5.dist-info/RECORD,,
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
fastapi_scaff/__init__.py,sha256=lW8qQhGSxIvcUNRJJWJxCiVr3GEMSlraAdGov5SBB8E,120
|
|
2
|
-
fastapi_scaff/__main__.py,sha256=YffqXFeXTUQonKkvl9-WRyeT3rt-pFPY4dlU47mFOdY,13219
|
|
3
|
-
fastapi_scaff/_api_tpl.json,sha256=8oQC2cb9yD1azuOpxvTeY98hxq0U7lXJB0cd_D6jCe4,6795
|
|
4
|
-
fastapi_scaff/_project_tpl.json,sha256=fzz827kN124BFKlgwVsX1yO5kvBewel8OKmAmu1nr94,72558
|
|
5
|
-
fastapi_scaff-0.1.3.dist-info/licenses/LICENSE,sha256=A5H6q7zd1QrL3iVs1KLsBOG0ImV-t9PpPspM4x-4Ea8,1069
|
|
6
|
-
tests/__init__.py,sha256=hL1sZ5kdrEFj8BqVmf98RgCrZAfXWNUhc122nk4hwuw,18
|
|
7
|
-
fastapi_scaff-0.1.3.dist-info/METADATA,sha256=k0yA-3sKWy1sKoeJ35TqDEp7zcXVqoMss4tHWxerJNU,3382
|
|
8
|
-
fastapi_scaff-0.1.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
-
fastapi_scaff-0.1.3.dist-info/entry_points.txt,sha256=kzs28nmpRWVCmWmZav3X7u7YOIOEir3sCkLnvQKTJbY,62
|
|
10
|
-
fastapi_scaff-0.1.3.dist-info/top_level.txt,sha256=GYgW8daqMJBN-Gv-z0JiFM2XWuSyBoEgXwqJW1wi0hg,20
|
|
11
|
-
fastapi_scaff-0.1.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|