py2docfx 0.1.11rc1996319__py3-none-any.whl → 0.1.12.dev2002521__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2docfx/__main__.py +24 -15
- py2docfx/convert_prepare/environment.py +13 -10
- py2docfx/convert_prepare/generate_document.py +6 -6
- py2docfx/convert_prepare/get_source.py +7 -7
- py2docfx/convert_prepare/git.py +10 -13
- py2docfx/convert_prepare/install_package.py +2 -2
- py2docfx/convert_prepare/pack.py +7 -10
- py2docfx/convert_prepare/package_info.py +3 -3
- py2docfx/convert_prepare/pip_utils.py +12 -14
- py2docfx/convert_prepare/post_process/merge_toc.py +3 -2
- py2docfx/convert_prepare/sphinx_caller.py +34 -12
- py2docfx/convert_prepare/tests/test_environment.py +0 -3
- py2docfx/convert_prepare/tests/test_generate_document.py +4 -2
- py2docfx/convert_prepare/tests/test_get_source.py +22 -14
- py2docfx/convert_prepare/tests/test_pack.py +6 -3
- py2docfx/convert_prepare/tests/test_params.py +0 -1
- py2docfx/convert_prepare/tests/test_sphinx_caller.py +10 -8
- py2docfx/convert_prepare/tests/test_subpackage.py +1 -0
- py2docfx/docfx_yaml/build_finished.py +1 -1
- py2docfx/docfx_yaml/logger.py +56 -55
- py2docfx/venv/venv1/Lib/site-packages/cachetools/__init__.py +7 -128
- py2docfx/venv/venv1/Lib/site-packages/cachetools/_decorators.py +152 -0
- {py2docfx-0.1.11rc1996319.dist-info → py2docfx-0.1.12.dev2002521.dist-info}/METADATA +1 -1
- {py2docfx-0.1.11rc1996319.dist-info → py2docfx-0.1.12.dev2002521.dist-info}/RECORD +26 -25
- {py2docfx-0.1.11rc1996319.dist-info → py2docfx-0.1.12.dev2002521.dist-info}/WHEEL +0 -0
- {py2docfx-0.1.11rc1996319.dist-info → py2docfx-0.1.12.dev2002521.dist-info}/top_level.txt +0 -0
@@ -6,6 +6,7 @@ import shutil
|
|
6
6
|
import stat
|
7
7
|
import pytest
|
8
8
|
import sys
|
9
|
+
|
9
10
|
from py2docfx.convert_prepare import git
|
10
11
|
from py2docfx.convert_prepare import get_source
|
11
12
|
from py2docfx.convert_prepare.package_info import PackageInfo
|
@@ -61,7 +62,8 @@ def test_update_package_info(init_package_info):
|
|
61
62
|
assert package.name == "mock_package"
|
62
63
|
assert package.version == "1.2.0"
|
63
64
|
|
64
|
-
|
65
|
+
@pytest.mark.asyncio
|
66
|
+
async def test_get_source_git_clone(init_package_info):
|
65
67
|
"""
|
66
68
|
Test the git clone of get_source
|
67
69
|
"""
|
@@ -76,10 +78,11 @@ def test_get_source_git_clone(init_package_info):
|
|
76
78
|
package.branch = "main"
|
77
79
|
package.folder = None
|
78
80
|
package.url = "https://github.com/Azure/azure-iot-hub-python"
|
79
|
-
get_source.get_source(sys.executable, package, 0)
|
81
|
+
await get_source.get_source(sys.executable, package, 0)
|
80
82
|
assert git.status("source_repo/0") is True
|
81
83
|
|
82
|
-
|
84
|
+
@pytest.mark.asyncio
|
85
|
+
async def test_get_source_dist_file_zip(init_package_info):
|
83
86
|
"""
|
84
87
|
Test the zip dist file download of get_source
|
85
88
|
"""
|
@@ -92,12 +95,13 @@ def test_get_source_dist_file_zip(init_package_info):
|
|
92
95
|
package.extra_index_url = None
|
93
96
|
package.prefer_source_distribution = True
|
94
97
|
package.location = "https://files.pythonhosted.org/packages/3e/71/f6f71a276e2e69264a97ad39ef850dca0a04fce67b12570730cb38d0ccac/azure-common-1.1.28.zip"
|
95
|
-
get_source.get_source(sys.executable, package, 1)
|
98
|
+
await get_source.get_source(sys.executable, package, 1)
|
96
99
|
assert os.path.exists("dist_temp/1/azure-common-1.1.28")
|
97
100
|
assert package.path.source_folder == os.path.join("dist_temp", "1", "azure-common-1.1.28")
|
98
101
|
assert os.path.exists("dist_temp/1/azure-common-1.1.28.zip") is False
|
99
102
|
|
100
|
-
|
103
|
+
@pytest.mark.asyncio
|
104
|
+
async def test_get_source_dist_file_whl(init_package_info):
|
101
105
|
"""
|
102
106
|
Test the whl dist file download of get_source
|
103
107
|
"""
|
@@ -110,12 +114,13 @@ def test_get_source_dist_file_whl(init_package_info):
|
|
110
114
|
package.extra_index_url = None
|
111
115
|
package.prefer_source_distribution = True
|
112
116
|
package.location = "https://files.pythonhosted.org/packages/62/55/7f118b9c1b23ec15ca05d15a578d8207aa1706bc6f7c87218efffbbf875d/azure_common-1.1.28-py2.py3-none-any.whl"
|
113
|
-
get_source.get_source(sys.executable, package, 2)
|
117
|
+
await get_source.get_source(sys.executable, package, 2)
|
114
118
|
assert os.path.exists("dist_temp/2/azure_common-1.1.28")
|
115
119
|
assert package.path.source_folder == os.path.join("dist_temp", "2", "azure_common-1.1.28")
|
116
120
|
assert os.path.exists("dist_temp/2/azure_common-1.1.28-py2.py3-none-any.whl") is False
|
117
121
|
|
118
|
-
|
122
|
+
@pytest.mark.asyncio
|
123
|
+
async def test_get_source_dist_file_tar(init_package_info):
|
119
124
|
"""
|
120
125
|
Test the tar dist file download of get_source
|
121
126
|
"""
|
@@ -128,12 +133,13 @@ def test_get_source_dist_file_tar(init_package_info):
|
|
128
133
|
package.extra_index_url = None
|
129
134
|
package.prefer_source_distribution = True
|
130
135
|
package.location = "https://files.pythonhosted.org/packages/fa/19/43a9eb812b4d6071fdc2c55640318f7eb5a1be8dbd3b6f9d96a1996e1bb6/azure-core-1.29.4.tar.gz"
|
131
|
-
get_source.get_source(sys.executable, package, 3)
|
136
|
+
await get_source.get_source(sys.executable, package, 3)
|
132
137
|
assert os.path.exists("dist_temp/3/azure-core-1.29.4")
|
133
138
|
assert package.path.source_folder == os.path.join("dist_temp", "3", "azure-core-1.29.4")
|
134
139
|
assert os.path.exists("dist_temp/3/azure-core-1.29.4.tar.gz") is False
|
135
140
|
|
136
|
-
|
141
|
+
@pytest.mark.asyncio
|
142
|
+
async def test_get_source_pip_whl(init_package_info):
|
137
143
|
"""
|
138
144
|
Test the pip install of get_source with prefer_source_distribution = False
|
139
145
|
"""
|
@@ -145,12 +151,13 @@ def test_get_source_pip_whl(init_package_info):
|
|
145
151
|
package.build_in_subpackage = False
|
146
152
|
package.extra_index_url = None
|
147
153
|
package.prefer_source_distribution = False
|
148
|
-
get_source.get_source(sys.executable, package, 4)
|
154
|
+
await get_source.get_source(sys.executable, package, 4)
|
149
155
|
assert os.path.exists("dist_temp/4/azure_common-1.1.28")
|
150
156
|
assert package.path.source_folder == os.path.join("dist_temp", "4", "azure_common-1.1.28")
|
151
157
|
assert os.path.exists("dist_temp/4/azure_common-1.1.28-py2.py3-none-any.whl") is False
|
152
158
|
|
153
|
-
|
159
|
+
@pytest.mark.asyncio
|
160
|
+
async def test_get_source_pip_zip(init_package_info):
|
154
161
|
"""
|
155
162
|
Test the pip install of get_source with prefer_source_distribution = True
|
156
163
|
"""
|
@@ -162,12 +169,13 @@ def test_get_source_pip_zip(init_package_info):
|
|
162
169
|
package.build_in_subpackage = False
|
163
170
|
package.extra_index_url = None
|
164
171
|
package.prefer_source_distribution = True
|
165
|
-
get_source.get_source(sys.executable, package, 5)
|
172
|
+
await get_source.get_source(sys.executable, package, 5)
|
166
173
|
assert os.path.exists("dist_temp/5/azure-common-1.1.28")
|
167
174
|
assert package.path.source_folder == os.path.join("dist_temp", "5", "azure-common-1.1.28")
|
168
175
|
assert os.path.exists("dist_temp/5/azure-common-1.1.28.zip") is False
|
169
176
|
|
170
|
-
|
177
|
+
@pytest.mark.asyncio
|
178
|
+
async def test_get_source_zip_file_at_position_0(init_package_info):
|
171
179
|
"""
|
172
180
|
Test the pip install of packages with zip or tar file at position 0 in the dirctory list
|
173
181
|
"""
|
@@ -179,7 +187,7 @@ def test_get_source_zip_file_at_position_0(init_package_info):
|
|
179
187
|
package.build_in_subpackage = False
|
180
188
|
package.extra_index_url = None
|
181
189
|
package.prefer_source_distribution = True
|
182
|
-
get_source.get_source(sys.executable, package, 6)
|
190
|
+
await get_source.get_source(sys.executable, package, 6)
|
183
191
|
assert os.path.exists("dist_temp/6/azure_template-0.1.0b3942895")
|
184
192
|
assert package.path.source_folder == os.path.join("dist_temp", "6", "azure_template-0.1.0b3942895")
|
185
193
|
assert os.path.exists("dist_temp/6/azure_template-0.1.0b3942895.tar.gz") is False
|
@@ -5,6 +5,8 @@ import subprocess
|
|
5
5
|
import shutil
|
6
6
|
import glob
|
7
7
|
from os import path
|
8
|
+
import pytest
|
9
|
+
|
8
10
|
from py2docfx.convert_prepare.pack import unpack_compressed, unpack_wheel
|
9
11
|
|
10
12
|
SRC_DIR = path.abspath("convert_prepare/tests/data/pack/")
|
@@ -81,8 +83,8 @@ def test_pack_unpack_compressed(tmp_path):
|
|
81
83
|
path.abspath("convert_prepare/tests/data/pack"), tmp_path / "gz", [], [str(gz_file_path)]
|
82
84
|
)
|
83
85
|
|
84
|
-
|
85
|
-
def test_pack_unpack_wheel(tmp_path):
|
86
|
+
@pytest.mark.asyncio
|
87
|
+
async def test_pack_unpack_wheel(tmp_path):
|
86
88
|
def _prepare_wheel(target_path):
|
87
89
|
subprocess.run(
|
88
90
|
["pip", "wheel", ".", "--wheel-dir", str(target_path / "wheel")],
|
@@ -107,7 +109,8 @@ def test_pack_unpack_wheel(tmp_path):
|
|
107
109
|
wheel_path = path.join(tmp_path / "wheel", wheel_name)
|
108
110
|
|
109
111
|
# unpack and assert the file list
|
110
|
-
|
112
|
+
package_name = wheel_name.split("-")[0]
|
113
|
+
await unpack_wheel(package_name, wheel_path)
|
111
114
|
_assert_file_list_same(
|
112
115
|
path.abspath("convert_prepare/tests/data/pack"),
|
113
116
|
tmp_path / "wheel" / "foo-0.1",
|
@@ -1,5 +1,5 @@
|
|
1
|
-
import pytest
|
2
1
|
import os
|
2
|
+
import pytest
|
3
3
|
import shutil
|
4
4
|
import sphinx
|
5
5
|
import sphinx.cmd.build
|
@@ -24,20 +24,22 @@ def init_paths(tmp_path):
|
|
24
24
|
package_info.name = 'testcode'
|
25
25
|
return rst_path, destination
|
26
26
|
|
27
|
-
|
28
|
-
def test_run_apidoc(tmp_path):
|
27
|
+
@pytest.mark.asyncio
|
28
|
+
async def test_run_apidoc(tmp_path):
|
29
29
|
rst_path, source_code_path = init_paths(tmp_path)
|
30
|
-
|
30
|
+
package_name = "testcode"
|
31
|
+
await run_apidoc(package_name, rst_path, source_code_path, package_info.get_exluded_command(), package_info)
|
31
32
|
|
32
33
|
# List all files under rst_path
|
33
34
|
rst_list = os.listdir(rst_path)
|
34
35
|
assert "testcode.fakemodule.rst" in rst_list
|
35
36
|
assert "testcode.exclude.rst" not in rst_list
|
36
37
|
|
37
|
-
|
38
|
-
def test_run_converter(tmp_path):
|
38
|
+
@pytest.mark.asyncio
|
39
|
+
async def test_run_converter(tmp_path):
|
39
40
|
rst_path, source_code_path = init_paths(tmp_path)
|
40
|
-
|
41
|
+
package_name = "testcode"
|
42
|
+
await run_apidoc(package_name, rst_path, source_code_path, package_info.get_exluded_command(), package_info)
|
41
43
|
|
42
44
|
# prepare conf.py, index.rst and docfx_yaml
|
43
45
|
conf_path = os.path.abspath("convert_prepare/tests/data/sphinx_caller/conf.py")
|
@@ -49,7 +51,7 @@ def test_run_converter(tmp_path):
|
|
49
51
|
index_rst.write("")
|
50
52
|
|
51
53
|
out_path = os.path.join(tmp_path, "out")
|
52
|
-
out_path = run_converter(rst_path, out_path, sphinx_build_path = sphinx.cmd.build.__file__, extra_package_path = source_code_path, conf_path=rst_path)
|
54
|
+
out_path = await run_converter(package_name, rst_path, out_path, sphinx_build_path = sphinx.cmd.build.__file__, extra_package_path = source_code_path, conf_path=rst_path)
|
53
55
|
|
54
56
|
if os.path.exists(out_path):
|
55
57
|
yaml_list = os.listdir(os.path.join(out_path, "docfx_yaml"))
|
@@ -315,7 +315,7 @@ def build_finished(app, exception):
|
|
315
315
|
if len(toc_yaml) == 0:
|
316
316
|
msg = "No documentation for this module."
|
317
317
|
py2docfx_logger.error(msg)
|
318
|
-
raise RuntimeError()
|
318
|
+
raise RuntimeError(msg)
|
319
319
|
|
320
320
|
toc_file = os.path.join(normalized_outdir, 'toc.yml')
|
321
321
|
with open(toc_file, 'w') as writable:
|
py2docfx/docfx_yaml/logger.py
CHANGED
@@ -34,8 +34,13 @@ def check_log_file_exists(log_file_path):
|
|
34
34
|
with open(log_file_path, 'w') as f:
|
35
35
|
f.write('')
|
36
36
|
|
37
|
-
def setup_log_handlers(
|
37
|
+
def setup_log_handlers(logger_name, log_file_name, log_folder_path):
|
38
|
+
check_log_dir_exists(log_folder_path)
|
39
|
+
logger = logging.getLogger(logger_name)
|
40
|
+
logger.setLevel(logging.INFO)
|
38
41
|
check_log_file_exists(log_file_name)
|
42
|
+
if logger.hasHandlers():
|
43
|
+
logger.handlers.clear()
|
39
44
|
file_handler = logging.FileHandler(filename=log_file_name, mode='a')
|
40
45
|
file_handler.setFormatter(logging.Formatter('%(levelname)s - %(name)s - %(message)s'))
|
41
46
|
logger.addHandler(file_handler)
|
@@ -45,57 +50,20 @@ def setup_log_handlers(logger, log_file_name):
|
|
45
50
|
def get_logger(logger_name: str):
|
46
51
|
log_folder_path = os.path.join("logs")
|
47
52
|
file_name = os.path.join(log_folder_path, "log.txt")
|
48
|
-
|
49
|
-
file_logger =
|
50
|
-
file_logger.setLevel(logging.INFO)
|
51
|
-
check_log_dir_exists(log_folder_path)
|
52
|
-
if file_logger.hasHandlers():
|
53
|
-
return file_logger
|
54
|
-
|
55
|
-
file_logger = setup_log_handlers(file_logger, file_name)
|
53
|
+
|
54
|
+
file_logger = setup_log_handlers(logger_name, file_name, log_folder_path)
|
56
55
|
|
57
56
|
return file_logger
|
58
57
|
|
59
|
-
def get_package_logger(logger_name:str):
|
58
|
+
def get_package_logger(logger_name:str, package_name:str = None):
|
60
59
|
log_folder_path = os.path.join("logs", "package_logs")
|
61
|
-
package_name
|
60
|
+
if package_name is None:
|
61
|
+
package_name = os.environ.get('PROCESSING_PACKAGE_NAME')
|
62
62
|
file_name = os.path.join(log_folder_path, f"{package_name}.txt")
|
63
|
-
|
64
|
-
file_logger = logging.getLogger(logger_name)
|
65
|
-
file_logger.setLevel(logging.INFO)
|
66
|
-
check_log_dir_exists(log_folder_path)
|
67
|
-
if file_logger.hasHandlers():
|
68
|
-
return file_logger
|
69
63
|
|
70
|
-
file_logger = setup_log_handlers(
|
71
|
-
|
72
|
-
return file_logger
|
64
|
+
file_logger = setup_log_handlers(logger_name, file_name, log_folder_path)
|
73
65
|
|
74
|
-
|
75
|
-
if subprocess_out.stdout:
|
76
|
-
logger.info(subprocess_out.stdout)
|
77
|
-
if subprocess_out.stderr:
|
78
|
-
msgs = subprocess_out.stderr.split('\n')
|
79
|
-
for msg in msgs:
|
80
|
-
if msg is None or msg == "":
|
81
|
-
continue
|
82
|
-
logger.warning(msg)
|
83
|
-
if subprocess_out.returncode != 0:
|
84
|
-
logger.error(f"Subprocess failed with return code {subprocess_out.returncode}")
|
85
|
-
raise RuntimeError()
|
86
|
-
|
87
|
-
def log_git_clone_subprocess_ouput(subprocess_out: subprocess.CompletedProcess, logger: logging.Logger):
|
88
|
-
if subprocess_out.stdout:
|
89
|
-
logger.info(subprocess_out.stdout)
|
90
|
-
if subprocess_out.stderr:
|
91
|
-
msgs = subprocess_out.stderr.split('\n')
|
92
|
-
for msg in msgs:
|
93
|
-
if msg is None or msg == "":
|
94
|
-
continue
|
95
|
-
logger.info(msg)
|
96
|
-
if subprocess_out.returncode != 0:
|
97
|
-
logger.error(f"Subprocess failed with return code {subprocess_out.returncode}")
|
98
|
-
raise RuntimeError()
|
66
|
+
return file_logger
|
99
67
|
|
100
68
|
def counts_errors_warnings(log_file_path):
|
101
69
|
error_count = 0
|
@@ -160,20 +128,53 @@ def output_log_by_log_level():
|
|
160
128
|
log_file_path = os.path.join(package_logs_folder, log_file)
|
161
129
|
print_out_log_by_log_level(parse_log(log_file_path), log_level)
|
162
130
|
|
163
|
-
async def run_async_subprocess(exe_path, cmd):
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
131
|
+
async def run_async_subprocess(exe_path, cmd, logger, cwd=None):
|
132
|
+
if cwd is None:
|
133
|
+
process = await asyncio.create_subprocess_exec(
|
134
|
+
exe_path, *cmd,
|
135
|
+
stdout=asyncio.subprocess.PIPE,
|
136
|
+
stderr=asyncio.subprocess.PIPE
|
137
|
+
)
|
138
|
+
else:
|
139
|
+
process = await asyncio.create_subprocess_exec(
|
140
|
+
exe_path, *cmd,
|
141
|
+
cwd=cwd,
|
142
|
+
stdout=asyncio.subprocess.PIPE,
|
143
|
+
stderr=asyncio.subprocess.PIPE,
|
144
|
+
)
|
145
|
+
stdout, stderr = await process.communicate()
|
146
|
+
if process.returncode != 0:
|
147
|
+
msg = stderr.decode('utf-8')
|
148
|
+
if msg != None and msg != "":
|
149
|
+
logger.error(msg)
|
150
|
+
raise subprocess.CalledProcessError(process.returncode, cmd, stdout, stderr)
|
151
|
+
else:
|
152
|
+
msg = stdout.decode('utf-8')
|
153
|
+
if msg != None and msg != "":
|
154
|
+
logger.info(msg)
|
155
|
+
|
156
|
+
async def run_async_subprocess_without_executable(cmd, logger, cwd=None):
|
157
|
+
if cwd is None:
|
158
|
+
process = await asyncio.create_subprocess_exec(
|
159
|
+
*cmd,
|
160
|
+
stdout=asyncio.subprocess.PIPE,
|
161
|
+
stderr=asyncio.subprocess.PIPE
|
162
|
+
)
|
163
|
+
else:
|
164
|
+
process = await asyncio.create_subprocess_exec(
|
165
|
+
*cmd,
|
166
|
+
cwd=cwd,
|
167
|
+
stdout=asyncio.subprocess.PIPE,
|
168
|
+
stderr=asyncio.subprocess.PIPE,
|
169
|
+
)
|
170
|
+
|
169
171
|
stdout, stderr = await process.communicate()
|
170
|
-
py2docfx_logger = get_logger(__name__)
|
171
172
|
if process.returncode != 0:
|
172
173
|
msg = stderr.decode('utf-8')
|
173
174
|
if msg != None and msg != "":
|
174
|
-
|
175
|
-
raise
|
175
|
+
logger.error(msg)
|
176
|
+
raise subprocess.CalledProcessError(process.returncode, cmd, stdout, stderr)
|
176
177
|
else:
|
177
178
|
msg = stdout.decode('utf-8')
|
178
179
|
if msg != None and msg != "":
|
179
|
-
|
180
|
+
logger.info(msg)
|
@@ -13,7 +13,7 @@ __all__ = (
|
|
13
13
|
"cachedmethod",
|
14
14
|
)
|
15
15
|
|
16
|
-
__version__ = "5.5.
|
16
|
+
__version__ = "5.5.2"
|
17
17
|
|
18
18
|
import collections
|
19
19
|
import collections.abc
|
@@ -23,6 +23,7 @@ import random
|
|
23
23
|
import time
|
24
24
|
|
25
25
|
from . import keys
|
26
|
+
from ._decorators import _cached_wrapper
|
26
27
|
|
27
28
|
|
28
29
|
class _DefaultSize:
|
@@ -643,150 +644,28 @@ def cached(cache, key=keys.hashkey, lock=None, info=False):
|
|
643
644
|
|
644
645
|
def decorator(func):
|
645
646
|
if info:
|
646
|
-
hits = misses = 0
|
647
|
-
|
648
647
|
if isinstance(cache, Cache):
|
649
648
|
|
650
|
-
def
|
651
|
-
nonlocal hits, misses
|
649
|
+
def make_info(hits, misses):
|
652
650
|
return _CacheInfo(hits, misses, cache.maxsize, cache.currsize)
|
653
651
|
|
654
652
|
elif isinstance(cache, collections.abc.Mapping):
|
655
653
|
|
656
|
-
def
|
657
|
-
nonlocal hits, misses
|
654
|
+
def make_info(hits, misses):
|
658
655
|
return _CacheInfo(hits, misses, None, len(cache))
|
659
656
|
|
660
657
|
else:
|
661
658
|
|
662
|
-
def
|
663
|
-
nonlocal hits, misses
|
659
|
+
def make_info(hits, misses):
|
664
660
|
return _CacheInfo(hits, misses, 0, 0)
|
665
661
|
|
666
|
-
|
667
|
-
|
668
|
-
def wrapper(*args, **kwargs):
|
669
|
-
nonlocal misses
|
670
|
-
misses += 1
|
671
|
-
return func(*args, **kwargs)
|
672
|
-
|
673
|
-
def cache_clear():
|
674
|
-
nonlocal hits, misses
|
675
|
-
hits = misses = 0
|
676
|
-
|
677
|
-
cache_info = getinfo
|
678
|
-
|
679
|
-
elif lock is None:
|
680
|
-
|
681
|
-
def wrapper(*args, **kwargs):
|
682
|
-
nonlocal hits, misses
|
683
|
-
k = key(*args, **kwargs)
|
684
|
-
try:
|
685
|
-
result = cache[k]
|
686
|
-
hits += 1
|
687
|
-
return result
|
688
|
-
except KeyError:
|
689
|
-
misses += 1
|
690
|
-
v = func(*args, **kwargs)
|
691
|
-
try:
|
692
|
-
cache[k] = v
|
693
|
-
except ValueError:
|
694
|
-
pass # value too large
|
695
|
-
return v
|
696
|
-
|
697
|
-
def cache_clear():
|
698
|
-
nonlocal hits, misses
|
699
|
-
cache.clear()
|
700
|
-
hits = misses = 0
|
701
|
-
|
702
|
-
cache_info = getinfo
|
703
|
-
|
704
|
-
else:
|
705
|
-
|
706
|
-
def wrapper(*args, **kwargs):
|
707
|
-
nonlocal hits, misses
|
708
|
-
k = key(*args, **kwargs)
|
709
|
-
try:
|
710
|
-
with lock:
|
711
|
-
result = cache[k]
|
712
|
-
hits += 1
|
713
|
-
return result
|
714
|
-
except KeyError:
|
715
|
-
with lock:
|
716
|
-
misses += 1
|
717
|
-
v = func(*args, **kwargs)
|
718
|
-
# in case of a race, prefer the item already in the cache
|
719
|
-
try:
|
720
|
-
with lock:
|
721
|
-
return cache.setdefault(k, v)
|
722
|
-
except ValueError:
|
723
|
-
return v # value too large
|
724
|
-
|
725
|
-
def cache_clear():
|
726
|
-
nonlocal hits, misses
|
727
|
-
with lock:
|
728
|
-
cache.clear()
|
729
|
-
hits = misses = 0
|
730
|
-
|
731
|
-
def cache_info():
|
732
|
-
with lock:
|
733
|
-
return getinfo()
|
734
|
-
|
662
|
+
wrapper = _cached_wrapper(func, cache, key, lock, make_info)
|
735
663
|
else:
|
736
|
-
|
737
|
-
|
738
|
-
def wrapper(*args, **kwargs):
|
739
|
-
return func(*args, **kwargs)
|
740
|
-
|
741
|
-
def cache_clear():
|
742
|
-
pass
|
743
|
-
|
744
|
-
elif lock is None:
|
745
|
-
|
746
|
-
def wrapper(*args, **kwargs):
|
747
|
-
k = key(*args, **kwargs)
|
748
|
-
try:
|
749
|
-
return cache[k]
|
750
|
-
except KeyError:
|
751
|
-
pass # key not found
|
752
|
-
v = func(*args, **kwargs)
|
753
|
-
try:
|
754
|
-
cache[k] = v
|
755
|
-
except ValueError:
|
756
|
-
pass # value too large
|
757
|
-
return v
|
758
|
-
|
759
|
-
def cache_clear():
|
760
|
-
cache.clear()
|
761
|
-
|
762
|
-
else:
|
763
|
-
|
764
|
-
def wrapper(*args, **kwargs):
|
765
|
-
k = key(*args, **kwargs)
|
766
|
-
try:
|
767
|
-
with lock:
|
768
|
-
return cache[k]
|
769
|
-
except KeyError:
|
770
|
-
pass # key not found
|
771
|
-
v = func(*args, **kwargs)
|
772
|
-
# in case of a race, prefer the item already in the cache
|
773
|
-
try:
|
774
|
-
with lock:
|
775
|
-
return cache.setdefault(k, v)
|
776
|
-
except ValueError:
|
777
|
-
return v # value too large
|
778
|
-
|
779
|
-
def cache_clear():
|
780
|
-
with lock:
|
781
|
-
cache.clear()
|
782
|
-
|
783
|
-
cache_info = None
|
664
|
+
wrapper = _cached_wrapper(func, cache, key, lock, None)
|
784
665
|
|
785
666
|
wrapper.cache = cache
|
786
667
|
wrapper.cache_key = key
|
787
668
|
wrapper.cache_lock = lock
|
788
|
-
wrapper.cache_clear = cache_clear
|
789
|
-
wrapper.cache_info = cache_info
|
790
669
|
|
791
670
|
return functools.update_wrapper(wrapper, func)
|
792
671
|
|
@@ -0,0 +1,152 @@
|
|
1
|
+
"""Extensible memoizing decorator helpers."""
|
2
|
+
|
3
|
+
|
4
|
+
def _cached_locked_info(func, cache, key, lock, info):
|
5
|
+
hits = misses = 0
|
6
|
+
|
7
|
+
def wrapper(*args, **kwargs):
|
8
|
+
nonlocal hits, misses
|
9
|
+
k = key(*args, **kwargs)
|
10
|
+
with lock:
|
11
|
+
try:
|
12
|
+
result = cache[k]
|
13
|
+
hits += 1
|
14
|
+
return result
|
15
|
+
except KeyError:
|
16
|
+
misses += 1
|
17
|
+
v = func(*args, **kwargs)
|
18
|
+
with lock:
|
19
|
+
try:
|
20
|
+
# in case of a race, prefer the item already in the cache
|
21
|
+
return cache.setdefault(k, v)
|
22
|
+
except ValueError:
|
23
|
+
return v # value too large
|
24
|
+
|
25
|
+
def cache_clear():
|
26
|
+
nonlocal hits, misses
|
27
|
+
with lock:
|
28
|
+
cache.clear()
|
29
|
+
hits = misses = 0
|
30
|
+
|
31
|
+
def cache_info():
|
32
|
+
with lock:
|
33
|
+
return info(hits, misses)
|
34
|
+
|
35
|
+
wrapper.cache_clear = cache_clear
|
36
|
+
wrapper.cache_info = cache_info
|
37
|
+
return wrapper
|
38
|
+
|
39
|
+
|
40
|
+
def _cached_unlocked_info(func, cache, key, info):
|
41
|
+
hits = misses = 0
|
42
|
+
|
43
|
+
def wrapper(*args, **kwargs):
|
44
|
+
nonlocal hits, misses
|
45
|
+
k = key(*args, **kwargs)
|
46
|
+
try:
|
47
|
+
result = cache[k]
|
48
|
+
hits += 1
|
49
|
+
return result
|
50
|
+
except KeyError:
|
51
|
+
misses += 1
|
52
|
+
v = func(*args, **kwargs)
|
53
|
+
try:
|
54
|
+
cache[k] = v
|
55
|
+
except ValueError:
|
56
|
+
pass # value too large
|
57
|
+
return v
|
58
|
+
|
59
|
+
def cache_clear():
|
60
|
+
nonlocal hits, misses
|
61
|
+
cache.clear()
|
62
|
+
hits = misses = 0
|
63
|
+
|
64
|
+
wrapper.cache_clear = cache_clear
|
65
|
+
wrapper.cache_info = lambda: info(hits, misses)
|
66
|
+
return wrapper
|
67
|
+
|
68
|
+
|
69
|
+
def _uncached_info(func, info):
|
70
|
+
misses = 0
|
71
|
+
|
72
|
+
def wrapper(*args, **kwargs):
|
73
|
+
nonlocal misses
|
74
|
+
misses += 1
|
75
|
+
return func(*args, **kwargs)
|
76
|
+
|
77
|
+
def cache_clear():
|
78
|
+
nonlocal misses
|
79
|
+
misses = 0
|
80
|
+
|
81
|
+
wrapper.cache_clear = cache_clear
|
82
|
+
wrapper.cache_info = lambda: info(0, misses)
|
83
|
+
return wrapper
|
84
|
+
|
85
|
+
|
86
|
+
def _cached_locked(func, cache, key, lock):
|
87
|
+
def wrapper(*args, **kwargs):
|
88
|
+
k = key(*args, **kwargs)
|
89
|
+
with lock:
|
90
|
+
try:
|
91
|
+
return cache[k]
|
92
|
+
except KeyError:
|
93
|
+
pass # key not found
|
94
|
+
v = func(*args, **kwargs)
|
95
|
+
with lock:
|
96
|
+
try:
|
97
|
+
# in case of a race, prefer the item already in the cache
|
98
|
+
return cache.setdefault(k, v)
|
99
|
+
except ValueError:
|
100
|
+
return v # value too large
|
101
|
+
|
102
|
+
def cache_clear():
|
103
|
+
with lock:
|
104
|
+
cache.clear()
|
105
|
+
|
106
|
+
wrapper.cache_clear = cache_clear
|
107
|
+
return wrapper
|
108
|
+
|
109
|
+
|
110
|
+
def _cached_unlocked(func, cache, key):
|
111
|
+
def wrapper(*args, **kwargs):
|
112
|
+
k = key(*args, **kwargs)
|
113
|
+
try:
|
114
|
+
return cache[k]
|
115
|
+
except KeyError:
|
116
|
+
pass # key not found
|
117
|
+
v = func(*args, **kwargs)
|
118
|
+
try:
|
119
|
+
cache[k] = v
|
120
|
+
except ValueError:
|
121
|
+
pass # value too large
|
122
|
+
return v
|
123
|
+
|
124
|
+
wrapper.cache_clear = lambda: cache.clear()
|
125
|
+
return wrapper
|
126
|
+
|
127
|
+
|
128
|
+
def _uncached(func):
|
129
|
+
def wrapper(*args, **kwargs):
|
130
|
+
return func(*args, **kwargs)
|
131
|
+
|
132
|
+
wrapper.cache_clear = lambda: None
|
133
|
+
return wrapper
|
134
|
+
|
135
|
+
|
136
|
+
def _cached_wrapper(func, cache, key, lock, info):
|
137
|
+
if info is not None:
|
138
|
+
if cache is None:
|
139
|
+
wrapper = _uncached_info(func, info)
|
140
|
+
elif lock is None:
|
141
|
+
wrapper = _cached_unlocked_info(func, cache, key, info)
|
142
|
+
else:
|
143
|
+
wrapper = _cached_locked_info(func, cache, key, lock, info)
|
144
|
+
else:
|
145
|
+
if cache is None:
|
146
|
+
wrapper = _uncached(func)
|
147
|
+
elif lock is None:
|
148
|
+
wrapper = _cached_unlocked(func, cache, key)
|
149
|
+
else:
|
150
|
+
wrapper = _cached_locked(func, cache, key, lock)
|
151
|
+
wrapper.cache_info = None
|
152
|
+
return wrapper
|