py2docfx 0.1.9.dev1926139__py3-none-any.whl → 0.1.9.dev1927662__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2docfx/__main__.py +18 -24
- py2docfx/convert_prepare/conf_templates/conf.py_t +1 -1
- py2docfx/convert_prepare/constants.py +2 -1
- py2docfx/convert_prepare/environment.py +13 -13
- py2docfx/convert_prepare/generate_document.py +6 -6
- py2docfx/convert_prepare/get_source.py +2 -2
- py2docfx/convert_prepare/git.py +12 -12
- py2docfx/convert_prepare/package_info.py +7 -7
- py2docfx/convert_prepare/pip_utils.py +13 -9
- py2docfx/convert_prepare/post_process/merge_toc.py +2 -2
- py2docfx/convert_prepare/sphinx_caller.py +6 -6
- py2docfx/convert_prepare/tests/test_generate_document.py +0 -2
- py2docfx/convert_prepare/tests/test_sphinx_caller.py +0 -2
- py2docfx/docfx_yaml/build_finished.py +5 -5
- py2docfx/docfx_yaml/convert_class.py +2 -2
- py2docfx/docfx_yaml/convert_enum.py +2 -2
- py2docfx/docfx_yaml/convert_module.py +2 -2
- py2docfx/docfx_yaml/convert_package.py +2 -2
- py2docfx/docfx_yaml/logger.py +82 -15
- py2docfx/docfx_yaml/process_doctree.py +4 -4
- py2docfx/docfx_yaml/translator.py +3 -3
- py2docfx/docfx_yaml/writer.py +5 -5
- {py2docfx-0.1.9.dev1926139.dist-info → py2docfx-0.1.9.dev1927662.dist-info}/METADATA +1 -1
- {py2docfx-0.1.9.dev1926139.dist-info → py2docfx-0.1.9.dev1927662.dist-info}/RECORD +26 -27
- py2docfx/convert_prepare/tests/utils.py +0 -11
- {py2docfx-0.1.9.dev1926139.dist-info → py2docfx-0.1.9.dev1927662.dist-info}/WHEEL +0 -0
- {py2docfx-0.1.9.dev1926139.dist-info → py2docfx-0.1.9.dev1927662.dist-info}/top_level.txt +0 -0
py2docfx/__main__.py
CHANGED
@@ -7,8 +7,8 @@ import sys
|
|
7
7
|
import shutil
|
8
8
|
|
9
9
|
from py2docfx import PACKAGE_ROOT
|
10
|
-
from py2docfx.docfx_yaml.logger import get_logger
|
11
|
-
from py2docfx.convert_prepare.constants import SOURCE_REPO, TARGET_REPO, DIST_TEMP
|
10
|
+
from py2docfx.docfx_yaml.logger import get_logger, get_package_logger, get_warning_error_count, output_log_by_log_level
|
11
|
+
from py2docfx.convert_prepare.constants import SOURCE_REPO, TARGET_REPO, DIST_TEMP, LOG_FOLDER
|
12
12
|
from py2docfx.convert_prepare.generate_document import generate_document
|
13
13
|
from py2docfx.convert_prepare.get_source import YAML_OUTPUT_ROOT
|
14
14
|
from py2docfx.convert_prepare.post_process.merge_toc import merge_toc, move_root_toc_to_target
|
@@ -227,8 +227,6 @@ async def donwload_package_generate_documents(
|
|
227
227
|
output_doc_folder: os.PathLike | None,
|
228
228
|
github_token: str, ado_token: str, required_package_list: list):
|
229
229
|
|
230
|
-
docfx_logger = get_logger(__name__)
|
231
|
-
|
232
230
|
start_num = len(required_package_list)
|
233
231
|
env_prepare_tasks = []
|
234
232
|
env_remove_tasks = []
|
@@ -244,14 +242,15 @@ async def donwload_package_generate_documents(
|
|
244
242
|
for idx, package in enumerate(package_info_list):
|
245
243
|
os.environ['PROCESSING_PACKAGE_NAME'] = package.name
|
246
244
|
package_number = start_num + idx
|
245
|
+
py2docfx_logger = get_package_logger(__name__)
|
247
246
|
msg = f"Processing package {package.name}, env_prepare_tasks: {len(env_prepare_tasks)}"
|
248
|
-
|
247
|
+
py2docfx_logger.info(msg)
|
249
248
|
|
250
249
|
try:
|
251
250
|
await env_prepare_tasks[idx]
|
252
251
|
except Exception as e:
|
253
252
|
msg = f"Failed to setup venv for package {package.name}: {e}"
|
254
|
-
|
253
|
+
py2docfx_logger.error(msg)
|
255
254
|
raise
|
256
255
|
|
257
256
|
generate_document(package, output_root,
|
@@ -268,7 +267,7 @@ async def donwload_package_generate_documents(
|
|
268
267
|
buffer_package_idx = idx + py2docfxEnvironment.VENV_BUFFER
|
269
268
|
|
270
269
|
msg = f"Creating venv {buffer_package_idx}"
|
271
|
-
|
270
|
+
py2docfx_logger.info(msg)
|
272
271
|
|
273
272
|
env_prepare_tasks.append(
|
274
273
|
asyncio.create_task(py2docfxEnvironment.prepare_venv(buffer_package_idx,
|
@@ -283,7 +282,7 @@ async def donwload_package_generate_documents(
|
|
283
282
|
|
284
283
|
if idx > py2docfxEnvironment.VENV_BUFFER and env_remove_tasks[idx-py2docfxEnvironment.VENV_BUFFER] != None:
|
285
284
|
msg = f"Removing venv {idx-py2docfxEnvironment.VENV_BUFFER}"
|
286
|
-
|
285
|
+
py2docfx_logger.info(msg)
|
287
286
|
await env_remove_tasks[idx-py2docfxEnvironment.VENV_BUFFER]
|
288
287
|
|
289
288
|
if output_doc_folder:
|
@@ -329,15 +328,6 @@ def decide_global_log_level(verbose: bool, show_warning: bool) -> None:
|
|
329
328
|
# Default log level
|
330
329
|
os.environ['LOG_LEVEL'] = 'ERROR'
|
331
330
|
|
332
|
-
def prepare_log_folder() -> None:
|
333
|
-
if not os.path.exists('logs'):
|
334
|
-
os.makedirs('logs')
|
335
|
-
os.makedirs(os.path.join('logs', 'package_logs'))
|
336
|
-
else:
|
337
|
-
shutil.rmtree('logs')
|
338
|
-
os.makedirs('logs')
|
339
|
-
os.makedirs(os.path.join('logs', 'package_logs'))
|
340
|
-
|
341
331
|
def main(argv) -> int:
|
342
332
|
# TODO: may need to purge pip cache
|
343
333
|
(package_info_list,
|
@@ -346,20 +336,19 @@ def main(argv) -> int:
|
|
346
336
|
output_root, verbose,
|
347
337
|
show_warning) = parse_command_line_args(argv)
|
348
338
|
|
339
|
+
clean_up_folder_list = [py2docfxEnvironment.VENV_DIR, DIST_TEMP, SOURCE_REPO, TARGET_REPO, LOG_FOLDER]
|
340
|
+
temp_folder_clean_up(clean_up_folder_list)
|
341
|
+
|
349
342
|
decide_global_log_level(verbose, show_warning)
|
350
|
-
prepare_log_folder()
|
351
343
|
|
352
|
-
|
344
|
+
py2docfx_logger = get_logger(__name__)
|
353
345
|
|
354
346
|
msg = "Adding yaml extension to path"
|
355
|
-
|
347
|
+
py2docfx_logger.info(msg)
|
356
348
|
|
357
349
|
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)),'docfx_yaml'))
|
358
350
|
os.chdir(PACKAGE_ROOT)
|
359
351
|
output_doc_folder = prepare_out_dir(output_root)
|
360
|
-
|
361
|
-
clean_up_folder_list = [py2docfxEnvironment.VENV_DIR, DIST_TEMP, SOURCE_REPO, TARGET_REPO]
|
362
|
-
temp_folder_clean_up(clean_up_folder_list)
|
363
352
|
|
364
353
|
try:
|
365
354
|
asyncio.run(donwload_package_generate_documents(
|
@@ -367,8 +356,13 @@ def main(argv) -> int:
|
|
367
356
|
github_token, ado_token, required_package_list))
|
368
357
|
except Exception as e:
|
369
358
|
msg = f"An error occurred: {e}"
|
370
|
-
|
359
|
+
py2docfx_logger.error(msg)
|
371
360
|
raise
|
361
|
+
|
362
|
+
warning_count, error_count = get_warning_error_count()
|
363
|
+
output_log_by_log_level()
|
364
|
+
print(f"Warning count: {warning_count}, Error count: {error_count}")
|
365
|
+
logging.shutdown()
|
372
366
|
return 0
|
373
367
|
|
374
368
|
if __name__ == "__main__":
|
@@ -62,7 +62,7 @@ release = '{{PROJECT_RELEASE}}'
|
|
62
62
|
#
|
63
63
|
# This is also used if you do content translation via gettext catalogs.
|
64
64
|
# Usually you set "language" from the command line for these cases.
|
65
|
-
language =
|
65
|
+
language = 'en'
|
66
66
|
|
67
67
|
# List of patterns, relative to source directory, that match files and
|
68
68
|
# directories to ignore when looking for source files.
|
@@ -28,7 +28,7 @@ def install_converter_requirements(executable: str):
|
|
28
28
|
https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/InstallPackage.ps1&line=15&lineEnd=35&lineStartColumn=1&lineEndColumn=87&lineStyle=plain&_a=contents
|
29
29
|
"""
|
30
30
|
pip_install_cmd = [executable, "-m", "pip", "install", "--upgrade"]
|
31
|
-
|
31
|
+
py2docfx_logger = get_logger(__name__)
|
32
32
|
pip_install_common_options = [
|
33
33
|
"--no-cache-dir",
|
34
34
|
"--quiet",
|
@@ -39,7 +39,7 @@ def install_converter_requirements(executable: str):
|
|
39
39
|
|
40
40
|
for module in REQUIREMENT_MODULES:
|
41
41
|
msg = f"<CI INFO>: Upgrading {module}..."
|
42
|
-
|
42
|
+
py2docfx_logger.info(msg)
|
43
43
|
subprocess.run(
|
44
44
|
pip_install_cmd + [module] + pip_install_common_options, check=True
|
45
45
|
)
|
@@ -106,39 +106,39 @@ async def create_environment(venv_path: int):
|
|
106
106
|
await (await asyncio.create_subprocess_exec("python3", "-m", "venv", venv_path)).wait()
|
107
107
|
|
108
108
|
async def prepare_base_venv(required_package_list: list[PackageInfo], github_token: str, ado_token: str):
|
109
|
-
|
109
|
+
py2docfx_logger = get_logger(__name__)
|
110
110
|
|
111
111
|
msg = f"<CI INFO>: Creating basevenv..."
|
112
|
-
|
112
|
+
py2docfx_logger.info(msg)
|
113
113
|
await create_environment(get_base_venv_path())
|
114
114
|
|
115
115
|
msg = f"<CI INFO>: Installing converter requirements in ..."
|
116
|
-
|
116
|
+
py2docfx_logger.info(msg)
|
117
117
|
await install_converter_requirement_async(get_base_venv_exe())
|
118
118
|
|
119
119
|
msg = f"<CI INFO>: Installing required packages in basevenv..."
|
120
|
-
|
120
|
+
py2docfx_logger.info(msg)
|
121
121
|
await install_required_packages(get_base_venv_exe(), required_package_list, github_token, ado_token)
|
122
122
|
|
123
123
|
msg = f"<CI INFO>: basevenv setup complete."
|
124
|
-
|
124
|
+
py2docfx_logger.info(msg)
|
125
125
|
|
126
126
|
async def prepare_venv(venv_num: int, package_info: PackageInfo, package_number: int, github_token: str, ado_token: str):
|
127
|
-
|
127
|
+
py2docfx_logger = get_logger(__name__)
|
128
128
|
await create_environment(get_venv_path(venv_num))
|
129
129
|
await install_venv_requirements(venv_num)
|
130
130
|
get_source(get_venv_exe(venv_num), package_info, package_number, vststoken=ado_token, githubtoken=github_token)
|
131
131
|
package_name, options = package_info.get_install_command()
|
132
132
|
await pip_utils.install_in_exe_async(get_venv_exe(venv_num), package_name, options)
|
133
133
|
msg = f"<CI INFO>: venv{venv_num} setup complete."
|
134
|
-
|
134
|
+
py2docfx_logger.info(msg)
|
135
135
|
|
136
136
|
async def remove_environment(venv_num: int):
|
137
|
-
|
137
|
+
py2docfx_logger = get_logger(__name__)
|
138
138
|
venv_path = get_venv_path(venv_num)
|
139
139
|
if os.path.exists(venv_path):
|
140
140
|
msg = f"<CI INFO>: Removing venv{venv_num}..."
|
141
|
-
|
141
|
+
py2docfx_logger.info(msg)
|
142
142
|
# Create a subprocess to run the shell command for removing the directory
|
143
143
|
process = await asyncio.create_subprocess_shell(
|
144
144
|
f'rm -rf {venv_path}' if os.name != 'nt' else f'rmdir /S /Q {venv_path}',
|
@@ -148,8 +148,8 @@ async def remove_environment(venv_num: int):
|
|
148
148
|
stdout, stderr = await process.communicate()
|
149
149
|
if process.returncode == 0:
|
150
150
|
msg = f"<CI INFO>: venv{venv_num} removed."
|
151
|
-
|
151
|
+
py2docfx_logger.info(msg)
|
152
152
|
else:
|
153
153
|
msg = f"<CI ERROR>: Failed to remove venv{venv_num}. Error: {stderr.decode()}"
|
154
|
-
|
154
|
+
py2docfx_logger.error(msg)
|
155
155
|
raise RuntimeError()
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from __future__ import annotations # Avoid A | B annotation break under <= py3.9
|
2
2
|
import os
|
3
3
|
import sys
|
4
|
-
from py2docfx.docfx_yaml.logger import
|
4
|
+
from py2docfx.docfx_yaml.logger import get_package_logger
|
5
5
|
from py2docfx.convert_prepare.generate_conf import generate_conf
|
6
6
|
from py2docfx.convert_prepare.git import checkout
|
7
7
|
from py2docfx.convert_prepare.package_info import PackageInfo
|
@@ -11,7 +11,7 @@ from py2docfx.convert_prepare.subpackage import merge_subpackage_files
|
|
11
11
|
CONF_TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "conf_templates")
|
12
12
|
|
13
13
|
def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_build_path: str, extra_package_path: str, executable=sys.executable):
|
14
|
-
|
14
|
+
py2docfx_logger = get_package_logger(__name__)
|
15
15
|
# Copy manual written RST from target doc repo
|
16
16
|
package_paths = pkg.path
|
17
17
|
if output_root:
|
@@ -31,18 +31,18 @@ def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_b
|
|
31
31
|
checkout(package_paths.source_folder, pkg.branch)
|
32
32
|
|
33
33
|
msg = f"<CI INFO>: Generating RST files for {pkg.name}."
|
34
|
-
|
34
|
+
py2docfx_logger.info(msg)
|
35
35
|
|
36
36
|
subpackages_rst_record = run_apidoc(package_paths.doc_folder, package_paths.source_folder,
|
37
37
|
exclude_paths, pkg)
|
38
38
|
|
39
39
|
msg = f"<CI INFO>: Listing RST files:"
|
40
|
-
|
40
|
+
py2docfx_logger.info(msg)
|
41
41
|
for rst_file in os.listdir(package_paths.doc_folder):
|
42
|
-
|
42
|
+
py2docfx_logger.info(rst_file)
|
43
43
|
|
44
44
|
msg = "<CI INFO>: Running Sphinx build..."
|
45
|
-
|
45
|
+
py2docfx_logger.info(msg)
|
46
46
|
|
47
47
|
generate_conf(pkg, package_paths.doc_folder, CONF_TEMPLATE_DIR)
|
48
48
|
run_converter(package_paths.doc_folder, package_paths.yaml_output_folder, sphinx_build_path, extra_package_path, executable=executable)
|
@@ -73,7 +73,7 @@ def update_package_info(executable: str, pkg: PackageInfo, source_folder: str):
|
|
73
73
|
)
|
74
74
|
|
75
75
|
def get_source(executable: str, pkg: PackageInfo, cnt: int, vststoken=None, githubtoken=None):
|
76
|
-
|
76
|
+
py2docfx_logger = get_logger(__name__)
|
77
77
|
path_cnt = str(cnt)
|
78
78
|
dist_dir = path.join(DIST_TEMP, path_cnt)
|
79
79
|
|
@@ -123,7 +123,7 @@ def get_source(executable: str, pkg: PackageInfo, cnt: int, vststoken=None, gith
|
|
123
123
|
)
|
124
124
|
else:
|
125
125
|
msg = f"Unknown install type: {pkg.install_type}"
|
126
|
-
|
126
|
+
py2docfx_logger.error(msg)
|
127
127
|
raise ValueError()
|
128
128
|
|
129
129
|
update_package_info(executable, pkg, source_folder)
|
py2docfx/convert_prepare/git.py
CHANGED
@@ -13,15 +13,15 @@ def clone(repo_location, branch, folder, extra_token=None):
|
|
13
13
|
replacing
|
14
14
|
https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/GetPackageCode.ps1&line=50&lineEnd=90&lineStartColumn=1&lineEndColumn=2&lineStyle=plain&_a=contents
|
15
15
|
"""
|
16
|
-
|
16
|
+
py2docfx_logger = get_logger(__name__)
|
17
17
|
msg = "<CI INFO>: Cloning repo: {}...".format(repo_location)
|
18
|
-
|
18
|
+
py2docfx_logger.info(msg)
|
19
19
|
|
20
20
|
global repoMap
|
21
21
|
|
22
22
|
if not test_url(repo_location, extra_token):
|
23
23
|
msg = "Git repo address {} is not a valid URL.".format(repo_location)
|
24
|
-
|
24
|
+
py2docfx_logger.error(msg)
|
25
25
|
raise ValueError()
|
26
26
|
else:
|
27
27
|
# Remove http(s):// from url to record. Further avoid dup-clone.
|
@@ -62,14 +62,14 @@ def clone(repo_location, branch, folder, extra_token=None):
|
|
62
62
|
msg = "<CI INFO>: Repo {} successfully cloned in {}...".format(
|
63
63
|
repo_location, repoMap[pureURL]
|
64
64
|
)
|
65
|
-
|
65
|
+
py2docfx_logger.info(msg)
|
66
66
|
else:
|
67
67
|
raise ValueError(
|
68
68
|
"Branch {} doesn't exist in repo {}.".format(branch, repo_location)
|
69
69
|
)
|
70
70
|
else:
|
71
71
|
msg = "<CI INFO>: Repo already cloned in {}...".format(repoMap[pureURL])
|
72
|
-
|
72
|
+
py2docfx_logger.info(msg)
|
73
73
|
|
74
74
|
return repoMap[pureURL]
|
75
75
|
|
@@ -78,11 +78,11 @@ def test_url(url, extraHeader):
|
|
78
78
|
"""
|
79
79
|
Test if the url is a valid git repo url
|
80
80
|
"""
|
81
|
-
|
81
|
+
py2docfx_logger = get_logger(__name__)
|
82
82
|
params = ["git", "ls-remote", url]
|
83
83
|
if extraHeader:
|
84
84
|
msg = "Using extra header to test git repo url."
|
85
|
-
|
85
|
+
py2docfx_logger.info(msg)
|
86
86
|
|
87
87
|
params[1:1] = [
|
88
88
|
"-c",
|
@@ -96,13 +96,13 @@ def test_url(url, extraHeader):
|
|
96
96
|
|
97
97
|
|
98
98
|
def convertBranch(repo_url, branch, extraHeader):
|
99
|
-
|
99
|
+
py2docfx_logger = get_logger(__name__)
|
100
100
|
result = branch
|
101
101
|
if not branch:
|
102
102
|
msg = "Using empty branch of {}, going to use master branch instead.".format(
|
103
103
|
repo_url
|
104
104
|
)
|
105
|
-
|
105
|
+
py2docfx_logger.info(msg)
|
106
106
|
result = "master"
|
107
107
|
if result == "master":
|
108
108
|
check_params = ["git", "ls-remote", "--heads", repo_url, "refs/heads/main"]
|
@@ -117,7 +117,7 @@ def convertBranch(repo_url, branch, extraHeader):
|
|
117
117
|
msg = "Using master branch of {}, going to use main branch instead.".format(
|
118
118
|
repo_url
|
119
119
|
)
|
120
|
-
|
120
|
+
py2docfx_logger.info(msg)
|
121
121
|
result = "main"
|
122
122
|
return result
|
123
123
|
|
@@ -129,7 +129,7 @@ def checkout(folder, branch):
|
|
129
129
|
Replacing
|
130
130
|
https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/PyCommon.ps1&line=707&lineEnd=763&lineStartColumn=1&lineEndColumn=2&lineStyle=plain&_a=contents
|
131
131
|
"""
|
132
|
-
|
132
|
+
py2docfx_logger = get_logger(__name__)
|
133
133
|
remote = "origin"
|
134
134
|
|
135
135
|
if ":" in branch:
|
@@ -158,7 +158,7 @@ def checkout(folder, branch):
|
|
158
158
|
|
159
159
|
subprocess.run(["git", "-C", folder, "checkout", "--quiet", branch])
|
160
160
|
msg = "<CI INFO>: Switched to branch {}.".format(branch)
|
161
|
-
|
161
|
+
py2docfx_logger.info(msg)
|
162
162
|
|
163
163
|
|
164
164
|
def commit_and_push(repo_location, folder, extra_token=None):
|
@@ -16,7 +16,7 @@ class PackageInfo:
|
|
16
16
|
|
17
17
|
@classmethod
|
18
18
|
def report_error(cls, name, value, condition=None):
|
19
|
-
|
19
|
+
py2docfx_logger = get_logger(__name__)
|
20
20
|
if condition:
|
21
21
|
message = "When {0}, found unexpected property of {1}, loaded value is: {2}".format(
|
22
22
|
condition, name, value
|
@@ -25,7 +25,7 @@ class PackageInfo:
|
|
25
25
|
message = "Found unexpected property of {0}, loaded value is: {1}".format(
|
26
26
|
name, value
|
27
27
|
)
|
28
|
-
|
28
|
+
py2docfx_logger.error(message)
|
29
29
|
raise ValueError()
|
30
30
|
|
31
31
|
@classmethod
|
@@ -60,14 +60,14 @@ class PackageInfo:
|
|
60
60
|
package_info.folder = package_info_dict.get("folder", None)
|
61
61
|
|
62
62
|
if not package_info.url:
|
63
|
-
|
63
|
+
py2docfx_logger = get_logger(__name__)
|
64
64
|
if not package_info.folder:
|
65
65
|
msg = "When install_type is source_code, url or folder should be provided"
|
66
|
-
|
66
|
+
py2docfx_logger.error(msg)
|
67
67
|
raise ValueError()
|
68
68
|
else:
|
69
69
|
msg = f'Read source code from local folder: {package_info.folder}'
|
70
|
-
|
70
|
+
py2docfx_logger.info(msg)
|
71
71
|
|
72
72
|
prefer_source_distribution = package_info_dict.get(
|
73
73
|
"prefer_source_distribution", False
|
@@ -143,12 +143,12 @@ class PackageInfo:
|
|
143
143
|
return (packageInstallName, pipInstallExtraOptions)
|
144
144
|
|
145
145
|
def get_exluded_command(self) -> []:
|
146
|
-
|
146
|
+
py2docfx_logger = get_logger(__name__)
|
147
147
|
if hasattr(self, "path"):
|
148
148
|
code_location = self.path.source_folder
|
149
149
|
else:
|
150
150
|
msg = "Should set source code location before build documents"
|
151
|
-
|
151
|
+
py2docfx_logger.error(msg)
|
152
152
|
raise ValueError()
|
153
153
|
exclude_path = []
|
154
154
|
if code_location:
|
@@ -26,8 +26,8 @@ def download(package_name, path, extra_index_url=None, prefer_source_distributio
|
|
26
26
|
else:
|
27
27
|
download_param.append("--prefer-binary")
|
28
28
|
output = subprocess.run(download_param, check=True, cwd=PACKAGE_ROOT, capture_output=True, text=True)
|
29
|
-
|
30
|
-
log_subprocess_ouput(output,
|
29
|
+
py2docfx_logger = get_logger(__name__)
|
30
|
+
log_subprocess_ouput(output, py2docfx_logger)
|
31
31
|
|
32
32
|
|
33
33
|
def install(package_name, options):
|
@@ -36,8 +36,8 @@ def install(package_name, options):
|
|
36
36
|
" ".join(pip_install_common_options + options), package_name
|
37
37
|
).split(" ")
|
38
38
|
output = subprocess.run(install_param, check=True, cwd=PACKAGE_ROOT, capture_output=True, text=True)
|
39
|
-
|
40
|
-
log_subprocess_ouput(output,
|
39
|
+
py2docfx_logger = get_logger(__name__)
|
40
|
+
log_subprocess_ouput(output, py2docfx_logger)
|
41
41
|
|
42
42
|
def install_in_exe(exe_path, package_name, options):
|
43
43
|
# Installs a package from PyPI using pip.
|
@@ -45,8 +45,8 @@ def install_in_exe(exe_path, package_name, options):
|
|
45
45
|
" ".join(pip_install_common_options + options), package_name
|
46
46
|
).split(" ")
|
47
47
|
output = subprocess.run(install_param, check=True, cwd=PACKAGE_ROOT, capture_output=True, text=True)
|
48
|
-
|
49
|
-
log_subprocess_ouput(output,
|
48
|
+
py2docfx_logger = get_logger(__name__)
|
49
|
+
log_subprocess_ouput(output, py2docfx_logger)
|
50
50
|
|
51
51
|
async def install_in_exe_async(exe_path, package_name, options):
|
52
52
|
pip_cmd = ["-m", "pip", "install"]+ pip_install_common_options + options + [package_name]
|
@@ -60,9 +60,13 @@ async def run_async_subprocess(exe_path, cmd):
|
|
60
60
|
stderr=asyncio.subprocess.PIPE
|
61
61
|
)
|
62
62
|
stdout, stderr = await process.communicate()
|
63
|
-
|
63
|
+
py2docfx_logger = get_logger(__name__)
|
64
64
|
if process.returncode != 0:
|
65
|
-
|
65
|
+
msg = stderr.decode('utf-8')
|
66
|
+
if msg != None and msg != "":
|
67
|
+
py2docfx_logger.error(msg)
|
66
68
|
raise RuntimeError()
|
67
69
|
else:
|
68
|
-
|
70
|
+
msg = stdout.decode('utf-8')
|
71
|
+
if msg != None and msg != "":
|
72
|
+
py2docfx_logger.info(msg)
|
@@ -8,7 +8,7 @@ TOC_FILE_PATH = "toc.yml"
|
|
8
8
|
PACKAGE_TOC_FILE_PATH = "_build/docfx_yaml/toc.yml"
|
9
9
|
def merge_toc(
|
10
10
|
root_doc_path: str | os.PathLike, package_doc_path: str | os.PathLike):
|
11
|
-
|
11
|
+
py2docfx_logger = get_logger(__name__)
|
12
12
|
root_toc_path = os.path.join(root_doc_path, TOC_FILE_PATH)
|
13
13
|
package_toc_path = os.path.join(package_doc_path, PACKAGE_TOC_FILE_PATH)
|
14
14
|
|
@@ -21,7 +21,7 @@ def merge_toc(
|
|
21
21
|
if not toc_content.endswith("\n"):
|
22
22
|
root_toc_handle.write("\n")
|
23
23
|
else:
|
24
|
-
|
24
|
+
py2docfx_logger.error(f"TOC content empty: {package_toc_path}")
|
25
25
|
raise ValueError()
|
26
26
|
|
27
27
|
# delete package toc.yml
|
@@ -5,7 +5,7 @@ import sphinx.ext.apidoc as apidoc
|
|
5
5
|
import subprocess
|
6
6
|
|
7
7
|
from py2docfx import PACKAGE_ROOT
|
8
|
-
from py2docfx.docfx_yaml.logger import
|
8
|
+
from py2docfx.docfx_yaml.logger import get_package_logger,log_subprocess_ouput
|
9
9
|
from py2docfx.convert_prepare.package_info import PackageInfo
|
10
10
|
from py2docfx.convert_prepare.paths import folder_is_hidden
|
11
11
|
from py2docfx.convert_prepare.subpackage import (get_subpackages,
|
@@ -20,7 +20,7 @@ def run_apidoc(rst_path, source_code_path, exclude_paths, package_info: PackageI
|
|
20
20
|
Replacing
|
21
21
|
https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/build.ps1&line=110&lineEnd=126&lineStartColumn=1&lineEndColumn=14&lineStyle=plain&_a=contents
|
22
22
|
"""
|
23
|
-
|
23
|
+
py2docfx_logger = get_package_logger(__name__)
|
24
24
|
subfolderList = [name for name in
|
25
25
|
os.listdir(source_code_path)
|
26
26
|
if os.path.isdir(os.path.join(source_code_path, name))
|
@@ -31,7 +31,7 @@ def run_apidoc(rst_path, source_code_path, exclude_paths, package_info: PackageI
|
|
31
31
|
subfolderPath = os.path.join(source_code_path, subfolder)
|
32
32
|
if os.path.isdir(subfolderPath):
|
33
33
|
msg = "<CI INFO>: Subfolder path {}.".format(subfolderPath)
|
34
|
-
|
34
|
+
py2docfx_logger.info(msg)
|
35
35
|
if os.environ.get('LOG_LEVEL') == 'INFO':
|
36
36
|
args = [
|
37
37
|
"--module-first",
|
@@ -71,7 +71,7 @@ def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path
|
|
71
71
|
Replacing
|
72
72
|
https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/build.ps1&line=150&lineEnd=161&lineStartColumn=13&lineEndColumn=52&lineStyle=plain&_a=contents
|
73
73
|
"""
|
74
|
-
|
74
|
+
py2docfx_logger = get_package_logger(__name__)
|
75
75
|
outdir = os.path.join(out_path, "_build")
|
76
76
|
|
77
77
|
# Sphinx/docutils have memory leak including linecaches, module-import-caches,
|
@@ -79,7 +79,7 @@ def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path
|
|
79
79
|
|
80
80
|
if not sys.executable:
|
81
81
|
msg = "Can't get the executable binary for the Python interpreter."
|
82
|
-
|
82
|
+
py2docfx_logger.error(msg)
|
83
83
|
raise ValueError()
|
84
84
|
sphinx_param = [
|
85
85
|
executable,
|
@@ -100,5 +100,5 @@ def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path
|
|
100
100
|
else:
|
101
101
|
env_tmp["PYTHONPATH"] = f"{extra_package_path}:{package_root_parent}:"
|
102
102
|
output = subprocess.run(sphinx_param, check=True, cwd=PACKAGE_ROOT, env=env_tmp, capture_output=True ,text=True)
|
103
|
-
log_subprocess_ouput(output,
|
103
|
+
log_subprocess_ouput(output, py2docfx_logger)
|
104
104
|
return outdir
|
@@ -10,14 +10,12 @@ import sphinx.cmd.build
|
|
10
10
|
from py2docfx.convert_prepare.generate_document import generate_document
|
11
11
|
from py2docfx.convert_prepare.package_info import PackageInfo
|
12
12
|
from py2docfx.convert_prepare.source import Source
|
13
|
-
from py2docfx.convert_prepare.tests.utils import prepare_log_folder
|
14
13
|
|
15
14
|
def test_generate_document(tmp_path):
|
16
15
|
"""
|
17
16
|
Test the generate_document function.
|
18
17
|
"""
|
19
18
|
# init test case
|
20
|
-
prepare_log_folder()
|
21
19
|
source_code_path = os.path.join("convert_prepare", "tests", "data", "generate_document")
|
22
20
|
output_root = os.path.join(tmp_path, "output")
|
23
21
|
shutil.copytree(source_code_path, os.path.join(tmp_path, "source", "0"))
|
@@ -7,7 +7,6 @@ import sphinx.cmd.build
|
|
7
7
|
from py2docfx.convert_prepare.sphinx_caller import run_apidoc, run_converter
|
8
8
|
from py2docfx.convert_prepare.package_info import PackageInfo
|
9
9
|
from py2docfx.convert_prepare.source import Source
|
10
|
-
from py2docfx.convert_prepare.tests.utils import prepare_log_folder
|
11
10
|
|
12
11
|
package_info = PackageInfo()
|
13
12
|
|
@@ -37,7 +36,6 @@ def test_run_apidoc(tmp_path):
|
|
37
36
|
|
38
37
|
|
39
38
|
def test_run_converter(tmp_path):
|
40
|
-
prepare_log_folder()
|
41
39
|
rst_path, source_code_path = init_paths(tmp_path)
|
42
40
|
run_apidoc(rst_path, source_code_path, package_info.get_exluded_command(), package_info)
|
43
41
|
|
@@ -92,7 +92,7 @@ def add_isrequired_if_needed(obj, key: str):
|
|
92
92
|
args['isRequired'] = True
|
93
93
|
|
94
94
|
def get_merged_params(obj, info_field_data, key: str):
|
95
|
-
|
95
|
+
py2docfx_logger = get_package_logger(__name__)
|
96
96
|
merged_params = []
|
97
97
|
arg_params = obj.get('syntax', {}).get(key, [])
|
98
98
|
if key in info_field_data[obj['uid']]:
|
@@ -101,7 +101,7 @@ def get_merged_params(obj, info_field_data, key: str):
|
|
101
101
|
if arg_params and doc_params:
|
102
102
|
if len(arg_params) - len(doc_params) > 0:
|
103
103
|
msg = f'Documented params don\'t match size of params:{obj["uid"]}' # CodeQL: [py/clear-text-logging-sensitive-data] There is no sensitive data in the print statement.
|
104
|
-
|
104
|
+
py2docfx_logger.warning(msg)
|
105
105
|
doc_params = remove_params_without_id(doc_params)
|
106
106
|
merged_params = merge_params(arg_params, doc_params)
|
107
107
|
else:
|
@@ -186,7 +186,7 @@ def build_finished(app, exception):
|
|
186
186
|
Output YAML on the file system.
|
187
187
|
"""
|
188
188
|
|
189
|
-
|
189
|
+
py2docfx_logger = get_package_logger(__name__)
|
190
190
|
|
191
191
|
def convert_class_to_enum_if_needed(obj):
|
192
192
|
if (obj.get('inheritance'), None):
|
@@ -305,7 +305,7 @@ def build_finished(app, exception):
|
|
305
305
|
|
306
306
|
if transformed_obj == None:
|
307
307
|
msg = f"Unknown yml, uid is: {uid}"
|
308
|
-
|
308
|
+
py2docfx_logger.warning(msg)
|
309
309
|
else:
|
310
310
|
# save file
|
311
311
|
common.write_yaml(transformed_obj, out_file, mime)
|
@@ -314,7 +314,7 @@ def build_finished(app, exception):
|
|
314
314
|
# Write TOC, the toc should include at least 1
|
315
315
|
if len(toc_yaml) == 0:
|
316
316
|
msg = "No documentation for this module."
|
317
|
-
|
317
|
+
py2docfx_logger.error(msg)
|
318
318
|
raise RuntimeError()
|
319
319
|
|
320
320
|
toc_file = os.path.join(normalized_outdir, 'toc.yml')
|
@@ -6,7 +6,7 @@ from common import remove_empty_values, parse_references, convert_member, conver
|
|
6
6
|
from logger import get_package_logger
|
7
7
|
|
8
8
|
def convert_class(obj):
|
9
|
-
|
9
|
+
py2docfx_logger = get_package_logger(__name__)
|
10
10
|
record = {}
|
11
11
|
reference_mapping = {}
|
12
12
|
old_class_object = {}
|
@@ -50,5 +50,5 @@ def convert_class(obj):
|
|
50
50
|
new_class_object['attributes'] = list(map(convert_member_partial, attributes))
|
51
51
|
|
52
52
|
msg = "class: " + new_class_object['uid'] # CodeQL: [py/clear-text-logging-sensitive-data] There is no sensitive data in the print statement.
|
53
|
-
|
53
|
+
py2docfx_logger.info(msg)
|
54
54
|
return remove_empty_values(new_class_object)
|
@@ -7,7 +7,7 @@ def convert_enum(obj):
|
|
7
7
|
|
8
8
|
:param obj: The object is generated from the Enum yaml file
|
9
9
|
'''
|
10
|
-
|
10
|
+
py2docfx_logger = get_package_logger(__name__)
|
11
11
|
record = {}
|
12
12
|
reference_mapping = {}
|
13
13
|
old_enum_object = {}
|
@@ -45,7 +45,7 @@ def convert_enum(obj):
|
|
45
45
|
new_enum_object['fields'] = list(map(convert_fields, fields))
|
46
46
|
|
47
47
|
msg = "enum: " + new_enum_object['uid'] # CodeQL: [py/clear-text-logging-sensitive-data] There is no sensitive data in the print statement.
|
48
|
-
|
48
|
+
py2docfx_logger.info(msg)
|
49
49
|
return remove_empty_values(new_enum_object)
|
50
50
|
|
51
51
|
|
@@ -6,7 +6,7 @@ from common import remove_empty_values, parse_references, convert_member
|
|
6
6
|
from logger import get_package_logger
|
7
7
|
|
8
8
|
def convert_module(obj, uid_type_mapping):
|
9
|
-
|
9
|
+
py2docfx_logger = get_package_logger(__name__)
|
10
10
|
record = {}
|
11
11
|
reference_mapping = {}
|
12
12
|
old_object = {}
|
@@ -56,5 +56,5 @@ def convert_module(obj, uid_type_mapping):
|
|
56
56
|
|
57
57
|
toreturn = remove_empty_values(new_object)
|
58
58
|
msg = "module: " + toreturn['uid'] # CodeQL: [py/clear-text-logging-sensitive-data] There is no sensitive data in the print statement.
|
59
|
-
|
59
|
+
py2docfx_logger.info(msg)
|
60
60
|
return toreturn
|
@@ -6,7 +6,7 @@ from common import remove_empty_values, parse_references, convert_member
|
|
6
6
|
from logger import get_package_logger
|
7
7
|
|
8
8
|
def convert_package(obj, uid_type_mapping):
|
9
|
-
|
9
|
+
py2docfx_logger = get_package_logger(__name__)
|
10
10
|
record = {}
|
11
11
|
reference_mapping = {}
|
12
12
|
old_object = {}
|
@@ -51,5 +51,5 @@ def convert_package(obj, uid_type_mapping):
|
|
51
51
|
|
52
52
|
toreturn = remove_empty_values(new_object)
|
53
53
|
msg = "package: " + toreturn['uid'] # CodeQL: [py/clear-text-logging-sensitive-data] There is no sensitive data in the print statement.
|
54
|
-
|
54
|
+
py2docfx_logger.info(msg)
|
55
55
|
return toreturn
|
py2docfx/docfx_yaml/logger.py
CHANGED
@@ -10,24 +10,32 @@ def get_log_level():
|
|
10
10
|
|
11
11
|
return logging.ERROR
|
12
12
|
|
13
|
+
def check_log_dir_exists(log_dir_path):
|
14
|
+
if not os.path.exists(log_dir_path):
|
15
|
+
os.makedirs(log_dir_path)
|
16
|
+
|
17
|
+
def check_log_file_exists(log_file_path):
|
18
|
+
# create log file if it doesn't exist
|
19
|
+
if not os.path.exists(log_file_path):
|
20
|
+
with open(log_file_path, 'w') as f:
|
21
|
+
f.write('')
|
22
|
+
|
13
23
|
def setup_log_handlers(logger, log_file_name):
|
24
|
+
check_log_file_exists(log_file_name)
|
14
25
|
file_handler = logging.FileHandler(filename=log_file_name, mode='a')
|
15
|
-
file_handler.setFormatter(logging.Formatter('%(
|
16
|
-
|
17
|
-
stream_handeler = logging.StreamHandler()
|
18
|
-
stream_handeler.setLevel(get_log_level())
|
26
|
+
file_handler.setFormatter(logging.Formatter('%(levelname)s - %(name)s - %(message)s'))
|
19
27
|
|
20
28
|
logger.addHandler(file_handler)
|
21
|
-
logger.addHandler(stream_handeler)
|
22
29
|
|
23
30
|
return logger
|
24
31
|
|
25
32
|
def get_logger(logger_name: str):
|
26
|
-
|
33
|
+
log_folder_path = os.path.join("logs")
|
34
|
+
file_name = os.path.join(log_folder_path, "log.txt")
|
27
35
|
|
28
36
|
file_logger = logging.getLogger(logger_name)
|
29
37
|
file_logger.setLevel(logging.INFO)
|
30
|
-
|
38
|
+
check_log_dir_exists(log_folder_path)
|
31
39
|
if file_logger.hasHandlers():
|
32
40
|
return file_logger
|
33
41
|
|
@@ -36,12 +44,13 @@ def get_logger(logger_name: str):
|
|
36
44
|
return file_logger
|
37
45
|
|
38
46
|
def get_package_logger(logger_name:str):
|
47
|
+
log_folder_path = os.path.join("logs", "package_logs")
|
39
48
|
package_name = os.environ.get('PROCESSING_PACKAGE_NAME')
|
40
|
-
file_name = os.path.join(
|
49
|
+
file_name = os.path.join(log_folder_path, f"{package_name}.txt")
|
41
50
|
|
42
51
|
file_logger = logging.getLogger(logger_name)
|
43
52
|
file_logger.setLevel(logging.INFO)
|
44
|
-
|
53
|
+
check_log_dir_exists(log_folder_path)
|
45
54
|
if file_logger.hasHandlers():
|
46
55
|
return file_logger
|
47
56
|
|
@@ -57,12 +66,70 @@ def log_subprocess_ouput(subprocess_out: subprocess.CompletedProcess, logger: lo
|
|
57
66
|
for msg in msgs:
|
58
67
|
if msg is None or msg == "":
|
59
68
|
continue
|
60
|
-
|
61
|
-
logger.warning(msg)
|
62
|
-
elif "ERROR:" in msg:
|
63
|
-
logger.error(msg)
|
64
|
-
else:
|
65
|
-
logger.info(msg)
|
69
|
+
logger.warning(msg)
|
66
70
|
if subprocess_out.returncode != 0:
|
67
71
|
logger.error(f"Subprocess failed with return code {subprocess_out.returncode}")
|
68
72
|
raise RuntimeError()
|
73
|
+
|
74
|
+
def counts_errors_warnings(log_file_path):
|
75
|
+
error_count = 0
|
76
|
+
warning_count = 0
|
77
|
+
with open(log_file_path, 'r', encoding='utf-8') as f:
|
78
|
+
lines = f.readlines()
|
79
|
+
for line in lines:
|
80
|
+
if line.startswith("ERROR -"):
|
81
|
+
error_count += 1
|
82
|
+
elif line.startswith("WARNING -"):
|
83
|
+
warning_count += 1
|
84
|
+
return warning_count, error_count
|
85
|
+
|
86
|
+
def get_warning_error_count():
|
87
|
+
main_log_file_path = os.path.join("logs", "log.txt")
|
88
|
+
warning_count, error_count = counts_errors_warnings(main_log_file_path)
|
89
|
+
|
90
|
+
log_folder_path = os.path.join("logs", "package_logs")
|
91
|
+
for log_file in os.listdir(log_folder_path):
|
92
|
+
log_file_path = os.path.join(log_folder_path, log_file)
|
93
|
+
warnings, errors = counts_errors_warnings(log_file_path)
|
94
|
+
warning_count += warnings
|
95
|
+
error_count += errors
|
96
|
+
|
97
|
+
return warning_count, error_count
|
98
|
+
|
99
|
+
def parse_log(log_file_path):
|
100
|
+
# reads the log file and parse it into a list of dictionaries
|
101
|
+
# the dictionary has the keys: 'name', 'level', 'message'
|
102
|
+
log_list = []
|
103
|
+
with open(log_file_path, 'r', encoding='utf-8') as f:
|
104
|
+
lines = f.readlines()
|
105
|
+
for line in lines:
|
106
|
+
if line.startswith("ERROR -"):
|
107
|
+
level = logging.INFO
|
108
|
+
elif line.startswith("WARNING -"):
|
109
|
+
level = logging.WARNING
|
110
|
+
elif line.startswith("INFO -"):
|
111
|
+
level = logging.INFO
|
112
|
+
else:
|
113
|
+
# put everything to the previous message
|
114
|
+
log_list[-1]['message'] += line
|
115
|
+
continue
|
116
|
+
temp_info = line.split(" - ")
|
117
|
+
logger_name = temp_info[1]
|
118
|
+
message = temp_info[2]
|
119
|
+
log_list.append({'name': logger_name, 'level': level, 'message': message})
|
120
|
+
return log_list
|
121
|
+
|
122
|
+
def print_out_log_by_log_level(log_list, log_level):
|
123
|
+
for log in log_list:
|
124
|
+
if log['level'] >= log_level and log['message'] not in ['', '\n', '\r\n']:
|
125
|
+
print(log['message'])
|
126
|
+
|
127
|
+
def output_log_by_log_level():
|
128
|
+
log_level = get_log_level()
|
129
|
+
main_log_file_path = os.path.join("logs", "log.txt")
|
130
|
+
print_out_log_by_log_level(parse_log(main_log_file_path), log_level)
|
131
|
+
|
132
|
+
package_logs_folder = os.path.join("logs", "package_logs")
|
133
|
+
for log_file in os.listdir(package_logs_folder):
|
134
|
+
log_file_path = os.path.join(package_logs_folder, log_file)
|
135
|
+
print_out_log_by_log_level(parse_log(log_file_path), log_level)
|
@@ -182,7 +182,7 @@ def _create_datam(app, cls, module, name, _type, obj, lines=None):
|
|
182
182
|
"""
|
183
183
|
Build the data structure for an autodoc class
|
184
184
|
"""
|
185
|
-
|
185
|
+
py2docfx_logger = get_package_logger(__name__)
|
186
186
|
if lines is None:
|
187
187
|
lines = []
|
188
188
|
short_name = name.split('.')[-1]
|
@@ -206,7 +206,7 @@ def _create_datam(app, cls, module, name, _type, obj, lines=None):
|
|
206
206
|
|
207
207
|
except Exception as e:
|
208
208
|
msg = "Can't get argspec for {}: {}. Exception: {}".format(type(obj), name, e)
|
209
|
-
|
209
|
+
py2docfx_logger.warning(msg)
|
210
210
|
|
211
211
|
datam = {
|
212
212
|
'module': module if module else None,
|
@@ -390,7 +390,7 @@ def process_docstring(app, _type, name, obj, options, lines):
|
|
390
390
|
This function takes the docstring and indexes it into memory.
|
391
391
|
"""
|
392
392
|
# Use exception as class
|
393
|
-
|
393
|
+
py2docfx_logger = get_package_logger(__name__)
|
394
394
|
def check_convert_package_type(obj, _type):
|
395
395
|
if _type == MODULE:
|
396
396
|
filename = getattr(obj, '__file__', None)
|
@@ -411,7 +411,7 @@ def process_docstring(app, _type, name, obj, options, lines):
|
|
411
411
|
cls, module = _get_cls_module(_type, name)
|
412
412
|
|
413
413
|
if _type != PACKAGE and not module:
|
414
|
-
|
414
|
+
py2docfx_logger.warning('Unknown Type: %s' % _type)
|
415
415
|
return None
|
416
416
|
|
417
417
|
if app.config.__contains__('autoclass_content') and app.config.autoclass_content.lower() == 'both':
|
@@ -28,7 +28,7 @@ types_contain_attributes = {CLASS_TYPE, EXCEPTION_TYPE}
|
|
28
28
|
|
29
29
|
def translator(app, docname, doctree):
|
30
30
|
|
31
|
-
|
31
|
+
py2docfx_logger = get_package_logger(__name__)
|
32
32
|
transform_node = app.docfx_transform_node
|
33
33
|
|
34
34
|
class_obj_cache = app.env.domains['py'].objects
|
@@ -58,14 +58,14 @@ def translator(app, docname, doctree):
|
|
58
58
|
assert node.tagname == 'desc'
|
59
59
|
if node.attributes['domain'] != 'py':
|
60
60
|
msg = str('Skipping Domain Object (%s)' % node.attributes['domain'])
|
61
|
-
|
61
|
+
py2docfx_logger.info(msg)
|
62
62
|
return None, None
|
63
63
|
|
64
64
|
try:
|
65
65
|
module = node[0].attributes['module']
|
66
66
|
full_name = node[0].attributes['fullname']
|
67
67
|
except KeyError as e:
|
68
|
-
|
68
|
+
py2docfx_logger.error("There maybe some syntax error in docstring near: " + node.astext())
|
69
69
|
raise e
|
70
70
|
|
71
71
|
uid = '{module}.{full_name}'.format(module=module, full_name=full_name)
|
py2docfx/docfx_yaml/writer.py
CHANGED
@@ -604,7 +604,7 @@ class MarkdownTranslator(nodes.NodeVisitor):
|
|
604
604
|
except ValueError as e:
|
605
605
|
msg = "Image not found where expected {}".format(
|
606
606
|
node.attributes['uri'])
|
607
|
-
|
607
|
+
py2docfx_logger.warning(msg)
|
608
608
|
raise nodes.SkipNode
|
609
609
|
image_name = ''.join(image_name.split())
|
610
610
|
self.new_state(0)
|
@@ -1085,10 +1085,10 @@ class MarkdownTranslator(nodes.NodeVisitor):
|
|
1085
1085
|
self.add_text('<<')
|
1086
1086
|
|
1087
1087
|
def visit_system_message(self, node):
|
1088
|
-
|
1088
|
+
py2docfx_logger = get_package_logger(__name__)
|
1089
1089
|
msg = str(bcolors.WARNING + "System message warnings: %s" %
|
1090
1090
|
self.replace_special_unicode(node.astext()) + bcolors.ENDC)
|
1091
|
-
|
1091
|
+
py2docfx_logger.warning(msg)
|
1092
1092
|
raise nodes.SkipNode
|
1093
1093
|
|
1094
1094
|
def visit_comment(self, node):
|
@@ -1106,14 +1106,14 @@ class MarkdownTranslator(nodes.NodeVisitor):
|
|
1106
1106
|
raise nodes.SkipNode
|
1107
1107
|
|
1108
1108
|
def visit_math(self, node):
|
1109
|
-
|
1109
|
+
py2docfx_logger = get_package_logger(__name__)
|
1110
1110
|
msg = str(bcolors.WARNING +
|
1111
1111
|
('using "math" markup without a Sphinx math extension '
|
1112
1112
|
'active, please use one of the math extensions '
|
1113
1113
|
'described at http://sphinx-doc.org/ext/math.html. Source_path is: {}; Line is: {}').format(
|
1114
1114
|
node.source if node.source else node.parent.source,
|
1115
1115
|
node.line if node.line else node.parent.line))
|
1116
|
-
|
1116
|
+
py2docfx_logger.warning(msg)
|
1117
1117
|
raise nodes.SkipNode
|
1118
1118
|
|
1119
1119
|
visit_math_block = visit_math
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: py2docfx
|
3
|
-
Version: 0.1.9.
|
3
|
+
Version: 0.1.9.dev1927662
|
4
4
|
Summary: A package built based on Sphinx which download source code package and generate yaml files supported by docfx.
|
5
5
|
Author: Microsoft Corporation
|
6
6
|
License: MIT License
|
@@ -1,40 +1,39 @@
|
|
1
1
|
py2docfx/__init__.py,sha256=kPRhPGPC1JknDotkksG428c1iIgfFr_4_7Jm-llrowY,72
|
2
|
-
py2docfx/__main__.py,sha256=
|
2
|
+
py2docfx/__main__.py,sha256=UvFVxhJNQCn9EHyDfHPwDqJrlqGpoadFVIvDavaYpiI,14456
|
3
3
|
py2docfx/convert_prepare/__init__.py,sha256=XxtxrP0kmW3ZBHIAoxsPDEHzcgeC0WSnole8Lk6CjKs,11
|
4
|
-
py2docfx/convert_prepare/constants.py,sha256=
|
5
|
-
py2docfx/convert_prepare/environment.py,sha256=
|
4
|
+
py2docfx/convert_prepare/constants.py,sha256=RC5DqNkqWvx4hb91FrajZ1R9dBFLxcPyoEJ43jdm36E,102
|
5
|
+
py2docfx/convert_prepare/environment.py,sha256=7EmlodXVG0S5wnn0_VFPwPJ_3OHfJfGpN33SUL3HSRI,7233
|
6
6
|
py2docfx/convert_prepare/generate_conf.py,sha256=wqs6iyElzJarH-20_qEL9zvZvt5xfBMsGXSXPSZy6wg,2295
|
7
|
-
py2docfx/convert_prepare/generate_document.py,sha256=
|
8
|
-
py2docfx/convert_prepare/get_source.py,sha256=
|
9
|
-
py2docfx/convert_prepare/git.py,sha256=
|
7
|
+
py2docfx/convert_prepare/generate_document.py,sha256=mNfJpLViIfzwRnjVwXmg5H5lVWzKJkUHeXTiJouA39o,2879
|
8
|
+
py2docfx/convert_prepare/get_source.py,sha256=6EMSLiDOfid4d7SDxweV_-TGV4McxhSxKQPJju2Mi4Y,5221
|
9
|
+
py2docfx/convert_prepare/git.py,sha256=AFCot6xJnZwPGkRsXwZl_EnSG60P5r_84lvSGp7LtB0,6263
|
10
10
|
py2docfx/convert_prepare/install_package.py,sha256=hATmgazcSX7k2n4jQXh9sQMyNUc1k1YqHv5K5UMALq4,262
|
11
11
|
py2docfx/convert_prepare/pack.py,sha256=bptweiUnlk_jtu1FxgqlqvHVOCfvoeAqemxli6ep0-o,1231
|
12
|
-
py2docfx/convert_prepare/package_info.py,sha256=
|
12
|
+
py2docfx/convert_prepare/package_info.py,sha256=2fDyalHtxhZQICehDK4zv7k69HE427c1MLEuHVnPzBU,7624
|
13
13
|
py2docfx/convert_prepare/package_info_extra_settings.py,sha256=u5B5e8hc0m9PA_-0kJzq1LtKn-xzZlucwXHTFy49mDg,1475
|
14
14
|
py2docfx/convert_prepare/params.py,sha256=PXMB8pLtb4XbfI322avA47q0AO-TyBE6kZf7FU8I6v4,1771
|
15
15
|
py2docfx/convert_prepare/paths.py,sha256=964RX81Qf__rzXgEATfqBNFCKTYVjLt9J7WCz2TnNdc,485
|
16
|
-
py2docfx/convert_prepare/pip_utils.py,sha256=
|
16
|
+
py2docfx/convert_prepare/pip_utils.py,sha256=SGWS07Rn0sWlZsr_fdGIqLFHsgma4UvsghjzkNogFkc,2930
|
17
17
|
py2docfx/convert_prepare/repo_info.py,sha256=6ASJlhBwf6vZTSENgrWCVlJjlJVhuBxzdQyWEdWAC4c,117
|
18
18
|
py2docfx/convert_prepare/source.py,sha256=6-A7oof3-WAQcQZZVpT9pKiFLH4CCIZeYqq0MN0O3gw,1710
|
19
|
-
py2docfx/convert_prepare/sphinx_caller.py,sha256=
|
19
|
+
py2docfx/convert_prepare/sphinx_caller.py,sha256=KM5SanFk67Kkd3oVKzNN-5KCAAiyNUUgnJl5mDs-3kw,4439
|
20
20
|
py2docfx/convert_prepare/subpackage.py,sha256=mXAi_07pXvnPkSLZfykDh_7VeFxfLy74pYlzhMO8N_Q,5183
|
21
|
-
py2docfx/convert_prepare/conf_templates/conf.py_t,sha256=
|
21
|
+
py2docfx/convert_prepare/conf_templates/conf.py_t,sha256=8zxvY1WiG-z2aiSNDY0719C08QxZLXXEMwKfYSGN0ZE,3811
|
22
22
|
py2docfx/convert_prepare/conf_templates/root_doc.rst_t,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
23
|
-
py2docfx/convert_prepare/post_process/merge_toc.py,sha256=
|
23
|
+
py2docfx/convert_prepare/post_process/merge_toc.py,sha256=DPUvL6eS0Q4gcp_fdGgWQJeYmt0sa9voWnVe0O-2giM,1265
|
24
24
|
py2docfx/convert_prepare/subpackage_merge/merge_root_package.py,sha256=uK96qL2asuSfo_3SZaoP8XZaUvjf5mNkr17JNbZR4Lg,1026
|
25
25
|
py2docfx/convert_prepare/subpackage_merge/merge_toc.py,sha256=nkVqe8R0m8D6cyTYV7aIpMDXorvn4-LXfU_vIK_hJBg,1706
|
26
26
|
py2docfx/convert_prepare/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
27
27
|
py2docfx/convert_prepare/tests/test_environment.py,sha256=1jONzb7saZN46h1GVI1jAvO8ws-UrjiQzl7bGEAvZBM,2131
|
28
|
-
py2docfx/convert_prepare/tests/test_generate_document.py,sha256=
|
28
|
+
py2docfx/convert_prepare/tests/test_generate_document.py,sha256=Llt3szDunwmFKhHxc8fKuov_ci5rQokMcOgu5QNqt0U,2572
|
29
29
|
py2docfx/convert_prepare/tests/test_get_source.py,sha256=TXaIPjKyA-KJopQBp8msUubT-rJLaq3llf1upfa10RM,7709
|
30
30
|
py2docfx/convert_prepare/tests/test_pack.py,sha256=46JWMNzknIptDVs7D3CuxcmqBr_OKMmaw1br9H7wqco,4134
|
31
31
|
py2docfx/convert_prepare/tests/test_package_info.py,sha256=hdNpAH9hlLF-cX9sKAu3cmiCtphX4USy9G_gWI_iaHo,3883
|
32
32
|
py2docfx/convert_prepare/tests/test_params.py,sha256=x-UXun7mz4DA8zyHQCim6bTPdW4SUxxne13vn7EjJKw,2054
|
33
33
|
py2docfx/convert_prepare/tests/test_post_process_merge_toc.py,sha256=YKOcn4_lf4syGsAvJ9BqpdUUc3SLfK4TiOX1lpXJT_Y,885
|
34
34
|
py2docfx/convert_prepare/tests/test_source.py,sha256=LNFZtvjz6QhVLOxatjWokYCCcoSm0bhTikMF9KoTPIE,2025
|
35
|
-
py2docfx/convert_prepare/tests/test_sphinx_caller.py,sha256=
|
35
|
+
py2docfx/convert_prepare/tests/test_sphinx_caller.py,sha256=1ZR3uoNl1b_cpVRBcoqVpYry6tT4gNhYX_r5OpGC36M,2612
|
36
36
|
py2docfx/convert_prepare/tests/test_subpackage.py,sha256=n0lCcdrTE1gkmmfGE85tSBMlpOEBszZafaHXffxo3Oc,4982
|
37
|
-
py2docfx/convert_prepare/tests/utils.py,sha256=jdOQO2pHMwcksKYaK-2bQh-s6l4n8AxwonXBgvhpwrI,315
|
38
37
|
py2docfx/convert_prepare/tests/data/generate_document/azure-dummy-sourcecode/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
39
38
|
py2docfx/convert_prepare/tests/data/generate_document/azure-dummy-sourcecode/azure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
40
39
|
py2docfx/convert_prepare/tests/data/generate_document/azure-dummy-sourcecode/azure/dummy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -56,25 +55,25 @@ py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure
|
|
56
55
|
py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_03_31/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
57
56
|
py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_03_31/models.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
58
57
|
py2docfx/docfx_yaml/__init__.py,sha256=KCEizAXv-SXtrYhvFfLHdBWDhz51AA9uagaeTL-Itpo,100
|
59
|
-
py2docfx/docfx_yaml/build_finished.py,sha256=
|
58
|
+
py2docfx/docfx_yaml/build_finished.py,sha256=BPsb8YiJjIwiUDwVpvsHjFjXnND2zwhM--LNsDwtrsM,13823
|
60
59
|
py2docfx/docfx_yaml/build_init.py,sha256=lAw-fnBVQbySfZ7Sut_NpFQUjnqLOmnGQrTBBH2RXcg,1860
|
61
60
|
py2docfx/docfx_yaml/common.py,sha256=UN1MUmjUoN1QSFDR1Cm_bfRuHr6FQiOe5VQV6s8xzjc,6841
|
62
|
-
py2docfx/docfx_yaml/convert_class.py,sha256=
|
63
|
-
py2docfx/docfx_yaml/convert_enum.py,sha256=
|
64
|
-
py2docfx/docfx_yaml/convert_module.py,sha256=
|
65
|
-
py2docfx/docfx_yaml/convert_package.py,sha256=
|
61
|
+
py2docfx/docfx_yaml/convert_class.py,sha256=_s-NA1Iz4Pjnnxr5t_sD7VPXxbA011WEYn-LaEzb75g,2269
|
62
|
+
py2docfx/docfx_yaml/convert_enum.py,sha256=QH0OkZq8pFi_nAXem6ddkO_7TSsoY2tz7_wxIwUjegI,2192
|
63
|
+
py2docfx/docfx_yaml/convert_module.py,sha256=GptO1MRwaQ2Qbu724F0kCDDQQTZe7mWOtrOp3Rzgl-I,2259
|
64
|
+
py2docfx/docfx_yaml/convert_package.py,sha256=Ep7PmvoLInDvY6OU5dveR6iVwyzGRkW3q6lX7yGJ0JE,2109
|
66
65
|
py2docfx/docfx_yaml/directives.py,sha256=zVVuNM_6AU9G6sbqL1UAyHHgPe7bkBWbthXI-PO5ez0,879
|
67
|
-
py2docfx/docfx_yaml/logger.py,sha256=
|
66
|
+
py2docfx/docfx_yaml/logger.py,sha256=_XZRN43HfFhoi-1Kk0VztAUlzKRe-wuT4W85GAERRnE,4950
|
68
67
|
py2docfx/docfx_yaml/miss_reference.py,sha256=Btoj9wAvA4u_wU7JHH0Cei3910N8a7MS34OUqJvXAd4,2443
|
69
68
|
py2docfx/docfx_yaml/nodes.py,sha256=tBDi35jLJArlobl07DKOkmH2qz7dudXLp_kTUfR_r2w,412
|
70
69
|
py2docfx/docfx_yaml/parameter_utils.py,sha256=zGSIQrUfbXf9PUK-W_1K83Uo5Zk797Zlze6aMurbHIA,8706
|
71
|
-
py2docfx/docfx_yaml/process_doctree.py,sha256=
|
70
|
+
py2docfx/docfx_yaml/process_doctree.py,sha256=n2D-DCR7JmqVXm0oeuYZVnHIaHJZOegM8-kHXXx8IZ4,17857
|
72
71
|
py2docfx/docfx_yaml/return_type_utils.py,sha256=nmdCUOvwdYk2jF6RqmOvU6gjXmXUTPUeCqyHPdKZNUQ,7483
|
73
72
|
py2docfx/docfx_yaml/settings.py,sha256=JQZNwFebczl-zn8Yk2taAGANRi-Hw8hywtDWxqXXFyQ,373
|
74
|
-
py2docfx/docfx_yaml/translator.py,sha256=
|
73
|
+
py2docfx/docfx_yaml/translator.py,sha256=LSzNl4C-07bLbUZ5myfyWwh25cTNIIBih77Cp4tBWvo,25999
|
75
74
|
py2docfx/docfx_yaml/utils.py,sha256=m5jC_qP2NKqzUx_z0zgZ-HAmxQdNTpJYKkL_F9vGeII,1555
|
76
75
|
py2docfx/docfx_yaml/write_utils.py,sha256=q5qoYWw6GVDV8a3E8IxcSLWnN9sAer42VFRgadHBkgk,305
|
77
|
-
py2docfx/docfx_yaml/writer.py,sha256=
|
76
|
+
py2docfx/docfx_yaml/writer.py,sha256=rB_mwwCJfDNATKGHKnHBzWqxaOGfIHOTtJ_f_qsGB90,35313
|
78
77
|
py2docfx/docfx_yaml/yaml_builder.py,sha256=S3xty_ILxEUsw1J9VCwUkSLLYAUfQDm3fYbciv70gXc,2573
|
79
78
|
py2docfx/docfx_yaml/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
80
79
|
py2docfx/docfx_yaml/tests/conftest.py,sha256=CykkZxaDZ-3a1EIQdGBieSmHL9FdyTE2xTJZe9QgKcg,1214
|
@@ -4177,7 +4176,7 @@ py2docfx/venv/venv1/Lib/site-packages/win32comext/taskscheduler/test/test_addtas
|
|
4177
4176
|
py2docfx/venv/venv1/Lib/site-packages/win32comext/taskscheduler/test/test_localsystem.py,sha256=08ojAS48W6RLsUbRD45j0SJhg_Y2NFHZT6qjT4Vrig0,75
|
4178
4177
|
py2docfx/venv/venv1/Scripts/pywin32_postinstall.py,sha256=u95n7QQUxpCjrZistYE-3gN451zXzopuJna8cXRQ4Jw,28115
|
4179
4178
|
py2docfx/venv/venv1/Scripts/pywin32_testall.py,sha256=-6yvZmd2lPQc4e8i6PgLsr_totF6mScvoq0Jqr0V2fM,3844
|
4180
|
-
py2docfx-0.1.9.
|
4181
|
-
py2docfx-0.1.9.
|
4182
|
-
py2docfx-0.1.9.
|
4183
|
-
py2docfx-0.1.9.
|
4179
|
+
py2docfx-0.1.9.dev1927662.dist-info/METADATA,sha256=P_XMnj_2hUmGQfQ2Q0YaydAkyKbWiJR-ADOLE67CEdA,600
|
4180
|
+
py2docfx-0.1.9.dev1927662.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
4181
|
+
py2docfx-0.1.9.dev1927662.dist-info/top_level.txt,sha256=5dH2uP81dczt_qQJ38wiZ-gzoVWasfiJALWRSjdbnYU,9
|
4182
|
+
py2docfx-0.1.9.dev1927662.dist-info/RECORD,,
|
@@ -1,11 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import shutil
|
3
|
-
|
4
|
-
def prepare_log_folder() -> None:
|
5
|
-
if not os.path.exists('logs'):
|
6
|
-
os.makedirs('logs')
|
7
|
-
os.makedirs(os.path.join('logs', 'package_logs'))
|
8
|
-
else:
|
9
|
-
shutil.rmtree('logs')
|
10
|
-
os.makedirs('logs')
|
11
|
-
os.makedirs(os.path.join('logs', 'package_logs'))
|
File without changes
|
File without changes
|