py2docfx 0.1.11.dev1985872__py3-none-any.whl → 0.1.11.dev1989123__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. py2docfx/__main__.py +13 -8
  2. py2docfx/convert_prepare/environment.py +1 -1
  3. py2docfx/convert_prepare/generate_document.py +5 -5
  4. py2docfx/convert_prepare/get_source.py +2 -2
  5. py2docfx/convert_prepare/pack.py +4 -4
  6. py2docfx/convert_prepare/sphinx_caller.py +34 -8
  7. py2docfx/convert_prepare/tests/test_environment.py +0 -2
  8. py2docfx/convert_prepare/tests/test_generate_document.py +4 -2
  9. py2docfx/convert_prepare/tests/test_get_source.py +1 -0
  10. py2docfx/convert_prepare/tests/test_pack.py +3 -1
  11. py2docfx/convert_prepare/tests/test_params.py +0 -1
  12. py2docfx/convert_prepare/tests/test_sphinx_caller.py +8 -6
  13. py2docfx/convert_prepare/tests/test_subpackage.py +1 -0
  14. py2docfx/docfx_yaml/build_finished.py +1 -1
  15. py2docfx/docfx_yaml/logger.py +12 -11
  16. py2docfx/docfx_yaml/tests/roots/test-writer-uri/code_with_uri.py +0 -7
  17. py2docfx/docfx_yaml/tests/test_writer_uri.py +0 -4
  18. py2docfx/docfx_yaml/writer.py +1 -13
  19. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/authorization_code.py +1 -1
  20. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/azd_cli.py +20 -14
  21. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/azure_arc.py +1 -1
  22. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/azure_cli.py +36 -14
  23. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/azure_powershell.py +1 -1
  24. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/chained.py +2 -2
  25. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/default.py +4 -3
  26. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/imds.py +2 -2
  27. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/managed_identity.py +1 -1
  28. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/__init__.py +2 -0
  29. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/auth_code_redirect_handler.py +1 -1
  30. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/decorators.py +15 -7
  31. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/interactive.py +1 -1
  32. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/managed_identity_client.py +0 -1
  33. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/msal_client.py +1 -1
  34. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/msal_managed_identity_client.py +2 -1
  35. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/shared_token_cache.py +3 -3
  36. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/utils.py +17 -2
  37. py2docfx/venv/venv1/Lib/site-packages/azure/identity/_version.py +1 -1
  38. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/azd_cli.py +14 -11
  39. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/azure_cli.py +30 -12
  40. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/default.py +2 -2
  41. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/imds.py +3 -3
  42. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/managed_identity.py +1 -1
  43. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_internal/decorators.py +15 -7
  44. py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_internal/managed_identity_client.py +1 -1
  45. py2docfx/venv/venv1/Lib/site-packages/cryptography/__about__.py +1 -1
  46. {py2docfx-0.1.11.dev1985872.dist-info → py2docfx-0.1.11.dev1989123.dist-info}/METADATA +1 -1
  47. {py2docfx-0.1.11.dev1985872.dist-info → py2docfx-0.1.11.dev1989123.dist-info}/RECORD +49 -49
  48. {py2docfx-0.1.11.dev1985872.dist-info → py2docfx-0.1.11.dev1989123.dist-info}/WHEEL +0 -0
  49. {py2docfx-0.1.11.dev1985872.dist-info → py2docfx-0.1.11.dev1989123.dist-info}/top_level.txt +0 -0
py2docfx/__main__.py CHANGED
@@ -38,7 +38,7 @@ async def donwload_package_generate_documents(
38
38
  for idx, package in enumerate(package_info_list):
39
39
  os.environ['PROCESSING_PACKAGE_NAME'] = package.name
40
40
  package_number = start_num + idx
41
- py2docfx_logger = py2docfxLogger.get_package_logger(__name__)
41
+ py2docfx_logger = py2docfxLogger.get_logger(__name__)
42
42
  msg = f"Processing package {package.name}, env_prepare_tasks: {len(env_prepare_tasks)}"
43
43
  py2docfx_logger.info(msg)
44
44
 
@@ -49,9 +49,9 @@ async def donwload_package_generate_documents(
49
49
  py2docfx_logger.error(msg)
50
50
  raise
51
51
 
52
- generate_document(package, output_root,
52
+ await generate_document(package, output_root,
53
53
  py2docfxEnvironment.get_base_venv_sphinx_build_path(),
54
- py2docfxEnvironment.get_venv_package_path(idx),
54
+ py2docfxEnvironment.get_venv_package_path(idx),
55
55
  py2docfxEnvironment.get_base_venv_exe())
56
56
 
57
57
  merge_toc(YAML_OUTPUT_ROOT, package.path.yaml_output_folder)
@@ -80,7 +80,7 @@ async def donwload_package_generate_documents(
80
80
  msg = f"Removing venv {idx-py2docfxEnvironment.VENV_BUFFER}"
81
81
  py2docfx_logger.info(msg)
82
82
  await env_remove_tasks[idx-py2docfxEnvironment.VENV_BUFFER]
83
-
83
+
84
84
  if output_doc_folder:
85
85
  move_root_toc_to_target(YAML_OUTPUT_ROOT, output_doc_folder)
86
86
 
@@ -88,6 +88,12 @@ async def donwload_package_generate_documents(
88
88
  if env_remove_tasks[idx] != None and not env_remove_tasks[idx].done():
89
89
  await env_remove_tasks[idx]
90
90
 
91
+ def fishish_up():
92
+ warning_count, error_count = py2docfxLogger.get_warning_error_count()
93
+ py2docfxLogger.output_log_by_log_level()
94
+ print(f"Warning count: {warning_count}, Error count: {error_count}")
95
+ logging.shutdown()
96
+
91
97
  def main(argv) -> int:
92
98
  # TODO: may need to purge pip cache
93
99
  (package_info_list,
@@ -116,12 +122,11 @@ def main(argv) -> int:
116
122
  except Exception as e:
117
123
  msg = f"An error occurred: {e}"
118
124
  py2docfx_logger.error(msg)
125
+ fishish_up()
126
+ # asyncio.get_event_loop().stop()
119
127
  raise
120
128
 
121
- warning_count, error_count = py2docfxLogger.get_warning_error_count()
122
- py2docfxLogger.output_log_by_log_level()
123
- print(f"Warning count: {warning_count}, Error count: {error_count}")
124
- logging.shutdown()
129
+ fishish_up()
125
130
  return 0
126
131
 
127
132
  if __name__ == "__main__":
@@ -150,4 +150,4 @@ async def remove_environment(venv_num: int):
150
150
  else:
151
151
  msg = f"<CI ERROR>: Failed to remove venv{venv_num}. Error: {stderr.decode()}"
152
152
  py2docfx_logger.error(msg)
153
- raise RuntimeError()
153
+ raise RuntimeError(msg)
@@ -10,8 +10,8 @@ from py2docfx.convert_prepare.subpackage import merge_subpackage_files
10
10
 
11
11
  CONF_TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "conf_templates")
12
12
 
13
- def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_build_path: str, extra_package_path: str, executable=sys.executable):
14
- py2docfx_logger = get_package_logger(__name__)
13
+ async def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_build_path: str, extra_package_path: str, executable=sys.executable):
14
+ py2docfx_logger = get_package_logger(__name__, pkg.name)
15
15
  # Copy manual written RST from target doc repo
16
16
  package_paths = pkg.path
17
17
  if output_root:
@@ -33,7 +33,7 @@ def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_b
33
33
  msg = f"<CI INFO>: Generating RST files for {pkg.name}."
34
34
  py2docfx_logger.info(msg)
35
35
 
36
- subpackages_rst_record = run_apidoc(package_paths.doc_folder, package_paths.source_folder,
36
+ subpackages_rst_record = run_apidoc(pkg.name, package_paths.doc_folder, package_paths.source_folder,
37
37
  exclude_paths, pkg)
38
38
 
39
39
  msg = f"<CI INFO>: Listing RST files:"
@@ -45,7 +45,7 @@ def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_b
45
45
  py2docfx_logger.info(msg)
46
46
 
47
47
  generate_conf(pkg, package_paths.doc_folder, CONF_TEMPLATE_DIR)
48
- run_converter(package_paths.doc_folder, package_paths.yaml_output_folder, sphinx_build_path, extra_package_path, executable=executable)
48
+ await run_converter(pkg.name, package_paths.doc_folder, package_paths.yaml_output_folder, sphinx_build_path, extra_package_path, executable=executable)
49
49
 
50
50
  subpackages_path_record = {}
51
51
  if pkg.build_in_subpackage:
@@ -53,6 +53,6 @@ def generate_document(pkg: PackageInfo, output_root: str | os.PathLike, sphinx_b
53
53
  for (subpackage_name, subpackage_path) in subpackages_rst_record.items():
54
54
  subpackage_yaml_path = os.path.join(subpackages_yaml_path, subpackage_name)
55
55
  subpackages_path_record[subpackage_name] = subpackage_yaml_path
56
- run_converter(subpackage_path, subpackage_yaml_path, sphinx_build_path, extra_package_path, executable=executable)
56
+ await run_converter(pkg.name, subpackage_path, subpackage_yaml_path, sphinx_build_path, extra_package_path, executable=executable)
57
57
 
58
58
  merge_subpackage_files(subpackages_path_record, package_paths.yaml_output_folder, pkg.name)
@@ -102,7 +102,7 @@ def get_source(executable: str, pkg: PackageInfo, cnt: int, vststoken=None, gith
102
102
  )
103
103
  # unpack the downloaded wheel file.
104
104
  downloaded_dist_file = path.join(dist_dir, os.listdir(dist_dir)[0])
105
- pack.unpack_dist(downloaded_dist_file)
105
+ pack.unpack_dist(pkg.name, downloaded_dist_file)
106
106
  os.remove(downloaded_dist_file)
107
107
  source_folder = path.join(
108
108
  path.dirname(downloaded_dist_file),
@@ -112,7 +112,7 @@ def get_source(executable: str, pkg: PackageInfo, cnt: int, vststoken=None, gith
112
112
  pip_utils.download(pkg.location, dist_dir, prefer_source_distribution=False)
113
113
  # unpack the downloaded dist file.
114
114
  downloaded_dist_file = path.join(dist_dir, os.listdir(dist_dir)[0])
115
- pack.unpack_dist(downloaded_dist_file)
115
+ pack.unpack_dist(pkg.name, downloaded_dist_file)
116
116
  os.remove(downloaded_dist_file)
117
117
  if downloaded_dist_file.endswith(".tar.gz"):
118
118
  downloaded_dist_file = downloaded_dist_file.rsplit(".", maxsplit=1)[
@@ -6,9 +6,9 @@ import sys
6
6
 
7
7
  from py2docfx.docfx_yaml.logger import get_package_logger, log_subprocess_ouput
8
8
 
9
- def unpack_dist(dist_file):
9
+ def unpack_dist(package_name, dist_file):
10
10
  if dist_file.endswith(".whl"):
11
- unpack_wheel(dist_file)
11
+ unpack_wheel(package_name, dist_file)
12
12
  else:
13
13
  unpack_compressed(dist_file)
14
14
 
@@ -20,11 +20,11 @@ def unpack_compressed(file_path):
20
20
  shutil.unpack_archive(file_path, path.dirname(file_path))
21
21
 
22
22
 
23
- def unpack_wheel(file_path):
23
+ def unpack_wheel(package_name, file_path):
24
24
  """
25
25
  Transform a wheel file to a folder containing source code
26
26
  """
27
- py2docfx_logger = get_package_logger(__name__)
27
+ py2docfx_logger = get_package_logger(__name__, package_name)
28
28
  command = [sys.executable,
29
29
  '-m',
30
30
  'wheel',
@@ -1,6 +1,7 @@
1
+ import asyncio
1
2
  import os
2
- import sys
3
3
  import subprocess
4
+ import sys
4
5
 
5
6
  from py2docfx import PACKAGE_ROOT
6
7
  from py2docfx.docfx_yaml.logger import get_package_logger,log_subprocess_ouput
@@ -11,14 +12,14 @@ from py2docfx.convert_prepare.subpackage import (get_subpackages,
11
12
 
12
13
  DEBUG_SPHINX_FLAG = 'PY2DOCFX_DEBUG_SPHINX'
13
14
 
14
- def run_apidoc(rst_path, source_code_path, exclude_paths, package_info: PackageInfo):
15
+ def run_apidoc(package_name, rst_path, source_code_path, exclude_paths, package_info: PackageInfo):
15
16
  """
16
17
  Run sphinx-apidoc to generate RST inside rst_path folder
17
18
 
18
19
  Replacing
19
20
  https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/build.ps1&line=110&lineEnd=126&lineStartColumn=1&lineEndColumn=14&lineStyle=plain&_a=contents
20
21
  """
21
- py2docfx_logger = get_package_logger(__name__)
22
+ py2docfx_logger = get_package_logger(__name__, package_name)
22
23
  subfolderList = [name for name in
23
24
  os.listdir(source_code_path)
24
25
  if os.path.isdir(os.path.join(source_code_path, name))
@@ -49,7 +50,13 @@ def run_apidoc(rst_path, source_code_path, exclude_paths, package_info: PackageI
49
50
  return subpackages_rst_record
50
51
 
51
52
 
52
- def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path: str, conf_path = None, executable = sys.executable):
53
+ async def run_converter(package_name: str,
54
+ rst_path,
55
+ out_path,
56
+ sphinx_build_path: str,
57
+ extra_package_path: str,
58
+ conf_path = None,
59
+ executable = sys.executable):
53
60
  """
54
61
  Take rst files as input and run sphinx converter
55
62
 
@@ -58,7 +65,7 @@ def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path
58
65
  Replacing
59
66
  https://apidrop.visualstudio.com/Content%20CI/_git/ReferenceAutomation?path=/Python/build.ps1&line=150&lineEnd=161&lineStartColumn=13&lineEndColumn=52&lineStyle=plain&_a=contents
60
67
  """
61
- py2docfx_logger = get_package_logger(__name__)
68
+ py2docfx_logger = get_package_logger(__name__, package_name)
62
69
  outdir = os.path.join(out_path, "_build")
63
70
 
64
71
  # Sphinx/docutils have memory leak including linecaches, module-import-caches,
@@ -69,7 +76,7 @@ def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path
69
76
  py2docfx_logger.error(msg)
70
77
  raise ValueError()
71
78
  sphinx_param = [
72
- executable,
79
+ # executable,
73
80
  sphinx_build_path,
74
81
  rst_path,
75
82
  outdir,
@@ -86,6 +93,25 @@ def run_converter(rst_path, out_path, sphinx_build_path: str, extra_package_path
86
93
  env_tmp["PYTHONPATH"] = f"{extra_package_path};{package_root_parent};"
87
94
  else:
88
95
  env_tmp["PYTHONPATH"] = f"{extra_package_path}:{package_root_parent}:"
89
- output = subprocess.run(sphinx_param, check=True, cwd=PACKAGE_ROOT, env=env_tmp, capture_output=True ,text=True)
90
- log_subprocess_ouput(output, py2docfx_logger)
96
+
97
+ proc = await asyncio.create_subprocess_exec(
98
+ executable, *sphinx_param,
99
+ cwd=PACKAGE_ROOT,
100
+ env=env_tmp,
101
+ stdout=asyncio.subprocess.PIPE,
102
+ stderr=asyncio.subprocess.PIPE
103
+ )
104
+ stdout, stderr = await proc.communicate()
105
+ return_code = proc.returncode
106
+
107
+ if return_code == 0:
108
+ py2docfx_logger.info(f"{stdout}")
109
+ py2docfx_logger.info(f"{stderr}")
110
+ else:
111
+ py2docfx_logger.error(f"{stderr}")
112
+ raise subprocess.CalledProcessError(return_code, sphinx_param, stdout, stderr)
113
+
114
+ # output = subprocess.run(sphinx_param, check=False, cwd=PACKAGE_ROOT, env=env_tmp, capture_output=True ,text=True)
115
+ # log_subprocess_ouput(output, py2docfx_logger)
116
+
91
117
  return outdir
@@ -1,8 +1,6 @@
1
- import asyncio
2
1
  import os
3
2
  import shutil
4
3
  import pytest
5
- import sys
6
4
 
7
5
  from py2docfx.convert_prepare.environment import create_environment
8
6
  from py2docfx.convert_prepare.environment import remove_environment
@@ -4,6 +4,7 @@ Test the generate_document function.
4
4
  import os
5
5
  import sys
6
6
  import shutil
7
+ import pytest
7
8
  import sphinx
8
9
  import sphinx.cmd.build
9
10
 
@@ -11,7 +12,8 @@ from py2docfx.convert_prepare.generate_document import generate_document
11
12
  from py2docfx.convert_prepare.package_info import PackageInfo
12
13
  from py2docfx.convert_prepare.source import Source
13
14
 
14
- def test_generate_document(tmp_path):
15
+ @pytest.mark.asyncio
16
+ async def test_generate_document(tmp_path):
15
17
  """
16
18
  Test the generate_document function.
17
19
  """
@@ -42,7 +44,7 @@ def test_generate_document(tmp_path):
42
44
 
43
45
  # call the function
44
46
 
45
- generate_document(package, output_root, sphinx_build_path = sphinx.cmd.build.__file__, extra_package_path = source_folder)
47
+ await generate_document(package, output_root, sphinx_build_path = sphinx.cmd.build.__file__, extra_package_path = source_folder)
46
48
 
47
49
  #assert the result
48
50
  yaml_path = os.path.join(yaml_output_folder, "_build", "docfx_yaml")
@@ -6,6 +6,7 @@ import shutil
6
6
  import stat
7
7
  import pytest
8
8
  import sys
9
+
9
10
  from py2docfx.convert_prepare import git
10
11
  from py2docfx.convert_prepare import get_source
11
12
  from py2docfx.convert_prepare.package_info import PackageInfo
@@ -5,6 +5,7 @@ import subprocess
5
5
  import shutil
6
6
  import glob
7
7
  from os import path
8
+
8
9
  from py2docfx.convert_prepare.pack import unpack_compressed, unpack_wheel
9
10
 
10
11
  SRC_DIR = path.abspath("convert_prepare/tests/data/pack/")
@@ -107,7 +108,8 @@ def test_pack_unpack_wheel(tmp_path):
107
108
  wheel_path = path.join(tmp_path / "wheel", wheel_name)
108
109
 
109
110
  # unpack and assert the file list
110
- unpack_wheel(wheel_path)
111
+ package_name = wheel_name.split("-")[0]
112
+ unpack_wheel(package_name, wheel_path)
111
113
  _assert_file_list_same(
112
114
  path.abspath("convert_prepare/tests/data/pack"),
113
115
  tmp_path / "wheel" / "foo-0.1",
@@ -1,4 +1,3 @@
1
- import pytest
2
1
  import os
3
2
 
4
3
  from py2docfx.convert_prepare.params import load_file_params, load_command_params
@@ -1,6 +1,6 @@
1
- import pytest
2
1
  import os
3
2
  import shutil
3
+ import pytest
4
4
  import sphinx
5
5
  import sphinx.cmd.build
6
6
 
@@ -27,17 +27,19 @@ def init_paths(tmp_path):
27
27
 
28
28
  def test_run_apidoc(tmp_path):
29
29
  rst_path, source_code_path = init_paths(tmp_path)
30
- run_apidoc(rst_path, source_code_path, package_info.get_exluded_command(), package_info)
30
+ package_name = "testcode"
31
+ run_apidoc(package_name, rst_path, source_code_path, package_info.get_exluded_command(), package_info)
31
32
 
32
33
  # List all files under rst_path
33
34
  rst_list = os.listdir(rst_path)
34
35
  assert "testcode.fakemodule.rst" in rst_list
35
36
  assert "testcode.exclude.rst" not in rst_list
36
37
 
37
-
38
- def test_run_converter(tmp_path):
38
+ @pytest.mark.asyncio
39
+ async def test_run_converter(tmp_path):
39
40
  rst_path, source_code_path = init_paths(tmp_path)
40
- run_apidoc(rst_path, source_code_path, package_info.get_exluded_command(), package_info)
41
+ package_name = "testcode"
42
+ run_apidoc(package_name, rst_path, source_code_path, package_info.get_exluded_command(), package_info)
41
43
 
42
44
  # prepare conf.py, index.rst and docfx_yaml
43
45
  conf_path = os.path.abspath("convert_prepare/tests/data/sphinx_caller/conf.py")
@@ -49,7 +51,7 @@ def test_run_converter(tmp_path):
49
51
  index_rst.write("")
50
52
 
51
53
  out_path = os.path.join(tmp_path, "out")
52
- out_path = run_converter(rst_path, out_path, sphinx_build_path = sphinx.cmd.build.__file__, extra_package_path = source_code_path, conf_path=rst_path)
54
+ out_path = await run_converter(package_name, rst_path, out_path, sphinx_build_path = sphinx.cmd.build.__file__, extra_package_path = source_code_path, conf_path=rst_path)
53
55
 
54
56
  if os.path.exists(out_path):
55
57
  yaml_list = os.listdir(os.path.join(out_path, "docfx_yaml"))
@@ -1,5 +1,6 @@
1
1
  import shutil, os, yaml
2
2
  from os import path
3
+
3
4
  from py2docfx.convert_prepare.subpackage import (
4
5
  get_subpackages,
5
6
  move_rst_files_to_subfolder,
@@ -315,7 +315,7 @@ def build_finished(app, exception):
315
315
  if len(toc_yaml) == 0:
316
316
  msg = "No documentation for this module."
317
317
  py2docfx_logger.error(msg)
318
- raise RuntimeError()
318
+ raise RuntimeError(msg)
319
319
 
320
320
  toc_file = os.path.join(normalized_outdir, 'toc.yml')
321
321
  with open(toc_file, 'w') as writable:
@@ -36,6 +36,8 @@ def check_log_file_exists(log_file_path):
36
36
 
37
37
  def setup_log_handlers(logger, log_file_name):
38
38
  check_log_file_exists(log_file_name)
39
+ if logger.hasHandlers():
40
+ logger.handlers.clear()
39
41
  file_handler = logging.FileHandler(filename=log_file_name, mode='a')
40
42
  file_handler.setFormatter(logging.Formatter('%(levelname)s - %(name)s - %(message)s'))
41
43
  logger.addHandler(file_handler)
@@ -49,23 +51,20 @@ def get_logger(logger_name: str):
49
51
  file_logger = logging.getLogger(logger_name)
50
52
  file_logger.setLevel(logging.INFO)
51
53
  check_log_dir_exists(log_folder_path)
52
- if file_logger.hasHandlers():
53
- return file_logger
54
54
 
55
55
  file_logger = setup_log_handlers(file_logger, file_name)
56
56
 
57
57
  return file_logger
58
58
 
59
- def get_package_logger(logger_name:str):
59
+ def get_package_logger(logger_name:str, package_name:str = None):
60
60
  log_folder_path = os.path.join("logs", "package_logs")
61
- package_name = os.environ.get('PROCESSING_PACKAGE_NAME')
61
+ if package_name is None:
62
+ package_name = os.environ.get('PROCESSING_PACKAGE_NAME')
62
63
  file_name = os.path.join(log_folder_path, f"{package_name}.txt")
63
64
 
64
65
  file_logger = logging.getLogger(logger_name)
65
66
  file_logger.setLevel(logging.INFO)
66
67
  check_log_dir_exists(log_folder_path)
67
- if file_logger.hasHandlers():
68
- return file_logger
69
68
 
70
69
  file_logger = setup_log_handlers(file_logger, file_name)
71
70
 
@@ -81,8 +80,9 @@ def log_subprocess_ouput(subprocess_out: subprocess.CompletedProcess, logger: lo
81
80
  continue
82
81
  logger.warning(msg)
83
82
  if subprocess_out.returncode != 0:
84
- logger.error(f"Subprocess failed with return code {subprocess_out.returncode}")
85
- raise RuntimeError()
83
+ msg = f"Subprocess failed with return code {subprocess_out.returncode}"
84
+ logger.error(msg)
85
+ raise RuntimeError(msg)
86
86
 
87
87
  def log_git_clone_subprocess_ouput(subprocess_out: subprocess.CompletedProcess, logger: logging.Logger):
88
88
  if subprocess_out.stdout:
@@ -94,8 +94,9 @@ def log_git_clone_subprocess_ouput(subprocess_out: subprocess.CompletedProcess,
94
94
  continue
95
95
  logger.info(msg)
96
96
  if subprocess_out.returncode != 0:
97
- logger.error(f"Subprocess failed with return code {subprocess_out.returncode}")
98
- raise RuntimeError()
97
+ msg = f"Subprocess failed with return code {subprocess_out.returncode}"
98
+ logger.error(msg)
99
+ raise RuntimeError(msg)
99
100
 
100
101
  def counts_errors_warnings(log_file_path):
101
102
  error_count = 0
@@ -172,7 +173,7 @@ async def run_async_subprocess(exe_path, cmd):
172
173
  msg = stderr.decode('utf-8')
173
174
  if msg != None and msg != "":
174
175
  py2docfx_logger.error(msg)
175
- raise RuntimeError()
176
+ raise RuntimeError(msg)
176
177
  else:
177
178
  msg = stdout.decode('utf-8')
178
179
  if msg != None and msg != "":
@@ -17,12 +17,5 @@ class SampleClass():
17
17
  """
18
18
  pass
19
19
 
20
- def dummy_summary3(self):
21
- """
22
- This is a bare URL that shouldn't be transformed into a link
23
- because it's in the exclusion list: https://management.azure.com
24
- """
25
- pass
26
-
27
20
  pass
28
21
 
@@ -20,7 +20,6 @@ def test_http_link_in_summary_should_not_nest_parenthesis(app):
20
20
  class_summary_result = transform_node(app, doctree[1][1][0])
21
21
  method1_summary_result = transform_node(app, doctree[1][1][2][1])
22
22
  method2_summary_result = transform_node(app, doctree[1][1][4][1])
23
- method3_summary_result = transform_node(app, doctree[1][1][6][1])
24
23
 
25
24
  # Assert
26
25
  # Shouldn't see something like [title]((link))
@@ -29,10 +28,7 @@ def test_http_link_in_summary_should_not_nest_parenthesis(app):
29
28
  "We should not generate nested parenthesis causing docs validation warnings\n")
30
29
  method2_summary_expected = ("\n\n This isn't a content issue link ([https://www.microsoft.com](https://www.microsoft.com))\n "
31
30
  "Should expect a transformed Markdown link.\n")
32
- method3_summary_expected = ("\n\n This is a bare URL that shouldn't be transformed into a link\n "
33
- "because it's in the exclusion list: `https://management.azure.com`\n")
34
31
  assert(class_summary_expected == class_summary_result)
35
32
  assert(method1_summary_expected == method1_summary_result)
36
33
  assert(method2_summary_expected == method2_summary_result)
37
- assert(method3_summary_expected == method3_summary_result)
38
34
 
@@ -184,15 +184,6 @@ class MarkdownTranslator(nodes.NodeVisitor):
184
184
  sectionchars = '*=-~"+`'
185
185
  xref_template = "<xref:{0}>"
186
186
 
187
- # URLs that shouldn't be automatically rendered as hyperlinks if found bare. Included because they appear
188
- # frequently, get flagged by the broken link validator and/or there's no value to the user in
189
- # making them clickable links.
190
- urls_that_shouldnt_be_rendered_as_links = {
191
- "https://management.azure.com",
192
- "https://management.chinacloudapi.cn",
193
- "https://management.usgovcloudapi.net"
194
- }
195
-
196
187
  def __init__(self, document, builder):
197
188
  self.invdata = []
198
189
  nodes.NodeVisitor.__init__(self, document)
@@ -936,10 +927,7 @@ class MarkdownTranslator(nodes.NodeVisitor):
936
927
  match_content_issue_pattern = True
937
928
  ref_string = node.attributes["refuri"]
938
929
  if not match_content_issue_pattern:
939
- if inner_text == node.attributes['refuri'] and inner_text in cls.urls_that_shouldnt_be_rendered_as_links:
940
- ref_string = f'`{inner_text}`'
941
- else:
942
- ref_string = '[{}]({})'.format(node.astext(), node.attributes['refuri'])
930
+ ref_string = '[{}]({})'.format(node.astext(), node.attributes['refuri'])
943
931
  else:
944
932
  # only use id in class and func refuri if its id exists
945
933
  # otherwise, remove '.html#' in refuri
@@ -85,7 +85,7 @@ class AuthorizationCodeCredential(GetTokenMixin):
85
85
  attribute gives a reason. Any error response from Microsoft Entra ID is available as the error's
86
86
  ``response`` attribute.
87
87
  """
88
- # pylint:disable=useless-super-delegation
88
+
89
89
  return super(AuthorizationCodeCredential, self).get_token(
90
90
  *scopes, claims=claims, tenant_id=tenant_id, client_secret=self._client_secret, **kwargs
91
91
  )
@@ -5,6 +5,7 @@
5
5
 
6
6
  from datetime import datetime
7
7
  import json
8
+ import logging
8
9
  import os
9
10
  import re
10
11
  import shutil
@@ -19,12 +20,15 @@ from .. import CredentialUnavailableError
19
20
  from .._internal import resolve_tenant, within_dac, validate_tenant_id, validate_scope
20
21
  from .._internal.decorators import log_get_token
21
22
 
23
+
24
+ _LOGGER = logging.getLogger(__name__)
25
+
22
26
  CLI_NOT_FOUND = (
23
27
  "Azure Developer CLI could not be found. "
24
28
  "Please visit https://aka.ms/azure-dev for installation instructions and then,"
25
29
  "once installed, authenticate to your Azure account using 'azd auth login'."
26
30
  )
27
- COMMAND_LINE = "azd auth token --output json --scope {}"
31
+ COMMAND_LINE = ["auth", "token", "--output", "json"]
28
32
  EXECUTABLE_NAME = "azd"
29
33
  NOT_LOGGED_IN = "Please run 'azd auth login' from a command prompt to authenticate before using this credential."
30
34
 
@@ -160,8 +164,9 @@ class AzureDeveloperCliCredential:
160
164
  for scope in scopes:
161
165
  validate_scope(scope)
162
166
 
163
- commandString = " --scope ".join(scopes)
164
- command = COMMAND_LINE.format(commandString)
167
+ command_args = COMMAND_LINE.copy()
168
+ for scope in scopes:
169
+ command_args += ["--scope", scope]
165
170
  tenant = resolve_tenant(
166
171
  default_tenant=self.tenant_id,
167
172
  tenant_id=tenant_id,
@@ -169,8 +174,8 @@ class AzureDeveloperCliCredential:
169
174
  **kwargs,
170
175
  )
171
176
  if tenant:
172
- command += " --tenant-id " + tenant
173
- output = _run_command(command, self._process_timeout)
177
+ command_args += ["--tenant-id", tenant]
178
+ output = _run_command(command_args, self._process_timeout)
174
179
 
175
180
  token = parse_token(output)
176
181
  if not token:
@@ -236,15 +241,13 @@ def sanitize_output(output: str) -> str:
236
241
  return re.sub(r"\"token\": \"(.*?)(\"|$)", "****", output)
237
242
 
238
243
 
239
- def _run_command(command: str, timeout: int) -> str:
244
+ def _run_command(command_args: List[str], timeout: int) -> str:
240
245
  # Ensure executable exists in PATH first. This avoids a subprocess call that would fail anyway.
241
- if shutil.which(EXECUTABLE_NAME) is None:
246
+ azd_path = shutil.which(EXECUTABLE_NAME)
247
+ if not azd_path:
242
248
  raise CredentialUnavailableError(message=CLI_NOT_FOUND)
243
249
 
244
- if sys.platform.startswith("win"):
245
- args = ["cmd", "/c", command]
246
- else:
247
- args = ["/bin/sh", "-c", command]
250
+ args = [azd_path] + command_args
248
251
  try:
249
252
  working_directory = get_safe_working_dir()
250
253
 
@@ -257,13 +260,16 @@ def _run_command(command: str, timeout: int) -> str:
257
260
  "timeout": timeout,
258
261
  }
259
262
 
263
+ _LOGGER.debug("Executing subprocess with the following arguments %s", args)
260
264
  return subprocess.check_output(args, **kwargs)
261
265
  except subprocess.CalledProcessError as ex:
262
266
  # non-zero return from shell
263
267
  # Fallback check in case the executable is not found while executing subprocess.
264
- if ex.returncode == 127 or ex.stderr.startswith("'azd' is not recognized"):
268
+ if ex.returncode == 127 or (ex.stderr is not None and ex.stderr.startswith("'azd' is not recognized")):
265
269
  raise CredentialUnavailableError(message=CLI_NOT_FOUND) from ex
266
- if "not logged in, run `azd auth login` to login" in ex.stderr and "AADSTS" not in ex.stderr:
270
+ if ex.stderr is not None and (
271
+ "not logged in, run `azd auth login` to login" in ex.stderr and "AADSTS" not in ex.stderr
272
+ ):
267
273
  raise CredentialUnavailableError(message=NOT_LOGGED_IN) from ex
268
274
 
269
275
  # return code is from the CLI -> propagate its output
@@ -278,7 +284,7 @@ def _run_command(command: str, timeout: int) -> str:
278
284
  # failed to execute 'cmd' or '/bin/sh'
279
285
  error = CredentialUnavailableError(message="Failed to execute '{}'".format(args[0]))
280
286
  raise error from ex
281
- except Exception as ex: # pylint:disable=broad-except
287
+ except Exception as ex:
282
288
  # could be a timeout, for example
283
289
  error = CredentialUnavailableError(message="Failed to invoke the Azure Developer CLI")
284
290
  raise error from ex
@@ -54,7 +54,7 @@ def _get_secret_key(response: PipelineResponse) -> str:
54
54
  with open(key_file, "r", encoding="utf-8") as file:
55
55
  try:
56
56
  return file.read()
57
- except Exception as error: # pylint:disable=broad-except
57
+ except Exception as error:
58
58
  # user is expected to have obtained read permission prior to this being called
59
59
  raise ClientAuthenticationError(
60
60
  message="Could not read file {} contents: {}".format(key_file, error)