py2docfx 0.1.2rc1679015__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,7 @@ from enum import Enum
2
2
  import os
3
3
  import re
4
4
  from py2docfx.convert_prepare.source import Source
5
+ from py2docfx.convert_prepare.package_info_extra_settings import extra_exclude_path_by_package
5
6
 
6
7
  class PackageInfo:
7
8
  class InstallType(int, Enum):
@@ -161,4 +162,12 @@ class PackageInfo:
161
162
  if idx != len(package_name_segs)-1:
162
163
  current_parent_packages = f'{current_parent_packages}/{package_seg}' if current_parent_packages else package_seg
163
164
  exclude_path.append(os.path.join(code_location, f'{current_parent_packages}/__init__.py'))
165
+
166
+ if self.name in extra_exclude_path_by_package:
167
+ exclude_path.extend(
168
+ [
169
+ os.path.join(code_location, path)
170
+ for path in extra_exclude_path_by_package[self.name]
171
+ ]
172
+ )
164
173
  return exclude_path
@@ -0,0 +1,22 @@
1
+ """
2
+ Sphinx defaultly generate namespace pages liek azure azure-core
3
+ We have different ways to deal with it compare to Azure SDK scripts
4
+ We pass --implicit-namespaces to mark parent level namespaces, instead they're defaultly packages
5
+ We exclude parent namespaces' __init__.py when running sphinx-apidoc otherwise it throws exception
6
+ Azure SDK scripts don't add the --implicit-namespaces flag
7
+ They removed parent level RSTs
8
+ Check https://github.com/Azure/azure-sdk-for-python/blob/efad456552b8e4aa48db7ee96930223b95144947/eng/tox/run_sphinx_apidoc.py#L37C1-L48C10
9
+
10
+ That difference causes our behavior differs from Azure SDK html when package name and its namespace
11
+ structure are inconsistent. For example, azure-core-tracing-opencensus and azure-core-tracing-opentelemetry
12
+ have layer of azure/core/tracing/ext/opencensus_span or azure/core/tracing/ext/opentelemetry_span,
13
+ We generates 2 extra pages of azure.core.tracing.ext because we aren't able to know it is a parent level namespace
14
+
15
+ Below map worksaround this issue by excluding know extra parent level __init__.py
16
+ """
17
+ extra_exclude_path_by_package = {
18
+ 'azure-core-tracing-opencensus': ['azure/core/tracing/ext/__init__.py'],
19
+ 'azure-core-tracing-opentelemetry': ['azure/core/tracing/ext/__init__.py'],
20
+ 'azure-eventhub-checkpointstoreblob': ['azure/eventhub/extensions/__init__.py'],
21
+ 'azure-eventhub-checkpointstoreblob-aio': ['azure/eventhub/extensions/__init__.py'],
22
+ }
@@ -51,5 +51,4 @@ def test_generate_document(tmp_path):
51
51
  assert not os.path.exists(os.path.join(yaml_path, "azure.dummy.yml"))
52
52
  assert not os.path.exists(os.path.join(yaml_path, "azure.yml"))
53
53
  assert os.path.exists(os.path.join(yaml_path, "toc.yml"))
54
- assert os.path.exists(os.path.join(yaml_path, "index.yml"))
55
54
 
@@ -13,10 +13,10 @@ with open(full_test_file_path, "r", encoding="utf-8") as json_file:
13
13
  package_info_0 = PackageInfo.parse_from(test_dict["packages"][0], False)
14
14
  package_info_0.code_location = "dummy_location"
15
15
 
16
- with open(full_test_file_path, "r", encoding="utf-8") as json_file:
17
- test_dict = json.load(json_file)
18
16
  package_info_1 = PackageInfo.parse_from(test_dict["packages"][1], False)
19
17
 
18
+ package_info_2 = PackageInfo.parse_from(test_dict["packages"][2], False)
19
+
20
20
  def test_parse_from():
21
21
  assert package_info_0.exclude_path == ["test*", "example*", "sample*", "doc*"]
22
22
  assert package_info_0.name == "azure-mltable-py2docfxtest"
@@ -55,3 +55,44 @@ def test_get_exclude_command(tmp_path):
55
55
  def form_exclude_path(raletive_path):
56
56
  return os.path.join(source_folder, raletive_path)
57
57
  assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
58
+
59
+
60
+ def test_get_exclude_command(tmp_path):
61
+ source_folder = os.path.join(tmp_path,"source_folder")
62
+ yaml_output_folder = os.path.join(tmp_path,"yaml_output_folder")
63
+ package_info_0.path = Source(
64
+ source_folder = source_folder, yaml_output_folder = yaml_output_folder, package_name = "azure-mltable-py2docfxtest"
65
+ )
66
+ exclude_path = package_info_0.get_exluded_command()
67
+ expected_exclude_path = [
68
+ "build/*",
69
+ "setup.py",
70
+ "test*",
71
+ "example*",
72
+ "sample*",
73
+ "doc*",
74
+ "azure/__init__.py",
75
+ "azure/mltable/__init__.py"
76
+ ]
77
+ def form_exclude_path(raletive_path):
78
+ return os.path.join(source_folder, raletive_path)
79
+ assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
80
+
81
+ def test_get_exclude_command_check_extra_exclude(tmp_path):
82
+ source_folder = os.path.join(tmp_path,"source_folder")
83
+ yaml_output_folder = os.path.join(tmp_path,"yaml_output_folder")
84
+ package_info_2.path = Source(
85
+ source_folder = source_folder, yaml_output_folder = yaml_output_folder, package_name = 'azure-core-tracing-opencensus'
86
+ )
87
+ exclude_path = package_info_2.get_exluded_command()
88
+ expected_exclude_path = [
89
+ "build/*",
90
+ "setup.py",
91
+ "azure/__init__.py",
92
+ "azure/core/__init__.py",
93
+ "azure/core/tracing/__init__.py",
94
+ 'azure/core/tracing/ext/__init__.py'
95
+ ]
96
+ def form_exclude_path(raletive_path):
97
+ return os.path.join(source_folder, raletive_path)
98
+ assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
@@ -26,9 +26,37 @@ scientific_notation_regex = re.compile(r'^[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)$
26
26
  def string_representer(dumper, data):
27
27
  return dumper.represent_scalar(u"tag:yaml.org,2002:str", data,
28
28
  style="'" if (scientific_notation_regex.match(data)) else None)
29
-
30
29
  yml.add_representer(str, string_representer)
31
30
 
31
+ def insert_node_to_toc_tree(toc_yaml, uid, project_name, toc_node_map):
32
+ # Build nested TOC
33
+ parent_level = uid
34
+ cur_node = None
35
+
36
+ # Try all ancestors azure.core.class1 -> azure.core -> azure
37
+ while parent_level.count('.') >= 1:
38
+ parent_level = '.'.join(parent_level.split('.')[:-1])
39
+ found_node = toc_node_map[parent_level] if parent_level in toc_node_map else None
40
+ if found_node:
41
+ # If ancestor already in current TOC, insert to its items
42
+ name = uid.split('.')[-1] if '.' in uid and project_name != uid else uid
43
+ cur_node = {'name': name, 'uid': uid}
44
+ if 'uid' in found_node:
45
+ # Only leaf nodes should have uid
46
+ found_node.pop('uid', 'No uid found')
47
+ # Subpackages should have its Overview page
48
+ found_node.setdefault('items', [{'name': 'Overview', 'uid': parent_level}]).append(cur_node)
49
+ break
50
+
51
+ # uid is representing a package in TOC as root node
52
+ if cur_node is None:
53
+ # if uid doesn't contain '.', the name needn't to simplify
54
+ cur_node = {'name': uid}
55
+ toc_yaml.append(cur_node)
56
+
57
+ # insert to uid-toc map
58
+ toc_node_map[uid] = cur_node
59
+
32
60
  def merge_params(arg_params, doc_params):
33
61
  merged_params = deepcopy(doc_params)
34
62
  # merge arg_params into merged_params
@@ -55,10 +83,10 @@ def remove_params_without_id(params):
55
83
  return new_params
56
84
 
57
85
  def add_isrequired_if_needed(obj, key: str):
58
- if key in obj['syntax'] and obj['type'] in ['class', 'function', 'method']:
59
- for args in obj['syntax'][key]:
60
- if 'isRequired' not in args and 'defaultValue' not in args:
61
- args['isRequired'] = True
86
+ if key in obj['syntax'] and obj['type'] in ['class', 'function', 'method']:
87
+ for args in obj['syntax'][key]:
88
+ if 'isRequired' not in args and 'defaultValue' not in args:
89
+ args['isRequired'] = True
62
90
 
63
91
  def get_merged_params(obj, info_field_data, key: str):
64
92
  merged_params = []
@@ -68,7 +96,7 @@ def get_merged_params(obj, info_field_data, key: str):
68
96
  key, [])
69
97
  if arg_params and doc_params:
70
98
  if len(arg_params) - len(doc_params) > 0:
71
- print("Documented params don't match size of params:"" {}".format(obj['uid'])) # lgtm [py/clear-text-logging-sensitive-data]
99
+ print(f'Documented params don\'t match size of params:{obj["uid"]}') # lgtm [py/clear-text-logging-sensitive-data]
72
100
  doc_params = remove_params_without_id(doc_params)
73
101
  merged_params = merge_params(arg_params, doc_params)
74
102
  else:
@@ -81,7 +109,7 @@ def raise_up_fields(obj):
81
109
  if 'summary' in obj['syntax'] and obj['syntax']['summary']:
82
110
  obj['summary'] = obj['syntax'].pop(
83
111
  'summary').strip(" \n\r\r")
84
-
112
+
85
113
  # Raise up remarks
86
114
  if 'remarks' in obj['syntax'] and obj['syntax']['remarks']:
87
115
  obj['remarks'] = obj['syntax'].pop('remarks')
@@ -148,35 +176,6 @@ def merge_data(obj, info_field_data, yaml_data):
148
176
  # Revert `type` for other objects to use
149
177
  info_field_data[obj['uid']]['type'] = obj['type']
150
178
 
151
- def find_node_in_toc_tree(toc_yaml, to_add_node):
152
- for module in toc_yaml:
153
- if module['name'] == to_add_node:
154
- return module
155
-
156
- if 'items' in module:
157
- items = module['items']
158
- found_module = find_node_in_toc_tree(items, to_add_node)
159
- if found_module != None:
160
- return found_module
161
-
162
- return None
163
-
164
- def build_nested_toc(toc_yaml, uid):
165
- # Build nested TOC
166
- if uid.count('.') >= 1:
167
- parent_level = '.'.join(uid.split('.')[:-1])
168
- found_node = find_node_in_toc_tree(toc_yaml, parent_level)
169
-
170
- if found_node:
171
- found_node.pop('uid', 'No uid found')
172
- found_node.setdefault('items', [{'name': 'Overview', 'uid': parent_level}]).append(
173
- {'name': uid, 'uid': uid})
174
- else:
175
- toc_yaml.append({'name': uid, 'uid': uid})
176
-
177
- else:
178
- toc_yaml.append({'name': uid, 'uid': uid})
179
-
180
179
  def build_finished(app, exception):
181
180
  """
182
181
  Output YAML on the file system.
@@ -207,6 +206,8 @@ def build_finished(app, exception):
207
206
  ))
208
207
 
209
208
  ensuredir(normalized_outdir)
209
+ project_name = app.config.project.replace('-','.')
210
+ toc_node_map = {}
210
211
 
211
212
  def filter_out_self_from_args(obj):
212
213
  arg_params = obj.get('syntax', {}).get('parameters', [])
@@ -260,30 +261,13 @@ def build_finished(app, exception):
260
261
  if (obj['type'] == 'class' and obj['inheritance']):
261
262
  convert_class_to_enum_if_needed(obj)
262
263
 
263
- build_nested_toc(toc_yaml, uid)
264
-
265
- index_children = []
266
- index_references = []
267
-
268
- def form_index_references_and_children(yaml_data, index_children, index_references):
269
- if yaml_data[0].get('type', None) in ['package', 'module']:
270
- index_children.append(yaml_data[0].get('fullName', ''))
271
- index_references.append({
272
- 'uid': yaml_data[0].get('fullName', ''),
273
- 'name': yaml_data[0].get('fullName', ''),
274
- 'fullname': yaml_data[0].get('fullName', ''),
275
- 'isExternal': False
276
- })
264
+ insert_node_to_toc_tree(toc_yaml, uid, project_name, toc_node_map)
277
265
 
278
266
  for data_set in (app.env.docfx_yaml_packages,
279
267
  app.env.docfx_yaml_modules,
280
268
  app.env.docfx_yaml_classes,
281
269
  app.env.docfx_yaml_functions): # noqa
282
-
283
270
  for uid, yaml_data in iter(sorted(data_set.items())):
284
-
285
- form_index_references_and_children(yaml_data, index_children, index_references)
286
-
287
271
  # Output file
288
272
  if uid.lower() in file_name_set:
289
273
  filename = uid + "(%s)" % app.env.docfx_info_uid_types[uid]
@@ -291,7 +275,6 @@ def build_finished(app, exception):
291
275
  filename = uid
292
276
  out_file = os.path.join(normalized_outdir, '%s.yml' % filename)
293
277
  ensuredir(os.path.dirname(out_file))
294
- new_object = {}
295
278
 
296
279
  transformed_obj = None
297
280
  if yaml_data[0]['type'] == 'package':
@@ -310,12 +293,13 @@ def build_finished(app, exception):
310
293
  mime = "PythonModule"
311
294
 
312
295
  if transformed_obj == None:
313
- print("Unknown yml: " + yamlfile_path)
296
+ print(f"Unknown yml, uid is: {uid}")
314
297
  else:
315
298
  # save file
316
299
  common.write_yaml(transformed_obj, out_file, mime)
317
300
  file_name_set.add(filename)
318
301
 
302
+ # Write TOC, the toc should include at least 1
319
303
  if len(toc_yaml) == 0:
320
304
  raise RuntimeError("No documentation for this module.")
321
305
 
@@ -323,31 +307,7 @@ def build_finished(app, exception):
323
307
  with open(toc_file, 'w') as writable:
324
308
  writable.write(
325
309
  dump(
326
- [{
327
- 'name': app.config.project,
328
- 'items': [{'name': 'Overview', 'uid': 'project-' + app.config.project}] + toc_yaml
329
- }],
310
+ toc_yaml,
330
311
  default_flow_style=False,
331
312
  )
332
313
  )
333
-
334
- index_file = os.path.join(normalized_outdir, 'index.yml')
335
-
336
- index_obj = [{
337
- 'uid': 'project-' + app.config.project,
338
- 'name': app.config.project,
339
- 'fullName': app.config.project,
340
- 'langs': ['python'],
341
- 'type': 'package',
342
- 'kind': 'distribution',
343
- 'summary': '',
344
- 'children': index_children
345
- }]
346
- transformed_obj = convert_package(index_obj, app.env.docfx_info_uid_types)
347
- mime = "PythonPackage"
348
- if transformed_obj == None:
349
- print("Unknown yml: " + yamlfile_path)
350
- else:
351
- # save file
352
- common.write_yaml(transformed_obj, index_file, mime)
353
- file_name_set.add(filename)
@@ -1,8 +1,9 @@
1
+ import os
1
2
  import pytest
2
-
3
+ import yaml
3
4
  from translator import translator
4
5
  from build_finished import build_finished, merge_data
5
-
6
+ from docfx_yaml.settings import API_ROOT
6
7
  from .utils.test_utils import prepare_app_envs,load_rst_transform_to_doctree
7
8
 
8
9
  @pytest.mark.sphinx('yaml', testroot='build-finished')
@@ -130,4 +131,52 @@ def test_merge_data_added_attribute():
130
131
  # Assert the results
131
132
  assert 'added_attribute' not in obj['syntax']
132
133
  assert len(yaml_data) == 1
133
- assert yaml_data[0]['uid'] == 'attr1'
134
+ assert yaml_data[0]['uid'] == 'attr1'
135
+
136
+
137
+ @pytest.mark.sphinx('yaml', testroot='build-finished')
138
+ def test_build_finished_check_toc(tmp_path, app):
139
+ app.builder.outdir = tmp_path
140
+ app.config.project = 'azure-durable-functions'
141
+ app.env.docfx_yaml_packages = {
142
+ 'azure.durable_functions':[{'uid':'azure.durable_functions','type':'package'}],
143
+ 'azure.durable_functions.models':[{'uid':'azure.durable_functions.models','type':'package'}],
144
+ }
145
+ app.env.docfx_yaml_modules = {
146
+ 'azure.durable_functions.constants':[{'uid':'azure.durable_functions.constants','type':'module'}],
147
+ 'azure.durable_functions.models.DurableEntityContext':[{'uid':'azure.durable_functions.models.DurableEntityContext','type':'module'}],
148
+ }
149
+ app.env.docfx_yaml_classes = {'azure.durable_functions.Blueprint':[{'uid':'azure.durable_functions.Blueprint','type':'class', 'inheritance':[]}]}
150
+
151
+ # Act
152
+ build_finished(app, None)
153
+
154
+ # Assert after build_finished
155
+ toc = None
156
+ with open(os.path.join(tmp_path, API_ROOT, 'toc.yml'), 'r', encoding='utf-8') as file:
157
+ toc = yaml.safe_load(file)
158
+
159
+ assert 1 == len(toc)
160
+ assert 'azure.durable_functions' == toc[0]['name']
161
+ assert 'uid' not in toc[0] # root packages shouldn't have uid
162
+
163
+ assert 4 == len(toc[0]['items'])
164
+ assert 'Overview' == toc[0]['items'][0]['name']
165
+ assert 'azure.durable_functions' == toc[0]['items'][0]['uid']
166
+
167
+ assert 'models' == toc[0]['items'][1]['name']
168
+ assert 'uid' not in toc[0]['items'][1]
169
+ assert 2 == len(toc[0]['items'][1]['items'])
170
+ assert 'Overview' == toc[0]['items'][1]['items'][0]['name']
171
+ assert 'azure.durable_functions.models' == toc[0]['items'][1]['items'][0]['uid']
172
+ assert 'DurableEntityContext' == toc[0]['items'][1]['items'][1]['name']
173
+ assert 'azure.durable_functions.models.DurableEntityContext' == toc[0]['items'][1]['items'][1]['uid']
174
+
175
+ assert 'constants' == toc[0]['items'][2]['name']
176
+ assert 'azure.durable_functions.constants' == toc[0]['items'][2]['uid']
177
+
178
+ assert 'Blueprint' == toc[0]['items'][3]['name']
179
+ assert 'azure.durable_functions.Blueprint' == toc[0]['items'][3]['uid']
180
+
181
+
182
+ 1
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: py2docfx
3
- Version: 0.1.2rc1679015
3
+ Version: 0.1.3
4
4
  Summary: A package built based on Sphinx which download source code package and generate yaml files supported by docfx.
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -8,7 +8,8 @@ py2docfx/convert_prepare/get_source.py,sha256=Rl6WYWpkalAWmc-02ZLb9zXxrHNDFaOoFT
8
8
  py2docfx/convert_prepare/git.py,sha256=xGJp2nDWLfVljrxyPnFKPoLIqmBh6by-QdITogIuxi0,5893
9
9
  py2docfx/convert_prepare/install_package.py,sha256=hATmgazcSX7k2n4jQXh9sQMyNUc1k1YqHv5K5UMALq4,262
10
10
  py2docfx/convert_prepare/pack.py,sha256=vZS67_GzEhUmZWHU1dxm8gnWyRBs-kB6-KjX1d_FdOU,1260
11
- py2docfx/convert_prepare/package_info.py,sha256=RCN3enfwXeU_9H8xBHLalvbhUrGxMtmlX0F9omGoPjU,6856
11
+ py2docfx/convert_prepare/package_info.py,sha256=V-xasck6ZfqJdUjlGHtkC1iE-DCBDAMK8Z0q-NronkE,7254
12
+ py2docfx/convert_prepare/package_info_extra_settings.py,sha256=u5B5e8hc0m9PA_-0kJzq1LtKn-xzZlucwXHTFy49mDg,1475
12
13
  py2docfx/convert_prepare/params.py,sha256=PXMB8pLtb4XbfI322avA47q0AO-TyBE6kZf7FU8I6v4,1771
13
14
  py2docfx/convert_prepare/paths.py,sha256=964RX81Qf__rzXgEATfqBNFCKTYVjLt9J7WCz2TnNdc,485
14
15
  py2docfx/convert_prepare/pip_utils.py,sha256=W8PJQQSZXUW7W_mdBxaK6KRuxMEskO1-Hw6hjRazqTY,1127
@@ -22,10 +23,10 @@ py2docfx/convert_prepare/post_process/merge_toc.py,sha256=coyae54OB1nGcCkxz9oAu-
22
23
  py2docfx/convert_prepare/subpackage_merge/merge_root_package.py,sha256=uK96qL2asuSfo_3SZaoP8XZaUvjf5mNkr17JNbZR4Lg,1026
23
24
  py2docfx/convert_prepare/subpackage_merge/merge_toc.py,sha256=nkVqe8R0m8D6cyTYV7aIpMDXorvn4-LXfU_vIK_hJBg,1706
24
25
  py2docfx/convert_prepare/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- py2docfx/convert_prepare/tests/test_generate_document.py,sha256=BKw8pMSsygLnv6bETlA2MIVarECzfWoNo4JglxHb-T4,2480
26
+ py2docfx/convert_prepare/tests/test_generate_document.py,sha256=hOAtib9pTFaZCS8QbYS6Dhz0hNGESDHPXumRIuIWp4Y,2415
26
27
  py2docfx/convert_prepare/tests/test_get_source.py,sha256=c22JfobgbEbWWiNzBNYpZm2yDfo5LwBioUuRwft9WZE,5858
27
28
  py2docfx/convert_prepare/tests/test_pack.py,sha256=46JWMNzknIptDVs7D3CuxcmqBr_OKMmaw1br9H7wqco,4134
28
- py2docfx/convert_prepare/tests/test_package_info.py,sha256=L2ax9dItnz5QNSsSjSjEcaS6UPZxiq3MwysBB1FdJxI,2262
29
+ py2docfx/convert_prepare/tests/test_package_info.py,sha256=hdNpAH9hlLF-cX9sKAu3cmiCtphX4USy9G_gWI_iaHo,3883
29
30
  py2docfx/convert_prepare/tests/test_params.py,sha256=p9DaGveocMBRih02KjpudJZE752neFBTLzOFbS47izQ,2036
30
31
  py2docfx/convert_prepare/tests/test_post_process_merge_toc.py,sha256=YKOcn4_lf4syGsAvJ9BqpdUUc3SLfK4TiOX1lpXJT_Y,885
31
32
  py2docfx/convert_prepare/tests/test_source.py,sha256=LNFZtvjz6QhVLOxatjWokYCCcoSm0bhTikMF9KoTPIE,2025
@@ -52,7 +53,7 @@ py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure
52
53
  py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_03_31/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
54
  py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_03_31/models.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
55
  py2docfx/docfx_yaml/__init__.py,sha256=KCEizAXv-SXtrYhvFfLHdBWDhz51AA9uagaeTL-Itpo,100
55
- py2docfx/docfx_yaml/build_finished.py,sha256=kJemkgnh9NIzn4E8jcAIaP7vb7wcHzdLl7yWUjoZbi8,14218
56
+ py2docfx/docfx_yaml/build_finished.py,sha256=yMA2Q3tuaXch6xzSe12ehpEIFs0r0S-M79w_xueCcsk,13115
56
57
  py2docfx/docfx_yaml/build_init.py,sha256=lAw-fnBVQbySfZ7Sut_NpFQUjnqLOmnGQrTBBH2RXcg,1860
57
58
  py2docfx/docfx_yaml/common.py,sha256=UN1MUmjUoN1QSFDR1Cm_bfRuHr6FQiOe5VQV6s8xzjc,6841
58
59
  py2docfx/docfx_yaml/convert_class.py,sha256=boKDaxnXbnLxja62UFXi3eChGDB_WBW6ouUUJgOhdpE,2098
@@ -72,7 +73,7 @@ py2docfx/docfx_yaml/writer.py,sha256=0ZqyVGDHa4Cr3NsuOPRf4pGUStl6g6IBxpSgIZeDT9I
72
73
  py2docfx/docfx_yaml/yaml_builder.py,sha256=qSxXVS4iFCc1ZdL5QzLrv8hy3LHIQCrhO4WcTp01vag,2575
73
74
  py2docfx/docfx_yaml/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
75
  py2docfx/docfx_yaml/tests/conftest.py,sha256=CykkZxaDZ-3a1EIQdGBieSmHL9FdyTE2xTJZe9QgKcg,1214
75
- py2docfx/docfx_yaml/tests/test_build_finished.py,sha256=ShCRhIHGu2IUaM6iWsC0w9i_qmdc_kadPE3oPRBBZbQ,4720
76
+ py2docfx/docfx_yaml/tests/test_build_finished.py,sha256=x1aaOzUJxQkmwEaVgKxtC7kvVPeO7cCgFNDQq2GApxQ,6979
76
77
  py2docfx/docfx_yaml/tests/test_method_arguments.py,sha256=Cvj9aoADtacKciVN8nempXW-KQL8nujSa9GNVuk6l_8,1578
77
78
  py2docfx/docfx_yaml/tests/test_numpy_syntax.py,sha256=ssb3J_-Jzjybhh4eycCA_LkXbGflyZyIUAiTjlEYLiw,863
78
79
  py2docfx/docfx_yaml/tests/test_translator_attributes.py,sha256=qZCsQGffq31k3UzpXkJpycplOXIq9gi2SxY6vu0DTfw,5224
@@ -118,7 +119,7 @@ py2docfx/docfx_yaml/tests/roots/test-writer-table/conf.py,sha256=avcbnIOV2mlGQwh
118
119
  py2docfx/docfx_yaml/tests/roots/test-writer-uri/code_with_uri.py,sha256=bzWTZpY2yf_By2bOSl1GFaY3BsZpkAvwQuGztlcHKkQ,537
119
120
  py2docfx/docfx_yaml/tests/roots/test-writer-uri/conf.py,sha256=avcbnIOV2mlGQwhMQJZC4W6UGRBRhnq1QBxjPWlySxQ,260
120
121
  py2docfx/docfx_yaml/tests/utils/test_utils.py,sha256=d0OYSUQ6NyoZx5mlLdNGGNhiNmmQhjVT4hQ6jY3VE_M,3383
121
- py2docfx-0.1.2rc1679015.dist-info/METADATA,sha256=QIvuMtAUJkvTqHAYy2H64EGG2lSasnKDtIxKDL_ThN4,599
122
- py2docfx-0.1.2rc1679015.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
123
- py2docfx-0.1.2rc1679015.dist-info/top_level.txt,sha256=5dH2uP81dczt_qQJ38wiZ-gzoVWasfiJALWRSjdbnYU,9
124
- py2docfx-0.1.2rc1679015.dist-info/RECORD,,
122
+ py2docfx-0.1.3.dist-info/METADATA,sha256=qXd3WEPkRlOwRnj6k2njHzWfx23B9bBXS9c5hZbccAo,590
123
+ py2docfx-0.1.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
124
+ py2docfx-0.1.3.dist-info/top_level.txt,sha256=5dH2uP81dczt_qQJ38wiZ-gzoVWasfiJALWRSjdbnYU,9
125
+ py2docfx-0.1.3.dist-info/RECORD,,