py2docfx 0.1.2.dev1676273__py3-none-any.whl → 0.1.2.dev1678928__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2docfx/convert_prepare/package_info.py +0 -9
- py2docfx/convert_prepare/tests/test_generate_document.py +1 -0
- py2docfx/convert_prepare/tests/test_package_info.py +2 -43
- py2docfx/docfx_yaml/build_finished.py +81 -41
- py2docfx/docfx_yaml/tests/test_build_finished.py +3 -52
- {py2docfx-0.1.2.dev1676273.dist-info → py2docfx-0.1.2.dev1678928.dist-info}/METADATA +1 -1
- {py2docfx-0.1.2.dev1676273.dist-info → py2docfx-0.1.2.dev1678928.dist-info}/RECORD +9 -10
- py2docfx/convert_prepare/package_info_extra_settings.py +0 -20
- {py2docfx-0.1.2.dev1676273.dist-info → py2docfx-0.1.2.dev1678928.dist-info}/WHEEL +0 -0
- {py2docfx-0.1.2.dev1676273.dist-info → py2docfx-0.1.2.dev1678928.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,6 @@ from enum import Enum
|
|
2
2
|
import os
|
3
3
|
import re
|
4
4
|
from py2docfx.convert_prepare.source import Source
|
5
|
-
from py2docfx.convert_prepare.package_info_extra_settings import extra_exclude_path_by_package
|
6
5
|
|
7
6
|
class PackageInfo:
|
8
7
|
class InstallType(int, Enum):
|
@@ -162,12 +161,4 @@ class PackageInfo:
|
|
162
161
|
if idx != len(package_name_segs)-1:
|
163
162
|
current_parent_packages = f'{current_parent_packages}/{package_seg}' if current_parent_packages else package_seg
|
164
163
|
exclude_path.append(os.path.join(code_location, f'{current_parent_packages}/__init__.py'))
|
165
|
-
|
166
|
-
if self.name in extra_exclude_path_by_package:
|
167
|
-
exclude_path.extend(
|
168
|
-
[
|
169
|
-
os.path.join(code_location, path)
|
170
|
-
for path in extra_exclude_path_by_package[self.name]
|
171
|
-
]
|
172
|
-
)
|
173
164
|
return exclude_path
|
@@ -51,4 +51,5 @@ def test_generate_document(tmp_path):
|
|
51
51
|
assert not os.path.exists(os.path.join(yaml_path, "azure.dummy.yml"))
|
52
52
|
assert not os.path.exists(os.path.join(yaml_path, "azure.yml"))
|
53
53
|
assert os.path.exists(os.path.join(yaml_path, "toc.yml"))
|
54
|
+
assert os.path.exists(os.path.join(yaml_path, "index.yml"))
|
54
55
|
|
@@ -13,10 +13,10 @@ with open(full_test_file_path, "r", encoding="utf-8") as json_file:
|
|
13
13
|
package_info_0 = PackageInfo.parse_from(test_dict["packages"][0], False)
|
14
14
|
package_info_0.code_location = "dummy_location"
|
15
15
|
|
16
|
+
with open(full_test_file_path, "r", encoding="utf-8") as json_file:
|
17
|
+
test_dict = json.load(json_file)
|
16
18
|
package_info_1 = PackageInfo.parse_from(test_dict["packages"][1], False)
|
17
19
|
|
18
|
-
package_info_2 = PackageInfo.parse_from(test_dict["packages"][2], False)
|
19
|
-
|
20
20
|
def test_parse_from():
|
21
21
|
assert package_info_0.exclude_path == ["test*", "example*", "sample*", "doc*"]
|
22
22
|
assert package_info_0.name == "azure-mltable-py2docfxtest"
|
@@ -55,44 +55,3 @@ def test_get_exclude_command(tmp_path):
|
|
55
55
|
def form_exclude_path(raletive_path):
|
56
56
|
return os.path.join(source_folder, raletive_path)
|
57
57
|
assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
|
58
|
-
|
59
|
-
|
60
|
-
def test_get_exclude_command(tmp_path):
|
61
|
-
source_folder = os.path.join(tmp_path,"source_folder")
|
62
|
-
yaml_output_folder = os.path.join(tmp_path,"yaml_output_folder")
|
63
|
-
package_info_0.path = Source(
|
64
|
-
source_folder = source_folder, yaml_output_folder = yaml_output_folder, package_name = "azure-mltable-py2docfxtest"
|
65
|
-
)
|
66
|
-
exclude_path = package_info_0.get_exluded_command()
|
67
|
-
expected_exclude_path = [
|
68
|
-
"build/*",
|
69
|
-
"setup.py",
|
70
|
-
"test*",
|
71
|
-
"example*",
|
72
|
-
"sample*",
|
73
|
-
"doc*",
|
74
|
-
"azure/__init__.py",
|
75
|
-
"azure/mltable/__init__.py"
|
76
|
-
]
|
77
|
-
def form_exclude_path(raletive_path):
|
78
|
-
return os.path.join(source_folder, raletive_path)
|
79
|
-
assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
|
80
|
-
|
81
|
-
def test_get_exclude_command_check_extra_exclude(tmp_path):
|
82
|
-
source_folder = os.path.join(tmp_path,"source_folder")
|
83
|
-
yaml_output_folder = os.path.join(tmp_path,"yaml_output_folder")
|
84
|
-
package_info_2.path = Source(
|
85
|
-
source_folder = source_folder, yaml_output_folder = yaml_output_folder, package_name = 'azure-core-tracing-opencensus'
|
86
|
-
)
|
87
|
-
exclude_path = package_info_2.get_exluded_command()
|
88
|
-
expected_exclude_path = [
|
89
|
-
"build/*",
|
90
|
-
"setup.py",
|
91
|
-
"azure/__init__.py",
|
92
|
-
"azure/core/__init__.py",
|
93
|
-
"azure/core/tracing/__init__.py",
|
94
|
-
'azure/core/tracing/ext/__init__.py'
|
95
|
-
]
|
96
|
-
def form_exclude_path(raletive_path):
|
97
|
-
return os.path.join(source_folder, raletive_path)
|
98
|
-
assert exclude_path == [form_exclude_path(path) for path in expected_exclude_path]
|
@@ -26,36 +26,8 @@ scientific_notation_regex = re.compile(r'^[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)$
|
|
26
26
|
def string_representer(dumper, data):
|
27
27
|
return dumper.represent_scalar(u"tag:yaml.org,2002:str", data,
|
28
28
|
style="'" if (scientific_notation_regex.match(data)) else None)
|
29
|
-
yml.add_representer(str, string_representer)
|
30
|
-
|
31
|
-
def insert_node_to_toc_tree(toc_yaml, uid, project_name, toc_node_map):
|
32
|
-
# Build nested TOC
|
33
|
-
parent_level = uid
|
34
|
-
cur_node = None
|
35
29
|
|
36
|
-
|
37
|
-
while parent_level.count('.') >= 1:
|
38
|
-
parent_level = '.'.join(parent_level.split('.')[:-1])
|
39
|
-
found_node = toc_node_map[parent_level] if parent_level in toc_node_map else None
|
40
|
-
if found_node:
|
41
|
-
# If ancestor already in current TOC, insert to its items
|
42
|
-
name = uid.split('.')[-1] if '.' in uid and project_name != uid else uid
|
43
|
-
cur_node = {'name': name, 'uid': uid}
|
44
|
-
if 'uid' in found_node:
|
45
|
-
# Only leaf nodes should have uid
|
46
|
-
found_node.pop('uid', 'No uid found')
|
47
|
-
# Subpackages should have its Overview page
|
48
|
-
found_node.setdefault('items', [{'name': 'Overview', 'uid': parent_level}]).append(cur_node)
|
49
|
-
break
|
50
|
-
|
51
|
-
# uid is representing a package in TOC as root node
|
52
|
-
if cur_node is None:
|
53
|
-
# if uid doesn't contain '.', the name needn't to simplify
|
54
|
-
cur_node = {'name': uid}
|
55
|
-
toc_yaml.append(cur_node)
|
56
|
-
|
57
|
-
# insert to uid-toc map
|
58
|
-
toc_node_map[uid] = cur_node
|
30
|
+
yml.add_representer(str, string_representer)
|
59
31
|
|
60
32
|
def merge_params(arg_params, doc_params):
|
61
33
|
merged_params = deepcopy(doc_params)
|
@@ -83,10 +55,10 @@ def remove_params_without_id(params):
|
|
83
55
|
return new_params
|
84
56
|
|
85
57
|
def add_isrequired_if_needed(obj, key: str):
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
58
|
+
if key in obj['syntax'] and obj['type'] in ['class', 'function', 'method']:
|
59
|
+
for args in obj['syntax'][key]:
|
60
|
+
if 'isRequired' not in args and 'defaultValue' not in args:
|
61
|
+
args['isRequired'] = True
|
90
62
|
|
91
63
|
def get_merged_params(obj, info_field_data, key: str):
|
92
64
|
merged_params = []
|
@@ -96,7 +68,7 @@ def get_merged_params(obj, info_field_data, key: str):
|
|
96
68
|
key, [])
|
97
69
|
if arg_params and doc_params:
|
98
70
|
if len(arg_params) - len(doc_params) > 0:
|
99
|
-
print(
|
71
|
+
print("Documented params don't match size of params:"" {}".format(obj['uid'])) # lgtm [py/clear-text-logging-sensitive-data]
|
100
72
|
doc_params = remove_params_without_id(doc_params)
|
101
73
|
merged_params = merge_params(arg_params, doc_params)
|
102
74
|
else:
|
@@ -109,7 +81,7 @@ def raise_up_fields(obj):
|
|
109
81
|
if 'summary' in obj['syntax'] and obj['syntax']['summary']:
|
110
82
|
obj['summary'] = obj['syntax'].pop(
|
111
83
|
'summary').strip(" \n\r\r")
|
112
|
-
|
84
|
+
|
113
85
|
# Raise up remarks
|
114
86
|
if 'remarks' in obj['syntax'] and obj['syntax']['remarks']:
|
115
87
|
obj['remarks'] = obj['syntax'].pop('remarks')
|
@@ -176,6 +148,35 @@ def merge_data(obj, info_field_data, yaml_data):
|
|
176
148
|
# Revert `type` for other objects to use
|
177
149
|
info_field_data[obj['uid']]['type'] = obj['type']
|
178
150
|
|
151
|
+
def find_node_in_toc_tree(toc_yaml, to_add_node):
|
152
|
+
for module in toc_yaml:
|
153
|
+
if module['name'] == to_add_node:
|
154
|
+
return module
|
155
|
+
|
156
|
+
if 'items' in module:
|
157
|
+
items = module['items']
|
158
|
+
found_module = find_node_in_toc_tree(items, to_add_node)
|
159
|
+
if found_module != None:
|
160
|
+
return found_module
|
161
|
+
|
162
|
+
return None
|
163
|
+
|
164
|
+
def build_nested_toc(toc_yaml, uid):
|
165
|
+
# Build nested TOC
|
166
|
+
if uid.count('.') >= 1:
|
167
|
+
parent_level = '.'.join(uid.split('.')[:-1])
|
168
|
+
found_node = find_node_in_toc_tree(toc_yaml, parent_level)
|
169
|
+
|
170
|
+
if found_node:
|
171
|
+
found_node.pop('uid', 'No uid found')
|
172
|
+
found_node.setdefault('items', [{'name': 'Overview', 'uid': parent_level}]).append(
|
173
|
+
{'name': uid, 'uid': uid})
|
174
|
+
else:
|
175
|
+
toc_yaml.append({'name': uid, 'uid': uid})
|
176
|
+
|
177
|
+
else:
|
178
|
+
toc_yaml.append({'name': uid, 'uid': uid})
|
179
|
+
|
179
180
|
def build_finished(app, exception):
|
180
181
|
"""
|
181
182
|
Output YAML on the file system.
|
@@ -206,8 +207,6 @@ def build_finished(app, exception):
|
|
206
207
|
))
|
207
208
|
|
208
209
|
ensuredir(normalized_outdir)
|
209
|
-
project_name = app.config.project.replace('-','.')
|
210
|
-
toc_node_map = {}
|
211
210
|
|
212
211
|
def filter_out_self_from_args(obj):
|
213
212
|
arg_params = obj.get('syntax', {}).get('parameters', [])
|
@@ -261,13 +260,30 @@ def build_finished(app, exception):
|
|
261
260
|
if (obj['type'] == 'class' and obj['inheritance']):
|
262
261
|
convert_class_to_enum_if_needed(obj)
|
263
262
|
|
264
|
-
|
263
|
+
build_nested_toc(toc_yaml, uid)
|
264
|
+
|
265
|
+
index_children = []
|
266
|
+
index_references = []
|
267
|
+
|
268
|
+
def form_index_references_and_children(yaml_data, index_children, index_references):
|
269
|
+
if yaml_data[0].get('type', None) in ['package', 'module']:
|
270
|
+
index_children.append(yaml_data[0].get('fullName', ''))
|
271
|
+
index_references.append({
|
272
|
+
'uid': yaml_data[0].get('fullName', ''),
|
273
|
+
'name': yaml_data[0].get('fullName', ''),
|
274
|
+
'fullname': yaml_data[0].get('fullName', ''),
|
275
|
+
'isExternal': False
|
276
|
+
})
|
265
277
|
|
266
278
|
for data_set in (app.env.docfx_yaml_packages,
|
267
279
|
app.env.docfx_yaml_modules,
|
268
280
|
app.env.docfx_yaml_classes,
|
269
281
|
app.env.docfx_yaml_functions): # noqa
|
282
|
+
|
270
283
|
for uid, yaml_data in iter(sorted(data_set.items())):
|
284
|
+
|
285
|
+
form_index_references_and_children(yaml_data, index_children, index_references)
|
286
|
+
|
271
287
|
# Output file
|
272
288
|
if uid.lower() in file_name_set:
|
273
289
|
filename = uid + "(%s)" % app.env.docfx_info_uid_types[uid]
|
@@ -275,6 +291,7 @@ def build_finished(app, exception):
|
|
275
291
|
filename = uid
|
276
292
|
out_file = os.path.join(normalized_outdir, '%s.yml' % filename)
|
277
293
|
ensuredir(os.path.dirname(out_file))
|
294
|
+
new_object = {}
|
278
295
|
|
279
296
|
transformed_obj = None
|
280
297
|
if yaml_data[0]['type'] == 'package':
|
@@ -293,13 +310,12 @@ def build_finished(app, exception):
|
|
293
310
|
mime = "PythonModule"
|
294
311
|
|
295
312
|
if transformed_obj == None:
|
296
|
-
print(
|
313
|
+
print("Unknown yml: " + yamlfile_path)
|
297
314
|
else:
|
298
315
|
# save file
|
299
316
|
common.write_yaml(transformed_obj, out_file, mime)
|
300
317
|
file_name_set.add(filename)
|
301
318
|
|
302
|
-
# Write TOC, the toc should include at least 1
|
303
319
|
if len(toc_yaml) == 0:
|
304
320
|
raise RuntimeError("No documentation for this module.")
|
305
321
|
|
@@ -307,7 +323,31 @@ def build_finished(app, exception):
|
|
307
323
|
with open(toc_file, 'w') as writable:
|
308
324
|
writable.write(
|
309
325
|
dump(
|
310
|
-
|
326
|
+
[{
|
327
|
+
'name': app.config.project,
|
328
|
+
'items': [{'name': 'Overview', 'uid': 'project-' + app.config.project}] + toc_yaml
|
329
|
+
}],
|
311
330
|
default_flow_style=False,
|
312
331
|
)
|
313
332
|
)
|
333
|
+
|
334
|
+
index_file = os.path.join(normalized_outdir, 'index.yml')
|
335
|
+
|
336
|
+
index_obj = [{
|
337
|
+
'uid': 'project-' + app.config.project,
|
338
|
+
'name': app.config.project,
|
339
|
+
'fullName': app.config.project,
|
340
|
+
'langs': ['python'],
|
341
|
+
'type': 'package',
|
342
|
+
'kind': 'distribution',
|
343
|
+
'summary': '',
|
344
|
+
'children': index_children
|
345
|
+
}]
|
346
|
+
transformed_obj = convert_package(index_obj, app.env.docfx_info_uid_types)
|
347
|
+
mime = "PythonPackage"
|
348
|
+
if transformed_obj == None:
|
349
|
+
print("Unknown yml: " + yamlfile_path)
|
350
|
+
else:
|
351
|
+
# save file
|
352
|
+
common.write_yaml(transformed_obj, index_file, mime)
|
353
|
+
file_name_set.add(filename)
|
@@ -1,9 +1,8 @@
|
|
1
|
-
import os
|
2
1
|
import pytest
|
3
|
-
|
2
|
+
|
4
3
|
from translator import translator
|
5
4
|
from build_finished import build_finished, merge_data
|
6
|
-
|
5
|
+
|
7
6
|
from .utils.test_utils import prepare_app_envs,load_rst_transform_to_doctree
|
8
7
|
|
9
8
|
@pytest.mark.sphinx('yaml', testroot='build-finished')
|
@@ -131,52 +130,4 @@ def test_merge_data_added_attribute():
|
|
131
130
|
# Assert the results
|
132
131
|
assert 'added_attribute' not in obj['syntax']
|
133
132
|
assert len(yaml_data) == 1
|
134
|
-
assert yaml_data[0]['uid'] == 'attr1'
|
135
|
-
|
136
|
-
|
137
|
-
@pytest.mark.sphinx('yaml', testroot='build-finished')
|
138
|
-
def test_build_finished_check_toc(tmp_path, app):
|
139
|
-
app.builder.outdir = tmp_path
|
140
|
-
app.config.project = 'azure-durable-functions'
|
141
|
-
app.env.docfx_yaml_packages = {
|
142
|
-
'azure.durable_functions':[{'uid':'azure.durable_functions','type':'package'}],
|
143
|
-
'azure.durable_functions.models':[{'uid':'azure.durable_functions.models','type':'package'}],
|
144
|
-
}
|
145
|
-
app.env.docfx_yaml_modules = {
|
146
|
-
'azure.durable_functions.constants':[{'uid':'azure.durable_functions.constants','type':'module'}],
|
147
|
-
'azure.durable_functions.models.DurableEntityContext':[{'uid':'azure.durable_functions.models.DurableEntityContext','type':'module'}],
|
148
|
-
}
|
149
|
-
app.env.docfx_yaml_classes = {'azure.durable_functions.Blueprint':[{'uid':'azure.durable_functions.Blueprint','type':'class', 'inheritance':[]}]}
|
150
|
-
|
151
|
-
# Act
|
152
|
-
build_finished(app, None)
|
153
|
-
|
154
|
-
# Assert after build_finished
|
155
|
-
toc = None
|
156
|
-
with open(os.path.join(tmp_path, API_ROOT, 'toc.yml'), 'r', encoding='utf-8') as file:
|
157
|
-
toc = yaml.safe_load(file)
|
158
|
-
|
159
|
-
assert 1 == len(toc)
|
160
|
-
assert 'azure.durable_functions' == toc[0]['name']
|
161
|
-
assert 'uid' not in toc[0] # root packages shouldn't have uid
|
162
|
-
|
163
|
-
assert 4 == len(toc[0]['items'])
|
164
|
-
assert 'Overview' == toc[0]['items'][0]['name']
|
165
|
-
assert 'azure.durable_functions' == toc[0]['items'][0]['uid']
|
166
|
-
|
167
|
-
assert 'models' == toc[0]['items'][1]['name']
|
168
|
-
assert 'uid' not in toc[0]['items'][1]
|
169
|
-
assert 2 == len(toc[0]['items'][1]['items'])
|
170
|
-
assert 'Overview' == toc[0]['items'][1]['items'][0]['name']
|
171
|
-
assert 'azure.durable_functions.models' == toc[0]['items'][1]['items'][0]['uid']
|
172
|
-
assert 'DurableEntityContext' == toc[0]['items'][1]['items'][1]['name']
|
173
|
-
assert 'azure.durable_functions.models.DurableEntityContext' == toc[0]['items'][1]['items'][1]['uid']
|
174
|
-
|
175
|
-
assert 'constants' == toc[0]['items'][2]['name']
|
176
|
-
assert 'azure.durable_functions.constants' == toc[0]['items'][2]['uid']
|
177
|
-
|
178
|
-
assert 'Blueprint' == toc[0]['items'][3]['name']
|
179
|
-
assert 'azure.durable_functions.Blueprint' == toc[0]['items'][3]['uid']
|
180
|
-
|
181
|
-
|
182
|
-
1
|
133
|
+
assert yaml_data[0]['uid'] == 'attr1'
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: py2docfx
|
3
|
-
Version: 0.1.2.
|
3
|
+
Version: 0.1.2.dev1678928
|
4
4
|
Summary: A package built based on Sphinx which download source code package and generate yaml files supported by docfx.
|
5
5
|
Author: Microsoft Corporation
|
6
6
|
License: MIT License
|
@@ -8,8 +8,7 @@ py2docfx/convert_prepare/get_source.py,sha256=Rl6WYWpkalAWmc-02ZLb9zXxrHNDFaOoFT
|
|
8
8
|
py2docfx/convert_prepare/git.py,sha256=xGJp2nDWLfVljrxyPnFKPoLIqmBh6by-QdITogIuxi0,5893
|
9
9
|
py2docfx/convert_prepare/install_package.py,sha256=hATmgazcSX7k2n4jQXh9sQMyNUc1k1YqHv5K5UMALq4,262
|
10
10
|
py2docfx/convert_prepare/pack.py,sha256=vZS67_GzEhUmZWHU1dxm8gnWyRBs-kB6-KjX1d_FdOU,1260
|
11
|
-
py2docfx/convert_prepare/package_info.py,sha256=
|
12
|
-
py2docfx/convert_prepare/package_info_extra_settings.py,sha256=pjZ2-DAOljBrx3L9N6sB5QohntFRa0uCCRiPEBXTvQg,1299
|
11
|
+
py2docfx/convert_prepare/package_info.py,sha256=RCN3enfwXeU_9H8xBHLalvbhUrGxMtmlX0F9omGoPjU,6856
|
13
12
|
py2docfx/convert_prepare/params.py,sha256=PXMB8pLtb4XbfI322avA47q0AO-TyBE6kZf7FU8I6v4,1771
|
14
13
|
py2docfx/convert_prepare/paths.py,sha256=964RX81Qf__rzXgEATfqBNFCKTYVjLt9J7WCz2TnNdc,485
|
15
14
|
py2docfx/convert_prepare/pip_utils.py,sha256=W8PJQQSZXUW7W_mdBxaK6KRuxMEskO1-Hw6hjRazqTY,1127
|
@@ -23,10 +22,10 @@ py2docfx/convert_prepare/post_process/merge_toc.py,sha256=coyae54OB1nGcCkxz9oAu-
|
|
23
22
|
py2docfx/convert_prepare/subpackage_merge/merge_root_package.py,sha256=uK96qL2asuSfo_3SZaoP8XZaUvjf5mNkr17JNbZR4Lg,1026
|
24
23
|
py2docfx/convert_prepare/subpackage_merge/merge_toc.py,sha256=nkVqe8R0m8D6cyTYV7aIpMDXorvn4-LXfU_vIK_hJBg,1706
|
25
24
|
py2docfx/convert_prepare/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
|
-
py2docfx/convert_prepare/tests/test_generate_document.py,sha256=
|
25
|
+
py2docfx/convert_prepare/tests/test_generate_document.py,sha256=BKw8pMSsygLnv6bETlA2MIVarECzfWoNo4JglxHb-T4,2480
|
27
26
|
py2docfx/convert_prepare/tests/test_get_source.py,sha256=c22JfobgbEbWWiNzBNYpZm2yDfo5LwBioUuRwft9WZE,5858
|
28
27
|
py2docfx/convert_prepare/tests/test_pack.py,sha256=46JWMNzknIptDVs7D3CuxcmqBr_OKMmaw1br9H7wqco,4134
|
29
|
-
py2docfx/convert_prepare/tests/test_package_info.py,sha256=
|
28
|
+
py2docfx/convert_prepare/tests/test_package_info.py,sha256=L2ax9dItnz5QNSsSjSjEcaS6UPZxiq3MwysBB1FdJxI,2262
|
30
29
|
py2docfx/convert_prepare/tests/test_params.py,sha256=p9DaGveocMBRih02KjpudJZE752neFBTLzOFbS47izQ,2036
|
31
30
|
py2docfx/convert_prepare/tests/test_post_process_merge_toc.py,sha256=YKOcn4_lf4syGsAvJ9BqpdUUc3SLfK4TiOX1lpXJT_Y,885
|
32
31
|
py2docfx/convert_prepare/tests/test_source.py,sha256=LNFZtvjz6QhVLOxatjWokYCCcoSm0bhTikMF9KoTPIE,2025
|
@@ -53,7 +52,7 @@ py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure
|
|
53
52
|
py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_03_31/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
54
53
|
py2docfx/convert_prepare/tests/data/subpackage/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_03_31/models.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
54
|
py2docfx/docfx_yaml/__init__.py,sha256=KCEizAXv-SXtrYhvFfLHdBWDhz51AA9uagaeTL-Itpo,100
|
56
|
-
py2docfx/docfx_yaml/build_finished.py,sha256=
|
55
|
+
py2docfx/docfx_yaml/build_finished.py,sha256=kJemkgnh9NIzn4E8jcAIaP7vb7wcHzdLl7yWUjoZbi8,14218
|
57
56
|
py2docfx/docfx_yaml/build_init.py,sha256=lAw-fnBVQbySfZ7Sut_NpFQUjnqLOmnGQrTBBH2RXcg,1860
|
58
57
|
py2docfx/docfx_yaml/common.py,sha256=UN1MUmjUoN1QSFDR1Cm_bfRuHr6FQiOe5VQV6s8xzjc,6841
|
59
58
|
py2docfx/docfx_yaml/convert_class.py,sha256=boKDaxnXbnLxja62UFXi3eChGDB_WBW6ouUUJgOhdpE,2098
|
@@ -73,7 +72,7 @@ py2docfx/docfx_yaml/writer.py,sha256=0ZqyVGDHa4Cr3NsuOPRf4pGUStl6g6IBxpSgIZeDT9I
|
|
73
72
|
py2docfx/docfx_yaml/yaml_builder.py,sha256=qSxXVS4iFCc1ZdL5QzLrv8hy3LHIQCrhO4WcTp01vag,2575
|
74
73
|
py2docfx/docfx_yaml/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
75
74
|
py2docfx/docfx_yaml/tests/conftest.py,sha256=CykkZxaDZ-3a1EIQdGBieSmHL9FdyTE2xTJZe9QgKcg,1214
|
76
|
-
py2docfx/docfx_yaml/tests/test_build_finished.py,sha256=
|
75
|
+
py2docfx/docfx_yaml/tests/test_build_finished.py,sha256=ShCRhIHGu2IUaM6iWsC0w9i_qmdc_kadPE3oPRBBZbQ,4720
|
77
76
|
py2docfx/docfx_yaml/tests/test_method_arguments.py,sha256=Cvj9aoADtacKciVN8nempXW-KQL8nujSa9GNVuk6l_8,1578
|
78
77
|
py2docfx/docfx_yaml/tests/test_numpy_syntax.py,sha256=ssb3J_-Jzjybhh4eycCA_LkXbGflyZyIUAiTjlEYLiw,863
|
79
78
|
py2docfx/docfx_yaml/tests/test_translator_attributes.py,sha256=qZCsQGffq31k3UzpXkJpycplOXIq9gi2SxY6vu0DTfw,5224
|
@@ -119,7 +118,7 @@ py2docfx/docfx_yaml/tests/roots/test-writer-table/conf.py,sha256=avcbnIOV2mlGQwh
|
|
119
118
|
py2docfx/docfx_yaml/tests/roots/test-writer-uri/code_with_uri.py,sha256=bzWTZpY2yf_By2bOSl1GFaY3BsZpkAvwQuGztlcHKkQ,537
|
120
119
|
py2docfx/docfx_yaml/tests/roots/test-writer-uri/conf.py,sha256=avcbnIOV2mlGQwhMQJZC4W6UGRBRhnq1QBxjPWlySxQ,260
|
121
120
|
py2docfx/docfx_yaml/tests/utils/test_utils.py,sha256=d0OYSUQ6NyoZx5mlLdNGGNhiNmmQhjVT4hQ6jY3VE_M,3383
|
122
|
-
py2docfx-0.1.2.
|
123
|
-
py2docfx-0.1.2.
|
124
|
-
py2docfx-0.1.2.
|
125
|
-
py2docfx-0.1.2.
|
121
|
+
py2docfx-0.1.2.dev1678928.dist-info/METADATA,sha256=3_qDauKjSh9KXv953REVtX9U0KyowlxQHtEvTYXgDYI,601
|
122
|
+
py2docfx-0.1.2.dev1678928.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
123
|
+
py2docfx-0.1.2.dev1678928.dist-info/top_level.txt,sha256=5dH2uP81dczt_qQJ38wiZ-gzoVWasfiJALWRSjdbnYU,9
|
124
|
+
py2docfx-0.1.2.dev1678928.dist-info/RECORD,,
|
@@ -1,20 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
Sphinx defaultly generate namespace pages liek azure azure-core
|
3
|
-
We have different ways to deal with it compare to Azure SDK scripts
|
4
|
-
We pass --implicit-namespaces to mark parent level namespaces, instead they're defaultly packages
|
5
|
-
We exclude parent namespaces' __init__.py when running sphinx-apidoc otherwise it throws exception
|
6
|
-
Azure SDK scripts don't add the --implicit-namespaces flag
|
7
|
-
They removed parent level RSTs
|
8
|
-
Check https://github.com/Azure/azure-sdk-for-python/blob/efad456552b8e4aa48db7ee96930223b95144947/eng/tox/run_sphinx_apidoc.py#L37C1-L48C10
|
9
|
-
|
10
|
-
That difference causes our behavior differs from Azure SDK html when package name and its namespace
|
11
|
-
structure are inconsistent. For example, azure-core-tracing-opencensus and azure-core-tracing-opentelemetry
|
12
|
-
have layer of azure/core/tracing/ext/opencensus_span or azure/core/tracing/ext/opentelemetry_span,
|
13
|
-
We generates 2 extra pages of azure.core.tracing.ext because we aren't able to know it is a parent level namespace
|
14
|
-
|
15
|
-
Below map worksaround this issue by excluding know extra parent level __init__.py
|
16
|
-
"""
|
17
|
-
extra_exclude_path_by_package = {
|
18
|
-
'azure-core-tracing-opencensus': ['azure/core/tracing/ext/__init__.py'],
|
19
|
-
'azure-core-tracing-opentelemetry': ['azure/core/tracing/ext/__init__.py'],
|
20
|
-
}
|
File without changes
|
File without changes
|