py2docfx 0.1.21rc2249766__py3-none-any.whl → 0.1.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py2docfx/convert_prepare/generate_conf.py +8 -1
- py2docfx/docfx_yaml/process_doctree.py +9 -20
- py2docfx/docfx_yaml/translator.py +34 -44
- py2docfx/docfx_yaml/type_mapping.py +102 -0
- py2docfx/venv/basevenv/Lib/site-packages/certifi/__init__.py +1 -1
- py2docfx/venv/basevenv/Lib/site-packages/markupsafe/__init__.py +3 -2
- py2docfx/venv/basevenv/Lib/site-packages/yaml/__init__.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/default.py +8 -9
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/imds.py +7 -3
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/managed_identity.py +7 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_credentials/shared_cache.py +2 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/interactive.py +2 -2
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/msal_managed_identity_client.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/_version.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/default.py +8 -9
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/imds.py +7 -3
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/managed_identity.py +7 -1
- py2docfx/venv/venv1/Lib/site-packages/azure/identity/aio/_credentials/shared_cache.py +2 -2
- py2docfx/venv/venv1/Lib/site-packages/cachetools/__init__.py +96 -122
- py2docfx/venv/venv1/Lib/site-packages/cachetools/{_decorators.py → _cached.py} +106 -13
- py2docfx/venv/venv1/Lib/site-packages/cachetools/_cachedmethod.py +128 -0
- py2docfx/venv/venv1/Lib/site-packages/cachetools/func.py +5 -25
- py2docfx/venv/venv1/Lib/site-packages/certifi/__init__.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/cryptography/__about__.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/client_options.py +9 -2
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/general_helpers.py +36 -0
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/grpc_helpers.py +10 -7
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/grpc_helpers_async.py +8 -3
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/operations_v1/transports/base.py +13 -7
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/operations_v1/transports/rest.py +19 -12
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/operations_v1/transports/rest_asyncio.py +21 -0
- py2docfx/venv/venv1/Lib/site-packages/google/api_core/version.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/google/auth/_default.py +66 -12
- py2docfx/venv/venv1/Lib/site-packages/google/auth/_default_async.py +16 -10
- py2docfx/venv/venv1/Lib/site-packages/google/auth/_helpers.py +41 -0
- py2docfx/venv/venv1/Lib/site-packages/google/auth/compute_engine/credentials.py +67 -6
- py2docfx/venv/venv1/Lib/site-packages/google/auth/credentials.py +161 -18
- py2docfx/venv/venv1/Lib/site-packages/google/auth/environment_vars.py +4 -0
- py2docfx/venv/venv1/Lib/site-packages/google/auth/external_account.py +33 -10
- py2docfx/venv/venv1/Lib/site-packages/google/auth/external_account_authorized_user.py +24 -1
- py2docfx/venv/venv1/Lib/site-packages/google/auth/identity_pool.py +25 -1
- py2docfx/venv/venv1/Lib/site-packages/google/auth/impersonated_credentials.py +57 -9
- py2docfx/venv/venv1/Lib/site-packages/google/auth/pluggable.py +25 -1
- py2docfx/venv/venv1/Lib/site-packages/google/auth/version.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/google/oauth2/_client.py +117 -0
- py2docfx/venv/venv1/Lib/site-packages/google/oauth2/service_account.py +39 -4
- py2docfx/venv/venv1/Lib/site-packages/msal/application.py +3 -1
- py2docfx/venv/venv1/Lib/site-packages/msal/oauth2cli/oauth2.py +5 -2
- py2docfx/venv/venv1/Lib/site-packages/msal/sku.py +1 -1
- py2docfx/venv/venv1/Lib/site-packages/msal/token_cache.py +7 -1
- {py2docfx-0.1.21rc2249766.dist-info → py2docfx-0.1.22.dist-info}/METADATA +1 -1
- {py2docfx-0.1.21rc2249766.dist-info → py2docfx-0.1.22.dist-info}/RECORD +54 -52
- {py2docfx-0.1.21rc2249766.dist-info → py2docfx-0.1.22.dist-info}/WHEEL +0 -0
- {py2docfx-0.1.21rc2249766.dist-info → py2docfx-0.1.22.dist-info}/top_level.txt +0 -0
@@ -35,7 +35,14 @@ def generate_conf(package: PackageInfo, output: str, template_dir: str):
|
|
35
35
|
extension_config = []
|
36
36
|
if hasattr(package, "extension_config"):
|
37
37
|
for key, value in package.extension_config.items():
|
38
|
-
|
38
|
+
# Convert JSON boolean strings to Python boolean string representations
|
39
|
+
if value == True:
|
40
|
+
formatted_value = "True"
|
41
|
+
elif value == False:
|
42
|
+
formatted_value = "False"
|
43
|
+
else:
|
44
|
+
formatted_value = json.dumps(value)
|
45
|
+
extension_config.append(" = ".join([key, formatted_value]))
|
39
46
|
params["EXTENSION_CONFIG"] = "\n".join(extension_config)
|
40
47
|
|
41
48
|
# Write the final conf.py file.
|
@@ -12,16 +12,8 @@ from utils import transform_string
|
|
12
12
|
from enum import EnumMeta
|
13
13
|
from importlib import import_module
|
14
14
|
from logger import get_package_logger
|
15
|
+
from type_mapping import map_type_transformations, PACKAGE, METHOD, FUNCTION, DATA, MODULE, CLASS, EXCEPTION, ATTRIBUTE, PROPERTY, PYDANTIC_MODEL, PYDANTIC_FIELD, PYDANTIC_SETTINGS, PYDANTIC_VALIDATOR, PYDANTIC_CONFIG
|
15
16
|
|
16
|
-
PACKAGE = 'package'
|
17
|
-
METHOD = 'method'
|
18
|
-
FUNCTION = 'function'
|
19
|
-
DATA = 'data'
|
20
|
-
MODULE = 'module'
|
21
|
-
CLASS = 'class'
|
22
|
-
EXCEPTION = 'exception'
|
23
|
-
ATTRIBUTE = 'attribute'
|
24
|
-
PROPERTY = 'property'
|
25
17
|
REFMETHOD = 'meth'
|
26
18
|
REFFUNCTION = 'func'
|
27
19
|
REF_PATTERN = ':(py:)?(func|class|meth|mod|ref):`~?[a-zA-Z_\.<> ]*?`'
|
@@ -151,10 +143,10 @@ def getpositionalOnlyParameters(signature):
|
|
151
143
|
except KeyError:
|
152
144
|
# if the default value is not available, set it to inspect._empty
|
153
145
|
default_value = "<class 'inspect._empty'>"
|
154
|
-
|
146
|
+
|
155
147
|
if default_value != "<class 'inspect._empty'>":
|
156
148
|
positional_only_param[count]['defaultValue'] = default_value
|
157
|
-
|
149
|
+
|
158
150
|
count += 1
|
159
151
|
return positional_only_param
|
160
152
|
|
@@ -180,7 +172,7 @@ def _create_datam(app, cls, module, name, _type, obj, lines=None):
|
|
180
172
|
positional_only_params = []
|
181
173
|
try:
|
182
174
|
if _type in [CLASS, METHOD, FUNCTION]:
|
183
|
-
if not (_type == CLASS and isinstance(type(obj).__call__, type(EnumMeta.__call__))):
|
175
|
+
if not (_type == CLASS and isinstance(type(obj).__call__, type(EnumMeta.__call__))):
|
184
176
|
signature = inspect.signature(obj)
|
185
177
|
args = getParameterArgs(signature)
|
186
178
|
keyword_only_args = getKeywordOnlyParameters(signature)
|
@@ -329,10 +321,10 @@ def insert_children_on_package(app, _type, datam):
|
|
329
321
|
if datam[MODULE] not in app.env.docfx_yaml_packages:
|
330
322
|
return
|
331
323
|
insert_package = app.env.docfx_yaml_packages[datam[MODULE]]
|
332
|
-
|
324
|
+
|
333
325
|
for obj in insert_package:
|
334
326
|
if obj['type'] == PACKAGE and obj['uid'] == datam[MODULE]:
|
335
|
-
if _type in [CLASS, EXCEPTION]:
|
327
|
+
if _type in [CLASS, EXCEPTION]:
|
336
328
|
obj['children'].append(datam['uid'])
|
337
329
|
break
|
338
330
|
if _type in [FUNCTION, DATA]:
|
@@ -388,11 +380,8 @@ def process_docstring(app, _type, name, obj, options, lines):
|
|
388
380
|
return PACKAGE
|
389
381
|
return _type
|
390
382
|
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
if _type == PROPERTY:
|
395
|
-
_type = ATTRIBUTE
|
383
|
+
# Apply type transformations using shared mapping function
|
384
|
+
_type = map_type_transformations(_type)
|
396
385
|
|
397
386
|
_type = check_convert_package_type(obj, _type)
|
398
387
|
cls, module = _get_cls_module(_type, name)
|
@@ -451,4 +440,4 @@ def process_docstring(app, _type, name, obj, options, lines):
|
|
451
440
|
insert_children_on_class(app, _type, datam)
|
452
441
|
insert_children_on_function(app, _type, datam)
|
453
442
|
|
454
|
-
app.env.docfx_info_uid_types[datam['uid']] = _type
|
443
|
+
app.env.docfx_info_uid_types[datam['uid']] = _type
|
@@ -15,16 +15,15 @@ from sphinx.util.docfields import _is_single_paragraph
|
|
15
15
|
from collections import OrderedDict
|
16
16
|
from nodes import remarks
|
17
17
|
from logger import get_package_logger
|
18
|
+
from type_mapping import (
|
19
|
+
translator_type_mapping, CLASS_TYPE, EXCEPTION_TYPE, ATTRIBUTE_TYPE,
|
20
|
+
PYDANTIC_MODEL_TYPE, PYDANTIC_SETTINGS_TYPE, PYDANTIC_FIELD_TYPE, PYDANTIC_CONFIG_TYPE,
|
21
|
+
types_contain_constructor, types_contain_attributes, attribute_types
|
22
|
+
)
|
18
23
|
|
19
24
|
TYPE_SEP_PATTERN = '(\[|\]|, |\(|\))'
|
20
25
|
PARAMETER_NAME = "[*][*](.*?)[*][*]"
|
21
26
|
PARAMETER_TYPE = "[(]((?:.|\n)*)[)]"
|
22
|
-
CLASS_TYPE = 'class'
|
23
|
-
EXCEPTION_TYPE = 'exception'
|
24
|
-
ATTRIBUTE_TYPE = 'attribute'
|
25
|
-
|
26
|
-
types_contain_constructor = {CLASS_TYPE, EXCEPTION_TYPE}
|
27
|
-
types_contain_attributes = {CLASS_TYPE, EXCEPTION_TYPE}
|
28
27
|
|
29
28
|
def translator(app, docname, doctree):
|
30
29
|
|
@@ -45,15 +44,6 @@ def translator(app, docname, doctree):
|
|
45
44
|
else:
|
46
45
|
return para_field.astext()
|
47
46
|
|
48
|
-
def type_mapping(type_name):
|
49
|
-
mapping = {
|
50
|
-
"staticmethod": "method",
|
51
|
-
"classmethod": "method",
|
52
|
-
"exception": "class",
|
53
|
-
}
|
54
|
-
|
55
|
-
return mapping[type_name] if type_name in mapping else type_name
|
56
|
-
|
57
47
|
def _get_uid_and_type_from_desc(node):
|
58
48
|
assert node.tagname == 'desc'
|
59
49
|
if node.attributes['domain'] != 'py':
|
@@ -78,7 +68,7 @@ def translator(app, docname, doctree):
|
|
78
68
|
return True
|
79
69
|
|
80
70
|
return False
|
81
|
-
|
71
|
+
|
82
72
|
def extract_exception_desc(exception_fieldbody_node):
|
83
73
|
def extract_exception_type(exception_node):
|
84
74
|
_type_without_xref = transform_node(exception_node).strip(" \n\r\t")
|
@@ -90,7 +80,7 @@ def translator(app, docname, doctree):
|
|
90
80
|
else:
|
91
81
|
exception_type = _type_without_xref
|
92
82
|
return exception_type
|
93
|
-
|
83
|
+
|
94
84
|
extractedExceptions = []
|
95
85
|
for pararaph_node in exception_fieldbody_node.traverse(nodes.paragraph):
|
96
86
|
for exception_node in pararaph_node:
|
@@ -148,8 +138,8 @@ def translator(app, docname, doctree):
|
|
148
138
|
if fieldtype == 'Raises':
|
149
139
|
if data['exceptions']:
|
150
140
|
data['exceptions'].extend(extract_exception_desc(fieldbody))
|
151
|
-
else:
|
152
|
-
data['exceptions'] = extract_exception_desc(fieldbody)
|
141
|
+
else:
|
142
|
+
data['exceptions'] = extract_exception_desc(fieldbody)
|
153
143
|
|
154
144
|
if fieldtype == 'Returns':
|
155
145
|
returnvalue_ret = transform_node(content[0])
|
@@ -239,14 +229,14 @@ def translator(app, docname, doctree):
|
|
239
229
|
signature_child = extract_sig_child_from_attribute_desc_node(node)
|
240
230
|
content_child = extract_content_child_from_attribute_desc_node(node)
|
241
231
|
curuid = signature_child.get('module', '') + '.' + signature_child.get('fullname', '')
|
242
|
-
addedData = {}
|
232
|
+
addedData = {}
|
243
233
|
name = signature_child.children[0].astext()
|
244
|
-
if isinstance(signature_child, desc_signature) and any(isinstance(n, addnodes.desc_annotation) for n in signature_child):
|
245
|
-
signature_child_ids = signature_child.get('ids', [''])
|
246
|
-
|
234
|
+
if isinstance(signature_child, desc_signature) and any(isinstance(n, addnodes.desc_annotation) for n in signature_child):
|
235
|
+
signature_child_ids = signature_child.get('ids', [''])
|
236
|
+
|
247
237
|
if len(curuid) > 0:
|
248
238
|
parent = curuid[:curuid.rfind('.')]
|
249
|
-
|
239
|
+
|
250
240
|
if curuid in attribute_map:
|
251
241
|
# ensure the order of docstring attributes and real attributes is fixed
|
252
242
|
if len(signature_child_ids) == 0:
|
@@ -254,7 +244,7 @@ def translator(app, docname, doctree):
|
|
254
244
|
' ' + signature_child.astext())
|
255
245
|
# concat the description of duplicated nodes
|
256
246
|
else:
|
257
|
-
attribute_map[curuid]['syntax']['content'] = signature_child.astext()
|
247
|
+
attribute_map[curuid]['syntax']['content'] = signature_child.astext()
|
258
248
|
+ ' ' + attribute_map[curuid]['syntax']['content']
|
259
249
|
else:
|
260
250
|
ancestor_class_content_node = find_ancestor_class_content_node(signature_child['class'], signature_child['module'], signature_child['ids'], class_nodes)
|
@@ -292,14 +282,14 @@ def translator(app, docname, doctree):
|
|
292
282
|
raise Exception('ids of node: ' + repr(signature_child) + ' is missing.')
|
293
283
|
# no ids and no duplicate or uid can not be generated.
|
294
284
|
|
295
|
-
# Currently only utilize summary to avoid code repetition,
|
296
|
-
# if we need to change other attribute generator logic,
|
285
|
+
# Currently only utilize summary to avoid code repetition,
|
286
|
+
# if we need to change other attribute generator logic,
|
297
287
|
# better to get from extracted_content_data below too
|
298
|
-
|
288
|
+
|
299
289
|
extracted_content_data = extract_content(content_child, ATTRIBUTE_TYPE, module_name)
|
300
290
|
if not addedData:
|
301
291
|
# If current attribute doesn't have correct signature child, fill in basic information
|
302
|
-
# TODO: append fullName here, currently when fallback to here,
|
292
|
+
# TODO: append fullName here, currently when fallback to here,
|
303
293
|
# information like fullname, name of attribute comes from process_docstring
|
304
294
|
addedData = {
|
305
295
|
'uid': curuid,
|
@@ -351,9 +341,9 @@ def translator(app, docname, doctree):
|
|
351
341
|
if (first_child.astext().strip(" \n\r\t") == 'property'):
|
352
342
|
return None # Don't generate signature for property
|
353
343
|
elif (first_child.astext().strip(" \n\r\t") in annotation_to_skip):
|
354
|
-
# Don't include 'class' declaration for constructors,
|
344
|
+
# Don't include 'class' declaration for constructors,
|
355
345
|
# don't include 'classmethod' front of signature (To keep behavior consistent)
|
356
|
-
included_child_start = 1
|
346
|
+
included_child_start = 1
|
357
347
|
isClass = True
|
358
348
|
for included_child in node.children[included_child_start:]:
|
359
349
|
# Skip class name when write signature (To keep same behavior as before signature async support)
|
@@ -416,15 +406,15 @@ def translator(app, docname, doctree):
|
|
416
406
|
# Because summary can contain code examples,
|
417
407
|
# need to allow summary line to contain punctuation ony
|
418
408
|
if len(content) > 0:
|
419
|
-
summary.append(content)
|
420
|
-
|
409
|
+
summary.append(content)
|
410
|
+
|
421
411
|
if "desctype" in node.parent and node.parent["desctype"] == CLASS_TYPE:
|
422
412
|
# Make sure class doesn't have 'exceptions' field.
|
423
413
|
data.pop('exceptions', '')
|
424
414
|
|
425
415
|
if summary:
|
426
416
|
data['summary'] = '\n\n'.join(summary)
|
427
|
-
|
417
|
+
|
428
418
|
return data
|
429
419
|
|
430
420
|
|
@@ -434,24 +424,24 @@ def translator(app, docname, doctree):
|
|
434
424
|
if (desc_node['desctype'] in types_contain_attributes):
|
435
425
|
class_nodes.append(desc_node)
|
436
426
|
return class_nodes
|
437
|
-
|
427
|
+
|
438
428
|
class_nodes = extract_class_nodes_from_doctree(doctree)
|
439
429
|
class_added_attributes = {}
|
440
430
|
class_data = {}
|
441
|
-
for node in doctree.traverse(addnodes.desc):
|
431
|
+
for node in doctree.traverse(addnodes.desc):
|
442
432
|
(uid, module_name, node_type) = _get_uid_and_type_from_desc(node)
|
443
433
|
data = {}
|
444
434
|
signature_child = node.children[node.first_child_matching_class(addnodes.desc_signature)]
|
445
435
|
content_child = node.children[node.first_child_matching_class(addnodes.desc_content)]
|
446
|
-
if node_type
|
436
|
+
if node_type in attribute_types:
|
447
437
|
attribute_sig_child = extract_sig_child_from_attribute_desc_node(node)
|
448
438
|
|
449
439
|
if content_child.astext().startswith('alias of'):
|
450
440
|
# Ignore alias attribute
|
451
|
-
# e.g. azure.cognitiveservices.speech.intent.IntentRecognizer.IntentsIte (alias of Iterable[Tuple[Union[str, azure.cognitiveservices.speech.intent.LanguageUnderstandingModel], str]])
|
441
|
+
# e.g. azure.cognitiveservices.speech.intent.IntentRecognizer.IntentsIte (alias of Iterable[Tuple[Union[str, azure.cognitiveservices.speech.intent.LanguageUnderstandingModel], str]])
|
452
442
|
continue
|
453
443
|
|
454
|
-
if attribute_sig_child['class']:
|
444
|
+
if attribute_sig_child['class']:
|
455
445
|
attribute_class = attribute_sig_child['module'] + '.' + attribute_sig_child['class']
|
456
446
|
class_added_attributes.setdefault(attribute_class, OrderedDict())
|
457
447
|
# TODO: Merge attribute_data if same uid
|
@@ -470,19 +460,19 @@ def translator(app, docname, doctree):
|
|
470
460
|
else:
|
471
461
|
raise Exception('Attribute doesn\'t have class information. Attribute_name: {0}'.format(attribute_sig_child['fullname']))
|
472
462
|
continue
|
473
|
-
|
463
|
+
|
474
464
|
data.update(extract_content(content_child, node_type, module_name))
|
475
465
|
data['content'] = extract_signature(signature_child)
|
476
466
|
|
477
|
-
data['type'] =
|
467
|
+
data['type'] = translator_type_mapping(node_type) if node_type else 'unknown'
|
478
468
|
if _is_property_node(signature_child):
|
479
469
|
data['type'] = ATTRIBUTE_TYPE
|
480
|
-
|
470
|
+
|
481
471
|
# Don't include empty data
|
482
472
|
for key, val in data.copy().items():
|
483
473
|
if not val:
|
484
474
|
del data[key]
|
485
|
-
|
475
|
+
|
486
476
|
if uid in app.env.docfx_info_field_data:
|
487
477
|
# Sphinx autodoc already provides method signature, skip declaration in RST comments (py:class/py:method)
|
488
478
|
sig_id = signature_child.get('ids', [''])[0].lower()
|
@@ -502,7 +492,7 @@ def translator(app, docname, doctree):
|
|
502
492
|
for class_name, added_attributes in class_added_attributes.items():
|
503
493
|
if not added_attributes:
|
504
494
|
# `class_added_attributes` Maybe be in default value []
|
505
|
-
# Indicates that all doctree attribute desc nodes under this class
|
495
|
+
# Indicates that all doctree attribute desc nodes under this class
|
506
496
|
# are skipped attributes/properties (like alias)
|
507
497
|
continue
|
508
498
|
|
@@ -0,0 +1,102 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
#
|
3
|
+
# ---------------------------------------------------------
|
4
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
5
|
+
# ---------------------------------------------------------
|
6
|
+
|
7
|
+
"""
|
8
|
+
Type mapping utilities for converting various documentation types to standardized forms.
|
9
|
+
"""
|
10
|
+
|
11
|
+
# Standard types
|
12
|
+
PACKAGE = 'package'
|
13
|
+
METHOD = 'method'
|
14
|
+
FUNCTION = 'function'
|
15
|
+
DATA = 'data'
|
16
|
+
MODULE = 'module'
|
17
|
+
CLASS = 'class'
|
18
|
+
EXCEPTION = 'exception'
|
19
|
+
ATTRIBUTE = 'attribute'
|
20
|
+
PROPERTY = 'property'
|
21
|
+
|
22
|
+
# Pydantic specific types
|
23
|
+
PYDANTIC_MODEL = 'pydantic_model'
|
24
|
+
PYDANTIC_FIELD = 'pydantic_field'
|
25
|
+
PYDANTIC_SETTINGS = 'pydantic_settings'
|
26
|
+
PYDANTIC_VALIDATOR = 'pydantic_validator'
|
27
|
+
PYDANTIC_CONFIG = 'pydantic_config'
|
28
|
+
|
29
|
+
# Translator-style constants (for compatibility)
|
30
|
+
CLASS_TYPE = 'class'
|
31
|
+
EXCEPTION_TYPE = 'exception'
|
32
|
+
ATTRIBUTE_TYPE = 'attribute'
|
33
|
+
PYDANTIC_MODEL_TYPE = "pydantic_model"
|
34
|
+
PYDANTIC_SETTINGS_TYPE = "pydantic_settings"
|
35
|
+
PYDANTIC_FIELD_TYPE = "pydantic_field"
|
36
|
+
PYDANTIC_CONFIG_TYPE = "pydantic_config"
|
37
|
+
|
38
|
+
# Type groupings for translator functionality
|
39
|
+
types_contain_constructor = {
|
40
|
+
CLASS_TYPE,
|
41
|
+
PYDANTIC_MODEL_TYPE,
|
42
|
+
PYDANTIC_SETTINGS_TYPE,
|
43
|
+
EXCEPTION_TYPE,
|
44
|
+
PYDANTIC_CONFIG_TYPE,
|
45
|
+
}
|
46
|
+
|
47
|
+
types_contain_attributes = {
|
48
|
+
CLASS_TYPE,
|
49
|
+
PYDANTIC_MODEL_TYPE,
|
50
|
+
PYDANTIC_SETTINGS_TYPE,
|
51
|
+
EXCEPTION_TYPE,
|
52
|
+
PYDANTIC_CONFIG_TYPE,
|
53
|
+
}
|
54
|
+
|
55
|
+
attribute_types = {PYDANTIC_FIELD_TYPE, ATTRIBUTE_TYPE}
|
56
|
+
|
57
|
+
|
58
|
+
def map_type_transformations(type_name):
|
59
|
+
"""
|
60
|
+
Apply type transformations to convert various documentation types to standardized forms.
|
61
|
+
Used by process_doctree.py for initial type processing.
|
62
|
+
|
63
|
+
Args:
|
64
|
+
type_name (str): The original type name
|
65
|
+
|
66
|
+
Returns:
|
67
|
+
str: The transformed type name
|
68
|
+
"""
|
69
|
+
# Type transformations
|
70
|
+
if type_name == EXCEPTION or type_name in {PYDANTIC_MODEL, PYDANTIC_SETTINGS, PYDANTIC_CONFIG}:
|
71
|
+
return CLASS
|
72
|
+
elif type_name == PROPERTY or type_name == PYDANTIC_FIELD:
|
73
|
+
return ATTRIBUTE
|
74
|
+
elif type_name == PYDANTIC_VALIDATOR:
|
75
|
+
return METHOD
|
76
|
+
|
77
|
+
# Return original type if no transformation needed
|
78
|
+
return type_name
|
79
|
+
|
80
|
+
|
81
|
+
def translator_type_mapping(type_name):
|
82
|
+
"""
|
83
|
+
Apply type mapping transformations for translator processing.
|
84
|
+
Used by translator.py for docstring processing.
|
85
|
+
Includes both original translator mappings and process_doctree transformations.
|
86
|
+
|
87
|
+
Args:
|
88
|
+
type_name (str): The original type name
|
89
|
+
|
90
|
+
Returns:
|
91
|
+
str: The mapped type name
|
92
|
+
"""
|
93
|
+
# First apply the process_doctree style transformations
|
94
|
+
transformed_type = map_type_transformations(type_name)
|
95
|
+
|
96
|
+
# Then apply the original translator mappings
|
97
|
+
mapping = {
|
98
|
+
"staticmethod": "method",
|
99
|
+
"classmethod": "method",
|
100
|
+
}
|
101
|
+
|
102
|
+
return mapping[transformed_type] if transformed_type in mapping else transformed_type
|
@@ -214,14 +214,14 @@ class Markup(str):
|
|
214
214
|
if (end := value.find("-->", start)) == -1:
|
215
215
|
break
|
216
216
|
|
217
|
-
value = f"{value[:start]}{value[end + 3:]}"
|
217
|
+
value = f"{value[:start]}{value[end + 3 :]}"
|
218
218
|
|
219
219
|
# remove tags using the same method
|
220
220
|
while (start := value.find("<")) != -1:
|
221
221
|
if (end := value.find(">", start)) == -1:
|
222
222
|
break
|
223
223
|
|
224
|
-
value = f"{value[:start]}{value[end + 1:]}"
|
224
|
+
value = f"{value[:start]}{value[end + 1 :]}"
|
225
225
|
|
226
226
|
# collapse spaces
|
227
227
|
value = " ".join(value.split())
|
@@ -388,6 +388,7 @@ def __getattr__(name: str) -> t.Any:
|
|
388
388
|
"The '__version__' attribute is deprecated and will be removed in"
|
389
389
|
" MarkupSafe 3.1. Use feature detection, or"
|
390
390
|
' `importlib.metadata.version("markupsafe")`, instead.',
|
391
|
+
DeprecationWarning,
|
391
392
|
stacklevel=2,
|
392
393
|
)
|
393
394
|
return importlib.metadata.version("markupsafe")
|
@@ -172,7 +172,8 @@ class DefaultAzureCredential(ChainedTokenCredential):
|
|
172
172
|
|
173
173
|
process_timeout = kwargs.pop("process_timeout", 10)
|
174
174
|
require_envvar = kwargs.pop("require_envvar", False)
|
175
|
-
|
175
|
+
token_credentials_env = os.environ.get(EnvironmentVariables.AZURE_TOKEN_CREDENTIALS, "").strip().lower()
|
176
|
+
if require_envvar and not token_credentials_env:
|
176
177
|
raise ValueError(
|
177
178
|
"AZURE_TOKEN_CREDENTIALS environment variable is required but is not set or is empty. "
|
178
179
|
"Set it to 'dev', 'prod', or a specific credential name."
|
@@ -274,18 +275,16 @@ class DefaultAzureCredential(ChainedTokenCredential):
|
|
274
275
|
ManagedIdentityCredential(
|
275
276
|
client_id=managed_identity_client_id,
|
276
277
|
_exclude_workload_identity_credential=exclude_workload_identity_credential,
|
278
|
+
_enable_imds_probe=token_credentials_env != "managedidentitycredential",
|
277
279
|
**kwargs,
|
278
280
|
)
|
279
281
|
)
|
280
282
|
if not exclude_shared_token_cache_credential and SharedTokenCacheCredential.supported():
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
credentials.append(shared_cache)
|
287
|
-
except Exception as ex: # pylint:disable=broad-except
|
288
|
-
_LOGGER.info("Shared token cache is unavailable: '%s'", ex)
|
283
|
+
# username and/or tenant_id are only required when the cache contains tokens for multiple identities
|
284
|
+
shared_cache = SharedTokenCacheCredential(
|
285
|
+
username=shared_cache_username, tenant_id=shared_cache_tenant_id, authority=authority, **kwargs
|
286
|
+
)
|
287
|
+
credentials.append(shared_cache)
|
289
288
|
if not exclude_visual_studio_code_credential:
|
290
289
|
credentials.append(VisualStudioCodeCredential(tenant_id=vscode_tenant_id))
|
291
290
|
if not exclude_cli_credential:
|
@@ -82,6 +82,10 @@ def _check_forbidden_response(ex: HttpResponseError) -> None:
|
|
82
82
|
|
83
83
|
class ImdsCredential(MsalManagedIdentityClient):
|
84
84
|
def __init__(self, **kwargs: Any) -> None:
|
85
|
+
# If set to True/False, _enable_imds_probe forces whether or not the credential
|
86
|
+
# probes for the IMDS endpoint before attempting to get a token. If None (the default),
|
87
|
+
# the credential probes only if it's part of a ChainedTokenCredential chain.
|
88
|
+
self._enable_imds_probe = kwargs.pop("_enable_imds_probe", None)
|
85
89
|
super().__init__(retry_policy_class=ImdsRetryPolicy, **dict(PIPELINE_SETTINGS, **kwargs))
|
86
90
|
self._config = kwargs
|
87
91
|
|
@@ -102,9 +106,9 @@ class ImdsCredential(MsalManagedIdentityClient):
|
|
102
106
|
|
103
107
|
def _request_token(self, *scopes: str, **kwargs: Any) -> AccessTokenInfo:
|
104
108
|
|
105
|
-
if
|
106
|
-
|
107
|
-
# is available to avoid hanging for a long time if
|
109
|
+
do_probe = self._enable_imds_probe if self._enable_imds_probe is not None else within_credential_chain.get()
|
110
|
+
if do_probe and not self._endpoint_available:
|
111
|
+
# Probe to see if the IMDS endpoint is available to avoid hanging for a long time if it's not.
|
108
112
|
try:
|
109
113
|
client = ManagedIdentityClient(_get_request, **dict(PIPELINE_SETTINGS, **self._config))
|
110
114
|
client.request_token(*scopes, connection_timeout=1, retry_total=0)
|
@@ -76,6 +76,7 @@ class ManagedIdentityCredential:
|
|
76
76
|
user_identity_info = validate_identity_config(client_id, identity_config)
|
77
77
|
self._credential: Optional[SupportsTokenInfo] = None
|
78
78
|
exclude_workload_identity = kwargs.pop("_exclude_workload_identity_credential", False)
|
79
|
+
self._enable_imds_probe = kwargs.pop("_enable_imds_probe", None)
|
79
80
|
managed_identity_type = None
|
80
81
|
|
81
82
|
if os.environ.get(EnvironmentVariables.IDENTITY_ENDPOINT):
|
@@ -136,7 +137,12 @@ class ManagedIdentityCredential:
|
|
136
137
|
managed_identity_type = "IMDS"
|
137
138
|
from .imds import ImdsCredential
|
138
139
|
|
139
|
-
self._credential = ImdsCredential(
|
140
|
+
self._credential = ImdsCredential(
|
141
|
+
client_id=client_id,
|
142
|
+
identity_config=identity_config,
|
143
|
+
_enable_imds_probe=self._enable_imds_probe,
|
144
|
+
**kwargs,
|
145
|
+
)
|
140
146
|
|
141
147
|
if managed_identity_type:
|
142
148
|
log_msg = f"{self.__class__.__name__} will use {managed_identity_type}"
|
@@ -198,9 +198,9 @@ class _SharedTokenCacheCredential(SharedTokenCacheBase):
|
|
198
198
|
return token
|
199
199
|
except Exception as e: # pylint: disable=broad-except
|
200
200
|
if within_dac.get():
|
201
|
-
raise CredentialUnavailableError(
|
201
|
+
raise CredentialUnavailableError(
|
202
202
|
message=getattr(e, "message", str(e)), response=getattr(e, "response", None)
|
203
|
-
)
|
203
|
+
) from e
|
204
204
|
raise
|
205
205
|
|
206
206
|
raise CredentialUnavailableError(message=NO_TOKEN.format(account.get("username")))
|
@@ -203,7 +203,7 @@ class InteractiveCredential(MsalCredential, ABC):
|
|
203
203
|
return token
|
204
204
|
except Exception as ex: # pylint:disable=broad-except
|
205
205
|
if not (isinstance(ex, AuthenticationRequiredError) and allow_prompt):
|
206
|
-
_LOGGER.warning(
|
206
|
+
_LOGGER.warning( # pylint: disable=do-not-log-raised-errors
|
207
207
|
"%s.%s failed: %s",
|
208
208
|
self.__class__.__name__,
|
209
209
|
base_method_name,
|
@@ -225,7 +225,7 @@ class InteractiveCredential(MsalCredential, ABC):
|
|
225
225
|
# this may be the first authentication, or the user may have authenticated a different identity
|
226
226
|
self._auth_record = _build_auth_record(result)
|
227
227
|
except Exception as ex:
|
228
|
-
_LOGGER.warning(
|
228
|
+
_LOGGER.warning( # pylint: disable=do-not-log-raised-errors
|
229
229
|
"%s.%s failed: %s",
|
230
230
|
self.__class__.__name__,
|
231
231
|
base_method_name,
|
py2docfx/venv/venv1/Lib/site-packages/azure/identity/_internal/msal_managed_identity_client.py
CHANGED
@@ -61,7 +61,7 @@ class MsalManagedIdentityClient(abc.ABC): # pylint:disable=client-accepts-api-v
|
|
61
61
|
)
|
62
62
|
error_desc = ""
|
63
63
|
if result and "error" in result:
|
64
|
-
error_desc =
|
64
|
+
error_desc = f"Token request error: ({result['error']}) {result.get('error_description', '')}"
|
65
65
|
error_message = self.get_unavailable_message(error_desc)
|
66
66
|
raise CredentialUnavailableError(error_message)
|
67
67
|
|
@@ -144,7 +144,8 @@ class DefaultAzureCredential(ChainedTokenCredential):
|
|
144
144
|
|
145
145
|
process_timeout = kwargs.pop("process_timeout", 10)
|
146
146
|
require_envvar = kwargs.pop("require_envvar", False)
|
147
|
-
|
147
|
+
token_credentials_env = os.environ.get(EnvironmentVariables.AZURE_TOKEN_CREDENTIALS, "").strip().lower()
|
148
|
+
if require_envvar and not token_credentials_env:
|
148
149
|
raise ValueError(
|
149
150
|
"AZURE_TOKEN_CREDENTIALS environment variable is required but is not set or is empty. "
|
150
151
|
"Set it to 'dev', 'prod', or a specific credential name."
|
@@ -235,18 +236,16 @@ class DefaultAzureCredential(ChainedTokenCredential):
|
|
235
236
|
ManagedIdentityCredential(
|
236
237
|
client_id=managed_identity_client_id,
|
237
238
|
_exclude_workload_identity_credential=exclude_workload_identity_credential,
|
239
|
+
_enable_imds_probe=token_credentials_env != "managedidentitycredential",
|
238
240
|
**kwargs,
|
239
241
|
)
|
240
242
|
)
|
241
243
|
if not exclude_shared_token_cache_credential and SharedTokenCacheCredential.supported():
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
credentials.append(shared_cache)
|
248
|
-
except Exception as ex: # pylint:disable=broad-except
|
249
|
-
_LOGGER.info("Shared token cache is unavailable: '%s'", ex)
|
244
|
+
# username and/or tenant_id are only required when the cache contains tokens for multiple identities
|
245
|
+
shared_cache = SharedTokenCacheCredential(
|
246
|
+
username=shared_cache_username, tenant_id=shared_cache_tenant_id, authority=authority, **kwargs
|
247
|
+
)
|
248
|
+
credentials.append(shared_cache)
|
250
249
|
if not exclude_visual_studio_code_credential:
|
251
250
|
credentials.append(VisualStudioCodeCredential(tenant_id=vscode_tenant_id))
|
252
251
|
if not exclude_cli_credential:
|
@@ -45,6 +45,10 @@ class ImdsCredential(AsyncContextManager, GetTokenMixin):
|
|
45
45
|
def __init__(self, **kwargs: Any) -> None:
|
46
46
|
super().__init__()
|
47
47
|
|
48
|
+
# If set to True/False, _enable_imds_probe forces whether or not the credential
|
49
|
+
# probes for the IMDS endpoint before attempting to get a token. If None (the default),
|
50
|
+
# the credential probes only if it's part of a ChainedTokenCredential chain.
|
51
|
+
self._enable_imds_probe = kwargs.pop("_enable_imds_probe", None)
|
48
52
|
kwargs["retry_policy_class"] = AsyncImdsRetryPolicy
|
49
53
|
self._client = AsyncManagedIdentityClient(_get_request, **dict(PIPELINE_SETTINGS, **kwargs))
|
50
54
|
if EnvironmentVariables.AZURE_POD_IDENTITY_AUTHORITY_HOST in os.environ:
|
@@ -65,9 +69,9 @@ class ImdsCredential(AsyncContextManager, GetTokenMixin):
|
|
65
69
|
|
66
70
|
async def _request_token(self, *scopes: str, **kwargs: Any) -> AccessTokenInfo:
|
67
71
|
|
68
|
-
if
|
69
|
-
|
70
|
-
# is available to avoid hanging for a long time if
|
72
|
+
do_probe = self._enable_imds_probe if self._enable_imds_probe is not None else within_credential_chain.get()
|
73
|
+
if do_probe and not self._endpoint_available:
|
74
|
+
# Probe to see if the IMDS endpoint is available to avoid hanging for a long time if it's not.
|
71
75
|
try:
|
72
76
|
await self._client.request_token(*scopes, connection_timeout=1, retry_total=0)
|
73
77
|
self._endpoint_available = True
|
@@ -49,6 +49,7 @@ class ManagedIdentityCredential(AsyncContextManager):
|
|
49
49
|
user_identity_info = validate_identity_config(client_id, identity_config)
|
50
50
|
self._credential: Optional[AsyncSupportsTokenInfo] = None
|
51
51
|
exclude_workload_identity = kwargs.pop("_exclude_workload_identity_credential", False)
|
52
|
+
self._enable_imds_probe = kwargs.pop("_enable_imds_probe", None)
|
52
53
|
managed_identity_type = None
|
53
54
|
if os.environ.get(EnvironmentVariables.IDENTITY_ENDPOINT):
|
54
55
|
if os.environ.get(EnvironmentVariables.IDENTITY_HEADER):
|
@@ -108,7 +109,12 @@ class ManagedIdentityCredential(AsyncContextManager):
|
|
108
109
|
managed_identity_type = "IMDS"
|
109
110
|
from .imds import ImdsCredential
|
110
111
|
|
111
|
-
self._credential = ImdsCredential(
|
112
|
+
self._credential = ImdsCredential(
|
113
|
+
client_id=client_id,
|
114
|
+
identity_config=identity_config,
|
115
|
+
_enable_imds_probe=self._enable_imds_probe,
|
116
|
+
**kwargs,
|
117
|
+
)
|
112
118
|
|
113
119
|
if managed_identity_type:
|
114
120
|
log_msg = f"{self.__class__.__name__} will use {managed_identity_type}"
|