py2docfx 0.1.18.dev2146624__py3-none-any.whl → 0.1.18rc2160234__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. py2docfx/docfx_yaml/build_finished.py +13 -3
  2. py2docfx/venv/basevenv/Lib/site-packages/certifi/__init__.py +1 -1
  3. py2docfx/venv/venv1/Lib/site-packages/azure/core/_version.py +1 -1
  4. py2docfx/venv/venv1/Lib/site-packages/azure/core/credentials.py +6 -2
  5. py2docfx/venv/venv1/Lib/site-packages/azure/core/credentials_async.py +10 -2
  6. py2docfx/venv/venv1/Lib/site-packages/azure/core/paging.py +13 -0
  7. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/__init__.py +5 -5
  8. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_authentication.py +11 -3
  9. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_authentication_async.py +1 -3
  10. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_distributed_tracing.py +19 -2
  11. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_retry.py +1 -0
  12. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/policies/_universal.py +21 -4
  13. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_aiohttp.py +17 -3
  14. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_base.py +2 -0
  15. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_requests_asyncio.py +6 -6
  16. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_requests_basic.py +7 -5
  17. py2docfx/venv/venv1/Lib/site-packages/azure/core/pipeline/transport/_requests_trio.py +6 -6
  18. py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/_async_poller.py +57 -2
  19. py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/_poller.py +74 -2
  20. py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/async_base_polling.py +3 -3
  21. py2docfx/venv/venv1/Lib/site-packages/azure/core/polling/base_polling.py +24 -7
  22. py2docfx/venv/venv1/Lib/site-packages/azure/core/serialization.py +156 -2
  23. py2docfx/venv/venv1/Lib/site-packages/azure/core/tracing/opentelemetry.py +24 -2
  24. py2docfx/venv/venv1/Lib/site-packages/certifi/__init__.py +1 -1
  25. py2docfx/venv/venv1/Lib/site-packages/cryptography/__about__.py +1 -1
  26. py2docfx/venv/venv1/Lib/site-packages/typing_extensions.py +49 -43
  27. {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18rc2160234.dist-info}/METADATA +1 -1
  28. {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18rc2160234.dist-info}/RECORD +30 -30
  29. {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18rc2160234.dist-info}/WHEEL +0 -0
  30. {py2docfx-0.1.18.dev2146624.dist-info → py2docfx-0.1.18rc2160234.dist-info}/top_level.txt +0 -0
@@ -51,24 +51,57 @@ class PollingMethod(Generic[PollingReturnType_co]):
51
51
  raise NotImplementedError("This method needs to be implemented")
52
52
 
53
53
  def run(self) -> None:
54
+ """Run the polling method.
55
+ This method should be implemented to perform the actual polling logic.
56
+
57
+ :return: None
58
+ :rtype: None
59
+ """
54
60
  raise NotImplementedError("This method needs to be implemented")
55
61
 
56
62
  def status(self) -> str:
63
+ """Return the current status of the polling operation.
64
+
65
+ :rtype: str
66
+ :return: The current status of the polling operation.
67
+ """
57
68
  raise NotImplementedError("This method needs to be implemented")
58
69
 
59
70
  def finished(self) -> bool:
71
+ """Check if the polling operation is finished.
72
+
73
+ :rtype: bool
74
+ :return: True if the polling operation is finished, False otherwise.
75
+ """
60
76
  raise NotImplementedError("This method needs to be implemented")
61
77
 
62
78
  def resource(self) -> PollingReturnType_co:
79
+ """Return the resource built by the polling operation.
80
+
81
+ :rtype: any
82
+ :return: The resource built by the polling operation.
83
+ """
63
84
  raise NotImplementedError("This method needs to be implemented")
64
85
 
65
86
  def get_continuation_token(self) -> str:
87
+ """Return a continuation token that allows to restart the poller later.
88
+
89
+ :rtype: str
90
+ :return: An opaque continuation token.
91
+ """
66
92
  raise TypeError("Polling method '{}' doesn't support get_continuation_token".format(self.__class__.__name__))
67
93
 
68
94
  @classmethod
69
95
  def from_continuation_token(
70
96
  cls, continuation_token: str, **kwargs: Any
71
97
  ) -> Tuple[Any, Any, DeserializationCallbackType]:
98
+ """Recreate the poller from a continuation token.
99
+
100
+ :param continuation_token: The continuation token to recreate the poller from.
101
+ :type continuation_token: str
102
+ :rtype: Tuple[Any, Any, DeserializationCallbackType]
103
+ :return: A tuple containing the client, initial response, and deserialization callback.
104
+ """
72
105
  raise TypeError("Polling method '{}' doesn't support from_continuation_token".format(cls.__name__))
73
106
 
74
107
 
@@ -85,6 +118,17 @@ class _SansIONoPolling(Generic[PollingReturnType_co]):
85
118
  initial_response: Any,
86
119
  deserialization_callback: Callable[[Any], PollingReturnType_co],
87
120
  ) -> None:
121
+ """Initialize the poller with the initial response and deserialization callback.
122
+
123
+ :param _: The client, not used in this polling method.
124
+ :type _: Any
125
+ :param initial_response: The initial response from the long-running operation.
126
+ :type initial_response: Any
127
+ :param deserialization_callback: A callback that takes a response and returns a deserialized object.
128
+ :type deserialization_callback: Callable[[Any], PollingReturnType_co]
129
+ :return: None
130
+ :rtype: None
131
+ """
88
132
  self._initial_response = initial_response
89
133
  self._deserialization_callback = deserialization_callback
90
134
 
@@ -105,9 +149,19 @@ class _SansIONoPolling(Generic[PollingReturnType_co]):
105
149
  return True
106
150
 
107
151
  def resource(self) -> PollingReturnType_co:
152
+ """Return the built resource.
153
+
154
+ :rtype: any
155
+ :return: The built resource.
156
+ """
108
157
  return self._deserialization_callback(self._initial_response)
109
158
 
110
159
  def get_continuation_token(self) -> str:
160
+ """Return a continuation token that allows to restart the poller later.
161
+
162
+ :rtype: str
163
+ :return: An opaque continuation token
164
+ """
111
165
  import pickle
112
166
 
113
167
  return base64.b64encode(pickle.dumps(self._initial_response)).decode("ascii")
@@ -116,6 +170,14 @@ class _SansIONoPolling(Generic[PollingReturnType_co]):
116
170
  def from_continuation_token(
117
171
  cls, continuation_token: str, **kwargs: Any
118
172
  ) -> Tuple[Any, Any, Callable[[Any], PollingReturnType_co]]:
173
+ """Recreate the poller from a continuation token.
174
+
175
+ :param continuation_token: The continuation token to recreate the poller from.
176
+ :type continuation_token: str
177
+ :rtype: Tuple[Any, Any, Callable[[Any], PollingReturnType_co]]
178
+ :return: A tuple containing the client, initial response, and deserialization callback.
179
+ :raises ValueError: If 'deserialization_callback' is not provided in kwargs.
180
+ """
119
181
  try:
120
182
  deserialization_callback = kwargs["deserialization_callback"]
121
183
  except KeyError:
@@ -190,8 +252,8 @@ class LROPoller(Generic[PollingReturnType_co]):
190
252
  if not error.continuation_token:
191
253
  try:
192
254
  error.continuation_token = self.continuation_token()
193
- except Exception as err: # pylint: disable=broad-except
194
- _LOGGER.warning("Unable to retrieve continuation token: %s", err)
255
+ except Exception: # pylint: disable=broad-except
256
+ _LOGGER.warning("Unable to retrieve continuation token.")
195
257
  error.continuation_token = None
196
258
 
197
259
  self._exception = error
@@ -227,6 +289,16 @@ class LROPoller(Generic[PollingReturnType_co]):
227
289
  def from_continuation_token(
228
290
  cls, polling_method: PollingMethod[PollingReturnType_co], continuation_token: str, **kwargs: Any
229
291
  ) -> "LROPoller[PollingReturnType_co]":
292
+ """Create a poller from a continuation token.
293
+
294
+ :param polling_method: The polling strategy to adopt
295
+ :type polling_method: ~azure.core.polling.PollingMethod
296
+ :param continuation_token: An opaque continuation token
297
+ :type continuation_token: str
298
+ :return: An instance of LROPoller
299
+ :rtype: ~azure.core.polling.LROPoller
300
+ :raises ~azure.core.exceptions.HttpResponseError: If the continuation token is invalid.
301
+ """
230
302
  (
231
303
  client,
232
304
  initial_response,
@@ -109,9 +109,9 @@ class AsyncLROBasePolling(
109
109
  """Poll status of operation so long as operation is incomplete and
110
110
  we have an endpoint to query.
111
111
 
112
- :raises: OperationFailed if operation status 'Failed' or 'Canceled'.
113
- :raises: BadStatus if response status invalid.
114
- :raises: BadResponse if response invalid.
112
+ :raises ~azure.core.polling.base_polling.OperationFailed: If operation status 'Failed' or 'Canceled'.
113
+ :raises ~azure.core.polling.base_polling.BadStatus: If response status invalid.
114
+ :raises ~azure.core.polling.base_polling.BadResponse: If response invalid.
115
115
  """
116
116
  if not self.finished():
117
117
  await self.update_status()
@@ -133,7 +133,7 @@ def _as_json(response: AllHttpResponseType) -> Dict[str, Any]:
133
133
  :type response: any
134
134
  :return: The content of this response as dict.
135
135
  :rtype: dict
136
- :raises: DecodeError if response body contains invalid json data.
136
+ :raises DecodeError: If response body contains invalid json data.
137
137
  """
138
138
  try:
139
139
  return json.loads(response.text())
@@ -148,7 +148,7 @@ def _raise_if_bad_http_status_and_method(response: AllHttpResponseType) -> None:
148
148
 
149
149
  :param response: The response object.
150
150
  :type response: any
151
- :raises: BadStatus if invalid status.
151
+ :raises ~azure.core.polling.base_polling.BadStatus: If invalid status.
152
152
  """
153
153
  code = response.status_code
154
154
  if code in {200, 201, 202, 204}:
@@ -382,7 +382,7 @@ class OperationResourcePolling(LongRunningOperation[HttpRequestTypeVar, AllHttpR
382
382
  :type pipeline_response: ~azure.core.pipeline.PipelineResponse
383
383
  :return: The status string.
384
384
  :rtype: str
385
- :raises: BadResponse if response has no body, or body does not contain status.
385
+ :raises ~azure.core.polling.base_polling.BadResponse: if response has no body, or body does not contain status.
386
386
  """
387
387
  response = pipeline_response.http_response
388
388
  if _is_empty(response):
@@ -617,7 +617,7 @@ class _SansIOLROBasePolling(
617
617
  :type initial_response: ~azure.core.pipeline.PipelineResponse
618
618
  :param deserialization_callback: A callback function to deserialize the final response.
619
619
  :type deserialization_callback: callable
620
- :raises: HttpResponseError if initial status is incorrect LRO state
620
+ :raises ~azure.core.HttpResponseError: If initial status is incorrect LRO state
621
621
  """
622
622
  self._client = client
623
623
  self._pipeline_response = ( # pylint: disable=attribute-defined-outside-init
@@ -646,6 +646,14 @@ class _SansIOLROBasePolling(
646
646
  raise HttpResponseError(response=initial_response.http_response, error=err) from err
647
647
 
648
648
  def get_continuation_token(self) -> str:
649
+ """Get a continuation token that can be used to recreate this poller.
650
+ The continuation token is a base64 encoded string that contains the initial response
651
+ serialized with pickle.
652
+
653
+ :rtype: str
654
+ :return: The continuation token.
655
+ :raises ValueError: If the initial response is not set.
656
+ """
649
657
  import pickle
650
658
 
651
659
  return base64.b64encode(pickle.dumps(self._initial_response)).decode("ascii")
@@ -654,6 +662,15 @@ class _SansIOLROBasePolling(
654
662
  def from_continuation_token(
655
663
  cls, continuation_token: str, **kwargs: Any
656
664
  ) -> Tuple[Any, Any, Callable[[Any], PollingReturnType_co]]:
665
+ """Recreate the poller from a continuation token.
666
+
667
+ :param continuation_token: The continuation token to recreate the poller.
668
+ :type continuation_token: str
669
+ :return: A tuple containing the client, the initial response, and the deserialization callback.
670
+ :rtype: tuple[~azure.core.PipelineClient, ~azure.core.pipeline.PipelineResponse, callable]
671
+ :raises ValueError: If the continuation token is invalid or if 'client' or
672
+ 'deserialization_callback' are not provided.
673
+ """
657
674
  try:
658
675
  client = kwargs["client"]
659
676
  except KeyError:
@@ -806,9 +823,9 @@ class LROBasePolling(
806
823
  """Poll status of operation so long as operation is incomplete and
807
824
  we have an endpoint to query.
808
825
 
809
- :raises: OperationFailed if operation status 'Failed' or 'Canceled'.
810
- :raises: BadStatus if response status invalid.
811
- :raises: BadResponse if response invalid.
826
+ :raises ~azure.core.polling.base_polling.OperationFailed: If operation status 'Failed' or 'Canceled'.
827
+ :raises ~azure.core.polling.base_polling.BadStatus: If response status invalid.
828
+ :raises ~azure.core.polling.base_polling.BadResponse: If response invalid.
812
829
  """
813
830
  if not self.finished():
814
831
  self.update_status()
@@ -6,12 +6,12 @@
6
6
  # --------------------------------------------------------------------------
7
7
  import base64
8
8
  from json import JSONEncoder
9
- from typing import Union, cast, Any
9
+ from typing import Dict, List, Optional, Union, cast, Any
10
10
  from datetime import datetime, date, time, timedelta
11
11
  from datetime import timezone
12
12
 
13
13
 
14
- __all__ = ["NULL", "AzureJSONEncoder"]
14
+ __all__ = ["NULL", "AzureJSONEncoder", "is_generated_model", "as_attribute_dict", "attribute_list"]
15
15
  TZ_UTC = timezone.utc
16
16
 
17
17
 
@@ -116,6 +116,12 @@ class AzureJSONEncoder(JSONEncoder):
116
116
  """A JSON encoder that's capable of serializing datetime objects and bytes."""
117
117
 
118
118
  def default(self, o: Any) -> Any:
119
+ """Override the default method to handle datetime and bytes serialization.
120
+ :param o: The object to serialize.
121
+ :type o: any
122
+ :return: A JSON-serializable representation of the object.
123
+ :rtype: any
124
+ """
119
125
  if isinstance(o, (bytes, bytearray)):
120
126
  return base64.b64encode(o).decode()
121
127
  try:
@@ -123,3 +129,151 @@ class AzureJSONEncoder(JSONEncoder):
123
129
  except AttributeError:
124
130
  pass
125
131
  return super(AzureJSONEncoder, self).default(o)
132
+
133
+
134
+ def is_generated_model(obj: Any) -> bool:
135
+ """Check if the object is a generated SDK model.
136
+
137
+ :param obj: The object to check.
138
+ :type obj: any
139
+ :return: True if the object is a generated SDK model, False otherwise.
140
+ :rtype: bool
141
+ """
142
+ return bool(getattr(obj, "_is_model", False) or hasattr(obj, "_attribute_map"))
143
+
144
+
145
+ def _is_readonly(p: Any) -> bool:
146
+ """Check if an attribute is readonly.
147
+
148
+ :param any p: The property to check.
149
+ :return: True if the property is readonly, False otherwise.
150
+ :rtype: bool
151
+ """
152
+ try:
153
+ return p._visibility == ["read"] # pylint: disable=protected-access
154
+ except AttributeError:
155
+ return False
156
+
157
+
158
+ def _as_attribute_dict_value(v: Any, *, exclude_readonly: bool = False) -> Any:
159
+ if v is None or isinstance(v, _Null):
160
+ return None
161
+ if isinstance(v, (list, tuple, set)):
162
+ return type(v)(_as_attribute_dict_value(x, exclude_readonly=exclude_readonly) for x in v)
163
+ if isinstance(v, dict):
164
+ return {dk: _as_attribute_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()}
165
+ return as_attribute_dict(v, exclude_readonly=exclude_readonly) if is_generated_model(v) else v
166
+
167
+
168
+ def _get_flattened_attribute(obj: Any) -> Optional[str]:
169
+ """Get the name of the flattened attribute in a generated TypeSpec model if one exists.
170
+
171
+ :param any obj: The object to check.
172
+ :return: The name of the flattened attribute if it exists, otherwise None.
173
+ :rtype: Optional[str]
174
+ """
175
+ flattened_items = None
176
+ try:
177
+ flattened_items = getattr(obj, next(a for a in dir(obj) if "__flattened_items" in a), None)
178
+ except StopIteration:
179
+ return None
180
+
181
+ if flattened_items is None:
182
+ return None
183
+
184
+ for k, v in obj._attr_to_rest_field.items(): # pylint: disable=protected-access
185
+ try:
186
+ if set(v._class_type._attr_to_rest_field.keys()).intersection( # pylint: disable=protected-access
187
+ set(flattened_items)
188
+ ):
189
+ return k
190
+ except AttributeError:
191
+ # if the attribute does not have _class_type, it is not a typespec generated model
192
+ continue
193
+ return None
194
+
195
+
196
+ def attribute_list(obj: Any) -> List[str]:
197
+ """Get a list of attribute names for a generated SDK model.
198
+
199
+ :param obj: The object to get attributes from.
200
+ :type obj: any
201
+ :return: A list of attribute names.
202
+ :rtype: List[str]
203
+ """
204
+ if not is_generated_model(obj):
205
+ raise TypeError("Object is not a generated SDK model.")
206
+ if hasattr(obj, "_attribute_map"):
207
+ # msrest model
208
+ return list(obj._attribute_map.keys()) # pylint: disable=protected-access
209
+ flattened_attribute = _get_flattened_attribute(obj)
210
+ retval: List[str] = []
211
+ for attr_name, rest_field in obj._attr_to_rest_field.items(): # pylint: disable=protected-access
212
+ if flattened_attribute == attr_name:
213
+ retval.extend(attribute_list(rest_field._class_type)) # pylint: disable=protected-access
214
+ else:
215
+ retval.append(attr_name)
216
+ return retval
217
+
218
+
219
+ def as_attribute_dict(obj: Any, *, exclude_readonly: bool = False) -> Dict[str, Any]:
220
+ """Convert an object to a dictionary of its attributes.
221
+
222
+ Made solely for backcompatibility with the legacy `.as_dict()` on msrest models.
223
+
224
+ .. deprecated::1.35.0
225
+ This function is added for backcompat purposes only.
226
+
227
+ :param any obj: The object to convert to a dictionary
228
+ :keyword bool exclude_readonly: Whether to exclude readonly properties
229
+ :return: A dictionary containing the object's attributes
230
+ :rtype: dict[str, any]
231
+ :raises TypeError: If the object is not a generated model instance
232
+ """
233
+ if not is_generated_model(obj):
234
+ raise TypeError("Object must be a generated model instance.")
235
+ if hasattr(obj, "_attribute_map"):
236
+ # msrest generated model
237
+ return obj.as_dict(keep_readonly=not exclude_readonly)
238
+ try:
239
+ # now we're a typespec generated model
240
+ result = {}
241
+ readonly_props = set()
242
+
243
+ # create a reverse mapping from rest field name to attribute name
244
+ rest_to_attr = {}
245
+ flattened_attribute = _get_flattened_attribute(obj)
246
+ for attr_name, rest_field in obj._attr_to_rest_field.items(): # pylint: disable=protected-access
247
+
248
+ if exclude_readonly and _is_readonly(rest_field):
249
+ # if we're excluding readonly properties, we need to track them
250
+ readonly_props.add(rest_field._rest_name) # pylint: disable=protected-access
251
+ if flattened_attribute == attr_name:
252
+ for fk, fv in rest_field._class_type._attr_to_rest_field.items(): # pylint: disable=protected-access
253
+ rest_to_attr[fv._rest_name] = fk # pylint: disable=protected-access
254
+ else:
255
+ rest_to_attr[rest_field._rest_name] = attr_name # pylint: disable=protected-access
256
+ for k, v in obj.items():
257
+ if exclude_readonly and k in readonly_props: # pyright: ignore
258
+ continue
259
+ if k == flattened_attribute:
260
+ for fk, fv in v.items():
261
+ result[rest_to_attr.get(fk, fk)] = _as_attribute_dict_value(fv, exclude_readonly=exclude_readonly)
262
+ else:
263
+ is_multipart_file_input = False
264
+ try:
265
+ is_multipart_file_input = next( # pylint: disable=protected-access
266
+ rf
267
+ for rf in obj._attr_to_rest_field.values() # pylint: disable=protected-access
268
+ if rf._rest_name == k # pylint: disable=protected-access
269
+ )._is_multipart_file_input
270
+ except StopIteration:
271
+ pass
272
+
273
+ result[rest_to_attr.get(k, k)] = (
274
+ v if is_multipart_file_input else _as_attribute_dict_value(v, exclude_readonly=exclude_readonly)
275
+ )
276
+ return result
277
+ except AttributeError as exc:
278
+ # not a typespec generated model
279
+ raise TypeError("Object must be a generated model instance.") from exc
@@ -5,7 +5,7 @@
5
5
  from __future__ import annotations
6
6
  from contextlib import contextmanager
7
7
  from contextvars import Token
8
- from typing import Optional, Dict, Sequence, cast, Callable, Iterator, TYPE_CHECKING
8
+ from typing import Any, Optional, Dict, Sequence, cast, Callable, Iterator, TYPE_CHECKING
9
9
 
10
10
  from opentelemetry import context as otel_context_module, trace
11
11
  from opentelemetry.trace import (
@@ -81,6 +81,8 @@ class OpenTelemetryTracer:
81
81
  kind: SpanKind = _SpanKind.INTERNAL,
82
82
  attributes: Optional[Attributes] = None,
83
83
  links: Optional[Sequence[Link]] = None,
84
+ start_time: Optional[int] = None,
85
+ context: Optional[Dict[str, Any]] = None,
84
86
  ) -> Span:
85
87
  """Starts a span without setting it as the current span in the context.
86
88
 
@@ -92,17 +94,28 @@ class OpenTelemetryTracer:
92
94
  :paramtype attributes: Mapping[str, AttributeValue]
93
95
  :keyword links: Links to add to the span.
94
96
  :paramtype links: list[~azure.core.tracing.Link]
97
+ :keyword start_time: The start time of the span in nanoseconds since the epoch.
98
+ :paramtype start_time: Optional[int]
99
+ :keyword context: A dictionary of context values corresponding to the parent span. If not provided,
100
+ the current global context will be used.
101
+ :paramtype context: Optional[Dict[str, any]]
95
102
  :return: The span that was started
96
103
  :rtype: ~opentelemetry.trace.Span
97
104
  """
98
105
  otel_kind = _KIND_MAPPINGS.get(kind, OpenTelemetrySpanKind.INTERNAL)
99
106
  otel_links = self._parse_links(links)
100
107
 
108
+ otel_context = None
109
+ if context:
110
+ otel_context = extract(context)
111
+
101
112
  otel_span = self._tracer.start_span(
102
113
  name,
114
+ context=otel_context,
103
115
  kind=otel_kind,
104
116
  attributes=attributes,
105
117
  links=otel_links,
118
+ start_time=start_time,
106
119
  record_exception=False,
107
120
  )
108
121
 
@@ -116,6 +129,8 @@ class OpenTelemetryTracer:
116
129
  kind: SpanKind = _SpanKind.INTERNAL,
117
130
  attributes: Optional[Attributes] = None,
118
131
  links: Optional[Sequence[Link]] = None,
132
+ start_time: Optional[int] = None,
133
+ context: Optional[Dict[str, Any]] = None,
119
134
  end_on_exit: bool = True,
120
135
  ) -> Iterator[Span]:
121
136
  """Context manager that starts a span and sets it as the current span in the context.
@@ -134,12 +149,19 @@ class OpenTelemetryTracer:
134
149
  :paramtype attributes: Optional[Attributes]
135
150
  :keyword links: Links to add to the span.
136
151
  :paramtype links: Optional[Sequence[Link]]
152
+ :keyword start_time: The start time of the span in nanoseconds since the epoch.
153
+ :paramtype start_time: Optional[int]
154
+ :keyword context: A dictionary of context values corresponding to the parent span. If not provided,
155
+ the current global context will be used.
156
+ :paramtype context: Optional[Dict[str, any]]
137
157
  :keyword end_on_exit: Whether to end the span when exiting the context manager. Defaults to True.
138
158
  :paramtype end_on_exit: bool
139
159
  :return: The span that was started
140
160
  :rtype: Iterator[~opentelemetry.trace.Span]
141
161
  """
142
- span = self.start_span(name, kind=kind, attributes=attributes, links=links)
162
+ span = self.start_span(
163
+ name, kind=kind, attributes=attributes, links=links, start_time=start_time, context=context
164
+ )
143
165
  with trace.use_span( # pylint: disable=not-context-manager
144
166
  span, record_exception=False, end_on_exit=end_on_exit
145
167
  ) as span:
@@ -1,4 +1,4 @@
1
1
  from .core import contents, where
2
2
 
3
3
  __all__ = ["contents", "where"]
4
- __version__ = "2025.06.15"
4
+ __version__ = "2025.07.09"
@@ -10,7 +10,7 @@ __all__ = [
10
10
  "__version__",
11
11
  ]
12
12
 
13
- __version__ = "45.0.4"
13
+ __version__ = "45.0.5"
14
14
 
15
15
 
16
16
  __author__ = "The Python Cryptographic Authority and individual contributors"
@@ -221,7 +221,55 @@ else:
221
221
 
222
222
  ClassVar = typing.ClassVar
223
223
 
224
+ # Vendored from cpython typing._SpecialFrom
225
+ # Having a separate class means that instances will not be rejected by
226
+ # typing._type_check.
227
+ class _SpecialForm(typing._Final, _root=True):
228
+ __slots__ = ('_name', '__doc__', '_getitem')
229
+
230
+ def __init__(self, getitem):
231
+ self._getitem = getitem
232
+ self._name = getitem.__name__
233
+ self.__doc__ = getitem.__doc__
234
+
235
+ def __getattr__(self, item):
236
+ if item in {'__name__', '__qualname__'}:
237
+ return self._name
238
+
239
+ raise AttributeError(item)
240
+
241
+ def __mro_entries__(self, bases):
242
+ raise TypeError(f"Cannot subclass {self!r}")
243
+
244
+ def __repr__(self):
245
+ return f'typing_extensions.{self._name}'
246
+
247
+ def __reduce__(self):
248
+ return self._name
249
+
250
+ def __call__(self, *args, **kwds):
251
+ raise TypeError(f"Cannot instantiate {self!r}")
252
+
253
+ def __or__(self, other):
254
+ return typing.Union[self, other]
255
+
256
+ def __ror__(self, other):
257
+ return typing.Union[other, self]
258
+
259
+ def __instancecheck__(self, obj):
260
+ raise TypeError(f"{self} cannot be used with isinstance()")
261
+
262
+ def __subclasscheck__(self, cls):
263
+ raise TypeError(f"{self} cannot be used with issubclass()")
264
+
265
+ @typing._tp_cache
266
+ def __getitem__(self, parameters):
267
+ return self._getitem(self, parameters)
268
+
224
269
 
270
+ # Note that inheriting from this class means that the object will be
271
+ # rejected by typing._type_check, so do not use it if the special form
272
+ # is arguably valid as a type by itself.
225
273
  class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
226
274
  def __repr__(self):
227
275
  return 'typing_extensions.' + self._name
@@ -1223,7 +1271,7 @@ else:
1223
1271
  td.__orig_bases__ = (TypedDict,)
1224
1272
  return td
1225
1273
 
1226
- class _TypedDictSpecialForm(_ExtensionsSpecialForm, _root=True):
1274
+ class _TypedDictSpecialForm(_SpecialForm, _root=True):
1227
1275
  def __call__(
1228
1276
  self,
1229
1277
  typename,
@@ -2201,48 +2249,6 @@ else:
2201
2249
  return typing._GenericAlias(self, (item,))
2202
2250
 
2203
2251
 
2204
- # Vendored from cpython typing._SpecialFrom
2205
- class _SpecialForm(typing._Final, _root=True):
2206
- __slots__ = ('_name', '__doc__', '_getitem')
2207
-
2208
- def __init__(self, getitem):
2209
- self._getitem = getitem
2210
- self._name = getitem.__name__
2211
- self.__doc__ = getitem.__doc__
2212
-
2213
- def __getattr__(self, item):
2214
- if item in {'__name__', '__qualname__'}:
2215
- return self._name
2216
-
2217
- raise AttributeError(item)
2218
-
2219
- def __mro_entries__(self, bases):
2220
- raise TypeError(f"Cannot subclass {self!r}")
2221
-
2222
- def __repr__(self):
2223
- return f'typing_extensions.{self._name}'
2224
-
2225
- def __reduce__(self):
2226
- return self._name
2227
-
2228
- def __call__(self, *args, **kwds):
2229
- raise TypeError(f"Cannot instantiate {self!r}")
2230
-
2231
- def __or__(self, other):
2232
- return typing.Union[self, other]
2233
-
2234
- def __ror__(self, other):
2235
- return typing.Union[other, self]
2236
-
2237
- def __instancecheck__(self, obj):
2238
- raise TypeError(f"{self} cannot be used with isinstance()")
2239
-
2240
- def __subclasscheck__(self, cls):
2241
- raise TypeError(f"{self} cannot be used with issubclass()")
2242
-
2243
- @typing._tp_cache
2244
- def __getitem__(self, parameters):
2245
- return self._getitem(self, parameters)
2246
2252
 
2247
2253
 
2248
2254
  if hasattr(typing, "LiteralString"): # 3.11+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: py2docfx
3
- Version: 0.1.18.dev2146624
3
+ Version: 0.1.18rc2160234
4
4
  Summary: A package built based on Sphinx which download source code package and generate yaml files supported by docfx.
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License