@autorest/python 6.17.1 → 6.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,6 +28,20 @@ KEY_TYPE = "Key"
28
28
  _LOGGER = logging.getLogger(__name__)
29
29
 
30
30
 
31
+ def get_item_type(yaml_data: Dict[str, Any], item_name: str, enable_exception: bool = True) -> Optional[Dict[str, Any]]:
32
+ try:
33
+ return next(p["type"] for p in yaml_data.get("properties", []) if p["wireName"] == item_name)
34
+ except StopIteration:
35
+ pass
36
+ for parent in yaml_data.get("parents", []):
37
+ result = get_item_type(parent, item_name, False)
38
+ if result:
39
+ return result
40
+ if enable_exception:
41
+ raise StopIteration(f"Could not find item type {item_name} from type {yaml_data['name']}")
42
+ return None
43
+
44
+
31
45
  def is_body(yaml_data: Dict[str, Any]) -> bool:
32
46
  """Return true if passed in parameter is a body param"""
33
47
  return yaml_data["protocol"]["http"]["in"] == "body"
@@ -544,9 +558,7 @@ class M4Reformatter(YamlUpdatePluginAutorest): # pylint: disable=too-many-publi
544
558
  if self.version_tolerant:
545
559
  # if we're in version tolerant, hide the paging model
546
560
  returned_response_object["type"]["internal"] = True
547
- operation["itemType"] = next(
548
- p["type"] for p in returned_response_object["type"]["properties"] if p["wireName"] == operation["itemName"]
549
- )
561
+ operation["itemType"] = get_item_type(returned_response_object["type"], operation["itemName"])
550
562
  if yaml_data["language"]["default"]["paging"].get("nextLinkOperation"):
551
563
  operation["nextOperation"] = self.update_operation(
552
564
  group_name=group_name,
@@ -30,6 +30,7 @@ from .primitive_types import (
30
30
  UnixTimeType,
31
31
  SdkCoreType,
32
32
  DecimalType,
33
+ MultiPartFileType,
33
34
  )
34
35
  from .enum_type import EnumType, EnumValue
35
36
  from .base import BaseType
@@ -149,6 +150,7 @@ TYPE_TO_OBJECT = {
149
150
  "unixtime": UnixTimeType,
150
151
  "credential": StringType,
151
152
  "sdkcore": SdkCoreType,
153
+ "multipartfile": MultiPartFileType,
152
154
  }
153
155
  _LOGGER = logging.getLogger(__name__)
154
156
 
@@ -55,7 +55,7 @@ class BaseType(BaseModel, ABC): # pylint: disable=too-many-public-methods
55
55
  return self.imports(**kwargs)
56
56
 
57
57
  def imports_for_sample(self) -> FileImport:
58
- return self.imports()
58
+ return FileImport(self.code_model)
59
59
 
60
60
  @staticmethod
61
61
  def serialize_sample_value(value: Any) -> str:
@@ -7,7 +7,7 @@ from typing import List, Dict, Any, Set, Union, Literal
7
7
 
8
8
  from .base import BaseType
9
9
  from .enum_type import EnumType
10
- from .model_type import ModelType
10
+ from .model_type import ModelType, UsageFlags
11
11
  from .combined_type import CombinedType
12
12
  from .client import Client
13
13
  from .request_builder import RequestBuilder, OverloadedRequestBuilder
@@ -162,9 +162,7 @@ class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-in
162
162
  """All of the model types in this class"""
163
163
  if not self._model_types:
164
164
  self._model_types = [
165
- t
166
- for t in self.types_map.values()
167
- if isinstance(t, ModelType) and not (self.options["models_mode"] == "dpg" and t.page_result_model)
165
+ t for t in self.types_map.values() if isinstance(t, ModelType) and t.usage != UsageFlags.Default.value
168
166
  ]
169
167
  return self._model_types
170
168
 
@@ -84,9 +84,8 @@ class ModelType( # pylint: disable=abstract-method
84
84
  self._got_polymorphic_subtypes = False
85
85
  self.internal: bool = self.yaml_data.get("internal", False)
86
86
  self.snake_case_name: str = self.yaml_data["snakeCaseName"]
87
- self.page_result_model: bool = self.yaml_data.get("pageResultModel", False)
88
87
  self.cross_language_definition_id: Optional[str] = self.yaml_data.get("crossLanguageDefinitionId")
89
- self.usage: int = self.yaml_data.get("usage", 0)
88
+ self.usage: int = self.yaml_data.get("usage", UsageFlags.Input.value | UsageFlags.Output.value)
90
89
 
91
90
  @property
92
91
  def is_usage_output(self) -> bool:
@@ -418,8 +418,19 @@ class OperationBase( # pylint: disable=too-many-public-methods,too-many-instanc
418
418
  file_import.merge(self.get_request_builder_import(self.request_builder, async_mode))
419
419
  if self.overloads:
420
420
  file_import.add_submodule_import("typing", "overload", ImportType.STDLIB)
421
- if self.non_default_errors and self.code_model.options["models_mode"] == "dpg":
422
- file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL)
421
+ if self.code_model.options["models_mode"] == "dpg":
422
+ if self.parameters.has_body:
423
+ if self.has_form_data_body:
424
+ file_import.add_submodule_import(relative_path, "_model_base", ImportType.LOCAL)
425
+ else:
426
+ file_import.add_submodule_import(
427
+ f"{relative_path}_model_base",
428
+ "SdkJSONEncoder",
429
+ ImportType.LOCAL,
430
+ )
431
+ file_import.add_import("json", ImportType.STDLIB)
432
+ if (self.default_error_deserialization or any(r.type for r in self.responses)) or self.non_default_errors:
433
+ file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL)
423
434
  return file_import
424
435
 
425
436
  def get_response_from_status(self, status_code: Optional[Union[str, int]]) -> ResponseType:
@@ -492,20 +503,6 @@ class Operation(OperationBase[Response]):
492
503
  )
493
504
  if self.has_response_body and not self.has_optional_return_type and not self.code_model.options["models_mode"]:
494
505
  file_import.add_submodule_import("typing", "cast", ImportType.STDLIB)
495
- relative_path = "..." if async_mode else ".."
496
- if self.code_model.options["models_mode"] == "dpg":
497
- if self.parameters.has_body:
498
- if self.has_form_data_body:
499
- file_import.add_submodule_import(relative_path, "_model_base", ImportType.LOCAL)
500
- else:
501
- file_import.add_submodule_import(
502
- f"{relative_path}_model_base",
503
- "SdkJSONEncoder",
504
- ImportType.LOCAL,
505
- )
506
- file_import.add_import("json", ImportType.STDLIB)
507
- if self.default_error_deserialization or any(r.type for r in self.responses):
508
- file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL)
509
506
 
510
507
  return file_import
511
508
 
@@ -381,6 +381,12 @@ class ClientParameter(Parameter):
381
381
  ):
382
382
  # this means i am the base url
383
383
  return ParameterMethodLocation.KEYWORD_ONLY
384
+ if (
385
+ self.client_default_value is not None
386
+ and self.code_model.options["from_typespec"]
387
+ and not self.code_model.options["azure_arm"]
388
+ ):
389
+ return ParameterMethodLocation.KEYWORD_ONLY
384
390
  return ParameterMethodLocation.POSITIONAL
385
391
 
386
392
 
@@ -624,3 +624,29 @@ class SdkCoreType(PrimitiveType):
624
624
  @property
625
625
  def serialization_type(self) -> str:
626
626
  return self.name
627
+
628
+
629
+ class MultiPartFileType(PrimitiveType):
630
+ def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None:
631
+ super().__init__(yaml_data=yaml_data, code_model=code_model)
632
+ self.name = "FileType"
633
+
634
+ def type_annotation(self, **kwargs: Any) -> str:
635
+ return self.name
636
+
637
+ def docstring_type(self, **kwargs: Any) -> str:
638
+ return f"~{self.code_model.namespace}._vendor.{self.name}"
639
+
640
+ def imports(self, **kwargs: Any) -> FileImport:
641
+ file_import = super().imports(**kwargs)
642
+ relative_path = "..." if kwargs.get("async_mode") else ".."
643
+ file_import.add_submodule_import(f"{relative_path}_vendor", self.name, ImportType.LOCAL)
644
+ return file_import
645
+
646
+ @property
647
+ def default_template_representation_declaration(self) -> str:
648
+ return '"filetype"' if self.code_model.for_test else "filetype"
649
+
650
+ @property
651
+ def instance_check_template(self) -> str:
652
+ return f"isinstance({{}}, {self.name})"
@@ -97,12 +97,9 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
97
97
  return self.is_discriminator and self.is_polymorphic and cast(ConstantType, self.type).value is None
98
98
 
99
99
  def type_annotation(self, *, is_operation_file: bool = False) -> str:
100
- types_type_annotation = self.type.type_annotation(is_operation_file=is_operation_file)
101
- if self.is_multipart_file_input:
102
- # we only support FileType or list of FileType
103
- types_type_annotation = types_type_annotation.replace("bytes", "FileType")
104
100
  if self.is_base_discriminator:
105
101
  return "str"
102
+ types_type_annotation = self.type.type_annotation(is_operation_file=is_operation_file)
106
103
  if self.optional and self.client_default_value is None:
107
104
  return f"Optional[{types_type_annotation}]"
108
105
  return types_type_annotation
@@ -115,9 +112,6 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
115
112
  *,
116
113
  client_default_value_declaration: Optional[str] = None,
117
114
  ) -> Any:
118
- if self.is_multipart_file_input:
119
- file_type_str = '"filetype"' if self.code_model.for_test else "filetype"
120
- return f"[{file_type_str}]" if self.type.type == "list" else file_type_str
121
115
  if self.client_default_value:
122
116
  client_default_value_declaration = self.get_declaration(self.client_default_value)
123
117
  # make sure there is no \n otherwise the json template will be invalid
@@ -156,8 +150,6 @@ class Property(BaseModel): # pylint: disable=too-many-instance-attributes
156
150
  "rest_discriminator" if self.is_discriminator else "rest_field",
157
151
  ImportType.LOCAL,
158
152
  )
159
- if self.is_multipart_file_input:
160
- file_import.add_submodule_import(".._vendor", "FileType", ImportType.LOCAL)
161
153
  return file_import
162
154
 
163
155
  @classmethod
@@ -516,7 +516,7 @@ class JinjaSerializer(ReaderAndWriter): # pylint: disable=abstract-method
516
516
  and operation.api_versions[0] != self.code_model.options["default_api_version"]
517
517
  ):
518
518
  continue
519
- samples = operation.yaml_data["samples"]
519
+ samples = operation.yaml_data.get("samples")
520
520
  if not samples or operation.name.startswith("_"):
521
521
  continue
522
522
  for value in samples.values():
@@ -535,9 +535,9 @@ class _OperationSerializer(_BuilderBaseSerializer[OperationType]): # pylint: di
535
535
  return "response"
536
536
 
537
537
  def example_template(self, builder: OperationType) -> List[str]:
538
+ if self.code_model.options["models_mode"] in ("msrest", "dpg"):
539
+ return []
538
540
  retval = super().example_template(builder)
539
- if self.code_model.options["models_mode"] == "msrest":
540
- return retval
541
541
  for response in builder.responses:
542
542
  polymorphic_subtypes: List[ModelType] = []
543
543
  if not response.type:
@@ -124,7 +124,6 @@ class GeneralSerializer(BaseSerializer):
124
124
  file_import.add_submodule_import("typing", "Union", ImportType.STDLIB)
125
125
  file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB)
126
126
  file_import.add_submodule_import("typing", "Mapping", ImportType.STDLIB)
127
- file_import.add_submodule_import("typing", "Sequence", ImportType.STDLIB)
128
127
  file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB)
129
128
  file_import.add_submodule_import("typing", "Any", ImportType.STDLIB)
130
129
  file_import.add_submodule_import("typing", "List", ImportType.STDLIB)
@@ -194,6 +194,8 @@ class DpgModelSerializer(_ModelSerializer):
194
194
  )
195
195
 
196
196
  for model in self.code_model.model_types:
197
+ if model.base == "json":
198
+ continue
197
199
  file_import.merge(model.imports(is_operation_file=False))
198
200
  for prop in model.properties:
199
201
  file_import.merge(prop.imports())
@@ -42,7 +42,7 @@ class SampleSerializer(BaseSerializer):
42
42
  self.operation = operation
43
43
  self.sample = sample
44
44
  self.file_name = file_name
45
- self.sample_params = {to_snake_case(k): v for k, v in sample.get("parameters", {}).items()}
45
+ self.sample_params = sample.get("parameters", {})
46
46
 
47
47
  def _imports(self) -> FileImportSerializer:
48
48
  imports = FileImport(self.code_model)
@@ -66,8 +66,8 @@ class SampleSerializer(BaseSerializer):
66
66
  "AzureKeyCredential",
67
67
  ImportType.SDKCORE,
68
68
  )
69
- for param in self.operation.parameters.positional:
70
- if not param.client_default_value and not param.optional and param.client_name in self.sample_params:
69
+ for param in self.operation.parameters.positional + self.operation.parameters.keyword_only:
70
+ if not param.client_default_value and not param.optional and param.wire_name in self.sample_params:
71
71
  imports.merge(param.type.imports_for_sample())
72
72
  return FileImportSerializer(imports, True)
73
73
 
@@ -80,15 +80,19 @@ class SampleSerializer(BaseSerializer):
80
80
  elif isinstance(credential_type, KeyCredentialType):
81
81
  special_param.update({"credential": 'AzureKeyCredential(key=os.getenv("AZURE_KEY"))'})
82
82
 
83
- params_positional = [
84
- p for p in self.code_model.clients[0].parameters.positional if not (p.optional or p.client_default_value)
83
+ params = [
84
+ p
85
+ for p in (
86
+ self.code_model.clients[0].parameters.positional + self.code_model.clients[0].parameters.keyword_only
87
+ )
88
+ if not (p.optional or p.client_default_value)
85
89
  ]
86
90
  client_params = {
87
91
  p.client_name: special_param.get(
88
92
  p.client_name,
89
- f'"{self.sample_params.get(p.client_name) or p.client_name.upper()}"',
93
+ f'"{self.sample_params.get(p.wire_name) or p.client_name.upper()}"',
90
94
  )
91
- for p in params_positional
95
+ for p in params
92
96
  }
93
97
 
94
98
  return client_params
@@ -103,15 +107,18 @@ class SampleSerializer(BaseSerializer):
103
107
 
104
108
  # prepare operation parameters
105
109
  def _operation_params(self) -> Dict[str, Any]:
106
- params_positional = [p for p in self.operation.parameters.positional if not p.client_default_value]
110
+ params = [
111
+ p
112
+ for p in (self.operation.parameters.positional + self.operation.parameters.keyword_only)
113
+ if not p.client_default_value
114
+ ]
107
115
  failure_info = "fail to find required param named {}"
108
116
  operation_params = {}
109
- for param in params_positional:
110
- name = param.client_name
111
- param_value = self.sample_params.get(name)
117
+ for param in params:
112
118
  if not param.optional:
119
+ param_value = self.sample_params.get(param.wire_name)
113
120
  if not param_value:
114
- raise Exception(failure_info.format(name)) # pylint: disable=broad-exception-raised
121
+ raise Exception(failure_info.format(param.client_name)) # pylint: disable=broad-exception-raised
115
122
  operation_params[param.client_name] = self.handle_param(param, param_value)
116
123
  return operation_params
117
124
 
@@ -489,6 +489,9 @@ def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typin
489
489
 
490
490
  class Model(_MyMutableMapping):
491
491
  _is_model = True
492
+ # label whether current class's _attr_to_rest_field has been calculated
493
+ # could not see _attr_to_rest_field directly because subclass inherits it from parent class
494
+ _calculated: typing.Set[str] = set()
492
495
 
493
496
  def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
494
497
  class_name = self.__class__.__name__
@@ -521,24 +524,27 @@ class Model(_MyMutableMapping):
521
524
  return Model(self.__dict__)
522
525
 
523
526
  def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: disable=unused-argument
524
- # we know the last three classes in mro are going to be 'Model', 'dict', and 'object'
525
- mros = cls.__mro__[:-3][::-1] # ignore model, dict, and object parents, and reverse the mro order
526
- attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
527
- k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
528
- }
529
- annotations = {
530
- k: v
531
- for mro_class in mros
532
- if hasattr(mro_class, "__annotations__") # pylint: disable=no-member
533
- for k, v in mro_class.__annotations__.items() # pylint: disable=no-member
534
- }
535
- for attr, rf in attr_to_rest_field.items():
536
- rf._module = cls.__module__
537
- if not rf._type:
538
- rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
539
- if not rf._rest_name_input:
540
- rf._rest_name_input = attr
541
- cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
527
+ if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated:
528
+ # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
529
+ # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
530
+ mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
531
+ attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
532
+ k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
533
+ }
534
+ annotations = {
535
+ k: v
536
+ for mro_class in mros
537
+ if hasattr(mro_class, "__annotations__") # pylint: disable=no-member
538
+ for k, v in mro_class.__annotations__.items() # pylint: disable=no-member
539
+ }
540
+ for attr, rf in attr_to_rest_field.items():
541
+ rf._module = cls.__module__
542
+ if not rf._type:
543
+ rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
544
+ if not rf._rest_name_input:
545
+ rf._rest_name_input = attr
546
+ cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
547
+ cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
542
548
 
543
549
  return super().__new__(cls) # pylint: disable=no-value-for-parameter
544
550
 
@@ -70,8 +70,6 @@ FileType = Union[
70
70
  Tuple[Optional[str], FileContent, Optional[str]],
71
71
  ]
72
72
 
73
- FilesType = Union[Mapping[str, FileType], Sequence[Tuple[str, FileType]]]
74
-
75
73
  def serialize_multipart_data_entry(data_entry: Any) -> Any:
76
74
  if isinstance(data_entry, (list, tuple, dict, Model)):
77
75
  return json.dumps(data_entry, cls=SdkJSONEncoder, exclude_readonly=True)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@autorest/python",
3
- "version": "6.17.1",
3
+ "version": "6.19.0",
4
4
  "description": "The Python extension for generators in AutoRest.",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -20,12 +20,13 @@
20
20
  "homepage": "https://github.com/Azure/autorest.python/blob/main/README.md",
21
21
  "dependencies": {
22
22
  "@autorest/system-requirements": "~1.0.2",
23
- "fs-extra": "~11.2.0"
23
+ "fs-extra": "~11.2.0",
24
+ "tsx": "4.17.0"
24
25
  },
25
26
  "devDependencies": {
26
27
  "@microsoft.azure/autorest.testserver": "^3.3.46",
27
28
  "typescript": "~5.1.3",
28
- "@azure-tools/typespec-python": "^0.27.1"
29
+ "@azure-tools/typespec-python": "^0.29.0"
29
30
  },
30
31
  "files": [
31
32
  "autorest/**/*.py",
@@ -39,6 +40,7 @@
39
40
  "start": "node ./scripts/run-python3.js ./scripts/start.py",
40
41
  "build": "node ./scripts/copy-generator.js --force",
41
42
  "install": "node ./scripts/copy-generator.js && node ./scripts/run-python3.js ./scripts/install.py",
42
- "debug": "node ./scripts/run-python3.js ./scripts/start.py --debug"
43
+ "debug": "node ./scripts/run-python3.js ./scripts/start.py --debug",
44
+ "test": "tsx ./scripts/run-tests.ts"
43
45
  }
44
46
  }
@@ -0,0 +1,81 @@
1
+ /* eslint-disable no-console */
2
+ import { execSync } from "child_process";
3
+ import { readFileSync } from "fs";
4
+ import { join } from "path";
5
+ import yargs from "yargs";
6
+ import { hideBin } from "yargs/helpers";
7
+
8
+ interface Arguments {
9
+ folder?: string;
10
+ command?: string;
11
+ }
12
+
13
+ const validFolders = [
14
+ "azure/legacy",
15
+ "azure/version-tolerant",
16
+ "vanilla/legacy",
17
+ "vanilla/version-tolerant",
18
+ "dpg/version-tolerant",
19
+ ];
20
+
21
+ const validCommands = ["ci", "lint", "mypy", "pyright", "apiview"];
22
+
23
+ // Parse command-line arguments using yargs
24
+ const argv = yargs(hideBin(process.argv))
25
+ .option("folder", {
26
+ alias: "f",
27
+ describe: "Specify the folder to use",
28
+ choices: validFolders,
29
+ type: "string",
30
+ })
31
+ .option("command", {
32
+ alias: "c",
33
+ describe: "Specify the command to run",
34
+ choices: validCommands,
35
+ type: "string",
36
+ }).argv as Arguments;
37
+
38
+ const foldersToProcess = argv.folder ? [argv.folder] : validFolders;
39
+
40
+ const commandToRun = argv.command || "all";
41
+
42
+ function getCommand(command: string, folder: string) {
43
+ if (!validCommands.includes(command)) throw new Error(`Unknown command '${command}'.`);
44
+ return `FOLDER=${folder} tox -c ./test/${folder}/tox.ini -e ${command}`;
45
+ }
46
+
47
+ function sectionExistsInToxIni(command: string, folder: string): boolean {
48
+ const toxIniPath = join(__dirname, `../test/${folder}/tox.ini`);
49
+ const toxIniContent = readFileSync(toxIniPath, "utf-8");
50
+ const sectionHeader = `[testenv:${command}]`;
51
+ return toxIniContent.includes(sectionHeader);
52
+ }
53
+
54
+ function myExecSync(command: string, folder: string): void {
55
+ if (!sectionExistsInToxIni(command, folder)) {
56
+ console.log(`No section for ${command} in tox.ini for folder ${folder}. Skipping...`);
57
+ return;
58
+ }
59
+ execSync(getCommand(command, folder), { stdio: "inherit" });
60
+ }
61
+
62
+ foldersToProcess.forEach((folder) => {
63
+ try {
64
+ if (commandToRun === "all") {
65
+ for (const key of validCommands) {
66
+ console.log(`Running ${key} for folder ${folder}...`);
67
+ myExecSync(key, folder);
68
+ }
69
+ } else if (getCommand(commandToRun, folder)) {
70
+ console.log(`Running ${commandToRun} for folder ${folder}...`);
71
+ myExecSync(commandToRun, folder);
72
+ } else {
73
+ console.error(`Error: Unknown command '${commandToRun}'.`);
74
+ process.exit(1);
75
+ }
76
+ } catch (error) {
77
+ console.error((error as Error).message);
78
+ console.error(`Error executing command for folder ${folder}: ${(error as Error).message}`);
79
+ process.exit(1);
80
+ }
81
+ });