@azure/synapse-artifacts 1.0.0-beta.4 → 1.0.0-beta.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/README.md +6 -4
- package/dist/index.js +18462 -16154
- package/dist/index.js.map +1 -1
- package/dist-esm/src/artifactsClient.js +20 -15
- package/dist-esm/src/artifactsClient.js.map +1 -1
- package/dist-esm/src/artifactsClientContext.js +14 -37
- package/dist-esm/src/artifactsClientContext.js.map +1 -1
- package/dist-esm/src/index.js +1 -2
- package/dist-esm/src/index.js.map +1 -1
- package/dist-esm/src/lroImpl.js +25 -0
- package/dist-esm/src/lroImpl.js.map +1 -0
- package/dist-esm/src/models/index.js +767 -1
- package/dist-esm/src/models/index.js.map +1 -1
- package/dist-esm/src/models/mappers.js +8083 -7446
- package/dist-esm/src/models/mappers.js.map +1 -1
- package/dist-esm/src/models/parameters.js +259 -165
- package/dist-esm/src/models/parameters.js.map +1 -1
- package/dist-esm/src/operations/bigDataPools.js +38 -50
- package/dist-esm/src/operations/bigDataPools.js.map +1 -1
- package/dist-esm/src/operations/dataFlowDebugSession.js +165 -153
- package/dist-esm/src/operations/dataFlowDebugSession.js.map +1 -1
- package/dist-esm/src/operations/dataFlowOperations.js +433 -0
- package/dist-esm/src/operations/dataFlowOperations.js.map +1 -0
- package/dist-esm/src/operations/datasetOperations.js +433 -0
- package/dist-esm/src/operations/datasetOperations.js.map +1 -0
- package/dist-esm/src/operations/index.js +19 -14
- package/dist-esm/src/operations/index.js.map +1 -1
- package/dist-esm/src/operations/integrationRuntimes.js +38 -50
- package/dist-esm/src/operations/integrationRuntimes.js.map +1 -1
- package/dist-esm/src/operations/kqlScriptOperations.js +303 -0
- package/dist-esm/src/operations/kqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operations/kqlScripts.js +150 -0
- package/dist-esm/src/operations/kqlScripts.js.map +1 -0
- package/dist-esm/src/operations/library.js +222 -194
- package/dist-esm/src/operations/library.js.map +1 -1
- package/dist-esm/src/operations/linkedServiceOperations.js +434 -0
- package/dist-esm/src/operations/linkedServiceOperations.js.map +1 -0
- package/dist-esm/src/operations/metastore.js +182 -0
- package/dist-esm/src/operations/metastore.js.map +1 -0
- package/dist-esm/src/operations/notebookOperationResult.js +64 -0
- package/dist-esm/src/operations/notebookOperationResult.js.map +1 -0
- package/dist-esm/src/operations/notebookOperations.js +558 -0
- package/dist-esm/src/operations/notebookOperations.js.map +1 -0
- package/dist-esm/src/operations/pipelineOperations.js +479 -0
- package/dist-esm/src/operations/pipelineOperations.js.map +1 -0
- package/dist-esm/src/operations/pipelineRunOperations.js +185 -0
- package/dist-esm/src/operations/pipelineRunOperations.js.map +1 -0
- package/dist-esm/src/operations/sparkConfigurationOperations.js +434 -0
- package/dist-esm/src/operations/sparkConfigurationOperations.js.map +1 -0
- package/dist-esm/src/operations/sparkJobDefinitionOperations.js +602 -0
- package/dist-esm/src/operations/sparkJobDefinitionOperations.js.map +1 -0
- package/dist-esm/src/operations/sqlPools.js +38 -50
- package/dist-esm/src/operations/sqlPools.js.map +1 -1
- package/dist-esm/src/operations/sqlScriptOperations.js +434 -0
- package/dist-esm/src/operations/sqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operations/triggerOperations.js +705 -0
- package/dist-esm/src/operations/triggerOperations.js.map +1 -0
- package/dist-esm/src/operations/triggerRunOperations.js +147 -0
- package/dist-esm/src/operations/triggerRunOperations.js.map +1 -0
- package/dist-esm/src/operations/workspaceGitRepoManagement.js +26 -29
- package/dist-esm/src/operations/workspaceGitRepoManagement.js.map +1 -1
- package/dist-esm/src/operations/workspaceOperations.js +62 -0
- package/dist-esm/src/operations/workspaceOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/bigDataPools.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/dataFlowDebugSession.js +1 -1
- package/dist-esm/src/operationsInterfaces/dataFlowDebugSession.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/{dataset.js → dataFlowOperations.js} +2 -2
- package/dist-esm/src/operationsInterfaces/dataFlowOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/{pipelineRun.js → datasetOperations.js} +1 -1
- package/dist-esm/src/operationsInterfaces/datasetOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/index.js +19 -14
- package/dist-esm/src/operationsInterfaces/index.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/integrationRuntimes.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/kqlScriptOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/kqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/{triggerRun.js → kqlScripts.js} +1 -1
- package/dist-esm/src/operationsInterfaces/kqlScripts.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/library.js +1 -1
- package/dist-esm/src/operationsInterfaces/library.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/linkedServiceOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/linkedServiceOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/{workspace.js → metastore.js} +1 -1
- package/dist-esm/src/operationsInterfaces/metastore.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/notebookOperationResult.js +9 -0
- package/dist-esm/src/operationsInterfaces/notebookOperationResult.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/notebookOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/notebookOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/pipelineOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/pipelineOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/pipelineRunOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/pipelineRunOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/{dataFlow.js → sparkConfigurationOperations.js} +2 -2
- package/dist-esm/src/operationsInterfaces/sparkConfigurationOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/sparkJobDefinitionOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/sparkJobDefinitionOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/sqlPools.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/sqlScriptOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/sqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/triggerOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/triggerOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/triggerRunOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/triggerRunOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/workspaceOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/workspaceOperations.js.map +1 -0
- package/package.json +20 -30
- package/types/synapse-artifacts.d.ts +2037 -1319
- package/dist-esm/src/lro/azureAsyncOperationStrategy.js +0 -149
- package/dist-esm/src/lro/azureAsyncOperationStrategy.js.map +0 -1
- package/dist-esm/src/lro/bodyPollingStrategy.js +0 -45
- package/dist-esm/src/lro/bodyPollingStrategy.js.map +0 -1
- package/dist-esm/src/lro/constants.js +0 -9
- package/dist-esm/src/lro/constants.js.map +0 -1
- package/dist-esm/src/lro/index.js +0 -15
- package/dist-esm/src/lro/index.js.map +0 -1
- package/dist-esm/src/lro/locationStrategy.js +0 -54
- package/dist-esm/src/lro/locationStrategy.js.map +0 -1
- package/dist-esm/src/lro/lroPolicy.js +0 -32
- package/dist-esm/src/lro/lroPolicy.js.map +0 -1
- package/dist-esm/src/lro/lroPoller.js +0 -74
- package/dist-esm/src/lro/lroPoller.js.map +0 -1
- package/dist-esm/src/lro/models.js +0 -9
- package/dist-esm/src/lro/models.js.map +0 -1
- package/dist-esm/src/lro/operation.js +0 -72
- package/dist-esm/src/lro/operation.js.map +0 -1
- package/dist-esm/src/lro/passthroughStrategy.js +0 -27
- package/dist-esm/src/lro/passthroughStrategy.js.map +0 -1
- package/dist-esm/src/lro/requestUtils.js +0 -82
- package/dist-esm/src/lro/requestUtils.js.map +0 -1
- package/dist-esm/src/operations/dataFlow.js +0 -391
- package/dist-esm/src/operations/dataFlow.js.map +0 -1
- package/dist-esm/src/operations/dataset.js +0 -391
- package/dist-esm/src/operations/dataset.js.map +0 -1
- package/dist-esm/src/operations/linkedService.js +0 -392
- package/dist-esm/src/operations/linkedService.js.map +0 -1
- package/dist-esm/src/operations/notebook.js +0 -527
- package/dist-esm/src/operations/notebook.js.map +0 -1
- package/dist-esm/src/operations/pipeline.js +0 -443
- package/dist-esm/src/operations/pipeline.js.map +0 -1
- package/dist-esm/src/operations/pipelineRun.js +0 -208
- package/dist-esm/src/operations/pipelineRun.js.map +0 -1
- package/dist-esm/src/operations/sparkJobDefinition.js +0 -520
- package/dist-esm/src/operations/sparkJobDefinition.js.map +0 -1
- package/dist-esm/src/operations/sqlScript.js +0 -392
- package/dist-esm/src/operations/sqlScript.js.map +0 -1
- package/dist-esm/src/operations/trigger.js +0 -609
- package/dist-esm/src/operations/trigger.js.map +0 -1
- package/dist-esm/src/operations/triggerRun.js +0 -160
- package/dist-esm/src/operations/triggerRun.js.map +0 -1
- package/dist-esm/src/operations/workspace.js +0 -68
- package/dist-esm/src/operations/workspace.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/dataFlow.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/dataset.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/linkedService.js +0 -9
- package/dist-esm/src/operationsInterfaces/linkedService.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/notebook.js +0 -9
- package/dist-esm/src/operationsInterfaces/notebook.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/pipeline.js +0 -9
- package/dist-esm/src/operationsInterfaces/pipeline.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/pipelineRun.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/sparkJobDefinition.js +0 -9
- package/dist-esm/src/operationsInterfaces/sparkJobDefinition.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/sqlScript.js +0 -9
- package/dist-esm/src/operationsInterfaces/sqlScript.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/trigger.js +0 -9
- package/dist-esm/src/operationsInterfaces/trigger.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/triggerRun.js.map +0 -1
- package/dist-esm/src/operationsInterfaces/workspace.js.map +0 -1
- package/dist-esm/src/utils/constants.js +0 -9
- package/dist-esm/src/utils/constants.js.map +0 -1
|
@@ -1,520 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Copyright (c) Microsoft Corporation.
|
|
3
|
-
* Licensed under the MIT License.
|
|
4
|
-
*
|
|
5
|
-
* Code generated by Microsoft (R) AutoRest Code Generator.
|
|
6
|
-
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
|
7
|
-
*/
|
|
8
|
-
import { __asyncDelegator, __asyncGenerator, __asyncValues, __await, __awaiter } from "tslib";
|
|
9
|
-
/// <reference lib="esnext.asynciterable" />
|
|
10
|
-
import { SpanStatusCode } from "@azure/core-tracing";
|
|
11
|
-
import { createSpan } from "../tracing";
|
|
12
|
-
import "@azure/core-paging";
|
|
13
|
-
import * as coreHttp from "@azure/core-http";
|
|
14
|
-
import * as Mappers from "../models/mappers";
|
|
15
|
-
import * as Parameters from "../models/parameters";
|
|
16
|
-
import { LROPoller, shouldDeserializeLRO } from "../lro";
|
|
17
|
-
/** Class representing a SparkJobDefinition. */
|
|
18
|
-
export class SparkJobDefinitionImpl {
|
|
19
|
-
/**
|
|
20
|
-
* Initialize a new instance of the class SparkJobDefinition class.
|
|
21
|
-
* @param client Reference to the service client
|
|
22
|
-
*/
|
|
23
|
-
constructor(client) {
|
|
24
|
-
this.client = client;
|
|
25
|
-
}
|
|
26
|
-
/**
|
|
27
|
-
* Lists spark job definitions.
|
|
28
|
-
* @param options The options parameters.
|
|
29
|
-
*/
|
|
30
|
-
listSparkJobDefinitionsByWorkspace(options) {
|
|
31
|
-
const iter = this.getSparkJobDefinitionsByWorkspacePagingAll(options);
|
|
32
|
-
return {
|
|
33
|
-
next() {
|
|
34
|
-
return iter.next();
|
|
35
|
-
},
|
|
36
|
-
[Symbol.asyncIterator]() {
|
|
37
|
-
return this;
|
|
38
|
-
},
|
|
39
|
-
byPage: () => {
|
|
40
|
-
return this.getSparkJobDefinitionsByWorkspacePagingPage(options);
|
|
41
|
-
}
|
|
42
|
-
};
|
|
43
|
-
}
|
|
44
|
-
getSparkJobDefinitionsByWorkspacePagingPage(options) {
|
|
45
|
-
return __asyncGenerator(this, arguments, function* getSparkJobDefinitionsByWorkspacePagingPage_1() {
|
|
46
|
-
let result = yield __await(this._getSparkJobDefinitionsByWorkspace(options));
|
|
47
|
-
yield yield __await(result.value || []);
|
|
48
|
-
let continuationToken = result.nextLink;
|
|
49
|
-
while (continuationToken) {
|
|
50
|
-
result = yield __await(this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options));
|
|
51
|
-
continuationToken = result.nextLink;
|
|
52
|
-
yield yield __await(result.value || []);
|
|
53
|
-
}
|
|
54
|
-
});
|
|
55
|
-
}
|
|
56
|
-
getSparkJobDefinitionsByWorkspacePagingAll(options) {
|
|
57
|
-
return __asyncGenerator(this, arguments, function* getSparkJobDefinitionsByWorkspacePagingAll_1() {
|
|
58
|
-
var e_1, _a;
|
|
59
|
-
try {
|
|
60
|
-
for (var _b = __asyncValues(this.getSparkJobDefinitionsByWorkspacePagingPage(options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
|
61
|
-
const page = _c.value;
|
|
62
|
-
yield __await(yield* __asyncDelegator(__asyncValues(page)));
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
66
|
-
finally {
|
|
67
|
-
try {
|
|
68
|
-
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
|
69
|
-
}
|
|
70
|
-
finally { if (e_1) throw e_1.error; }
|
|
71
|
-
}
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
/**
|
|
75
|
-
* Lists spark job definitions.
|
|
76
|
-
* @param options The options parameters.
|
|
77
|
-
*/
|
|
78
|
-
_getSparkJobDefinitionsByWorkspace(options) {
|
|
79
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
80
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-_getSparkJobDefinitionsByWorkspace", options || {});
|
|
81
|
-
const operationArguments = {
|
|
82
|
-
options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {})
|
|
83
|
-
};
|
|
84
|
-
try {
|
|
85
|
-
const result = yield this.client.sendOperationRequest(operationArguments, getSparkJobDefinitionsByWorkspaceOperationSpec);
|
|
86
|
-
return result;
|
|
87
|
-
}
|
|
88
|
-
catch (error) {
|
|
89
|
-
span.setStatus({
|
|
90
|
-
code: SpanStatusCode.ERROR,
|
|
91
|
-
message: error.message
|
|
92
|
-
});
|
|
93
|
-
throw error;
|
|
94
|
-
}
|
|
95
|
-
finally {
|
|
96
|
-
span.end();
|
|
97
|
-
}
|
|
98
|
-
});
|
|
99
|
-
}
|
|
100
|
-
/**
|
|
101
|
-
* Creates or updates a Spark Job Definition.
|
|
102
|
-
* @param sparkJobDefinitionName The spark job definition name.
|
|
103
|
-
* @param sparkJobDefinition Spark Job Definition resource definition.
|
|
104
|
-
* @param options The options parameters.
|
|
105
|
-
*/
|
|
106
|
-
createOrUpdateSparkJobDefinition(sparkJobDefinitionName, sparkJobDefinition, options) {
|
|
107
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
108
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdateSparkJobDefinition", options || {});
|
|
109
|
-
const operationArguments = {
|
|
110
|
-
sparkJobDefinitionName,
|
|
111
|
-
sparkJobDefinition,
|
|
112
|
-
options: this.getOperationOptions(updatedOptions, "undefined")
|
|
113
|
-
};
|
|
114
|
-
const sendOperation = (args, spec) => __awaiter(this, void 0, void 0, function* () {
|
|
115
|
-
try {
|
|
116
|
-
const result = yield this.client.sendOperationRequest(args, spec);
|
|
117
|
-
return result;
|
|
118
|
-
}
|
|
119
|
-
catch (error) {
|
|
120
|
-
span.setStatus({
|
|
121
|
-
code: SpanStatusCode.ERROR,
|
|
122
|
-
message: error.message
|
|
123
|
-
});
|
|
124
|
-
throw error;
|
|
125
|
-
}
|
|
126
|
-
finally {
|
|
127
|
-
span.end();
|
|
128
|
-
}
|
|
129
|
-
});
|
|
130
|
-
const initialOperationResult = yield sendOperation(operationArguments, createOrUpdateSparkJobDefinitionOperationSpec);
|
|
131
|
-
return new LROPoller({
|
|
132
|
-
initialOperationArguments: operationArguments,
|
|
133
|
-
initialOperationSpec: createOrUpdateSparkJobDefinitionOperationSpec,
|
|
134
|
-
initialOperationResult,
|
|
135
|
-
sendOperation
|
|
136
|
-
});
|
|
137
|
-
});
|
|
138
|
-
}
|
|
139
|
-
/**
|
|
140
|
-
* Gets a Spark Job Definition.
|
|
141
|
-
* @param sparkJobDefinitionName The spark job definition name.
|
|
142
|
-
* @param options The options parameters.
|
|
143
|
-
*/
|
|
144
|
-
getSparkJobDefinition(sparkJobDefinitionName, options) {
|
|
145
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
146
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-getSparkJobDefinition", options || {});
|
|
147
|
-
const operationArguments = {
|
|
148
|
-
sparkJobDefinitionName,
|
|
149
|
-
options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {})
|
|
150
|
-
};
|
|
151
|
-
try {
|
|
152
|
-
const result = yield this.client.sendOperationRequest(operationArguments, getSparkJobDefinitionOperationSpec);
|
|
153
|
-
return result;
|
|
154
|
-
}
|
|
155
|
-
catch (error) {
|
|
156
|
-
span.setStatus({
|
|
157
|
-
code: SpanStatusCode.ERROR,
|
|
158
|
-
message: error.message
|
|
159
|
-
});
|
|
160
|
-
throw error;
|
|
161
|
-
}
|
|
162
|
-
finally {
|
|
163
|
-
span.end();
|
|
164
|
-
}
|
|
165
|
-
});
|
|
166
|
-
}
|
|
167
|
-
/**
|
|
168
|
-
* Deletes a Spark Job Definition.
|
|
169
|
-
* @param sparkJobDefinitionName The spark job definition name.
|
|
170
|
-
* @param options The options parameters.
|
|
171
|
-
*/
|
|
172
|
-
deleteSparkJobDefinition(sparkJobDefinitionName, options) {
|
|
173
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
174
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-deleteSparkJobDefinition", options || {});
|
|
175
|
-
const operationArguments = {
|
|
176
|
-
sparkJobDefinitionName,
|
|
177
|
-
options: this.getOperationOptions(updatedOptions, "undefined")
|
|
178
|
-
};
|
|
179
|
-
const sendOperation = (args, spec) => __awaiter(this, void 0, void 0, function* () {
|
|
180
|
-
try {
|
|
181
|
-
const result = yield this.client.sendOperationRequest(args, spec);
|
|
182
|
-
return result;
|
|
183
|
-
}
|
|
184
|
-
catch (error) {
|
|
185
|
-
span.setStatus({
|
|
186
|
-
code: SpanStatusCode.ERROR,
|
|
187
|
-
message: error.message
|
|
188
|
-
});
|
|
189
|
-
throw error;
|
|
190
|
-
}
|
|
191
|
-
finally {
|
|
192
|
-
span.end();
|
|
193
|
-
}
|
|
194
|
-
});
|
|
195
|
-
const initialOperationResult = yield sendOperation(operationArguments, deleteSparkJobDefinitionOperationSpec);
|
|
196
|
-
return new LROPoller({
|
|
197
|
-
initialOperationArguments: operationArguments,
|
|
198
|
-
initialOperationSpec: deleteSparkJobDefinitionOperationSpec,
|
|
199
|
-
initialOperationResult,
|
|
200
|
-
sendOperation
|
|
201
|
-
});
|
|
202
|
-
});
|
|
203
|
-
}
|
|
204
|
-
/**
|
|
205
|
-
* Executes the spark job definition.
|
|
206
|
-
* @param sparkJobDefinitionName The spark job definition name.
|
|
207
|
-
* @param options The options parameters.
|
|
208
|
-
*/
|
|
209
|
-
executeSparkJobDefinition(sparkJobDefinitionName, options) {
|
|
210
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
211
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-executeSparkJobDefinition", options || {});
|
|
212
|
-
const operationArguments = {
|
|
213
|
-
sparkJobDefinitionName,
|
|
214
|
-
options: this.getOperationOptions(updatedOptions, "location")
|
|
215
|
-
};
|
|
216
|
-
const sendOperation = (args, spec) => __awaiter(this, void 0, void 0, function* () {
|
|
217
|
-
try {
|
|
218
|
-
const result = yield this.client.sendOperationRequest(args, spec);
|
|
219
|
-
return result;
|
|
220
|
-
}
|
|
221
|
-
catch (error) {
|
|
222
|
-
span.setStatus({
|
|
223
|
-
code: SpanStatusCode.ERROR,
|
|
224
|
-
message: error.message
|
|
225
|
-
});
|
|
226
|
-
throw error;
|
|
227
|
-
}
|
|
228
|
-
finally {
|
|
229
|
-
span.end();
|
|
230
|
-
}
|
|
231
|
-
});
|
|
232
|
-
const initialOperationResult = yield sendOperation(operationArguments, executeSparkJobDefinitionOperationSpec);
|
|
233
|
-
return new LROPoller({
|
|
234
|
-
initialOperationArguments: operationArguments,
|
|
235
|
-
initialOperationSpec: executeSparkJobDefinitionOperationSpec,
|
|
236
|
-
initialOperationResult,
|
|
237
|
-
sendOperation,
|
|
238
|
-
finalStateVia: "location"
|
|
239
|
-
});
|
|
240
|
-
});
|
|
241
|
-
}
|
|
242
|
-
/**
|
|
243
|
-
* Renames a sparkJobDefinition.
|
|
244
|
-
* @param sparkJobDefinitionName The spark job definition name.
|
|
245
|
-
* @param request proposed new name.
|
|
246
|
-
* @param options The options parameters.
|
|
247
|
-
*/
|
|
248
|
-
renameSparkJobDefinition(sparkJobDefinitionName, request, options) {
|
|
249
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
250
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-renameSparkJobDefinition", options || {});
|
|
251
|
-
const operationArguments = {
|
|
252
|
-
sparkJobDefinitionName,
|
|
253
|
-
request,
|
|
254
|
-
options: this.getOperationOptions(updatedOptions, "undefined")
|
|
255
|
-
};
|
|
256
|
-
const sendOperation = (args, spec) => __awaiter(this, void 0, void 0, function* () {
|
|
257
|
-
try {
|
|
258
|
-
const result = yield this.client.sendOperationRequest(args, spec);
|
|
259
|
-
return result;
|
|
260
|
-
}
|
|
261
|
-
catch (error) {
|
|
262
|
-
span.setStatus({
|
|
263
|
-
code: SpanStatusCode.ERROR,
|
|
264
|
-
message: error.message
|
|
265
|
-
});
|
|
266
|
-
throw error;
|
|
267
|
-
}
|
|
268
|
-
finally {
|
|
269
|
-
span.end();
|
|
270
|
-
}
|
|
271
|
-
});
|
|
272
|
-
const initialOperationResult = yield sendOperation(operationArguments, renameSparkJobDefinitionOperationSpec);
|
|
273
|
-
return new LROPoller({
|
|
274
|
-
initialOperationArguments: operationArguments,
|
|
275
|
-
initialOperationSpec: renameSparkJobDefinitionOperationSpec,
|
|
276
|
-
initialOperationResult,
|
|
277
|
-
sendOperation
|
|
278
|
-
});
|
|
279
|
-
});
|
|
280
|
-
}
|
|
281
|
-
/**
|
|
282
|
-
* Debug the spark job definition.
|
|
283
|
-
* @param sparkJobDefinitionAzureResource Spark Job Definition resource definition.
|
|
284
|
-
* @param options The options parameters.
|
|
285
|
-
*/
|
|
286
|
-
debugSparkJobDefinition(sparkJobDefinitionAzureResource, options) {
|
|
287
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
288
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-debugSparkJobDefinition", options || {});
|
|
289
|
-
const operationArguments = {
|
|
290
|
-
sparkJobDefinitionAzureResource,
|
|
291
|
-
options: this.getOperationOptions(updatedOptions, "location")
|
|
292
|
-
};
|
|
293
|
-
const sendOperation = (args, spec) => __awaiter(this, void 0, void 0, function* () {
|
|
294
|
-
try {
|
|
295
|
-
const result = yield this.client.sendOperationRequest(args, spec);
|
|
296
|
-
return result;
|
|
297
|
-
}
|
|
298
|
-
catch (error) {
|
|
299
|
-
span.setStatus({
|
|
300
|
-
code: SpanStatusCode.ERROR,
|
|
301
|
-
message: error.message
|
|
302
|
-
});
|
|
303
|
-
throw error;
|
|
304
|
-
}
|
|
305
|
-
finally {
|
|
306
|
-
span.end();
|
|
307
|
-
}
|
|
308
|
-
});
|
|
309
|
-
const initialOperationResult = yield sendOperation(operationArguments, debugSparkJobDefinitionOperationSpec);
|
|
310
|
-
return new LROPoller({
|
|
311
|
-
initialOperationArguments: operationArguments,
|
|
312
|
-
initialOperationSpec: debugSparkJobDefinitionOperationSpec,
|
|
313
|
-
initialOperationResult,
|
|
314
|
-
sendOperation,
|
|
315
|
-
finalStateVia: "location"
|
|
316
|
-
});
|
|
317
|
-
});
|
|
318
|
-
}
|
|
319
|
-
/**
|
|
320
|
-
* GetSparkJobDefinitionsByWorkspaceNext
|
|
321
|
-
* @param nextLink The nextLink from the previous successful call to the
|
|
322
|
-
* GetSparkJobDefinitionsByWorkspace method.
|
|
323
|
-
* @param options The options parameters.
|
|
324
|
-
*/
|
|
325
|
-
_getSparkJobDefinitionsByWorkspaceNext(nextLink, options) {
|
|
326
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
327
|
-
const { span, updatedOptions } = createSpan("ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext", options || {});
|
|
328
|
-
const operationArguments = {
|
|
329
|
-
nextLink,
|
|
330
|
-
options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {})
|
|
331
|
-
};
|
|
332
|
-
try {
|
|
333
|
-
const result = yield this.client.sendOperationRequest(operationArguments, getSparkJobDefinitionsByWorkspaceNextOperationSpec);
|
|
334
|
-
return result;
|
|
335
|
-
}
|
|
336
|
-
catch (error) {
|
|
337
|
-
span.setStatus({
|
|
338
|
-
code: SpanStatusCode.ERROR,
|
|
339
|
-
message: error.message
|
|
340
|
-
});
|
|
341
|
-
throw error;
|
|
342
|
-
}
|
|
343
|
-
finally {
|
|
344
|
-
span.end();
|
|
345
|
-
}
|
|
346
|
-
});
|
|
347
|
-
}
|
|
348
|
-
getOperationOptions(options, finalStateVia) {
|
|
349
|
-
const operationOptions = options || {};
|
|
350
|
-
operationOptions.requestOptions = Object.assign(Object.assign({}, operationOptions.requestOptions), { shouldDeserialize: shouldDeserializeLRO(finalStateVia) });
|
|
351
|
-
return coreHttp.operationOptionsToRequestOptionsBase(operationOptions);
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
// Operation Specifications
|
|
355
|
-
const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
|
|
356
|
-
const getSparkJobDefinitionsByWorkspaceOperationSpec = {
|
|
357
|
-
path: "/sparkJobDefinitions",
|
|
358
|
-
httpMethod: "GET",
|
|
359
|
-
responses: {
|
|
360
|
-
200: {
|
|
361
|
-
bodyMapper: Mappers.SparkJobDefinitionsListResponse
|
|
362
|
-
},
|
|
363
|
-
default: {
|
|
364
|
-
bodyMapper: Mappers.CloudError
|
|
365
|
-
}
|
|
366
|
-
},
|
|
367
|
-
queryParameters: [Parameters.apiVersion],
|
|
368
|
-
urlParameters: [Parameters.endpoint],
|
|
369
|
-
headerParameters: [Parameters.accept],
|
|
370
|
-
serializer
|
|
371
|
-
};
|
|
372
|
-
const createOrUpdateSparkJobDefinitionOperationSpec = {
|
|
373
|
-
path: "/sparkJobDefinitions/{sparkJobDefinitionName}",
|
|
374
|
-
httpMethod: "PUT",
|
|
375
|
-
responses: {
|
|
376
|
-
200: {
|
|
377
|
-
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
378
|
-
},
|
|
379
|
-
201: {
|
|
380
|
-
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
381
|
-
},
|
|
382
|
-
202: {
|
|
383
|
-
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
384
|
-
},
|
|
385
|
-
204: {
|
|
386
|
-
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
387
|
-
},
|
|
388
|
-
default: {
|
|
389
|
-
bodyMapper: Mappers.CloudError
|
|
390
|
-
}
|
|
391
|
-
},
|
|
392
|
-
requestBody: Parameters.sparkJobDefinition,
|
|
393
|
-
queryParameters: [Parameters.apiVersion],
|
|
394
|
-
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
395
|
-
headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch],
|
|
396
|
-
mediaType: "json",
|
|
397
|
-
serializer
|
|
398
|
-
};
|
|
399
|
-
const getSparkJobDefinitionOperationSpec = {
|
|
400
|
-
path: "/sparkJobDefinitions/{sparkJobDefinitionName}",
|
|
401
|
-
httpMethod: "GET",
|
|
402
|
-
responses: {
|
|
403
|
-
200: {
|
|
404
|
-
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
405
|
-
},
|
|
406
|
-
304: {},
|
|
407
|
-
default: {
|
|
408
|
-
bodyMapper: Mappers.CloudError
|
|
409
|
-
}
|
|
410
|
-
},
|
|
411
|
-
queryParameters: [Parameters.apiVersion],
|
|
412
|
-
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
413
|
-
headerParameters: [Parameters.accept, Parameters.ifNoneMatch],
|
|
414
|
-
serializer
|
|
415
|
-
};
|
|
416
|
-
const deleteSparkJobDefinitionOperationSpec = {
|
|
417
|
-
path: "/sparkJobDefinitions/{sparkJobDefinitionName}",
|
|
418
|
-
httpMethod: "DELETE",
|
|
419
|
-
responses: {
|
|
420
|
-
200: {},
|
|
421
|
-
201: {},
|
|
422
|
-
202: {},
|
|
423
|
-
204: {},
|
|
424
|
-
default: {
|
|
425
|
-
bodyMapper: Mappers.CloudError
|
|
426
|
-
}
|
|
427
|
-
},
|
|
428
|
-
queryParameters: [Parameters.apiVersion],
|
|
429
|
-
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
430
|
-
headerParameters: [Parameters.accept],
|
|
431
|
-
serializer
|
|
432
|
-
};
|
|
433
|
-
const executeSparkJobDefinitionOperationSpec = {
|
|
434
|
-
path: "/sparkJobDefinitions/{sparkJobDefinitionName}/execute",
|
|
435
|
-
httpMethod: "POST",
|
|
436
|
-
responses: {
|
|
437
|
-
200: {
|
|
438
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
439
|
-
},
|
|
440
|
-
201: {
|
|
441
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
442
|
-
},
|
|
443
|
-
202: {
|
|
444
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
445
|
-
},
|
|
446
|
-
204: {
|
|
447
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
448
|
-
},
|
|
449
|
-
default: {
|
|
450
|
-
bodyMapper: Mappers.CloudError
|
|
451
|
-
}
|
|
452
|
-
},
|
|
453
|
-
queryParameters: [Parameters.apiVersion],
|
|
454
|
-
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
455
|
-
headerParameters: [Parameters.accept],
|
|
456
|
-
serializer
|
|
457
|
-
};
|
|
458
|
-
const renameSparkJobDefinitionOperationSpec = {
|
|
459
|
-
path: "/sparkJobDefinitions/{sparkJobDefinitionName}/rename",
|
|
460
|
-
httpMethod: "POST",
|
|
461
|
-
responses: {
|
|
462
|
-
200: {},
|
|
463
|
-
201: {},
|
|
464
|
-
202: {},
|
|
465
|
-
204: {},
|
|
466
|
-
default: {
|
|
467
|
-
bodyMapper: Mappers.CloudError
|
|
468
|
-
}
|
|
469
|
-
},
|
|
470
|
-
requestBody: Parameters.request,
|
|
471
|
-
queryParameters: [Parameters.apiVersion],
|
|
472
|
-
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
473
|
-
headerParameters: [Parameters.accept, Parameters.contentType],
|
|
474
|
-
mediaType: "json",
|
|
475
|
-
serializer
|
|
476
|
-
};
|
|
477
|
-
const debugSparkJobDefinitionOperationSpec = {
|
|
478
|
-
path: "/debugSparkJobDefinition",
|
|
479
|
-
httpMethod: "POST",
|
|
480
|
-
responses: {
|
|
481
|
-
200: {
|
|
482
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
483
|
-
},
|
|
484
|
-
201: {
|
|
485
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
486
|
-
},
|
|
487
|
-
202: {
|
|
488
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
489
|
-
},
|
|
490
|
-
204: {
|
|
491
|
-
bodyMapper: Mappers.SparkBatchJob
|
|
492
|
-
},
|
|
493
|
-
default: {
|
|
494
|
-
bodyMapper: Mappers.CloudError
|
|
495
|
-
}
|
|
496
|
-
},
|
|
497
|
-
requestBody: Parameters.sparkJobDefinitionAzureResource,
|
|
498
|
-
queryParameters: [Parameters.apiVersion],
|
|
499
|
-
urlParameters: [Parameters.endpoint],
|
|
500
|
-
headerParameters: [Parameters.accept, Parameters.contentType],
|
|
501
|
-
mediaType: "json",
|
|
502
|
-
serializer
|
|
503
|
-
};
|
|
504
|
-
const getSparkJobDefinitionsByWorkspaceNextOperationSpec = {
|
|
505
|
-
path: "{nextLink}",
|
|
506
|
-
httpMethod: "GET",
|
|
507
|
-
responses: {
|
|
508
|
-
200: {
|
|
509
|
-
bodyMapper: Mappers.SparkJobDefinitionsListResponse
|
|
510
|
-
},
|
|
511
|
-
default: {
|
|
512
|
-
bodyMapper: Mappers.CloudError
|
|
513
|
-
}
|
|
514
|
-
},
|
|
515
|
-
queryParameters: [Parameters.apiVersion],
|
|
516
|
-
urlParameters: [Parameters.endpoint, Parameters.nextLink],
|
|
517
|
-
headerParameters: [Parameters.accept],
|
|
518
|
-
serializer
|
|
519
|
-
};
|
|
520
|
-
//# sourceMappingURL=sparkJobDefinition.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"sparkJobDefinition.js","sourceRoot":"","sources":["../../../src/operations/sparkJobDefinition.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;;AAEH,4CAA4C;AAC5C,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AACxC,OAAO,oBAAoB,CAAC;AAG5B,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAEnD,OAAO,EAAE,SAAS,EAAE,oBAAoB,EAAE,MAAM,QAAQ,CAAC;AAczD,+CAA+C;AAC/C,MAAM,OAAO,sBAAsB;IAGjC;;;OAGG;IACH,YAAY,MAA8B;QACxC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;OAGG;IACI,kCAAkC,CACvC,OAAmC;QAEnC,MAAM,IAAI,GAAG,IAAI,CAAC,0CAA0C,CAAC,OAAO,CAAC,CAAC;QACtE,OAAO;YACL,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD,MAAM,EAAE,GAAG,EAAE;gBACX,OAAO,IAAI,CAAC,2CAA2C,CAAC,OAAO,CAAC,CAAC;YACnE,CAAC;SACF,CAAC;IACJ,CAAC;IAEc,2CAA2C,CACxD,OAAmC;;YAEnC,IAAI,MAAM,GAAG,cAAM,IAAI,CAAC,kCAAkC,CAAC,OAAO,CAAC,CAAA,CAAC;YACpE,oBAAM,MAAM,CAAC,KAAK,IAAI,EAAE,CAAA,CAAC;YACzB,IAAI,iBAAiB,GAAG,MAAM,CAAC,QAAQ,CAAC;YACxC,OAAO,iBAAiB,EAAE;gBACxB,MAAM,GAAG,cAAM,IAAI,CAAC,sCAAsC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAA,CAAC;gBACvF,iBAAiB,GAAG,MAAM,CAAC,QAAQ,CAAC;gBACpC,oBAAM,MAAM,CAAC,KAAK,IAAI,EAAE,CAAA,CAAC;aAC1B;QACH,CAAC;KAAA;IAEc,0CAA0C,CACvD,OAAmC;;;;gBAEnC,KAAyB,IAAA,KAAA,cAAA,IAAI,CAAC,2CAA2C,CAAC,OAAO,CAAC,CAAA,IAAA;oBAAvE,MAAM,IAAI,WAAA,CAAA;oBACnB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,IAAI,CAAA,CAAA,CAAA,CAAC;iBACb;;;;;;;;;QACH,CAAC;KAAA;IAED;;;OAGG;IACW,kCAAkC,CAC9C,OAAmC;;YAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,oDAAoD,EACpD,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,cAAc,IAAI,EAAE,CAAC;aAC7E,CAAC;YACF,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACnD,kBAAkB,EAClB,8CAA8C,CAC/C,CAAC;gBACF,OAAO,MAAqE,CAAC;aAC9E;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;iBACvB,CAAC,CAAC;gBACH,MAAM,KAAK,CAAC;aACb;oBAAS;gBACR,IAAI,CAAC,GAAG,EAAE,CAAC;aACZ;QACH,CAAC;KAAA;IAED;;;;;OAKG;IACG,gCAAgC,CACpC,sBAA8B,EAC9B,kBAA8C,EAC9C,OAA0E;;YAE1E,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,kDAAkD,EAClD,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,sBAAsB;gBACtB,kBAAkB;gBAClB,OAAO,EAAE,IAAI,CAAC,mBAAmB,CAAC,cAAc,EAAE,WAAW,CAAC;aAC/D,CAAC;YACF,MAAM,aAAa,GAAG,CACpB,IAAiC,EACjC,IAA4B,EAC5B,EAAE;gBACF,IAAI;oBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAClE,OAAO,MAAoE,CAAC;iBAC7E;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE,cAAc,CAAC,KAAK;wBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;qBACvB,CAAC,CAAC;oBACH,MAAM,KAAK,CAAC;iBACb;wBAAS;oBACR,IAAI,CAAC,GAAG,EAAE,CAAC;iBACZ;YACH,CAAC,CAAA,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,aAAa,CAChD,kBAAkB,EAClB,6CAA6C,CAC9C,CAAC;YACF,OAAO,IAAI,SAAS,CAAC;gBACnB,yBAAyB,EAAE,kBAAkB;gBAC7C,oBAAoB,EAAE,6CAA6C;gBACnE,sBAAsB;gBACtB,aAAa;aACd,CAAC,CAAC;QACL,CAAC;KAAA;IAED;;;;OAIG;IACG,qBAAqB,CACzB,sBAA8B,EAC9B,OAA+D;;YAE/D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,uCAAuC,EACvC,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,sBAAsB;gBACtB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,cAAc,IAAI,EAAE,CAAC;aAC7E,CAAC;YACF,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACnD,kBAAkB,EAClB,kCAAkC,CACnC,CAAC;gBACF,OAAO,MAAyD,CAAC;aAClE;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;iBACvB,CAAC,CAAC;gBACH,MAAM,KAAK,CAAC;aACb;oBAAS;gBACR,IAAI,CAAC,GAAG,EAAE,CAAC;aACZ;QACH,CAAC;KAAA;IAED;;;;OAIG;IACG,wBAAwB,CAC5B,sBAA8B,EAC9B,OAAmC;;YAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,sBAAsB;gBACtB,OAAO,EAAE,IAAI,CAAC,mBAAmB,CAAC,cAAc,EAAE,WAAW,CAAC;aAC/D,CAAC;YACF,MAAM,aAAa,GAAG,CACpB,IAAiC,EACjC,IAA4B,EAC5B,EAAE;gBACF,IAAI;oBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAClE,OAAO,MAA+B,CAAC;iBACxC;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE,cAAc,CAAC,KAAK;wBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;qBACvB,CAAC,CAAC;oBACH,MAAM,KAAK,CAAC;iBACb;wBAAS;oBACR,IAAI,CAAC,GAAG,EAAE,CAAC;iBACZ;YACH,CAAC,CAAA,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,aAAa,CAChD,kBAAkB,EAClB,qCAAqC,CACtC,CAAC;YACF,OAAO,IAAI,SAAS,CAAC;gBACnB,yBAAyB,EAAE,kBAAkB;gBAC7C,oBAAoB,EAAE,qCAAqC;gBAC3D,sBAAsB;gBACtB,aAAa;aACd,CAAC,CAAC;QACL,CAAC;KAAA;IAED;;;;OAIG;IACG,yBAAyB,CAC7B,sBAA8B,EAC9B,OAAmC;;YAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,2CAA2C,EAC3C,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,sBAAsB;gBACtB,OAAO,EAAE,IAAI,CAAC,mBAAmB,CAAC,cAAc,EAAE,UAAU,CAAC;aAC9D,CAAC;YACF,MAAM,aAAa,GAAG,CACpB,IAAiC,EACjC,IAA4B,EAC5B,EAAE;gBACF,IAAI;oBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAClE,OAAO,MAA6D,CAAC;iBACtE;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE,cAAc,CAAC,KAAK;wBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;qBACvB,CAAC,CAAC;oBACH,MAAM,KAAK,CAAC;iBACb;wBAAS;oBACR,IAAI,CAAC,GAAG,EAAE,CAAC;iBACZ;YACH,CAAC,CAAA,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,aAAa,CAChD,kBAAkB,EAClB,sCAAsC,CACvC,CAAC;YACF,OAAO,IAAI,SAAS,CAAC;gBACnB,yBAAyB,EAAE,kBAAkB;gBAC7C,oBAAoB,EAAE,sCAAsC;gBAC5D,sBAAsB;gBACtB,aAAa;gBACb,aAAa,EAAE,UAAU;aAC1B,CAAC,CAAC;QACL,CAAC;KAAA;IAED;;;;;OAKG;IACG,wBAAwB,CAC5B,sBAA8B,EAC9B,OAA8B,EAC9B,OAAmC;;YAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,sBAAsB;gBACtB,OAAO;gBACP,OAAO,EAAE,IAAI,CAAC,mBAAmB,CAAC,cAAc,EAAE,WAAW,CAAC;aAC/D,CAAC;YACF,MAAM,aAAa,GAAG,CACpB,IAAiC,EACjC,IAA4B,EAC5B,EAAE;gBACF,IAAI;oBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAClE,OAAO,MAA+B,CAAC;iBACxC;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE,cAAc,CAAC,KAAK;wBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;qBACvB,CAAC,CAAC;oBACH,MAAM,KAAK,CAAC;iBACb;wBAAS;oBACR,IAAI,CAAC,GAAG,EAAE,CAAC;iBACZ;YACH,CAAC,CAAA,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,aAAa,CAChD,kBAAkB,EAClB,qCAAqC,CACtC,CAAC;YACF,OAAO,IAAI,SAAS,CAAC;gBACnB,yBAAyB,EAAE,kBAAkB;gBAC7C,oBAAoB,EAAE,qCAAqC;gBAC3D,sBAAsB;gBACtB,aAAa;aACd,CAAC,CAAC;QACL,CAAC;KAAA;IAED;;;;OAIG;IACG,uBAAuB,CAC3B,+BAA2D,EAC3D,OAAmC;;YAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,yCAAyC,EACzC,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,+BAA+B;gBAC/B,OAAO,EAAE,IAAI,CAAC,mBAAmB,CAAC,cAAc,EAAE,UAAU,CAAC;aAC9D,CAAC;YACF,MAAM,aAAa,GAAG,CACpB,IAAiC,EACjC,IAA4B,EAC5B,EAAE;gBACF,IAAI;oBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBAClE,OAAO,MAA2D,CAAC;iBACpE;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE,cAAc,CAAC,KAAK;wBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;qBACvB,CAAC,CAAC;oBACH,MAAM,KAAK,CAAC;iBACb;wBAAS;oBACR,IAAI,CAAC,GAAG,EAAE,CAAC;iBACZ;YACH,CAAC,CAAA,CAAC;YAEF,MAAM,sBAAsB,GAAG,MAAM,aAAa,CAChD,kBAAkB,EAClB,oCAAoC,CACrC,CAAC;YACF,OAAO,IAAI,SAAS,CAAC;gBACnB,yBAAyB,EAAE,kBAAkB;gBAC7C,oBAAoB,EAAE,oCAAoC;gBAC1D,sBAAsB;gBACtB,aAAa;gBACb,aAAa,EAAE,UAAU;aAC1B,CAAC,CAAC;QACL,CAAC;KAAA;IAED;;;;;OAKG;IACW,sCAAsC,CAClD,QAAgB,EAChB,OAAmC;;YAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,wDAAwD,EACxD,OAAO,IAAI,EAAE,CACd,CAAC;YACF,MAAM,kBAAkB,GAAgC;gBACtD,QAAQ;gBACR,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,cAAc,IAAI,EAAE,CAAC;aAC7E,CAAC;YACF,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACnD,kBAAkB,EAClB,kDAAkD,CACnD,CAAC;gBACF,OAAO,MAAyE,CAAC;aAClF;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;iBACvB,CAAC,CAAC;gBACH,MAAM,KAAK,CAAC;aACb;oBAAS;gBACR,IAAI,CAAC,GAAG,EAAE,CAAC;aACZ;QACH,CAAC;KAAA;IAEO,mBAAmB,CACzB,OAA6B,EAC7B,aAAsB;QAEtB,MAAM,gBAAgB,GAA8B,OAAO,IAAI,EAAE,CAAC;QAClE,gBAAgB,CAAC,cAAc,mCAC1B,gBAAgB,CAAC,cAAc,KAClC,iBAAiB,EAAE,oBAAoB,CAAC,aAAa,CAAC,GACvD,CAAC;QACF,OAAO,QAAQ,CAAC,oCAAoC,CAAC,gBAAgB,CAAC,CAAC;IACzE,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,8CAA8C,GAA2B;IAC7E,IAAI,EAAE,sBAAsB;IAC5B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,+BAA+B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;IACpC,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,6CAA6C,GAA2B;IAC5E,IAAI,EAAE,+CAA+C;IACrD,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,WAAW,EAAE,UAAU,CAAC,kBAAkB;IAC1C,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,EAAE,UAAU,CAAC,OAAO,CAAC;IACjF,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,+CAA+C;IACrD,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE,EAAE;QACP,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,UAAU;CACX,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,+CAA+C;IACrD,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,sCAAsC,GAA2B;IACrE,IAAI,EAAE,uDAAuD;IAC7D,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,sDAAsD;IAC5D,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,WAAW,EAAE,UAAU,CAAC,OAAO;IAC/B,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,oCAAoC,GAA2B;IACnE,IAAI,EAAE,0BAA0B;IAChC,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,aAAa;SAClC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,WAAW,EAAE,UAAU,CAAC,+BAA+B;IACvD,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;IACpC,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,kDAAkD,GAA2B;IACjF,IAAI,EAAE,YAAY;IAClB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,+BAA+B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC;IACxC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,QAAQ,CAAC;IACzD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\n/// <reference lib=\"esnext.asynciterable\" />\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { createSpan } from \"../tracing\";\nimport \"@azure/core-paging\";\nimport { PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { SparkJobDefinition } from \"../operationsInterfaces\";\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { ArtifactsClientContext } from \"../artifactsClientContext\";\nimport { LROPoller, shouldDeserializeLRO } from \"../lro\";\nimport {\n SparkJobDefinitionResource,\n SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse,\n SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams,\n SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse,\n SparkJobDefinitionGetSparkJobDefinitionOptionalParams,\n SparkJobDefinitionGetSparkJobDefinitionResponse,\n SparkJobDefinitionExecuteSparkJobDefinitionResponse,\n ArtifactRenameRequest,\n SparkJobDefinitionDebugSparkJobDefinitionResponse,\n SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse\n} from \"../models\";\n\n/** Class representing a SparkJobDefinition. */\nexport class SparkJobDefinitionImpl implements SparkJobDefinition {\n private readonly client: ArtifactsClientContext;\n\n /**\n * Initialize a new instance of the class SparkJobDefinition class.\n * @param client Reference to the service client\n */\n constructor(client: ArtifactsClientContext) {\n this.client = client;\n }\n\n /**\n * Lists spark job definitions.\n * @param options The options parameters.\n */\n public listSparkJobDefinitionsByWorkspace(\n options?: coreHttp.OperationOptions\n ): PagedAsyncIterableIterator<SparkJobDefinitionResource> {\n const iter = this.getSparkJobDefinitionsByWorkspacePagingAll(options);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage: () => {\n return this.getSparkJobDefinitionsByWorkspacePagingPage(options);\n }\n };\n }\n\n private async *getSparkJobDefinitionsByWorkspacePagingPage(\n options?: coreHttp.OperationOptions\n ): AsyncIterableIterator<SparkJobDefinitionResource[]> {\n let result = await this._getSparkJobDefinitionsByWorkspace(options);\n yield result.value || [];\n let continuationToken = result.nextLink;\n while (continuationToken) {\n result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options);\n continuationToken = result.nextLink;\n yield result.value || [];\n }\n }\n\n private async *getSparkJobDefinitionsByWorkspacePagingAll(\n options?: coreHttp.OperationOptions\n ): AsyncIterableIterator<SparkJobDefinitionResource> {\n for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) {\n yield* page;\n }\n }\n\n /**\n * Lists spark job definitions.\n * @param options The options parameters.\n */\n private async _getSparkJobDefinitionsByWorkspace(\n options?: coreHttp.OperationOptions\n ): Promise<SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-_getSparkJobDefinitionsByWorkspace\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {})\n };\n try {\n const result = await this.client.sendOperationRequest(\n operationArguments,\n getSparkJobDefinitionsByWorkspaceOperationSpec\n );\n return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates or updates a Spark Job Definition.\n * @param sparkJobDefinitionName The spark job definition name.\n * @param sparkJobDefinition Spark Job Definition resource definition.\n * @param options The options parameters.\n */\n async createOrUpdateSparkJobDefinition(\n sparkJobDefinitionName: string,\n sparkJobDefinition: SparkJobDefinitionResource,\n options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams\n ): Promise<LROPoller<SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse>> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-createOrUpdateSparkJobDefinition\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n sparkJobDefinitionName,\n sparkJobDefinition,\n options: this.getOperationOptions(updatedOptions, \"undefined\")\n };\n const sendOperation = async (\n args: coreHttp.OperationArguments,\n spec: coreHttp.OperationSpec\n ) => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n\n const initialOperationResult = await sendOperation(\n operationArguments,\n createOrUpdateSparkJobDefinitionOperationSpec\n );\n return new LROPoller({\n initialOperationArguments: operationArguments,\n initialOperationSpec: createOrUpdateSparkJobDefinitionOperationSpec,\n initialOperationResult,\n sendOperation\n });\n }\n\n /**\n * Gets a Spark Job Definition.\n * @param sparkJobDefinitionName The spark job definition name.\n * @param options The options parameters.\n */\n async getSparkJobDefinition(\n sparkJobDefinitionName: string,\n options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams\n ): Promise<SparkJobDefinitionGetSparkJobDefinitionResponse> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-getSparkJobDefinition\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n sparkJobDefinitionName,\n options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {})\n };\n try {\n const result = await this.client.sendOperationRequest(\n operationArguments,\n getSparkJobDefinitionOperationSpec\n );\n return result as SparkJobDefinitionGetSparkJobDefinitionResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Spark Job Definition.\n * @param sparkJobDefinitionName The spark job definition name.\n * @param options The options parameters.\n */\n async deleteSparkJobDefinition(\n sparkJobDefinitionName: string,\n options?: coreHttp.OperationOptions\n ): Promise<LROPoller<coreHttp.RestResponse>> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-deleteSparkJobDefinition\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n sparkJobDefinitionName,\n options: this.getOperationOptions(updatedOptions, \"undefined\")\n };\n const sendOperation = async (\n args: coreHttp.OperationArguments,\n spec: coreHttp.OperationSpec\n ) => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as coreHttp.RestResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n\n const initialOperationResult = await sendOperation(\n operationArguments,\n deleteSparkJobDefinitionOperationSpec\n );\n return new LROPoller({\n initialOperationArguments: operationArguments,\n initialOperationSpec: deleteSparkJobDefinitionOperationSpec,\n initialOperationResult,\n sendOperation\n });\n }\n\n /**\n * Executes the spark job definition.\n * @param sparkJobDefinitionName The spark job definition name.\n * @param options The options parameters.\n */\n async executeSparkJobDefinition(\n sparkJobDefinitionName: string,\n options?: coreHttp.OperationOptions\n ): Promise<LROPoller<SparkJobDefinitionExecuteSparkJobDefinitionResponse>> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-executeSparkJobDefinition\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n sparkJobDefinitionName,\n options: this.getOperationOptions(updatedOptions, \"location\")\n };\n const sendOperation = async (\n args: coreHttp.OperationArguments,\n spec: coreHttp.OperationSpec\n ) => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as SparkJobDefinitionExecuteSparkJobDefinitionResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n\n const initialOperationResult = await sendOperation(\n operationArguments,\n executeSparkJobDefinitionOperationSpec\n );\n return new LROPoller({\n initialOperationArguments: operationArguments,\n initialOperationSpec: executeSparkJobDefinitionOperationSpec,\n initialOperationResult,\n sendOperation,\n finalStateVia: \"location\"\n });\n }\n\n /**\n * Renames a sparkJobDefinition.\n * @param sparkJobDefinitionName The spark job definition name.\n * @param request proposed new name.\n * @param options The options parameters.\n */\n async renameSparkJobDefinition(\n sparkJobDefinitionName: string,\n request: ArtifactRenameRequest,\n options?: coreHttp.OperationOptions\n ): Promise<LROPoller<coreHttp.RestResponse>> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-renameSparkJobDefinition\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n sparkJobDefinitionName,\n request,\n options: this.getOperationOptions(updatedOptions, \"undefined\")\n };\n const sendOperation = async (\n args: coreHttp.OperationArguments,\n spec: coreHttp.OperationSpec\n ) => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as coreHttp.RestResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n\n const initialOperationResult = await sendOperation(\n operationArguments,\n renameSparkJobDefinitionOperationSpec\n );\n return new LROPoller({\n initialOperationArguments: operationArguments,\n initialOperationSpec: renameSparkJobDefinitionOperationSpec,\n initialOperationResult,\n sendOperation\n });\n }\n\n /**\n * Debug the spark job definition.\n * @param sparkJobDefinitionAzureResource Spark Job Definition resource definition.\n * @param options The options parameters.\n */\n async debugSparkJobDefinition(\n sparkJobDefinitionAzureResource: SparkJobDefinitionResource,\n options?: coreHttp.OperationOptions\n ): Promise<LROPoller<SparkJobDefinitionDebugSparkJobDefinitionResponse>> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-debugSparkJobDefinition\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n sparkJobDefinitionAzureResource,\n options: this.getOperationOptions(updatedOptions, \"location\")\n };\n const sendOperation = async (\n args: coreHttp.OperationArguments,\n spec: coreHttp.OperationSpec\n ) => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as SparkJobDefinitionDebugSparkJobDefinitionResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n\n const initialOperationResult = await sendOperation(\n operationArguments,\n debugSparkJobDefinitionOperationSpec\n );\n return new LROPoller({\n initialOperationArguments: operationArguments,\n initialOperationSpec: debugSparkJobDefinitionOperationSpec,\n initialOperationResult,\n sendOperation,\n finalStateVia: \"location\"\n });\n }\n\n /**\n * GetSparkJobDefinitionsByWorkspaceNext\n * @param nextLink The nextLink from the previous successful call to the\n * GetSparkJobDefinitionsByWorkspace method.\n * @param options The options parameters.\n */\n private async _getSparkJobDefinitionsByWorkspaceNext(\n nextLink: string,\n options?: coreHttp.OperationOptions\n ): Promise<SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse> {\n const { span, updatedOptions } = createSpan(\n \"ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext\",\n options || {}\n );\n const operationArguments: coreHttp.OperationArguments = {\n nextLink,\n options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {})\n };\n try {\n const result = await this.client.sendOperationRequest(\n operationArguments,\n getSparkJobDefinitionsByWorkspaceNextOperationSpec\n );\n return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse;\n } catch (error) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n }\n\n private getOperationOptions<TOptions extends coreHttp.OperationOptions>(\n options: TOptions | undefined,\n finalStateVia?: string\n ): coreHttp.RequestOptionsBase {\n const operationOptions: coreHttp.OperationOptions = options || {};\n operationOptions.requestOptions = {\n ...operationOptions.requestOptions,\n shouldDeserialize: shouldDeserializeLRO(finalStateVia)\n };\n return coreHttp.operationOptionsToRequestOptionsBase(operationOptions);\n }\n}\n// Operation Specifications\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst getSparkJobDefinitionsByWorkspaceOperationSpec: coreHttp.OperationSpec = {\n path: \"/sparkJobDefinitions\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkJobDefinitionsListResponse\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint],\n headerParameters: [Parameters.accept],\n serializer\n};\nconst createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = {\n path: \"/sparkJobDefinitions/{sparkJobDefinitionName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkJobDefinitionResource\n },\n 201: {\n bodyMapper: Mappers.SparkJobDefinitionResource\n },\n 202: {\n bodyMapper: Mappers.SparkJobDefinitionResource\n },\n 204: {\n bodyMapper: Mappers.SparkJobDefinitionResource\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n requestBody: Parameters.sparkJobDefinition,\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],\n headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch],\n mediaType: \"json\",\n serializer\n};\nconst getSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = {\n path: \"/sparkJobDefinitions/{sparkJobDefinitionName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkJobDefinitionResource\n },\n 304: {},\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],\n headerParameters: [Parameters.accept, Parameters.ifNoneMatch],\n serializer\n};\nconst deleteSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = {\n path: \"/sparkJobDefinitions/{sparkJobDefinitionName}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {},\n 201: {},\n 202: {},\n 204: {},\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],\n headerParameters: [Parameters.accept],\n serializer\n};\nconst executeSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = {\n path: \"/sparkJobDefinitions/{sparkJobDefinitionName}/execute\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkBatchJob\n },\n 201: {\n bodyMapper: Mappers.SparkBatchJob\n },\n 202: {\n bodyMapper: Mappers.SparkBatchJob\n },\n 204: {\n bodyMapper: Mappers.SparkBatchJob\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],\n headerParameters: [Parameters.accept],\n serializer\n};\nconst renameSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = {\n path: \"/sparkJobDefinitions/{sparkJobDefinitionName}/rename\",\n httpMethod: \"POST\",\n responses: {\n 200: {},\n 201: {},\n 202: {},\n 204: {},\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n requestBody: Parameters.request,\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],\n headerParameters: [Parameters.accept, Parameters.contentType],\n mediaType: \"json\",\n serializer\n};\nconst debugSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = {\n path: \"/debugSparkJobDefinition\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkBatchJob\n },\n 201: {\n bodyMapper: Mappers.SparkBatchJob\n },\n 202: {\n bodyMapper: Mappers.SparkBatchJob\n },\n 204: {\n bodyMapper: Mappers.SparkBatchJob\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n requestBody: Parameters.sparkJobDefinitionAzureResource,\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint],\n headerParameters: [Parameters.accept, Parameters.contentType],\n mediaType: \"json\",\n serializer\n};\nconst getSparkJobDefinitionsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = {\n path: \"{nextLink}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkJobDefinitionsListResponse\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion],\n urlParameters: [Parameters.endpoint, Parameters.nextLink],\n headerParameters: [Parameters.accept],\n serializer\n};\n"]}
|