@azure/synapse-spark 1.0.0-beta.3 → 1.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +14 -13
  3. package/dist/browser/index.d.ts +4 -0
  4. package/dist/browser/index.d.ts.map +1 -0
  5. package/dist/browser/index.js +11 -0
  6. package/dist/browser/index.js.map +1 -0
  7. package/{types/synapse-spark.d.ts → dist/browser/models/index.d.ts} +548 -621
  8. package/dist/browser/models/index.d.ts.map +1 -0
  9. package/dist/browser/models/index.js +134 -0
  10. package/dist/browser/models/index.js.map +1 -0
  11. package/dist/browser/models/mappers.d.ts +19 -0
  12. package/dist/browser/models/mappers.d.ts.map +1 -0
  13. package/dist/browser/models/mappers.js +1151 -0
  14. package/dist/browser/models/mappers.js.map +1 -0
  15. package/dist/browser/models/parameters.d.ts +16 -0
  16. package/dist/browser/models/parameters.d.ts.map +1 -0
  17. package/dist/browser/models/parameters.js +133 -0
  18. package/dist/browser/models/parameters.js.map +1 -0
  19. package/dist/browser/operations/index.d.ts +3 -0
  20. package/dist/browser/operations/index.d.ts.map +1 -0
  21. package/dist/browser/operations/index.js +10 -0
  22. package/dist/browser/operations/index.js.map +1 -0
  23. package/dist/browser/operations/sparkBatch.d.ts +36 -0
  24. package/dist/browser/operations/sparkBatch.d.ts.map +1 -0
  25. package/dist/browser/operations/sparkBatch.js +122 -0
  26. package/dist/browser/operations/sparkBatch.js.map +1 -0
  27. package/dist/browser/operations/sparkSessionOperations.d.ts +69 -0
  28. package/dist/browser/operations/sparkSessionOperations.d.ts.map +1 -0
  29. package/dist/browser/operations/sparkSessionOperations.js +259 -0
  30. package/dist/browser/operations/sparkSessionOperations.js.map +1 -0
  31. package/dist/browser/operationsInterfaces/index.d.ts +3 -0
  32. package/dist/browser/operationsInterfaces/index.d.ts.map +1 -0
  33. package/dist/browser/operationsInterfaces/index.js +10 -0
  34. package/dist/browser/operationsInterfaces/index.js.map +1 -0
  35. package/dist/browser/operationsInterfaces/sparkBatch.d.ts +28 -0
  36. package/dist/browser/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  37. package/dist/browser/operationsInterfaces/sparkBatch.js +9 -0
  38. package/dist/browser/operationsInterfaces/sparkBatch.js.map +1 -0
  39. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  40. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  41. package/dist/browser/operationsInterfaces/sparkSessionOperations.js +9 -0
  42. package/dist/browser/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  43. package/dist/browser/package.json +3 -0
  44. package/dist/browser/sparkClient.d.ts +24 -0
  45. package/dist/browser/sparkClient.d.ts.map +1 -0
  46. package/dist/browser/sparkClient.js +86 -0
  47. package/dist/browser/sparkClient.js.map +1 -0
  48. package/dist/browser/tracing.d.ts +2 -0
  49. package/dist/browser/tracing.d.ts.map +1 -0
  50. package/dist/browser/tracing.js +14 -0
  51. package/dist/browser/tracing.js.map +1 -0
  52. package/dist/commonjs/index.d.ts +4 -0
  53. package/dist/commonjs/index.d.ts.map +1 -0
  54. package/dist/commonjs/index.js +16 -0
  55. package/dist/commonjs/index.js.map +1 -0
  56. package/dist/commonjs/models/index.d.ts +548 -0
  57. package/dist/commonjs/models/index.d.ts.map +1 -0
  58. package/dist/commonjs/models/index.js +137 -0
  59. package/dist/commonjs/models/index.js.map +1 -0
  60. package/dist/commonjs/models/mappers.d.ts +19 -0
  61. package/dist/commonjs/models/mappers.d.ts.map +1 -0
  62. package/dist/commonjs/models/mappers.js +1154 -0
  63. package/dist/commonjs/models/mappers.js.map +1 -0
  64. package/dist/commonjs/models/parameters.d.ts +16 -0
  65. package/dist/commonjs/models/parameters.d.ts.map +1 -0
  66. package/dist/commonjs/models/parameters.js +136 -0
  67. package/dist/commonjs/models/parameters.js.map +1 -0
  68. package/dist/commonjs/operations/index.d.ts +3 -0
  69. package/dist/commonjs/operations/index.d.ts.map +1 -0
  70. package/dist/commonjs/operations/index.js +13 -0
  71. package/dist/commonjs/operations/index.js.map +1 -0
  72. package/dist/commonjs/operations/sparkBatch.d.ts +36 -0
  73. package/dist/commonjs/operations/sparkBatch.d.ts.map +1 -0
  74. package/dist/commonjs/operations/sparkBatch.js +127 -0
  75. package/dist/commonjs/operations/sparkBatch.js.map +1 -0
  76. package/dist/commonjs/operations/sparkSessionOperations.d.ts +69 -0
  77. package/dist/commonjs/operations/sparkSessionOperations.d.ts.map +1 -0
  78. package/dist/commonjs/operations/sparkSessionOperations.js +264 -0
  79. package/dist/commonjs/operations/sparkSessionOperations.js.map +1 -0
  80. package/dist/commonjs/operationsInterfaces/index.d.ts +3 -0
  81. package/dist/commonjs/operationsInterfaces/index.d.ts.map +1 -0
  82. package/dist/commonjs/operationsInterfaces/index.js +13 -0
  83. package/dist/commonjs/operationsInterfaces/index.js.map +1 -0
  84. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts +28 -0
  85. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  86. package/dist/commonjs/operationsInterfaces/sparkBatch.js +10 -0
  87. package/dist/commonjs/operationsInterfaces/sparkBatch.js.map +1 -0
  88. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  89. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  90. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js +10 -0
  91. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  92. package/dist/commonjs/package.json +3 -0
  93. package/dist/commonjs/sparkClient.d.ts +24 -0
  94. package/dist/commonjs/sparkClient.d.ts.map +1 -0
  95. package/dist/commonjs/sparkClient.js +91 -0
  96. package/dist/commonjs/sparkClient.js.map +1 -0
  97. package/dist/commonjs/tracing.d.ts +2 -0
  98. package/dist/commonjs/tracing.d.ts.map +1 -0
  99. package/dist/commonjs/tracing.js +17 -0
  100. package/dist/commonjs/tracing.js.map +1 -0
  101. package/dist/commonjs/tsdoc-metadata.json +11 -0
  102. package/dist/esm/index.d.ts +4 -0
  103. package/dist/esm/index.d.ts.map +1 -0
  104. package/dist/esm/index.js +11 -0
  105. package/dist/esm/index.js.map +1 -0
  106. package/dist/esm/models/index.d.ts +548 -0
  107. package/dist/esm/models/index.d.ts.map +1 -0
  108. package/dist/esm/models/index.js +134 -0
  109. package/dist/esm/models/index.js.map +1 -0
  110. package/dist/esm/models/mappers.d.ts +19 -0
  111. package/dist/esm/models/mappers.d.ts.map +1 -0
  112. package/dist/esm/models/mappers.js +1151 -0
  113. package/dist/esm/models/mappers.js.map +1 -0
  114. package/dist/esm/models/parameters.d.ts +16 -0
  115. package/dist/esm/models/parameters.d.ts.map +1 -0
  116. package/dist/esm/models/parameters.js +133 -0
  117. package/dist/esm/models/parameters.js.map +1 -0
  118. package/dist/esm/operations/index.d.ts +3 -0
  119. package/dist/esm/operations/index.d.ts.map +1 -0
  120. package/dist/esm/operations/index.js +10 -0
  121. package/dist/esm/operations/index.js.map +1 -0
  122. package/dist/esm/operations/sparkBatch.d.ts +36 -0
  123. package/dist/esm/operations/sparkBatch.d.ts.map +1 -0
  124. package/dist/esm/operations/sparkBatch.js +122 -0
  125. package/dist/esm/operations/sparkBatch.js.map +1 -0
  126. package/dist/esm/operations/sparkSessionOperations.d.ts +69 -0
  127. package/dist/esm/operations/sparkSessionOperations.d.ts.map +1 -0
  128. package/dist/esm/operations/sparkSessionOperations.js +259 -0
  129. package/dist/esm/operations/sparkSessionOperations.js.map +1 -0
  130. package/dist/esm/operationsInterfaces/index.d.ts +3 -0
  131. package/dist/esm/operationsInterfaces/index.d.ts.map +1 -0
  132. package/dist/esm/operationsInterfaces/index.js +10 -0
  133. package/dist/esm/operationsInterfaces/index.js.map +1 -0
  134. package/dist/esm/operationsInterfaces/sparkBatch.d.ts +28 -0
  135. package/dist/esm/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  136. package/dist/esm/operationsInterfaces/sparkBatch.js +9 -0
  137. package/dist/esm/operationsInterfaces/sparkBatch.js.map +1 -0
  138. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  139. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  140. package/dist/esm/operationsInterfaces/sparkSessionOperations.js +9 -0
  141. package/dist/esm/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  142. package/dist/esm/package.json +3 -0
  143. package/dist/esm/sparkClient.d.ts +24 -0
  144. package/dist/esm/sparkClient.d.ts.map +1 -0
  145. package/dist/esm/sparkClient.js +86 -0
  146. package/dist/esm/sparkClient.js.map +1 -0
  147. package/dist/esm/tracing.d.ts +2 -0
  148. package/dist/esm/tracing.d.ts.map +1 -0
  149. package/dist/esm/tracing.js +14 -0
  150. package/dist/esm/tracing.js.map +1 -0
  151. package/dist/react-native/index.d.ts +4 -0
  152. package/dist/react-native/index.d.ts.map +1 -0
  153. package/dist/react-native/index.js +11 -0
  154. package/dist/react-native/index.js.map +1 -0
  155. package/dist/react-native/models/index.d.ts +548 -0
  156. package/dist/react-native/models/index.d.ts.map +1 -0
  157. package/dist/react-native/models/index.js +134 -0
  158. package/dist/react-native/models/index.js.map +1 -0
  159. package/dist/react-native/models/mappers.d.ts +19 -0
  160. package/dist/react-native/models/mappers.d.ts.map +1 -0
  161. package/dist/react-native/models/mappers.js +1151 -0
  162. package/dist/react-native/models/mappers.js.map +1 -0
  163. package/dist/react-native/models/parameters.d.ts +16 -0
  164. package/dist/react-native/models/parameters.d.ts.map +1 -0
  165. package/dist/react-native/models/parameters.js +133 -0
  166. package/dist/react-native/models/parameters.js.map +1 -0
  167. package/dist/react-native/operations/index.d.ts +3 -0
  168. package/dist/react-native/operations/index.d.ts.map +1 -0
  169. package/dist/react-native/operations/index.js +10 -0
  170. package/dist/react-native/operations/index.js.map +1 -0
  171. package/dist/react-native/operations/sparkBatch.d.ts +36 -0
  172. package/dist/react-native/operations/sparkBatch.d.ts.map +1 -0
  173. package/dist/react-native/operations/sparkBatch.js +122 -0
  174. package/dist/react-native/operations/sparkBatch.js.map +1 -0
  175. package/dist/react-native/operations/sparkSessionOperations.d.ts +69 -0
  176. package/dist/react-native/operations/sparkSessionOperations.d.ts.map +1 -0
  177. package/dist/react-native/operations/sparkSessionOperations.js +259 -0
  178. package/dist/react-native/operations/sparkSessionOperations.js.map +1 -0
  179. package/dist/react-native/operationsInterfaces/index.d.ts +3 -0
  180. package/dist/react-native/operationsInterfaces/index.d.ts.map +1 -0
  181. package/dist/react-native/operationsInterfaces/index.js +10 -0
  182. package/dist/react-native/operationsInterfaces/index.js.map +1 -0
  183. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts +28 -0
  184. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  185. package/dist/react-native/operationsInterfaces/sparkBatch.js +9 -0
  186. package/dist/react-native/operationsInterfaces/sparkBatch.js.map +1 -0
  187. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  188. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  189. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js +9 -0
  190. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  191. package/dist/react-native/package.json +3 -0
  192. package/dist/react-native/sparkClient.d.ts +24 -0
  193. package/dist/react-native/sparkClient.d.ts.map +1 -0
  194. package/dist/react-native/sparkClient.js +86 -0
  195. package/dist/react-native/sparkClient.js.map +1 -0
  196. package/dist/react-native/tracing.d.ts +2 -0
  197. package/dist/react-native/tracing.d.ts.map +1 -0
  198. package/dist/react-native/tracing.js +14 -0
  199. package/dist/react-native/tracing.js.map +1 -0
  200. package/package.json +83 -79
  201. package/CHANGELOG.md +0 -17
  202. package/dist/index.js +0 -1995
  203. package/dist/index.js.map +0 -1
  204. package/dist/index.min.js +0 -1
  205. package/dist/index.min.js.map +0 -1
  206. package/rollup.config.js +0 -3
  207. package/tsconfig.json +0 -19
package/dist/index.js DELETED
@@ -1,1995 +0,0 @@
1
- 'use strict';
2
-
3
- Object.defineProperty(exports, '__esModule', { value: true });
4
-
5
- var coreTracing = require('@azure/core-tracing');
6
- var coreClient = require('@azure/core-client');
7
-
8
- /*
9
- * Copyright (c) Microsoft Corporation.
10
- * Licensed under the MIT License.
11
- *
12
- * Code generated by Microsoft (R) AutoRest Code Generator.
13
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
14
- */
15
- (function (KnownSparkJobType) {
16
- KnownSparkJobType["SparkBatch"] = "SparkBatch";
17
- KnownSparkJobType["SparkSession"] = "SparkSession";
18
- })(exports.KnownSparkJobType || (exports.KnownSparkJobType = {}));
19
- (function (KnownSparkBatchJobResultType) {
20
- KnownSparkBatchJobResultType["Uncertain"] = "Uncertain";
21
- KnownSparkBatchJobResultType["Succeeded"] = "Succeeded";
22
- KnownSparkBatchJobResultType["Failed"] = "Failed";
23
- KnownSparkBatchJobResultType["Cancelled"] = "Cancelled";
24
- })(exports.KnownSparkBatchJobResultType || (exports.KnownSparkBatchJobResultType = {}));
25
- (function (KnownSchedulerCurrentState) {
26
- KnownSchedulerCurrentState["Queued"] = "Queued";
27
- KnownSchedulerCurrentState["Scheduled"] = "Scheduled";
28
- KnownSchedulerCurrentState["Ended"] = "Ended";
29
- })(exports.KnownSchedulerCurrentState || (exports.KnownSchedulerCurrentState = {}));
30
- (function (KnownPluginCurrentState) {
31
- KnownPluginCurrentState["Preparation"] = "Preparation";
32
- KnownPluginCurrentState["ResourceAcquisition"] = "ResourceAcquisition";
33
- KnownPluginCurrentState["Queued"] = "Queued";
34
- KnownPluginCurrentState["Submission"] = "Submission";
35
- KnownPluginCurrentState["Monitoring"] = "Monitoring";
36
- KnownPluginCurrentState["Cleanup"] = "Cleanup";
37
- KnownPluginCurrentState["Ended"] = "Ended";
38
- })(exports.KnownPluginCurrentState || (exports.KnownPluginCurrentState = {}));
39
- (function (KnownSparkErrorSource) {
40
- KnownSparkErrorSource["System"] = "System";
41
- KnownSparkErrorSource["User"] = "User";
42
- KnownSparkErrorSource["Unknown"] = "Unknown";
43
- KnownSparkErrorSource["Dependency"] = "Dependency";
44
- })(exports.KnownSparkErrorSource || (exports.KnownSparkErrorSource = {}));
45
- (function (KnownSparkSessionResultType) {
46
- KnownSparkSessionResultType["Uncertain"] = "Uncertain";
47
- KnownSparkSessionResultType["Succeeded"] = "Succeeded";
48
- KnownSparkSessionResultType["Failed"] = "Failed";
49
- KnownSparkSessionResultType["Cancelled"] = "Cancelled";
50
- })(exports.KnownSparkSessionResultType || (exports.KnownSparkSessionResultType = {}));
51
- (function (KnownSparkStatementLanguageType) {
52
- KnownSparkStatementLanguageType["Spark"] = "spark";
53
- KnownSparkStatementLanguageType["PySpark"] = "pyspark";
54
- KnownSparkStatementLanguageType["DotNetSpark"] = "dotnetspark";
55
- KnownSparkStatementLanguageType["Sql"] = "sql";
56
- })(exports.KnownSparkStatementLanguageType || (exports.KnownSparkStatementLanguageType = {}));
57
-
58
- /*
59
- * Copyright (c) Microsoft Corporation.
60
- * Licensed under the MIT License.
61
- *
62
- * Code generated by Microsoft (R) AutoRest Code Generator.
63
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
64
- */
65
- const createSpan = coreTracing.createSpanFunction({
66
- namespace: "Azure.Synapse.Spark",
67
- packagePrefix: "Microsoft.Synapse"
68
- });
69
-
70
- /*
71
- * Copyright (c) Microsoft Corporation.
72
- * Licensed under the MIT License.
73
- *
74
- * Code generated by Microsoft (R) AutoRest Code Generator.
75
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
76
- */
77
- const SparkBatchJobCollection = {
78
- type: {
79
- name: "Composite",
80
- className: "SparkBatchJobCollection",
81
- modelProperties: {
82
- from: {
83
- serializedName: "from",
84
- required: true,
85
- type: {
86
- name: "Number"
87
- }
88
- },
89
- total: {
90
- serializedName: "total",
91
- required: true,
92
- type: {
93
- name: "Number"
94
- }
95
- },
96
- sessions: {
97
- serializedName: "sessions",
98
- type: {
99
- name: "Sequence",
100
- element: {
101
- type: {
102
- name: "Composite",
103
- className: "SparkBatchJob"
104
- }
105
- }
106
- }
107
- }
108
- }
109
- }
110
- };
111
- const SparkBatchJob = {
112
- type: {
113
- name: "Composite",
114
- className: "SparkBatchJob",
115
- modelProperties: {
116
- livyInfo: {
117
- serializedName: "livyInfo",
118
- type: {
119
- name: "Composite",
120
- className: "SparkBatchJobState"
121
- }
122
- },
123
- name: {
124
- serializedName: "name",
125
- type: {
126
- name: "String"
127
- }
128
- },
129
- workspaceName: {
130
- serializedName: "workspaceName",
131
- type: {
132
- name: "String"
133
- }
134
- },
135
- sparkPoolName: {
136
- serializedName: "sparkPoolName",
137
- type: {
138
- name: "String"
139
- }
140
- },
141
- submitterName: {
142
- serializedName: "submitterName",
143
- type: {
144
- name: "String"
145
- }
146
- },
147
- submitterId: {
148
- serializedName: "submitterId",
149
- type: {
150
- name: "String"
151
- }
152
- },
153
- artifactId: {
154
- serializedName: "artifactId",
155
- type: {
156
- name: "String"
157
- }
158
- },
159
- jobType: {
160
- serializedName: "jobType",
161
- type: {
162
- name: "String"
163
- }
164
- },
165
- result: {
166
- serializedName: "result",
167
- type: {
168
- name: "String"
169
- }
170
- },
171
- scheduler: {
172
- serializedName: "schedulerInfo",
173
- type: {
174
- name: "Composite",
175
- className: "SparkScheduler"
176
- }
177
- },
178
- plugin: {
179
- serializedName: "pluginInfo",
180
- type: {
181
- name: "Composite",
182
- className: "SparkServicePlugin"
183
- }
184
- },
185
- errors: {
186
- serializedName: "errorInfo",
187
- type: {
188
- name: "Sequence",
189
- element: {
190
- type: {
191
- name: "Composite",
192
- className: "SparkServiceError"
193
- }
194
- }
195
- }
196
- },
197
- tags: {
198
- serializedName: "tags",
199
- type: {
200
- name: "Dictionary",
201
- value: { type: { name: "String" } }
202
- }
203
- },
204
- id: {
205
- serializedName: "id",
206
- required: true,
207
- type: {
208
- name: "Number"
209
- }
210
- },
211
- appId: {
212
- serializedName: "appId",
213
- nullable: true,
214
- type: {
215
- name: "String"
216
- }
217
- },
218
- appInfo: {
219
- serializedName: "appInfo",
220
- nullable: true,
221
- type: {
222
- name: "Dictionary",
223
- value: { type: { name: "String" } }
224
- }
225
- },
226
- state: {
227
- serializedName: "state",
228
- type: {
229
- name: "String"
230
- }
231
- },
232
- logLines: {
233
- serializedName: "log",
234
- nullable: true,
235
- type: {
236
- name: "Sequence",
237
- element: {
238
- type: {
239
- name: "String"
240
- }
241
- }
242
- }
243
- }
244
- }
245
- }
246
- };
247
- const SparkBatchJobState = {
248
- type: {
249
- name: "Composite",
250
- className: "SparkBatchJobState",
251
- modelProperties: {
252
- notStartedAt: {
253
- serializedName: "notStartedAt",
254
- nullable: true,
255
- type: {
256
- name: "DateTime"
257
- }
258
- },
259
- startingAt: {
260
- serializedName: "startingAt",
261
- nullable: true,
262
- type: {
263
- name: "DateTime"
264
- }
265
- },
266
- runningAt: {
267
- serializedName: "runningAt",
268
- nullable: true,
269
- type: {
270
- name: "DateTime"
271
- }
272
- },
273
- deadAt: {
274
- serializedName: "deadAt",
275
- nullable: true,
276
- type: {
277
- name: "DateTime"
278
- }
279
- },
280
- successAt: {
281
- serializedName: "successAt",
282
- nullable: true,
283
- type: {
284
- name: "DateTime"
285
- }
286
- },
287
- terminatedAt: {
288
- serializedName: "killedAt",
289
- nullable: true,
290
- type: {
291
- name: "DateTime"
292
- }
293
- },
294
- recoveringAt: {
295
- serializedName: "recoveringAt",
296
- nullable: true,
297
- type: {
298
- name: "DateTime"
299
- }
300
- },
301
- currentState: {
302
- serializedName: "currentState",
303
- type: {
304
- name: "String"
305
- }
306
- },
307
- jobCreationRequest: {
308
- serializedName: "jobCreationRequest",
309
- type: {
310
- name: "Composite",
311
- className: "SparkRequest"
312
- }
313
- }
314
- }
315
- }
316
- };
317
- const SparkRequest = {
318
- type: {
319
- name: "Composite",
320
- className: "SparkRequest",
321
- modelProperties: {
322
- name: {
323
- serializedName: "name",
324
- type: {
325
- name: "String"
326
- }
327
- },
328
- file: {
329
- serializedName: "file",
330
- type: {
331
- name: "String"
332
- }
333
- },
334
- className: {
335
- serializedName: "className",
336
- type: {
337
- name: "String"
338
- }
339
- },
340
- arguments: {
341
- serializedName: "args",
342
- type: {
343
- name: "Sequence",
344
- element: {
345
- type: {
346
- name: "String"
347
- }
348
- }
349
- }
350
- },
351
- jars: {
352
- serializedName: "jars",
353
- type: {
354
- name: "Sequence",
355
- element: {
356
- type: {
357
- name: "String"
358
- }
359
- }
360
- }
361
- },
362
- pythonFiles: {
363
- serializedName: "pyFiles",
364
- type: {
365
- name: "Sequence",
366
- element: {
367
- type: {
368
- name: "String"
369
- }
370
- }
371
- }
372
- },
373
- files: {
374
- serializedName: "files",
375
- type: {
376
- name: "Sequence",
377
- element: {
378
- type: {
379
- name: "String"
380
- }
381
- }
382
- }
383
- },
384
- archives: {
385
- serializedName: "archives",
386
- type: {
387
- name: "Sequence",
388
- element: {
389
- type: {
390
- name: "String"
391
- }
392
- }
393
- }
394
- },
395
- configuration: {
396
- serializedName: "conf",
397
- type: {
398
- name: "Dictionary",
399
- value: { type: { name: "String" } }
400
- }
401
- },
402
- driverMemory: {
403
- serializedName: "driverMemory",
404
- type: {
405
- name: "String"
406
- }
407
- },
408
- driverCores: {
409
- serializedName: "driverCores",
410
- type: {
411
- name: "Number"
412
- }
413
- },
414
- executorMemory: {
415
- serializedName: "executorMemory",
416
- type: {
417
- name: "String"
418
- }
419
- },
420
- executorCores: {
421
- serializedName: "executorCores",
422
- type: {
423
- name: "Number"
424
- }
425
- },
426
- executorCount: {
427
- serializedName: "numExecutors",
428
- type: {
429
- name: "Number"
430
- }
431
- }
432
- }
433
- }
434
- };
435
- const SparkScheduler = {
436
- type: {
437
- name: "Composite",
438
- className: "SparkScheduler",
439
- modelProperties: {
440
- submittedAt: {
441
- serializedName: "submittedAt",
442
- nullable: true,
443
- type: {
444
- name: "DateTime"
445
- }
446
- },
447
- scheduledAt: {
448
- serializedName: "scheduledAt",
449
- nullable: true,
450
- type: {
451
- name: "DateTime"
452
- }
453
- },
454
- endedAt: {
455
- serializedName: "endedAt",
456
- nullable: true,
457
- type: {
458
- name: "DateTime"
459
- }
460
- },
461
- cancellationRequestedAt: {
462
- serializedName: "cancellationRequestedAt",
463
- nullable: true,
464
- type: {
465
- name: "DateTime"
466
- }
467
- },
468
- currentState: {
469
- serializedName: "currentState",
470
- type: {
471
- name: "String"
472
- }
473
- }
474
- }
475
- }
476
- };
477
- const SparkServicePlugin = {
478
- type: {
479
- name: "Composite",
480
- className: "SparkServicePlugin",
481
- modelProperties: {
482
- preparationStartedAt: {
483
- serializedName: "preparationStartedAt",
484
- nullable: true,
485
- type: {
486
- name: "DateTime"
487
- }
488
- },
489
- resourceAcquisitionStartedAt: {
490
- serializedName: "resourceAcquisitionStartedAt",
491
- nullable: true,
492
- type: {
493
- name: "DateTime"
494
- }
495
- },
496
- submissionStartedAt: {
497
- serializedName: "submissionStartedAt",
498
- nullable: true,
499
- type: {
500
- name: "DateTime"
501
- }
502
- },
503
- monitoringStartedAt: {
504
- serializedName: "monitoringStartedAt",
505
- nullable: true,
506
- type: {
507
- name: "DateTime"
508
- }
509
- },
510
- cleanupStartedAt: {
511
- serializedName: "cleanupStartedAt",
512
- nullable: true,
513
- type: {
514
- name: "DateTime"
515
- }
516
- },
517
- currentState: {
518
- serializedName: "currentState",
519
- type: {
520
- name: "String"
521
- }
522
- }
523
- }
524
- }
525
- };
526
- const SparkServiceError = {
527
- type: {
528
- name: "Composite",
529
- className: "SparkServiceError",
530
- modelProperties: {
531
- message: {
532
- serializedName: "message",
533
- type: {
534
- name: "String"
535
- }
536
- },
537
- errorCode: {
538
- serializedName: "errorCode",
539
- type: {
540
- name: "String"
541
- }
542
- },
543
- source: {
544
- serializedName: "source",
545
- type: {
546
- name: "String"
547
- }
548
- }
549
- }
550
- }
551
- };
552
- const SparkBatchJobOptions = {
553
- type: {
554
- name: "Composite",
555
- className: "SparkBatchJobOptions",
556
- modelProperties: {
557
- tags: {
558
- serializedName: "tags",
559
- type: {
560
- name: "Dictionary",
561
- value: { type: { name: "String" } }
562
- }
563
- },
564
- artifactId: {
565
- serializedName: "artifactId",
566
- type: {
567
- name: "String"
568
- }
569
- },
570
- name: {
571
- serializedName: "name",
572
- required: true,
573
- type: {
574
- name: "String"
575
- }
576
- },
577
- file: {
578
- serializedName: "file",
579
- required: true,
580
- type: {
581
- name: "String"
582
- }
583
- },
584
- className: {
585
- serializedName: "className",
586
- type: {
587
- name: "String"
588
- }
589
- },
590
- arguments: {
591
- serializedName: "args",
592
- type: {
593
- name: "Sequence",
594
- element: {
595
- type: {
596
- name: "String"
597
- }
598
- }
599
- }
600
- },
601
- jars: {
602
- serializedName: "jars",
603
- type: {
604
- name: "Sequence",
605
- element: {
606
- type: {
607
- name: "String"
608
- }
609
- }
610
- }
611
- },
612
- pythonFiles: {
613
- serializedName: "pyFiles",
614
- type: {
615
- name: "Sequence",
616
- element: {
617
- type: {
618
- name: "String"
619
- }
620
- }
621
- }
622
- },
623
- files: {
624
- serializedName: "files",
625
- type: {
626
- name: "Sequence",
627
- element: {
628
- type: {
629
- name: "String"
630
- }
631
- }
632
- }
633
- },
634
- archives: {
635
- serializedName: "archives",
636
- type: {
637
- name: "Sequence",
638
- element: {
639
- type: {
640
- name: "String"
641
- }
642
- }
643
- }
644
- },
645
- configuration: {
646
- serializedName: "conf",
647
- type: {
648
- name: "Dictionary",
649
- value: { type: { name: "String" } }
650
- }
651
- },
652
- driverMemory: {
653
- serializedName: "driverMemory",
654
- type: {
655
- name: "String"
656
- }
657
- },
658
- driverCores: {
659
- serializedName: "driverCores",
660
- type: {
661
- name: "Number"
662
- }
663
- },
664
- executorMemory: {
665
- serializedName: "executorMemory",
666
- type: {
667
- name: "String"
668
- }
669
- },
670
- executorCores: {
671
- serializedName: "executorCores",
672
- type: {
673
- name: "Number"
674
- }
675
- },
676
- executorCount: {
677
- serializedName: "numExecutors",
678
- type: {
679
- name: "Number"
680
- }
681
- }
682
- }
683
- }
684
- };
685
- const SparkSessionCollection = {
686
- type: {
687
- name: "Composite",
688
- className: "SparkSessionCollection",
689
- modelProperties: {
690
- from: {
691
- serializedName: "from",
692
- required: true,
693
- type: {
694
- name: "Number"
695
- }
696
- },
697
- total: {
698
- serializedName: "total",
699
- required: true,
700
- type: {
701
- name: "Number"
702
- }
703
- },
704
- sessions: {
705
- serializedName: "sessions",
706
- type: {
707
- name: "Sequence",
708
- element: {
709
- type: {
710
- name: "Composite",
711
- className: "SparkSession"
712
- }
713
- }
714
- }
715
- }
716
- }
717
- }
718
- };
719
- const SparkSession = {
720
- type: {
721
- name: "Composite",
722
- className: "SparkSession",
723
- modelProperties: {
724
- livyInfo: {
725
- serializedName: "livyInfo",
726
- type: {
727
- name: "Composite",
728
- className: "SparkSessionState"
729
- }
730
- },
731
- name: {
732
- serializedName: "name",
733
- type: {
734
- name: "String"
735
- }
736
- },
737
- workspaceName: {
738
- serializedName: "workspaceName",
739
- type: {
740
- name: "String"
741
- }
742
- },
743
- sparkPoolName: {
744
- serializedName: "sparkPoolName",
745
- type: {
746
- name: "String"
747
- }
748
- },
749
- submitterName: {
750
- serializedName: "submitterName",
751
- type: {
752
- name: "String"
753
- }
754
- },
755
- submitterId: {
756
- serializedName: "submitterId",
757
- type: {
758
- name: "String"
759
- }
760
- },
761
- artifactId: {
762
- serializedName: "artifactId",
763
- type: {
764
- name: "String"
765
- }
766
- },
767
- jobType: {
768
- serializedName: "jobType",
769
- type: {
770
- name: "String"
771
- }
772
- },
773
- result: {
774
- serializedName: "result",
775
- type: {
776
- name: "String"
777
- }
778
- },
779
- scheduler: {
780
- serializedName: "schedulerInfo",
781
- type: {
782
- name: "Composite",
783
- className: "SparkScheduler"
784
- }
785
- },
786
- plugin: {
787
- serializedName: "pluginInfo",
788
- type: {
789
- name: "Composite",
790
- className: "SparkServicePlugin"
791
- }
792
- },
793
- errors: {
794
- serializedName: "errorInfo",
795
- type: {
796
- name: "Sequence",
797
- element: {
798
- type: {
799
- name: "Composite",
800
- className: "SparkServiceError"
801
- }
802
- }
803
- }
804
- },
805
- tags: {
806
- serializedName: "tags",
807
- type: {
808
- name: "Dictionary",
809
- value: { type: { name: "String" } }
810
- }
811
- },
812
- id: {
813
- serializedName: "id",
814
- required: true,
815
- type: {
816
- name: "Number"
817
- }
818
- },
819
- appId: {
820
- serializedName: "appId",
821
- nullable: true,
822
- type: {
823
- name: "String"
824
- }
825
- },
826
- appInfo: {
827
- serializedName: "appInfo",
828
- nullable: true,
829
- type: {
830
- name: "Dictionary",
831
- value: { type: { name: "String" } }
832
- }
833
- },
834
- state: {
835
- serializedName: "state",
836
- type: {
837
- name: "String"
838
- }
839
- },
840
- logLines: {
841
- serializedName: "log",
842
- nullable: true,
843
- type: {
844
- name: "Sequence",
845
- element: {
846
- type: {
847
- name: "String"
848
- }
849
- }
850
- }
851
- }
852
- }
853
- }
854
- };
855
- const SparkSessionState = {
856
- type: {
857
- name: "Composite",
858
- className: "SparkSessionState",
859
- modelProperties: {
860
- notStartedAt: {
861
- serializedName: "notStartedAt",
862
- nullable: true,
863
- type: {
864
- name: "DateTime"
865
- }
866
- },
867
- startingAt: {
868
- serializedName: "startingAt",
869
- nullable: true,
870
- type: {
871
- name: "DateTime"
872
- }
873
- },
874
- idleAt: {
875
- serializedName: "idleAt",
876
- nullable: true,
877
- type: {
878
- name: "DateTime"
879
- }
880
- },
881
- deadAt: {
882
- serializedName: "deadAt",
883
- nullable: true,
884
- type: {
885
- name: "DateTime"
886
- }
887
- },
888
- shuttingDownAt: {
889
- serializedName: "shuttingDownAt",
890
- nullable: true,
891
- type: {
892
- name: "DateTime"
893
- }
894
- },
895
- terminatedAt: {
896
- serializedName: "killedAt",
897
- nullable: true,
898
- type: {
899
- name: "DateTime"
900
- }
901
- },
902
- recoveringAt: {
903
- serializedName: "recoveringAt",
904
- nullable: true,
905
- type: {
906
- name: "DateTime"
907
- }
908
- },
909
- busyAt: {
910
- serializedName: "busyAt",
911
- nullable: true,
912
- type: {
913
- name: "DateTime"
914
- }
915
- },
916
- errorAt: {
917
- serializedName: "errorAt",
918
- nullable: true,
919
- type: {
920
- name: "DateTime"
921
- }
922
- },
923
- currentState: {
924
- serializedName: "currentState",
925
- type: {
926
- name: "String"
927
- }
928
- },
929
- jobCreationRequest: {
930
- serializedName: "jobCreationRequest",
931
- type: {
932
- name: "Composite",
933
- className: "SparkRequest"
934
- }
935
- }
936
- }
937
- }
938
- };
939
- const SparkSessionOptions = {
940
- type: {
941
- name: "Composite",
942
- className: "SparkSessionOptions",
943
- modelProperties: {
944
- tags: {
945
- serializedName: "tags",
946
- type: {
947
- name: "Dictionary",
948
- value: { type: { name: "String" } }
949
- }
950
- },
951
- artifactId: {
952
- serializedName: "artifactId",
953
- type: {
954
- name: "String"
955
- }
956
- },
957
- name: {
958
- serializedName: "name",
959
- required: true,
960
- type: {
961
- name: "String"
962
- }
963
- },
964
- file: {
965
- serializedName: "file",
966
- type: {
967
- name: "String"
968
- }
969
- },
970
- className: {
971
- serializedName: "className",
972
- type: {
973
- name: "String"
974
- }
975
- },
976
- arguments: {
977
- serializedName: "args",
978
- type: {
979
- name: "Sequence",
980
- element: {
981
- type: {
982
- name: "String"
983
- }
984
- }
985
- }
986
- },
987
- jars: {
988
- serializedName: "jars",
989
- type: {
990
- name: "Sequence",
991
- element: {
992
- type: {
993
- name: "String"
994
- }
995
- }
996
- }
997
- },
998
- pythonFiles: {
999
- serializedName: "pyFiles",
1000
- type: {
1001
- name: "Sequence",
1002
- element: {
1003
- type: {
1004
- name: "String"
1005
- }
1006
- }
1007
- }
1008
- },
1009
- files: {
1010
- serializedName: "files",
1011
- type: {
1012
- name: "Sequence",
1013
- element: {
1014
- type: {
1015
- name: "String"
1016
- }
1017
- }
1018
- }
1019
- },
1020
- archives: {
1021
- serializedName: "archives",
1022
- type: {
1023
- name: "Sequence",
1024
- element: {
1025
- type: {
1026
- name: "String"
1027
- }
1028
- }
1029
- }
1030
- },
1031
- configuration: {
1032
- serializedName: "conf",
1033
- type: {
1034
- name: "Dictionary",
1035
- value: { type: { name: "String" } }
1036
- }
1037
- },
1038
- driverMemory: {
1039
- serializedName: "driverMemory",
1040
- type: {
1041
- name: "String"
1042
- }
1043
- },
1044
- driverCores: {
1045
- serializedName: "driverCores",
1046
- type: {
1047
- name: "Number"
1048
- }
1049
- },
1050
- executorMemory: {
1051
- serializedName: "executorMemory",
1052
- type: {
1053
- name: "String"
1054
- }
1055
- },
1056
- executorCores: {
1057
- serializedName: "executorCores",
1058
- type: {
1059
- name: "Number"
1060
- }
1061
- },
1062
- executorCount: {
1063
- serializedName: "numExecutors",
1064
- type: {
1065
- name: "Number"
1066
- }
1067
- }
1068
- }
1069
- }
1070
- };
1071
- const SparkStatementCollection = {
1072
- type: {
1073
- name: "Composite",
1074
- className: "SparkStatementCollection",
1075
- modelProperties: {
1076
- total: {
1077
- serializedName: "total_statements",
1078
- required: true,
1079
- type: {
1080
- name: "Number"
1081
- }
1082
- },
1083
- statements: {
1084
- serializedName: "statements",
1085
- type: {
1086
- name: "Sequence",
1087
- element: {
1088
- type: {
1089
- name: "Composite",
1090
- className: "SparkStatement"
1091
- }
1092
- }
1093
- }
1094
- }
1095
- }
1096
- }
1097
- };
1098
- const SparkStatement = {
1099
- type: {
1100
- name: "Composite",
1101
- className: "SparkStatement",
1102
- modelProperties: {
1103
- id: {
1104
- serializedName: "id",
1105
- required: true,
1106
- type: {
1107
- name: "Number"
1108
- }
1109
- },
1110
- code: {
1111
- serializedName: "code",
1112
- type: {
1113
- name: "String"
1114
- }
1115
- },
1116
- state: {
1117
- serializedName: "state",
1118
- type: {
1119
- name: "String"
1120
- }
1121
- },
1122
- output: {
1123
- serializedName: "output",
1124
- type: {
1125
- name: "Composite",
1126
- className: "SparkStatementOutput"
1127
- }
1128
- }
1129
- }
1130
- }
1131
- };
1132
- const SparkStatementOutput = {
1133
- type: {
1134
- name: "Composite",
1135
- className: "SparkStatementOutput",
1136
- modelProperties: {
1137
- status: {
1138
- serializedName: "status",
1139
- type: {
1140
- name: "String"
1141
- }
1142
- },
1143
- executionCount: {
1144
- serializedName: "execution_count",
1145
- required: true,
1146
- type: {
1147
- name: "Number"
1148
- }
1149
- },
1150
- data: {
1151
- serializedName: "data",
1152
- type: {
1153
- name: "Dictionary",
1154
- value: { type: { name: "any" } }
1155
- }
1156
- },
1157
- errorName: {
1158
- serializedName: "ename",
1159
- nullable: true,
1160
- type: {
1161
- name: "String"
1162
- }
1163
- },
1164
- errorValue: {
1165
- serializedName: "evalue",
1166
- nullable: true,
1167
- type: {
1168
- name: "String"
1169
- }
1170
- },
1171
- traceback: {
1172
- serializedName: "traceback",
1173
- nullable: true,
1174
- type: {
1175
- name: "Sequence",
1176
- element: {
1177
- type: {
1178
- name: "String"
1179
- }
1180
- }
1181
- }
1182
- }
1183
- }
1184
- }
1185
- };
1186
- const SparkStatementOptions = {
1187
- type: {
1188
- name: "Composite",
1189
- className: "SparkStatementOptions",
1190
- modelProperties: {
1191
- code: {
1192
- serializedName: "code",
1193
- type: {
1194
- name: "String"
1195
- }
1196
- },
1197
- kind: {
1198
- serializedName: "kind",
1199
- type: {
1200
- name: "String"
1201
- }
1202
- }
1203
- }
1204
- }
1205
- };
1206
- const SparkStatementCancellationResult = {
1207
- type: {
1208
- name: "Composite",
1209
- className: "SparkStatementCancellationResult",
1210
- modelProperties: {
1211
- message: {
1212
- serializedName: "msg",
1213
- type: {
1214
- name: "String"
1215
- }
1216
- }
1217
- }
1218
- }
1219
- };
1220
-
1221
- var Mappers = /*#__PURE__*/Object.freeze({
1222
- __proto__: null,
1223
- SparkBatchJobCollection: SparkBatchJobCollection,
1224
- SparkBatchJob: SparkBatchJob,
1225
- SparkBatchJobState: SparkBatchJobState,
1226
- SparkRequest: SparkRequest,
1227
- SparkScheduler: SparkScheduler,
1228
- SparkServicePlugin: SparkServicePlugin,
1229
- SparkServiceError: SparkServiceError,
1230
- SparkBatchJobOptions: SparkBatchJobOptions,
1231
- SparkSessionCollection: SparkSessionCollection,
1232
- SparkSession: SparkSession,
1233
- SparkSessionState: SparkSessionState,
1234
- SparkSessionOptions: SparkSessionOptions,
1235
- SparkStatementCollection: SparkStatementCollection,
1236
- SparkStatement: SparkStatement,
1237
- SparkStatementOutput: SparkStatementOutput,
1238
- SparkStatementOptions: SparkStatementOptions,
1239
- SparkStatementCancellationResult: SparkStatementCancellationResult
1240
- });
1241
-
1242
- /*
1243
- * Copyright (c) Microsoft Corporation.
1244
- * Licensed under the MIT License.
1245
- *
1246
- * Code generated by Microsoft (R) AutoRest Code Generator.
1247
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1248
- */
1249
- const accept = {
1250
- parameterPath: "accept",
1251
- mapper: {
1252
- defaultValue: "application/json",
1253
- isConstant: true,
1254
- serializedName: "Accept",
1255
- type: {
1256
- name: "String"
1257
- }
1258
- }
1259
- };
1260
- const endpoint = {
1261
- parameterPath: "endpoint",
1262
- mapper: {
1263
- serializedName: "endpoint",
1264
- required: true,
1265
- type: {
1266
- name: "String"
1267
- }
1268
- },
1269
- skipEncoding: true
1270
- };
1271
- const livyApiVersion = {
1272
- parameterPath: "livyApiVersion",
1273
- mapper: {
1274
- serializedName: "livyApiVersion",
1275
- required: true,
1276
- type: {
1277
- name: "String"
1278
- }
1279
- },
1280
- skipEncoding: true
1281
- };
1282
- const sparkPoolName = {
1283
- parameterPath: "sparkPoolName",
1284
- mapper: {
1285
- serializedName: "sparkPoolName",
1286
- required: true,
1287
- type: {
1288
- name: "String"
1289
- }
1290
- },
1291
- skipEncoding: true
1292
- };
1293
- const fromParam = {
1294
- parameterPath: ["options", "fromParam"],
1295
- mapper: {
1296
- serializedName: "from",
1297
- type: {
1298
- name: "Number"
1299
- }
1300
- }
1301
- };
1302
- const size = {
1303
- parameterPath: ["options", "size"],
1304
- mapper: {
1305
- serializedName: "size",
1306
- type: {
1307
- name: "Number"
1308
- }
1309
- }
1310
- };
1311
- const detailed = {
1312
- parameterPath: ["options", "detailed"],
1313
- mapper: {
1314
- serializedName: "detailed",
1315
- type: {
1316
- name: "Boolean"
1317
- }
1318
- }
1319
- };
1320
- const contentType = {
1321
- parameterPath: ["options", "contentType"],
1322
- mapper: {
1323
- defaultValue: "application/json",
1324
- isConstant: true,
1325
- serializedName: "Content-Type",
1326
- type: {
1327
- name: "String"
1328
- }
1329
- }
1330
- };
1331
- const sparkBatchJobOptions = {
1332
- parameterPath: "sparkBatchJobOptions",
1333
- mapper: SparkBatchJobOptions
1334
- };
1335
- const batchId = {
1336
- parameterPath: "batchId",
1337
- mapper: {
1338
- serializedName: "batchId",
1339
- required: true,
1340
- type: {
1341
- name: "Number"
1342
- }
1343
- }
1344
- };
1345
- const sparkSessionOptions = {
1346
- parameterPath: "sparkSessionOptions",
1347
- mapper: SparkSessionOptions
1348
- };
1349
- const sessionId = {
1350
- parameterPath: "sessionId",
1351
- mapper: {
1352
- serializedName: "sessionId",
1353
- required: true,
1354
- type: {
1355
- name: "Number"
1356
- }
1357
- }
1358
- };
1359
- const sparkStatementOptions = {
1360
- parameterPath: "sparkStatementOptions",
1361
- mapper: SparkStatementOptions
1362
- };
1363
- const statementId = {
1364
- parameterPath: "statementId",
1365
- mapper: {
1366
- serializedName: "statementId",
1367
- required: true,
1368
- type: {
1369
- name: "Number"
1370
- }
1371
- }
1372
- };
1373
-
1374
- /*
1375
- * Copyright (c) Microsoft Corporation.
1376
- * Licensed under the MIT License.
1377
- *
1378
- * Code generated by Microsoft (R) AutoRest Code Generator.
1379
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1380
- */
1381
- /** Class representing a SparkBatch. */
1382
- class SparkBatchImpl {
1383
- /**
1384
- * Initialize a new instance of the class SparkBatch class.
1385
- * @param client Reference to the service client
1386
- */
1387
- constructor(client) {
1388
- this.client = client;
1389
- }
1390
- /**
1391
- * List all spark batch jobs which are running under a particular spark pool.
1392
- * @param options The options parameters.
1393
- */
1394
- async getSparkBatchJobs(options) {
1395
- const { span } = createSpan("SparkClient-getSparkBatchJobs", options || {});
1396
- try {
1397
- const result = await this.client.sendOperationRequest({ options }, getSparkBatchJobsOperationSpec);
1398
- return result;
1399
- }
1400
- catch (error) {
1401
- span.setStatus({
1402
- code: coreTracing.SpanStatusCode.UNSET,
1403
- message: error.message
1404
- });
1405
- throw error;
1406
- }
1407
- finally {
1408
- span.end();
1409
- }
1410
- }
1411
- /**
1412
- * Create new spark batch job.
1413
- * @param sparkBatchJobOptions Livy compatible batch job request payload.
1414
- * @param options The options parameters.
1415
- */
1416
- async createSparkBatchJob(sparkBatchJobOptions, options) {
1417
- const { span } = createSpan("SparkClient-createSparkBatchJob", options || {});
1418
- try {
1419
- const result = await this.client.sendOperationRequest({ sparkBatchJobOptions, options }, createSparkBatchJobOperationSpec);
1420
- return result;
1421
- }
1422
- catch (error) {
1423
- span.setStatus({
1424
- code: coreTracing.SpanStatusCode.UNSET,
1425
- message: error.message
1426
- });
1427
- throw error;
1428
- }
1429
- finally {
1430
- span.end();
1431
- }
1432
- }
1433
- /**
1434
- * Gets a single spark batch job.
1435
- * @param batchId Identifier for the batch job.
1436
- * @param options The options parameters.
1437
- */
1438
- async getSparkBatchJob(batchId, options) {
1439
- const { span } = createSpan("SparkClient-getSparkBatchJob", options || {});
1440
- try {
1441
- const result = await this.client.sendOperationRequest({ batchId, options }, getSparkBatchJobOperationSpec);
1442
- return result;
1443
- }
1444
- catch (error) {
1445
- span.setStatus({
1446
- code: coreTracing.SpanStatusCode.UNSET,
1447
- message: error.message
1448
- });
1449
- throw error;
1450
- }
1451
- finally {
1452
- span.end();
1453
- }
1454
- }
1455
- /**
1456
- * Cancels a running spark batch job.
1457
- * @param batchId Identifier for the batch job.
1458
- * @param options The options parameters.
1459
- */
1460
- async cancelSparkBatchJob(batchId, options) {
1461
- const { span } = createSpan("SparkClient-cancelSparkBatchJob", options || {});
1462
- try {
1463
- const result = await this.client.sendOperationRequest({ batchId, options }, cancelSparkBatchJobOperationSpec);
1464
- return result;
1465
- }
1466
- catch (error) {
1467
- span.setStatus({
1468
- code: coreTracing.SpanStatusCode.UNSET,
1469
- message: error.message
1470
- });
1471
- throw error;
1472
- }
1473
- finally {
1474
- span.end();
1475
- }
1476
- }
1477
- }
1478
- // Operation Specifications
1479
- const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
1480
- const getSparkBatchJobsOperationSpec = {
1481
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches",
1482
- httpMethod: "GET",
1483
- responses: {
1484
- 200: {
1485
- bodyMapper: SparkBatchJobCollection
1486
- }
1487
- },
1488
- queryParameters: [fromParam, size, detailed],
1489
- urlParameters: [
1490
- endpoint,
1491
- livyApiVersion,
1492
- sparkPoolName
1493
- ],
1494
- headerParameters: [accept],
1495
- serializer
1496
- };
1497
- const createSparkBatchJobOperationSpec = {
1498
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches",
1499
- httpMethod: "POST",
1500
- responses: {
1501
- 200: {
1502
- bodyMapper: SparkBatchJob
1503
- }
1504
- },
1505
- requestBody: sparkBatchJobOptions,
1506
- queryParameters: [detailed],
1507
- urlParameters: [
1508
- endpoint,
1509
- livyApiVersion,
1510
- sparkPoolName
1511
- ],
1512
- headerParameters: [accept, contentType],
1513
- mediaType: "json",
1514
- serializer
1515
- };
1516
- const getSparkBatchJobOperationSpec = {
1517
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}",
1518
- httpMethod: "GET",
1519
- responses: {
1520
- 200: {
1521
- bodyMapper: SparkBatchJob
1522
- }
1523
- },
1524
- queryParameters: [detailed],
1525
- urlParameters: [
1526
- endpoint,
1527
- livyApiVersion,
1528
- sparkPoolName,
1529
- batchId
1530
- ],
1531
- headerParameters: [accept],
1532
- serializer
1533
- };
1534
- const cancelSparkBatchJobOperationSpec = {
1535
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}",
1536
- httpMethod: "DELETE",
1537
- responses: { 200: {} },
1538
- urlParameters: [
1539
- endpoint,
1540
- livyApiVersion,
1541
- sparkPoolName,
1542
- batchId
1543
- ],
1544
- serializer
1545
- };
1546
-
1547
- /*
1548
- * Copyright (c) Microsoft Corporation.
1549
- * Licensed under the MIT License.
1550
- *
1551
- * Code generated by Microsoft (R) AutoRest Code Generator.
1552
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1553
- */
1554
- /** Class representing a SparkSessionOperations. */
1555
- class SparkSessionOperationsImpl {
1556
- /**
1557
- * Initialize a new instance of the class SparkSessionOperations class.
1558
- * @param client Reference to the service client
1559
- */
1560
- constructor(client) {
1561
- this.client = client;
1562
- }
1563
- /**
1564
- * List all spark sessions which are running under a particular spark pool.
1565
- * @param options The options parameters.
1566
- */
1567
- async getSparkSessions(options) {
1568
- const { span } = createSpan("SparkClient-getSparkSessions", options || {});
1569
- try {
1570
- const result = await this.client.sendOperationRequest({ options }, getSparkSessionsOperationSpec);
1571
- return result;
1572
- }
1573
- catch (error) {
1574
- span.setStatus({
1575
- code: coreTracing.SpanStatusCode.UNSET,
1576
- message: error.message
1577
- });
1578
- throw error;
1579
- }
1580
- finally {
1581
- span.end();
1582
- }
1583
- }
1584
- /**
1585
- * Create new spark session.
1586
- * @param sparkSessionOptions Livy compatible batch job request payload.
1587
- * @param options The options parameters.
1588
- */
1589
- async createSparkSession(sparkSessionOptions, options) {
1590
- const { span } = createSpan("SparkClient-createSparkSession", options || {});
1591
- try {
1592
- const result = await this.client.sendOperationRequest({ sparkSessionOptions, options }, createSparkSessionOperationSpec);
1593
- return result;
1594
- }
1595
- catch (error) {
1596
- span.setStatus({
1597
- code: coreTracing.SpanStatusCode.UNSET,
1598
- message: error.message
1599
- });
1600
- throw error;
1601
- }
1602
- finally {
1603
- span.end();
1604
- }
1605
- }
1606
- /**
1607
- * Gets a single spark session.
1608
- * @param sessionId Identifier for the session.
1609
- * @param options The options parameters.
1610
- */
1611
- async getSparkSession(sessionId, options) {
1612
- const { span } = createSpan("SparkClient-getSparkSession", options || {});
1613
- try {
1614
- const result = await this.client.sendOperationRequest({ sessionId, options }, getSparkSessionOperationSpec);
1615
- return result;
1616
- }
1617
- catch (error) {
1618
- span.setStatus({
1619
- code: coreTracing.SpanStatusCode.UNSET,
1620
- message: error.message
1621
- });
1622
- throw error;
1623
- }
1624
- finally {
1625
- span.end();
1626
- }
1627
- }
1628
- /**
1629
- * Cancels a running spark session.
1630
- * @param sessionId Identifier for the session.
1631
- * @param options The options parameters.
1632
- */
1633
- async cancelSparkSession(sessionId, options) {
1634
- const { span } = createSpan("SparkClient-cancelSparkSession", options || {});
1635
- try {
1636
- const result = await this.client.sendOperationRequest({ sessionId, options }, cancelSparkSessionOperationSpec);
1637
- return result;
1638
- }
1639
- catch (error) {
1640
- span.setStatus({
1641
- code: coreTracing.SpanStatusCode.UNSET,
1642
- message: error.message
1643
- });
1644
- throw error;
1645
- }
1646
- finally {
1647
- span.end();
1648
- }
1649
- }
1650
- /**
1651
- * Sends a keep alive call to the current session to reset the session timeout.
1652
- * @param sessionId Identifier for the session.
1653
- * @param options The options parameters.
1654
- */
1655
- async resetSparkSessionTimeout(sessionId, options) {
1656
- const { span } = createSpan("SparkClient-resetSparkSessionTimeout", options || {});
1657
- try {
1658
- const result = await this.client.sendOperationRequest({ sessionId, options }, resetSparkSessionTimeoutOperationSpec);
1659
- return result;
1660
- }
1661
- catch (error) {
1662
- span.setStatus({
1663
- code: coreTracing.SpanStatusCode.UNSET,
1664
- message: error.message
1665
- });
1666
- throw error;
1667
- }
1668
- finally {
1669
- span.end();
1670
- }
1671
- }
1672
- /**
1673
- * Gets a list of statements within a spark session.
1674
- * @param sessionId Identifier for the session.
1675
- * @param options The options parameters.
1676
- */
1677
- async getSparkStatements(sessionId, options) {
1678
- const { span } = createSpan("SparkClient-getSparkStatements", options || {});
1679
- try {
1680
- const result = await this.client.sendOperationRequest({ sessionId, options }, getSparkStatementsOperationSpec);
1681
- return result;
1682
- }
1683
- catch (error) {
1684
- span.setStatus({
1685
- code: coreTracing.SpanStatusCode.UNSET,
1686
- message: error.message
1687
- });
1688
- throw error;
1689
- }
1690
- finally {
1691
- span.end();
1692
- }
1693
- }
1694
- /**
1695
- * Create statement within a spark session.
1696
- * @param sessionId Identifier for the session.
1697
- * @param sparkStatementOptions Livy compatible batch job request payload.
1698
- * @param options The options parameters.
1699
- */
1700
- async createSparkStatement(sessionId, sparkStatementOptions, options) {
1701
- const { span } = createSpan("SparkClient-createSparkStatement", options || {});
1702
- try {
1703
- const result = await this.client.sendOperationRequest({ sessionId, sparkStatementOptions, options }, createSparkStatementOperationSpec);
1704
- return result;
1705
- }
1706
- catch (error) {
1707
- span.setStatus({
1708
- code: coreTracing.SpanStatusCode.UNSET,
1709
- message: error.message
1710
- });
1711
- throw error;
1712
- }
1713
- finally {
1714
- span.end();
1715
- }
1716
- }
1717
- /**
1718
- * Gets a single statement within a spark session.
1719
- * @param sessionId Identifier for the session.
1720
- * @param statementId Identifier for the statement.
1721
- * @param options The options parameters.
1722
- */
1723
- async getSparkStatement(sessionId, statementId, options) {
1724
- const { span } = createSpan("SparkClient-getSparkStatement", options || {});
1725
- try {
1726
- const result = await this.client.sendOperationRequest({ sessionId, statementId, options }, getSparkStatementOperationSpec);
1727
- return result;
1728
- }
1729
- catch (error) {
1730
- span.setStatus({
1731
- code: coreTracing.SpanStatusCode.UNSET,
1732
- message: error.message
1733
- });
1734
- throw error;
1735
- }
1736
- finally {
1737
- span.end();
1738
- }
1739
- }
1740
- /**
1741
- * Kill a statement within a session.
1742
- * @param sessionId Identifier for the session.
1743
- * @param statementId Identifier for the statement.
1744
- * @param options The options parameters.
1745
- */
1746
- async cancelSparkStatement(sessionId, statementId, options) {
1747
- const { span } = createSpan("SparkClient-cancelSparkStatement", options || {});
1748
- try {
1749
- const result = await this.client.sendOperationRequest({ sessionId, statementId, options }, cancelSparkStatementOperationSpec);
1750
- return result;
1751
- }
1752
- catch (error) {
1753
- span.setStatus({
1754
- code: coreTracing.SpanStatusCode.UNSET,
1755
- message: error.message
1756
- });
1757
- throw error;
1758
- }
1759
- finally {
1760
- span.end();
1761
- }
1762
- }
1763
- }
1764
- // Operation Specifications
1765
- const serializer$1 = coreClient.createSerializer(Mappers, /* isXml */ false);
1766
- const getSparkSessionsOperationSpec = {
1767
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions",
1768
- httpMethod: "GET",
1769
- responses: {
1770
- 200: {
1771
- bodyMapper: SparkSessionCollection
1772
- }
1773
- },
1774
- queryParameters: [fromParam, size, detailed],
1775
- urlParameters: [
1776
- endpoint,
1777
- livyApiVersion,
1778
- sparkPoolName
1779
- ],
1780
- headerParameters: [accept],
1781
- serializer: serializer$1
1782
- };
1783
- const createSparkSessionOperationSpec = {
1784
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions",
1785
- httpMethod: "POST",
1786
- responses: {
1787
- 200: {
1788
- bodyMapper: SparkSession
1789
- }
1790
- },
1791
- requestBody: sparkSessionOptions,
1792
- queryParameters: [detailed],
1793
- urlParameters: [
1794
- endpoint,
1795
- livyApiVersion,
1796
- sparkPoolName
1797
- ],
1798
- headerParameters: [accept, contentType],
1799
- mediaType: "json",
1800
- serializer: serializer$1
1801
- };
1802
- const getSparkSessionOperationSpec = {
1803
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}",
1804
- httpMethod: "GET",
1805
- responses: {
1806
- 200: {
1807
- bodyMapper: SparkSession
1808
- }
1809
- },
1810
- queryParameters: [detailed],
1811
- urlParameters: [
1812
- endpoint,
1813
- livyApiVersion,
1814
- sparkPoolName,
1815
- sessionId
1816
- ],
1817
- headerParameters: [accept],
1818
- serializer: serializer$1
1819
- };
1820
- const cancelSparkSessionOperationSpec = {
1821
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}",
1822
- httpMethod: "DELETE",
1823
- responses: { 200: {} },
1824
- urlParameters: [
1825
- endpoint,
1826
- livyApiVersion,
1827
- sparkPoolName,
1828
- sessionId
1829
- ],
1830
- serializer: serializer$1
1831
- };
1832
- const resetSparkSessionTimeoutOperationSpec = {
1833
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/reset-timeout",
1834
- httpMethod: "PUT",
1835
- responses: { 200: {} },
1836
- urlParameters: [
1837
- endpoint,
1838
- livyApiVersion,
1839
- sparkPoolName,
1840
- sessionId
1841
- ],
1842
- serializer: serializer$1
1843
- };
1844
- const getSparkStatementsOperationSpec = {
1845
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements",
1846
- httpMethod: "GET",
1847
- responses: {
1848
- 200: {
1849
- bodyMapper: SparkStatementCollection
1850
- }
1851
- },
1852
- urlParameters: [
1853
- endpoint,
1854
- livyApiVersion,
1855
- sparkPoolName,
1856
- sessionId
1857
- ],
1858
- headerParameters: [accept],
1859
- serializer: serializer$1
1860
- };
1861
- const createSparkStatementOperationSpec = {
1862
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements",
1863
- httpMethod: "POST",
1864
- responses: {
1865
- 200: {
1866
- bodyMapper: SparkStatement
1867
- }
1868
- },
1869
- requestBody: sparkStatementOptions,
1870
- urlParameters: [
1871
- endpoint,
1872
- livyApiVersion,
1873
- sparkPoolName,
1874
- sessionId
1875
- ],
1876
- headerParameters: [accept, contentType],
1877
- mediaType: "json",
1878
- serializer: serializer$1
1879
- };
1880
- const getSparkStatementOperationSpec = {
1881
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}",
1882
- httpMethod: "GET",
1883
- responses: {
1884
- 200: {
1885
- bodyMapper: SparkStatement
1886
- }
1887
- },
1888
- urlParameters: [
1889
- endpoint,
1890
- livyApiVersion,
1891
- sparkPoolName,
1892
- sessionId,
1893
- statementId
1894
- ],
1895
- headerParameters: [accept],
1896
- serializer: serializer$1
1897
- };
1898
- const cancelSparkStatementOperationSpec = {
1899
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}/cancel",
1900
- httpMethod: "POST",
1901
- responses: {
1902
- 200: {
1903
- bodyMapper: SparkStatementCancellationResult
1904
- }
1905
- },
1906
- urlParameters: [
1907
- endpoint,
1908
- livyApiVersion,
1909
- sparkPoolName,
1910
- sessionId,
1911
- statementId
1912
- ],
1913
- headerParameters: [accept],
1914
- serializer: serializer$1
1915
- };
1916
-
1917
- /*
1918
- * Copyright (c) Microsoft Corporation.
1919
- * Licensed under the MIT License.
1920
- *
1921
- * Code generated by Microsoft (R) AutoRest Code Generator.
1922
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1923
- */
1924
- class SparkClientContext extends coreClient.ServiceClient {
1925
- /**
1926
- * Initializes a new instance of the SparkClientContext class.
1927
- * @param credentials Subscription credentials which uniquely identify client subscription.
1928
- * @param endpoint The workspace development endpoint, for example
1929
- * https://myworkspace.dev.azuresynapse.net.
1930
- * @param sparkPoolName Name of the spark pool.
1931
- * @param options The parameter options
1932
- */
1933
- constructor(credentials, endpoint, sparkPoolName, options) {
1934
- if (credentials === undefined) {
1935
- throw new Error("'credentials' cannot be null");
1936
- }
1937
- if (endpoint === undefined) {
1938
- throw new Error("'endpoint' cannot be null");
1939
- }
1940
- if (sparkPoolName === undefined) {
1941
- throw new Error("'sparkPoolName' cannot be null");
1942
- }
1943
- // Initializing default values for options
1944
- if (!options) {
1945
- options = {};
1946
- }
1947
- const defaults = {
1948
- requestContentType: "application/json; charset=utf-8",
1949
- credential: credentials
1950
- };
1951
- const packageDetails = `azsdk-js-synapse-spark/1.0.0-beta.3`;
1952
- const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix
1953
- ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`
1954
- : `${packageDetails}`;
1955
- if (!options.credentialScopes) {
1956
- options.credentialScopes = ["https://dev.azuresynapse.net/.default"];
1957
- }
1958
- const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: {
1959
- userAgentPrefix
1960
- }, baseUri: options.endpoint || "{endpoint}" });
1961
- super(optionsWithDefaults);
1962
- // Parameter assignments
1963
- this.endpoint = endpoint;
1964
- this.sparkPoolName = sparkPoolName;
1965
- // Assigning values to Constant parameters
1966
- this.livyApiVersion = options.livyApiVersion || "2019-11-01-preview";
1967
- }
1968
- }
1969
-
1970
- /*
1971
- * Copyright (c) Microsoft Corporation.
1972
- * Licensed under the MIT License.
1973
- *
1974
- * Code generated by Microsoft (R) AutoRest Code Generator.
1975
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1976
- */
1977
- class SparkClient extends SparkClientContext {
1978
- /**
1979
- * Initializes a new instance of the SparkClient class.
1980
- * @param credentials Subscription credentials which uniquely identify client subscription.
1981
- * @param endpoint The workspace development endpoint, for example
1982
- * https://myworkspace.dev.azuresynapse.net.
1983
- * @param sparkPoolName Name of the spark pool.
1984
- * @param options The parameter options
1985
- */
1986
- constructor(credentials, endpoint, sparkPoolName, options) {
1987
- super(credentials, endpoint, sparkPoolName, options);
1988
- this.sparkBatch = new SparkBatchImpl(this);
1989
- this.sparkSessionOperations = new SparkSessionOperationsImpl(this);
1990
- }
1991
- }
1992
-
1993
- exports.SparkClient = SparkClient;
1994
- exports.SparkClientContext = SparkClientContext;
1995
- //# sourceMappingURL=index.js.map