@azure/synapse-spark 1.0.0-beta.4 → 1.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +14 -13
  3. package/dist/browser/index.d.ts +4 -0
  4. package/dist/browser/index.d.ts.map +1 -0
  5. package/dist/browser/index.js +11 -0
  6. package/dist/browser/index.js.map +1 -0
  7. package/{types/synapse-spark.d.ts → dist/browser/models/index.d.ts} +548 -680
  8. package/dist/browser/models/index.d.ts.map +1 -0
  9. package/dist/browser/models/index.js +134 -0
  10. package/dist/browser/models/index.js.map +1 -0
  11. package/dist/browser/models/mappers.d.ts +19 -0
  12. package/dist/browser/models/mappers.d.ts.map +1 -0
  13. package/dist/browser/models/mappers.js +1151 -0
  14. package/dist/browser/models/mappers.js.map +1 -0
  15. package/dist/browser/models/parameters.d.ts +16 -0
  16. package/dist/browser/models/parameters.d.ts.map +1 -0
  17. package/dist/browser/models/parameters.js +133 -0
  18. package/dist/browser/models/parameters.js.map +1 -0
  19. package/dist/browser/operations/index.d.ts +3 -0
  20. package/dist/browser/operations/index.d.ts.map +1 -0
  21. package/dist/browser/operations/index.js +10 -0
  22. package/dist/browser/operations/index.js.map +1 -0
  23. package/dist/browser/operations/sparkBatch.d.ts +36 -0
  24. package/dist/browser/operations/sparkBatch.d.ts.map +1 -0
  25. package/dist/browser/operations/sparkBatch.js +122 -0
  26. package/dist/browser/operations/sparkBatch.js.map +1 -0
  27. package/dist/browser/operations/sparkSessionOperations.d.ts +69 -0
  28. package/dist/browser/operations/sparkSessionOperations.d.ts.map +1 -0
  29. package/dist/browser/operations/sparkSessionOperations.js +259 -0
  30. package/dist/browser/operations/sparkSessionOperations.js.map +1 -0
  31. package/dist/browser/operationsInterfaces/index.d.ts +3 -0
  32. package/dist/browser/operationsInterfaces/index.d.ts.map +1 -0
  33. package/dist/browser/operationsInterfaces/index.js +10 -0
  34. package/dist/browser/operationsInterfaces/index.js.map +1 -0
  35. package/dist/browser/operationsInterfaces/sparkBatch.d.ts +28 -0
  36. package/dist/browser/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  37. package/dist/browser/operationsInterfaces/sparkBatch.js +9 -0
  38. package/dist/browser/operationsInterfaces/sparkBatch.js.map +1 -0
  39. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  40. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  41. package/dist/browser/operationsInterfaces/sparkSessionOperations.js +9 -0
  42. package/dist/browser/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  43. package/dist/browser/package.json +3 -0
  44. package/dist/browser/sparkClient.d.ts +24 -0
  45. package/dist/browser/sparkClient.d.ts.map +1 -0
  46. package/dist/browser/sparkClient.js +86 -0
  47. package/dist/browser/sparkClient.js.map +1 -0
  48. package/dist/browser/tracing.d.ts +2 -0
  49. package/dist/browser/tracing.d.ts.map +1 -0
  50. package/dist/browser/tracing.js +14 -0
  51. package/dist/browser/tracing.js.map +1 -0
  52. package/dist/commonjs/index.d.ts +4 -0
  53. package/dist/commonjs/index.d.ts.map +1 -0
  54. package/dist/commonjs/index.js +16 -0
  55. package/dist/commonjs/index.js.map +1 -0
  56. package/dist/commonjs/models/index.d.ts +548 -0
  57. package/dist/commonjs/models/index.d.ts.map +1 -0
  58. package/dist/commonjs/models/index.js +137 -0
  59. package/dist/commonjs/models/index.js.map +1 -0
  60. package/dist/commonjs/models/mappers.d.ts +19 -0
  61. package/dist/commonjs/models/mappers.d.ts.map +1 -0
  62. package/dist/commonjs/models/mappers.js +1154 -0
  63. package/dist/commonjs/models/mappers.js.map +1 -0
  64. package/dist/commonjs/models/parameters.d.ts +16 -0
  65. package/dist/commonjs/models/parameters.d.ts.map +1 -0
  66. package/dist/commonjs/models/parameters.js +136 -0
  67. package/dist/commonjs/models/parameters.js.map +1 -0
  68. package/dist/commonjs/operations/index.d.ts +3 -0
  69. package/dist/commonjs/operations/index.d.ts.map +1 -0
  70. package/dist/commonjs/operations/index.js +13 -0
  71. package/dist/commonjs/operations/index.js.map +1 -0
  72. package/dist/commonjs/operations/sparkBatch.d.ts +36 -0
  73. package/dist/commonjs/operations/sparkBatch.d.ts.map +1 -0
  74. package/dist/commonjs/operations/sparkBatch.js +127 -0
  75. package/dist/commonjs/operations/sparkBatch.js.map +1 -0
  76. package/dist/commonjs/operations/sparkSessionOperations.d.ts +69 -0
  77. package/dist/commonjs/operations/sparkSessionOperations.d.ts.map +1 -0
  78. package/dist/commonjs/operations/sparkSessionOperations.js +264 -0
  79. package/dist/commonjs/operations/sparkSessionOperations.js.map +1 -0
  80. package/dist/commonjs/operationsInterfaces/index.d.ts +3 -0
  81. package/dist/commonjs/operationsInterfaces/index.d.ts.map +1 -0
  82. package/dist/commonjs/operationsInterfaces/index.js +13 -0
  83. package/dist/commonjs/operationsInterfaces/index.js.map +1 -0
  84. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts +28 -0
  85. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  86. package/dist/commonjs/operationsInterfaces/sparkBatch.js +10 -0
  87. package/dist/commonjs/operationsInterfaces/sparkBatch.js.map +1 -0
  88. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  89. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  90. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js +10 -0
  91. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  92. package/dist/commonjs/package.json +3 -0
  93. package/dist/commonjs/sparkClient.d.ts +24 -0
  94. package/dist/commonjs/sparkClient.d.ts.map +1 -0
  95. package/dist/commonjs/sparkClient.js +91 -0
  96. package/dist/commonjs/sparkClient.js.map +1 -0
  97. package/dist/commonjs/tracing.d.ts +2 -0
  98. package/dist/commonjs/tracing.d.ts.map +1 -0
  99. package/dist/commonjs/tracing.js +17 -0
  100. package/dist/commonjs/tracing.js.map +1 -0
  101. package/dist/commonjs/tsdoc-metadata.json +11 -0
  102. package/dist/esm/index.d.ts +4 -0
  103. package/dist/esm/index.d.ts.map +1 -0
  104. package/dist/esm/index.js +11 -0
  105. package/dist/esm/index.js.map +1 -0
  106. package/dist/esm/models/index.d.ts +548 -0
  107. package/dist/esm/models/index.d.ts.map +1 -0
  108. package/dist/esm/models/index.js +134 -0
  109. package/dist/esm/models/index.js.map +1 -0
  110. package/dist/esm/models/mappers.d.ts +19 -0
  111. package/dist/esm/models/mappers.d.ts.map +1 -0
  112. package/dist/esm/models/mappers.js +1151 -0
  113. package/dist/esm/models/mappers.js.map +1 -0
  114. package/dist/esm/models/parameters.d.ts +16 -0
  115. package/dist/esm/models/parameters.d.ts.map +1 -0
  116. package/dist/esm/models/parameters.js +133 -0
  117. package/dist/esm/models/parameters.js.map +1 -0
  118. package/dist/esm/operations/index.d.ts +3 -0
  119. package/dist/esm/operations/index.d.ts.map +1 -0
  120. package/dist/esm/operations/index.js +10 -0
  121. package/dist/esm/operations/index.js.map +1 -0
  122. package/dist/esm/operations/sparkBatch.d.ts +36 -0
  123. package/dist/esm/operations/sparkBatch.d.ts.map +1 -0
  124. package/dist/esm/operations/sparkBatch.js +122 -0
  125. package/dist/esm/operations/sparkBatch.js.map +1 -0
  126. package/dist/esm/operations/sparkSessionOperations.d.ts +69 -0
  127. package/dist/esm/operations/sparkSessionOperations.d.ts.map +1 -0
  128. package/dist/esm/operations/sparkSessionOperations.js +259 -0
  129. package/dist/esm/operations/sparkSessionOperations.js.map +1 -0
  130. package/dist/esm/operationsInterfaces/index.d.ts +3 -0
  131. package/dist/esm/operationsInterfaces/index.d.ts.map +1 -0
  132. package/dist/esm/operationsInterfaces/index.js +10 -0
  133. package/dist/esm/operationsInterfaces/index.js.map +1 -0
  134. package/dist/esm/operationsInterfaces/sparkBatch.d.ts +28 -0
  135. package/dist/esm/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  136. package/dist/esm/operationsInterfaces/sparkBatch.js +9 -0
  137. package/dist/esm/operationsInterfaces/sparkBatch.js.map +1 -0
  138. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  139. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  140. package/dist/esm/operationsInterfaces/sparkSessionOperations.js +9 -0
  141. package/dist/esm/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  142. package/dist/esm/package.json +3 -0
  143. package/dist/esm/sparkClient.d.ts +24 -0
  144. package/dist/esm/sparkClient.d.ts.map +1 -0
  145. package/dist/esm/sparkClient.js +86 -0
  146. package/dist/esm/sparkClient.js.map +1 -0
  147. package/dist/esm/tracing.d.ts +2 -0
  148. package/dist/esm/tracing.d.ts.map +1 -0
  149. package/dist/esm/tracing.js +14 -0
  150. package/dist/esm/tracing.js.map +1 -0
  151. package/dist/react-native/index.d.ts +4 -0
  152. package/dist/react-native/index.d.ts.map +1 -0
  153. package/dist/react-native/index.js +11 -0
  154. package/dist/react-native/index.js.map +1 -0
  155. package/dist/react-native/models/index.d.ts +548 -0
  156. package/dist/react-native/models/index.d.ts.map +1 -0
  157. package/dist/react-native/models/index.js +134 -0
  158. package/dist/react-native/models/index.js.map +1 -0
  159. package/dist/react-native/models/mappers.d.ts +19 -0
  160. package/dist/react-native/models/mappers.d.ts.map +1 -0
  161. package/dist/react-native/models/mappers.js +1151 -0
  162. package/dist/react-native/models/mappers.js.map +1 -0
  163. package/dist/react-native/models/parameters.d.ts +16 -0
  164. package/dist/react-native/models/parameters.d.ts.map +1 -0
  165. package/dist/react-native/models/parameters.js +133 -0
  166. package/dist/react-native/models/parameters.js.map +1 -0
  167. package/dist/react-native/operations/index.d.ts +3 -0
  168. package/dist/react-native/operations/index.d.ts.map +1 -0
  169. package/dist/react-native/operations/index.js +10 -0
  170. package/dist/react-native/operations/index.js.map +1 -0
  171. package/dist/react-native/operations/sparkBatch.d.ts +36 -0
  172. package/dist/react-native/operations/sparkBatch.d.ts.map +1 -0
  173. package/dist/react-native/operations/sparkBatch.js +122 -0
  174. package/dist/react-native/operations/sparkBatch.js.map +1 -0
  175. package/dist/react-native/operations/sparkSessionOperations.d.ts +69 -0
  176. package/dist/react-native/operations/sparkSessionOperations.d.ts.map +1 -0
  177. package/dist/react-native/operations/sparkSessionOperations.js +259 -0
  178. package/dist/react-native/operations/sparkSessionOperations.js.map +1 -0
  179. package/dist/react-native/operationsInterfaces/index.d.ts +3 -0
  180. package/dist/react-native/operationsInterfaces/index.d.ts.map +1 -0
  181. package/dist/react-native/operationsInterfaces/index.js +10 -0
  182. package/dist/react-native/operationsInterfaces/index.js.map +1 -0
  183. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts +28 -0
  184. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  185. package/dist/react-native/operationsInterfaces/sparkBatch.js +9 -0
  186. package/dist/react-native/operationsInterfaces/sparkBatch.js.map +1 -0
  187. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  188. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  189. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js +9 -0
  190. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  191. package/dist/react-native/package.json +3 -0
  192. package/dist/react-native/sparkClient.d.ts +24 -0
  193. package/dist/react-native/sparkClient.d.ts.map +1 -0
  194. package/dist/react-native/sparkClient.js +86 -0
  195. package/dist/react-native/sparkClient.js.map +1 -0
  196. package/dist/react-native/tracing.d.ts +2 -0
  197. package/dist/react-native/tracing.d.ts.map +1 -0
  198. package/dist/react-native/tracing.js +14 -0
  199. package/dist/react-native/tracing.js.map +1 -0
  200. package/package.json +82 -78
  201. package/CHANGELOG.md +0 -24
  202. package/dist/index.js +0 -2016
  203. package/dist/index.js.map +0 -1
  204. package/dist/index.min.js +0 -1
  205. package/dist/index.min.js.map +0 -1
  206. package/rollup.config.js +0 -3
  207. package/tsconfig.json +0 -19
package/dist/index.js DELETED
@@ -1,2016 +0,0 @@
1
- 'use strict';
2
-
3
- Object.defineProperty(exports, '__esModule', { value: true });
4
-
5
- var coreTracing = require('@azure/core-tracing');
6
- var coreClient = require('@azure/core-client');
7
-
8
- /*
9
- * Copyright (c) Microsoft Corporation.
10
- * Licensed under the MIT License.
11
- *
12
- * Code generated by Microsoft (R) AutoRest Code Generator.
13
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
14
- */
15
- (function (KnownSparkJobType) {
16
- KnownSparkJobType["SparkBatch"] = "SparkBatch";
17
- KnownSparkJobType["SparkSession"] = "SparkSession";
18
- })(exports.KnownSparkJobType || (exports.KnownSparkJobType = {}));
19
- (function (KnownSparkBatchJobResultType) {
20
- KnownSparkBatchJobResultType["Uncertain"] = "Uncertain";
21
- KnownSparkBatchJobResultType["Succeeded"] = "Succeeded";
22
- KnownSparkBatchJobResultType["Failed"] = "Failed";
23
- KnownSparkBatchJobResultType["Cancelled"] = "Cancelled";
24
- })(exports.KnownSparkBatchJobResultType || (exports.KnownSparkBatchJobResultType = {}));
25
- (function (KnownSchedulerCurrentState) {
26
- KnownSchedulerCurrentState["Queued"] = "Queued";
27
- KnownSchedulerCurrentState["Scheduled"] = "Scheduled";
28
- KnownSchedulerCurrentState["Ended"] = "Ended";
29
- })(exports.KnownSchedulerCurrentState || (exports.KnownSchedulerCurrentState = {}));
30
- (function (KnownPluginCurrentState) {
31
- KnownPluginCurrentState["Preparation"] = "Preparation";
32
- KnownPluginCurrentState["ResourceAcquisition"] = "ResourceAcquisition";
33
- KnownPluginCurrentState["Queued"] = "Queued";
34
- KnownPluginCurrentState["Submission"] = "Submission";
35
- KnownPluginCurrentState["Monitoring"] = "Monitoring";
36
- KnownPluginCurrentState["Cleanup"] = "Cleanup";
37
- KnownPluginCurrentState["Ended"] = "Ended";
38
- })(exports.KnownPluginCurrentState || (exports.KnownPluginCurrentState = {}));
39
- (function (KnownSparkErrorSource) {
40
- KnownSparkErrorSource["System"] = "System";
41
- KnownSparkErrorSource["User"] = "User";
42
- KnownSparkErrorSource["Unknown"] = "Unknown";
43
- KnownSparkErrorSource["Dependency"] = "Dependency";
44
- })(exports.KnownSparkErrorSource || (exports.KnownSparkErrorSource = {}));
45
- (function (KnownLivyStates) {
46
- KnownLivyStates["NotStarted"] = "not_started";
47
- KnownLivyStates["Starting"] = "starting";
48
- KnownLivyStates["Idle"] = "idle";
49
- KnownLivyStates["Busy"] = "busy";
50
- KnownLivyStates["ShuttingDown"] = "shutting_down";
51
- KnownLivyStates["Error"] = "error";
52
- KnownLivyStates["Dead"] = "dead";
53
- KnownLivyStates["Killed"] = "killed";
54
- KnownLivyStates["Success"] = "success";
55
- KnownLivyStates["Running"] = "running";
56
- KnownLivyStates["Recovering"] = "recovering";
57
- })(exports.KnownLivyStates || (exports.KnownLivyStates = {}));
58
- (function (KnownSparkSessionResultType) {
59
- KnownSparkSessionResultType["Uncertain"] = "Uncertain";
60
- KnownSparkSessionResultType["Succeeded"] = "Succeeded";
61
- KnownSparkSessionResultType["Failed"] = "Failed";
62
- KnownSparkSessionResultType["Cancelled"] = "Cancelled";
63
- })(exports.KnownSparkSessionResultType || (exports.KnownSparkSessionResultType = {}));
64
- (function (KnownLivyStatementStates) {
65
- KnownLivyStatementStates["Waiting"] = "waiting";
66
- KnownLivyStatementStates["Running"] = "running";
67
- KnownLivyStatementStates["Available"] = "available";
68
- KnownLivyStatementStates["Error"] = "error";
69
- KnownLivyStatementStates["Cancelling"] = "cancelling";
70
- KnownLivyStatementStates["Cancelled"] = "cancelled";
71
- })(exports.KnownLivyStatementStates || (exports.KnownLivyStatementStates = {}));
72
- (function (KnownSparkStatementLanguageType) {
73
- KnownSparkStatementLanguageType["Spark"] = "spark";
74
- KnownSparkStatementLanguageType["PySpark"] = "pyspark";
75
- KnownSparkStatementLanguageType["DotNetSpark"] = "dotnetspark";
76
- KnownSparkStatementLanguageType["Sql"] = "sql";
77
- })(exports.KnownSparkStatementLanguageType || (exports.KnownSparkStatementLanguageType = {}));
78
-
79
- /*
80
- * Copyright (c) Microsoft Corporation.
81
- * Licensed under the MIT License.
82
- *
83
- * Code generated by Microsoft (R) AutoRest Code Generator.
84
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
85
- */
86
- const createSpan = coreTracing.createSpanFunction({
87
- namespace: "Azure.Synapse.Spark",
88
- packagePrefix: "Microsoft.Synapse"
89
- });
90
-
91
- /*
92
- * Copyright (c) Microsoft Corporation.
93
- * Licensed under the MIT License.
94
- *
95
- * Code generated by Microsoft (R) AutoRest Code Generator.
96
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
97
- */
98
- const SparkBatchJobCollection = {
99
- type: {
100
- name: "Composite",
101
- className: "SparkBatchJobCollection",
102
- modelProperties: {
103
- from: {
104
- serializedName: "from",
105
- required: true,
106
- type: {
107
- name: "Number"
108
- }
109
- },
110
- total: {
111
- serializedName: "total",
112
- required: true,
113
- type: {
114
- name: "Number"
115
- }
116
- },
117
- sessions: {
118
- serializedName: "sessions",
119
- type: {
120
- name: "Sequence",
121
- element: {
122
- type: {
123
- name: "Composite",
124
- className: "SparkBatchJob"
125
- }
126
- }
127
- }
128
- }
129
- }
130
- }
131
- };
132
- const SparkBatchJob = {
133
- type: {
134
- name: "Composite",
135
- className: "SparkBatchJob",
136
- modelProperties: {
137
- livyInfo: {
138
- serializedName: "livyInfo",
139
- type: {
140
- name: "Composite",
141
- className: "SparkBatchJobState"
142
- }
143
- },
144
- name: {
145
- serializedName: "name",
146
- type: {
147
- name: "String"
148
- }
149
- },
150
- workspaceName: {
151
- serializedName: "workspaceName",
152
- type: {
153
- name: "String"
154
- }
155
- },
156
- sparkPoolName: {
157
- serializedName: "sparkPoolName",
158
- type: {
159
- name: "String"
160
- }
161
- },
162
- submitterName: {
163
- serializedName: "submitterName",
164
- type: {
165
- name: "String"
166
- }
167
- },
168
- submitterId: {
169
- serializedName: "submitterId",
170
- type: {
171
- name: "String"
172
- }
173
- },
174
- artifactId: {
175
- serializedName: "artifactId",
176
- type: {
177
- name: "String"
178
- }
179
- },
180
- jobType: {
181
- serializedName: "jobType",
182
- type: {
183
- name: "String"
184
- }
185
- },
186
- result: {
187
- serializedName: "result",
188
- type: {
189
- name: "String"
190
- }
191
- },
192
- scheduler: {
193
- serializedName: "schedulerInfo",
194
- type: {
195
- name: "Composite",
196
- className: "SparkScheduler"
197
- }
198
- },
199
- plugin: {
200
- serializedName: "pluginInfo",
201
- type: {
202
- name: "Composite",
203
- className: "SparkServicePlugin"
204
- }
205
- },
206
- errors: {
207
- serializedName: "errorInfo",
208
- type: {
209
- name: "Sequence",
210
- element: {
211
- type: {
212
- name: "Composite",
213
- className: "SparkServiceError"
214
- }
215
- }
216
- }
217
- },
218
- tags: {
219
- serializedName: "tags",
220
- type: {
221
- name: "Dictionary",
222
- value: { type: { name: "String" } }
223
- }
224
- },
225
- id: {
226
- serializedName: "id",
227
- required: true,
228
- type: {
229
- name: "Number"
230
- }
231
- },
232
- appId: {
233
- serializedName: "appId",
234
- nullable: true,
235
- type: {
236
- name: "String"
237
- }
238
- },
239
- appInfo: {
240
- serializedName: "appInfo",
241
- nullable: true,
242
- type: {
243
- name: "Dictionary",
244
- value: { type: { name: "String" } }
245
- }
246
- },
247
- state: {
248
- serializedName: "state",
249
- type: {
250
- name: "String"
251
- }
252
- },
253
- logLines: {
254
- serializedName: "log",
255
- nullable: true,
256
- type: {
257
- name: "Sequence",
258
- element: {
259
- type: {
260
- name: "String"
261
- }
262
- }
263
- }
264
- }
265
- }
266
- }
267
- };
268
- const SparkBatchJobState = {
269
- type: {
270
- name: "Composite",
271
- className: "SparkBatchJobState",
272
- modelProperties: {
273
- notStartedAt: {
274
- serializedName: "notStartedAt",
275
- nullable: true,
276
- type: {
277
- name: "DateTime"
278
- }
279
- },
280
- startingAt: {
281
- serializedName: "startingAt",
282
- nullable: true,
283
- type: {
284
- name: "DateTime"
285
- }
286
- },
287
- runningAt: {
288
- serializedName: "runningAt",
289
- nullable: true,
290
- type: {
291
- name: "DateTime"
292
- }
293
- },
294
- deadAt: {
295
- serializedName: "deadAt",
296
- nullable: true,
297
- type: {
298
- name: "DateTime"
299
- }
300
- },
301
- successAt: {
302
- serializedName: "successAt",
303
- nullable: true,
304
- type: {
305
- name: "DateTime"
306
- }
307
- },
308
- terminatedAt: {
309
- serializedName: "killedAt",
310
- nullable: true,
311
- type: {
312
- name: "DateTime"
313
- }
314
- },
315
- recoveringAt: {
316
- serializedName: "recoveringAt",
317
- nullable: true,
318
- type: {
319
- name: "DateTime"
320
- }
321
- },
322
- currentState: {
323
- serializedName: "currentState",
324
- type: {
325
- name: "String"
326
- }
327
- },
328
- jobCreationRequest: {
329
- serializedName: "jobCreationRequest",
330
- type: {
331
- name: "Composite",
332
- className: "SparkRequest"
333
- }
334
- }
335
- }
336
- }
337
- };
338
- const SparkRequest = {
339
- type: {
340
- name: "Composite",
341
- className: "SparkRequest",
342
- modelProperties: {
343
- name: {
344
- serializedName: "name",
345
- type: {
346
- name: "String"
347
- }
348
- },
349
- file: {
350
- serializedName: "file",
351
- type: {
352
- name: "String"
353
- }
354
- },
355
- className: {
356
- serializedName: "className",
357
- type: {
358
- name: "String"
359
- }
360
- },
361
- arguments: {
362
- serializedName: "args",
363
- type: {
364
- name: "Sequence",
365
- element: {
366
- type: {
367
- name: "String"
368
- }
369
- }
370
- }
371
- },
372
- jars: {
373
- serializedName: "jars",
374
- type: {
375
- name: "Sequence",
376
- element: {
377
- type: {
378
- name: "String"
379
- }
380
- }
381
- }
382
- },
383
- pythonFiles: {
384
- serializedName: "pyFiles",
385
- type: {
386
- name: "Sequence",
387
- element: {
388
- type: {
389
- name: "String"
390
- }
391
- }
392
- }
393
- },
394
- files: {
395
- serializedName: "files",
396
- type: {
397
- name: "Sequence",
398
- element: {
399
- type: {
400
- name: "String"
401
- }
402
- }
403
- }
404
- },
405
- archives: {
406
- serializedName: "archives",
407
- type: {
408
- name: "Sequence",
409
- element: {
410
- type: {
411
- name: "String"
412
- }
413
- }
414
- }
415
- },
416
- configuration: {
417
- serializedName: "conf",
418
- type: {
419
- name: "Dictionary",
420
- value: { type: { name: "String" } }
421
- }
422
- },
423
- driverMemory: {
424
- serializedName: "driverMemory",
425
- type: {
426
- name: "String"
427
- }
428
- },
429
- driverCores: {
430
- serializedName: "driverCores",
431
- type: {
432
- name: "Number"
433
- }
434
- },
435
- executorMemory: {
436
- serializedName: "executorMemory",
437
- type: {
438
- name: "String"
439
- }
440
- },
441
- executorCores: {
442
- serializedName: "executorCores",
443
- type: {
444
- name: "Number"
445
- }
446
- },
447
- executorCount: {
448
- serializedName: "numExecutors",
449
- type: {
450
- name: "Number"
451
- }
452
- }
453
- }
454
- }
455
- };
456
- const SparkScheduler = {
457
- type: {
458
- name: "Composite",
459
- className: "SparkScheduler",
460
- modelProperties: {
461
- submittedAt: {
462
- serializedName: "submittedAt",
463
- nullable: true,
464
- type: {
465
- name: "DateTime"
466
- }
467
- },
468
- scheduledAt: {
469
- serializedName: "scheduledAt",
470
- nullable: true,
471
- type: {
472
- name: "DateTime"
473
- }
474
- },
475
- endedAt: {
476
- serializedName: "endedAt",
477
- nullable: true,
478
- type: {
479
- name: "DateTime"
480
- }
481
- },
482
- cancellationRequestedAt: {
483
- serializedName: "cancellationRequestedAt",
484
- nullable: true,
485
- type: {
486
- name: "DateTime"
487
- }
488
- },
489
- currentState: {
490
- serializedName: "currentState",
491
- type: {
492
- name: "String"
493
- }
494
- }
495
- }
496
- }
497
- };
498
- const SparkServicePlugin = {
499
- type: {
500
- name: "Composite",
501
- className: "SparkServicePlugin",
502
- modelProperties: {
503
- preparationStartedAt: {
504
- serializedName: "preparationStartedAt",
505
- nullable: true,
506
- type: {
507
- name: "DateTime"
508
- }
509
- },
510
- resourceAcquisitionStartedAt: {
511
- serializedName: "resourceAcquisitionStartedAt",
512
- nullable: true,
513
- type: {
514
- name: "DateTime"
515
- }
516
- },
517
- submissionStartedAt: {
518
- serializedName: "submissionStartedAt",
519
- nullable: true,
520
- type: {
521
- name: "DateTime"
522
- }
523
- },
524
- monitoringStartedAt: {
525
- serializedName: "monitoringStartedAt",
526
- nullable: true,
527
- type: {
528
- name: "DateTime"
529
- }
530
- },
531
- cleanupStartedAt: {
532
- serializedName: "cleanupStartedAt",
533
- nullable: true,
534
- type: {
535
- name: "DateTime"
536
- }
537
- },
538
- currentState: {
539
- serializedName: "currentState",
540
- type: {
541
- name: "String"
542
- }
543
- }
544
- }
545
- }
546
- };
547
- const SparkServiceError = {
548
- type: {
549
- name: "Composite",
550
- className: "SparkServiceError",
551
- modelProperties: {
552
- message: {
553
- serializedName: "message",
554
- type: {
555
- name: "String"
556
- }
557
- },
558
- errorCode: {
559
- serializedName: "errorCode",
560
- type: {
561
- name: "String"
562
- }
563
- },
564
- source: {
565
- serializedName: "source",
566
- type: {
567
- name: "String"
568
- }
569
- }
570
- }
571
- }
572
- };
573
- const SparkBatchJobOptions = {
574
- type: {
575
- name: "Composite",
576
- className: "SparkBatchJobOptions",
577
- modelProperties: {
578
- tags: {
579
- serializedName: "tags",
580
- type: {
581
- name: "Dictionary",
582
- value: { type: { name: "String" } }
583
- }
584
- },
585
- artifactId: {
586
- serializedName: "artifactId",
587
- type: {
588
- name: "String"
589
- }
590
- },
591
- name: {
592
- serializedName: "name",
593
- required: true,
594
- type: {
595
- name: "String"
596
- }
597
- },
598
- file: {
599
- serializedName: "file",
600
- required: true,
601
- type: {
602
- name: "String"
603
- }
604
- },
605
- className: {
606
- serializedName: "className",
607
- type: {
608
- name: "String"
609
- }
610
- },
611
- arguments: {
612
- serializedName: "args",
613
- type: {
614
- name: "Sequence",
615
- element: {
616
- type: {
617
- name: "String"
618
- }
619
- }
620
- }
621
- },
622
- jars: {
623
- serializedName: "jars",
624
- type: {
625
- name: "Sequence",
626
- element: {
627
- type: {
628
- name: "String"
629
- }
630
- }
631
- }
632
- },
633
- pythonFiles: {
634
- serializedName: "pyFiles",
635
- type: {
636
- name: "Sequence",
637
- element: {
638
- type: {
639
- name: "String"
640
- }
641
- }
642
- }
643
- },
644
- files: {
645
- serializedName: "files",
646
- type: {
647
- name: "Sequence",
648
- element: {
649
- type: {
650
- name: "String"
651
- }
652
- }
653
- }
654
- },
655
- archives: {
656
- serializedName: "archives",
657
- type: {
658
- name: "Sequence",
659
- element: {
660
- type: {
661
- name: "String"
662
- }
663
- }
664
- }
665
- },
666
- configuration: {
667
- serializedName: "conf",
668
- type: {
669
- name: "Dictionary",
670
- value: { type: { name: "String" } }
671
- }
672
- },
673
- driverMemory: {
674
- serializedName: "driverMemory",
675
- type: {
676
- name: "String"
677
- }
678
- },
679
- driverCores: {
680
- serializedName: "driverCores",
681
- type: {
682
- name: "Number"
683
- }
684
- },
685
- executorMemory: {
686
- serializedName: "executorMemory",
687
- type: {
688
- name: "String"
689
- }
690
- },
691
- executorCores: {
692
- serializedName: "executorCores",
693
- type: {
694
- name: "Number"
695
- }
696
- },
697
- executorCount: {
698
- serializedName: "numExecutors",
699
- type: {
700
- name: "Number"
701
- }
702
- }
703
- }
704
- }
705
- };
706
- const SparkSessionCollection = {
707
- type: {
708
- name: "Composite",
709
- className: "SparkSessionCollection",
710
- modelProperties: {
711
- from: {
712
- serializedName: "from",
713
- required: true,
714
- type: {
715
- name: "Number"
716
- }
717
- },
718
- total: {
719
- serializedName: "total",
720
- required: true,
721
- type: {
722
- name: "Number"
723
- }
724
- },
725
- sessions: {
726
- serializedName: "sessions",
727
- type: {
728
- name: "Sequence",
729
- element: {
730
- type: {
731
- name: "Composite",
732
- className: "SparkSession"
733
- }
734
- }
735
- }
736
- }
737
- }
738
- }
739
- };
740
- const SparkSession = {
741
- type: {
742
- name: "Composite",
743
- className: "SparkSession",
744
- modelProperties: {
745
- livyInfo: {
746
- serializedName: "livyInfo",
747
- type: {
748
- name: "Composite",
749
- className: "SparkSessionState"
750
- }
751
- },
752
- name: {
753
- serializedName: "name",
754
- type: {
755
- name: "String"
756
- }
757
- },
758
- workspaceName: {
759
- serializedName: "workspaceName",
760
- type: {
761
- name: "String"
762
- }
763
- },
764
- sparkPoolName: {
765
- serializedName: "sparkPoolName",
766
- type: {
767
- name: "String"
768
- }
769
- },
770
- submitterName: {
771
- serializedName: "submitterName",
772
- type: {
773
- name: "String"
774
- }
775
- },
776
- submitterId: {
777
- serializedName: "submitterId",
778
- type: {
779
- name: "String"
780
- }
781
- },
782
- artifactId: {
783
- serializedName: "artifactId",
784
- type: {
785
- name: "String"
786
- }
787
- },
788
- jobType: {
789
- serializedName: "jobType",
790
- type: {
791
- name: "String"
792
- }
793
- },
794
- result: {
795
- serializedName: "result",
796
- type: {
797
- name: "String"
798
- }
799
- },
800
- scheduler: {
801
- serializedName: "schedulerInfo",
802
- type: {
803
- name: "Composite",
804
- className: "SparkScheduler"
805
- }
806
- },
807
- plugin: {
808
- serializedName: "pluginInfo",
809
- type: {
810
- name: "Composite",
811
- className: "SparkServicePlugin"
812
- }
813
- },
814
- errors: {
815
- serializedName: "errorInfo",
816
- type: {
817
- name: "Sequence",
818
- element: {
819
- type: {
820
- name: "Composite",
821
- className: "SparkServiceError"
822
- }
823
- }
824
- }
825
- },
826
- tags: {
827
- serializedName: "tags",
828
- type: {
829
- name: "Dictionary",
830
- value: { type: { name: "String" } }
831
- }
832
- },
833
- id: {
834
- serializedName: "id",
835
- required: true,
836
- type: {
837
- name: "Number"
838
- }
839
- },
840
- appId: {
841
- serializedName: "appId",
842
- nullable: true,
843
- type: {
844
- name: "String"
845
- }
846
- },
847
- appInfo: {
848
- serializedName: "appInfo",
849
- nullable: true,
850
- type: {
851
- name: "Dictionary",
852
- value: { type: { name: "String" } }
853
- }
854
- },
855
- state: {
856
- serializedName: "state",
857
- type: {
858
- name: "String"
859
- }
860
- },
861
- logLines: {
862
- serializedName: "log",
863
- nullable: true,
864
- type: {
865
- name: "Sequence",
866
- element: {
867
- type: {
868
- name: "String"
869
- }
870
- }
871
- }
872
- }
873
- }
874
- }
875
- };
876
- const SparkSessionState = {
877
- type: {
878
- name: "Composite",
879
- className: "SparkSessionState",
880
- modelProperties: {
881
- notStartedAt: {
882
- serializedName: "notStartedAt",
883
- nullable: true,
884
- type: {
885
- name: "DateTime"
886
- }
887
- },
888
- startingAt: {
889
- serializedName: "startingAt",
890
- nullable: true,
891
- type: {
892
- name: "DateTime"
893
- }
894
- },
895
- idleAt: {
896
- serializedName: "idleAt",
897
- nullable: true,
898
- type: {
899
- name: "DateTime"
900
- }
901
- },
902
- deadAt: {
903
- serializedName: "deadAt",
904
- nullable: true,
905
- type: {
906
- name: "DateTime"
907
- }
908
- },
909
- shuttingDownAt: {
910
- serializedName: "shuttingDownAt",
911
- nullable: true,
912
- type: {
913
- name: "DateTime"
914
- }
915
- },
916
- terminatedAt: {
917
- serializedName: "killedAt",
918
- nullable: true,
919
- type: {
920
- name: "DateTime"
921
- }
922
- },
923
- recoveringAt: {
924
- serializedName: "recoveringAt",
925
- nullable: true,
926
- type: {
927
- name: "DateTime"
928
- }
929
- },
930
- busyAt: {
931
- serializedName: "busyAt",
932
- nullable: true,
933
- type: {
934
- name: "DateTime"
935
- }
936
- },
937
- errorAt: {
938
- serializedName: "errorAt",
939
- nullable: true,
940
- type: {
941
- name: "DateTime"
942
- }
943
- },
944
- currentState: {
945
- serializedName: "currentState",
946
- type: {
947
- name: "String"
948
- }
949
- },
950
- jobCreationRequest: {
951
- serializedName: "jobCreationRequest",
952
- type: {
953
- name: "Composite",
954
- className: "SparkRequest"
955
- }
956
- }
957
- }
958
- }
959
- };
960
- const SparkSessionOptions = {
961
- type: {
962
- name: "Composite",
963
- className: "SparkSessionOptions",
964
- modelProperties: {
965
- tags: {
966
- serializedName: "tags",
967
- type: {
968
- name: "Dictionary",
969
- value: { type: { name: "String" } }
970
- }
971
- },
972
- artifactId: {
973
- serializedName: "artifactId",
974
- type: {
975
- name: "String"
976
- }
977
- },
978
- name: {
979
- serializedName: "name",
980
- required: true,
981
- type: {
982
- name: "String"
983
- }
984
- },
985
- file: {
986
- serializedName: "file",
987
- type: {
988
- name: "String"
989
- }
990
- },
991
- className: {
992
- serializedName: "className",
993
- type: {
994
- name: "String"
995
- }
996
- },
997
- arguments: {
998
- serializedName: "args",
999
- type: {
1000
- name: "Sequence",
1001
- element: {
1002
- type: {
1003
- name: "String"
1004
- }
1005
- }
1006
- }
1007
- },
1008
- jars: {
1009
- serializedName: "jars",
1010
- type: {
1011
- name: "Sequence",
1012
- element: {
1013
- type: {
1014
- name: "String"
1015
- }
1016
- }
1017
- }
1018
- },
1019
- pythonFiles: {
1020
- serializedName: "pyFiles",
1021
- type: {
1022
- name: "Sequence",
1023
- element: {
1024
- type: {
1025
- name: "String"
1026
- }
1027
- }
1028
- }
1029
- },
1030
- files: {
1031
- serializedName: "files",
1032
- type: {
1033
- name: "Sequence",
1034
- element: {
1035
- type: {
1036
- name: "String"
1037
- }
1038
- }
1039
- }
1040
- },
1041
- archives: {
1042
- serializedName: "archives",
1043
- type: {
1044
- name: "Sequence",
1045
- element: {
1046
- type: {
1047
- name: "String"
1048
- }
1049
- }
1050
- }
1051
- },
1052
- configuration: {
1053
- serializedName: "conf",
1054
- type: {
1055
- name: "Dictionary",
1056
- value: { type: { name: "String" } }
1057
- }
1058
- },
1059
- driverMemory: {
1060
- serializedName: "driverMemory",
1061
- type: {
1062
- name: "String"
1063
- }
1064
- },
1065
- driverCores: {
1066
- serializedName: "driverCores",
1067
- type: {
1068
- name: "Number"
1069
- }
1070
- },
1071
- executorMemory: {
1072
- serializedName: "executorMemory",
1073
- type: {
1074
- name: "String"
1075
- }
1076
- },
1077
- executorCores: {
1078
- serializedName: "executorCores",
1079
- type: {
1080
- name: "Number"
1081
- }
1082
- },
1083
- executorCount: {
1084
- serializedName: "numExecutors",
1085
- type: {
1086
- name: "Number"
1087
- }
1088
- }
1089
- }
1090
- }
1091
- };
1092
- const SparkStatementCollection = {
1093
- type: {
1094
- name: "Composite",
1095
- className: "SparkStatementCollection",
1096
- modelProperties: {
1097
- total: {
1098
- serializedName: "total_statements",
1099
- required: true,
1100
- type: {
1101
- name: "Number"
1102
- }
1103
- },
1104
- statements: {
1105
- serializedName: "statements",
1106
- type: {
1107
- name: "Sequence",
1108
- element: {
1109
- type: {
1110
- name: "Composite",
1111
- className: "SparkStatement"
1112
- }
1113
- }
1114
- }
1115
- }
1116
- }
1117
- }
1118
- };
1119
- const SparkStatement = {
1120
- type: {
1121
- name: "Composite",
1122
- className: "SparkStatement",
1123
- modelProperties: {
1124
- id: {
1125
- serializedName: "id",
1126
- required: true,
1127
- type: {
1128
- name: "Number"
1129
- }
1130
- },
1131
- code: {
1132
- serializedName: "code",
1133
- type: {
1134
- name: "String"
1135
- }
1136
- },
1137
- state: {
1138
- serializedName: "state",
1139
- type: {
1140
- name: "String"
1141
- }
1142
- },
1143
- output: {
1144
- serializedName: "output",
1145
- type: {
1146
- name: "Composite",
1147
- className: "SparkStatementOutput"
1148
- }
1149
- }
1150
- }
1151
- }
1152
- };
1153
- const SparkStatementOutput = {
1154
- type: {
1155
- name: "Composite",
1156
- className: "SparkStatementOutput",
1157
- modelProperties: {
1158
- status: {
1159
- serializedName: "status",
1160
- type: {
1161
- name: "String"
1162
- }
1163
- },
1164
- executionCount: {
1165
- serializedName: "execution_count",
1166
- required: true,
1167
- type: {
1168
- name: "Number"
1169
- }
1170
- },
1171
- data: {
1172
- serializedName: "data",
1173
- type: {
1174
- name: "Dictionary",
1175
- value: { type: { name: "any" } }
1176
- }
1177
- },
1178
- errorName: {
1179
- serializedName: "ename",
1180
- nullable: true,
1181
- type: {
1182
- name: "String"
1183
- }
1184
- },
1185
- errorValue: {
1186
- serializedName: "evalue",
1187
- nullable: true,
1188
- type: {
1189
- name: "String"
1190
- }
1191
- },
1192
- traceback: {
1193
- serializedName: "traceback",
1194
- nullable: true,
1195
- type: {
1196
- name: "Sequence",
1197
- element: {
1198
- type: {
1199
- name: "String"
1200
- }
1201
- }
1202
- }
1203
- }
1204
- }
1205
- }
1206
- };
1207
- const SparkStatementOptions = {
1208
- type: {
1209
- name: "Composite",
1210
- className: "SparkStatementOptions",
1211
- modelProperties: {
1212
- code: {
1213
- serializedName: "code",
1214
- type: {
1215
- name: "String"
1216
- }
1217
- },
1218
- kind: {
1219
- serializedName: "kind",
1220
- type: {
1221
- name: "String"
1222
- }
1223
- }
1224
- }
1225
- }
1226
- };
1227
- const SparkStatementCancellationResult = {
1228
- type: {
1229
- name: "Composite",
1230
- className: "SparkStatementCancellationResult",
1231
- modelProperties: {
1232
- message: {
1233
- serializedName: "msg",
1234
- type: {
1235
- name: "String"
1236
- }
1237
- }
1238
- }
1239
- }
1240
- };
1241
-
1242
- var Mappers = /*#__PURE__*/Object.freeze({
1243
- __proto__: null,
1244
- SparkBatchJobCollection: SparkBatchJobCollection,
1245
- SparkBatchJob: SparkBatchJob,
1246
- SparkBatchJobState: SparkBatchJobState,
1247
- SparkRequest: SparkRequest,
1248
- SparkScheduler: SparkScheduler,
1249
- SparkServicePlugin: SparkServicePlugin,
1250
- SparkServiceError: SparkServiceError,
1251
- SparkBatchJobOptions: SparkBatchJobOptions,
1252
- SparkSessionCollection: SparkSessionCollection,
1253
- SparkSession: SparkSession,
1254
- SparkSessionState: SparkSessionState,
1255
- SparkSessionOptions: SparkSessionOptions,
1256
- SparkStatementCollection: SparkStatementCollection,
1257
- SparkStatement: SparkStatement,
1258
- SparkStatementOutput: SparkStatementOutput,
1259
- SparkStatementOptions: SparkStatementOptions,
1260
- SparkStatementCancellationResult: SparkStatementCancellationResult
1261
- });
1262
-
1263
- /*
1264
- * Copyright (c) Microsoft Corporation.
1265
- * Licensed under the MIT License.
1266
- *
1267
- * Code generated by Microsoft (R) AutoRest Code Generator.
1268
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1269
- */
1270
- const accept = {
1271
- parameterPath: "accept",
1272
- mapper: {
1273
- defaultValue: "application/json",
1274
- isConstant: true,
1275
- serializedName: "Accept",
1276
- type: {
1277
- name: "String"
1278
- }
1279
- }
1280
- };
1281
- const endpoint = {
1282
- parameterPath: "endpoint",
1283
- mapper: {
1284
- serializedName: "endpoint",
1285
- required: true,
1286
- type: {
1287
- name: "String"
1288
- }
1289
- },
1290
- skipEncoding: true
1291
- };
1292
- const livyApiVersion = {
1293
- parameterPath: "livyApiVersion",
1294
- mapper: {
1295
- serializedName: "livyApiVersion",
1296
- required: true,
1297
- type: {
1298
- name: "String"
1299
- }
1300
- },
1301
- skipEncoding: true
1302
- };
1303
- const sparkPoolName = {
1304
- parameterPath: "sparkPoolName",
1305
- mapper: {
1306
- serializedName: "sparkPoolName",
1307
- required: true,
1308
- type: {
1309
- name: "String"
1310
- }
1311
- },
1312
- skipEncoding: true
1313
- };
1314
- const fromParam = {
1315
- parameterPath: ["options", "fromParam"],
1316
- mapper: {
1317
- serializedName: "from",
1318
- type: {
1319
- name: "Number"
1320
- }
1321
- }
1322
- };
1323
- const size = {
1324
- parameterPath: ["options", "size"],
1325
- mapper: {
1326
- serializedName: "size",
1327
- type: {
1328
- name: "Number"
1329
- }
1330
- }
1331
- };
1332
- const detailed = {
1333
- parameterPath: ["options", "detailed"],
1334
- mapper: {
1335
- serializedName: "detailed",
1336
- type: {
1337
- name: "Boolean"
1338
- }
1339
- }
1340
- };
1341
- const contentType = {
1342
- parameterPath: ["options", "contentType"],
1343
- mapper: {
1344
- defaultValue: "application/json",
1345
- isConstant: true,
1346
- serializedName: "Content-Type",
1347
- type: {
1348
- name: "String"
1349
- }
1350
- }
1351
- };
1352
- const sparkBatchJobOptions = {
1353
- parameterPath: "sparkBatchJobOptions",
1354
- mapper: SparkBatchJobOptions
1355
- };
1356
- const batchId = {
1357
- parameterPath: "batchId",
1358
- mapper: {
1359
- serializedName: "batchId",
1360
- required: true,
1361
- type: {
1362
- name: "Number"
1363
- }
1364
- }
1365
- };
1366
- const sparkSessionOptions = {
1367
- parameterPath: "sparkSessionOptions",
1368
- mapper: SparkSessionOptions
1369
- };
1370
- const sessionId = {
1371
- parameterPath: "sessionId",
1372
- mapper: {
1373
- serializedName: "sessionId",
1374
- required: true,
1375
- type: {
1376
- name: "Number"
1377
- }
1378
- }
1379
- };
1380
- const sparkStatementOptions = {
1381
- parameterPath: "sparkStatementOptions",
1382
- mapper: SparkStatementOptions
1383
- };
1384
- const statementId = {
1385
- parameterPath: "statementId",
1386
- mapper: {
1387
- serializedName: "statementId",
1388
- required: true,
1389
- type: {
1390
- name: "Number"
1391
- }
1392
- }
1393
- };
1394
-
1395
- /*
1396
- * Copyright (c) Microsoft Corporation.
1397
- * Licensed under the MIT License.
1398
- *
1399
- * Code generated by Microsoft (R) AutoRest Code Generator.
1400
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1401
- */
1402
- /** Class containing SparkBatch operations. */
1403
- class SparkBatchImpl {
1404
- /**
1405
- * Initialize a new instance of the class SparkBatch class.
1406
- * @param client Reference to the service client
1407
- */
1408
- constructor(client) {
1409
- this.client = client;
1410
- }
1411
- /**
1412
- * List all spark batch jobs which are running under a particular spark pool.
1413
- * @param options The options parameters.
1414
- */
1415
- async getSparkBatchJobs(options) {
1416
- const { span } = createSpan("SparkClient-getSparkBatchJobs", options || {});
1417
- try {
1418
- const result = await this.client.sendOperationRequest({ options }, getSparkBatchJobsOperationSpec);
1419
- return result;
1420
- }
1421
- catch (error) {
1422
- span.setStatus({
1423
- code: coreTracing.SpanStatusCode.UNSET,
1424
- message: error.message
1425
- });
1426
- throw error;
1427
- }
1428
- finally {
1429
- span.end();
1430
- }
1431
- }
1432
- /**
1433
- * Create new spark batch job.
1434
- * @param sparkBatchJobOptions Livy compatible batch job request payload.
1435
- * @param options The options parameters.
1436
- */
1437
- async createSparkBatchJob(sparkBatchJobOptions, options) {
1438
- const { span } = createSpan("SparkClient-createSparkBatchJob", options || {});
1439
- try {
1440
- const result = await this.client.sendOperationRequest({ sparkBatchJobOptions, options }, createSparkBatchJobOperationSpec);
1441
- return result;
1442
- }
1443
- catch (error) {
1444
- span.setStatus({
1445
- code: coreTracing.SpanStatusCode.UNSET,
1446
- message: error.message
1447
- });
1448
- throw error;
1449
- }
1450
- finally {
1451
- span.end();
1452
- }
1453
- }
1454
- /**
1455
- * Gets a single spark batch job.
1456
- * @param batchId Identifier for the batch job.
1457
- * @param options The options parameters.
1458
- */
1459
- async getSparkBatchJob(batchId, options) {
1460
- const { span } = createSpan("SparkClient-getSparkBatchJob", options || {});
1461
- try {
1462
- const result = await this.client.sendOperationRequest({ batchId, options }, getSparkBatchJobOperationSpec);
1463
- return result;
1464
- }
1465
- catch (error) {
1466
- span.setStatus({
1467
- code: coreTracing.SpanStatusCode.UNSET,
1468
- message: error.message
1469
- });
1470
- throw error;
1471
- }
1472
- finally {
1473
- span.end();
1474
- }
1475
- }
1476
- /**
1477
- * Cancels a running spark batch job.
1478
- * @param batchId Identifier for the batch job.
1479
- * @param options The options parameters.
1480
- */
1481
- async cancelSparkBatchJob(batchId, options) {
1482
- const { span } = createSpan("SparkClient-cancelSparkBatchJob", options || {});
1483
- try {
1484
- const result = await this.client.sendOperationRequest({ batchId, options }, cancelSparkBatchJobOperationSpec);
1485
- return result;
1486
- }
1487
- catch (error) {
1488
- span.setStatus({
1489
- code: coreTracing.SpanStatusCode.UNSET,
1490
- message: error.message
1491
- });
1492
- throw error;
1493
- }
1494
- finally {
1495
- span.end();
1496
- }
1497
- }
1498
- }
1499
- // Operation Specifications
1500
- const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
1501
- const getSparkBatchJobsOperationSpec = {
1502
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches",
1503
- httpMethod: "GET",
1504
- responses: {
1505
- 200: {
1506
- bodyMapper: SparkBatchJobCollection
1507
- }
1508
- },
1509
- queryParameters: [fromParam, size, detailed],
1510
- urlParameters: [
1511
- endpoint,
1512
- livyApiVersion,
1513
- sparkPoolName
1514
- ],
1515
- headerParameters: [accept],
1516
- serializer
1517
- };
1518
- const createSparkBatchJobOperationSpec = {
1519
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches",
1520
- httpMethod: "POST",
1521
- responses: {
1522
- 200: {
1523
- bodyMapper: SparkBatchJob
1524
- }
1525
- },
1526
- requestBody: sparkBatchJobOptions,
1527
- queryParameters: [detailed],
1528
- urlParameters: [
1529
- endpoint,
1530
- livyApiVersion,
1531
- sparkPoolName
1532
- ],
1533
- headerParameters: [accept, contentType],
1534
- mediaType: "json",
1535
- serializer
1536
- };
1537
- const getSparkBatchJobOperationSpec = {
1538
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}",
1539
- httpMethod: "GET",
1540
- responses: {
1541
- 200: {
1542
- bodyMapper: SparkBatchJob
1543
- }
1544
- },
1545
- queryParameters: [detailed],
1546
- urlParameters: [
1547
- endpoint,
1548
- livyApiVersion,
1549
- sparkPoolName,
1550
- batchId
1551
- ],
1552
- headerParameters: [accept],
1553
- serializer
1554
- };
1555
- const cancelSparkBatchJobOperationSpec = {
1556
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}",
1557
- httpMethod: "DELETE",
1558
- responses: { 200: {} },
1559
- urlParameters: [
1560
- endpoint,
1561
- livyApiVersion,
1562
- sparkPoolName,
1563
- batchId
1564
- ],
1565
- serializer
1566
- };
1567
-
1568
- /*
1569
- * Copyright (c) Microsoft Corporation.
1570
- * Licensed under the MIT License.
1571
- *
1572
- * Code generated by Microsoft (R) AutoRest Code Generator.
1573
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1574
- */
1575
- /** Class containing SparkSessionOperations operations. */
1576
- class SparkSessionOperationsImpl {
1577
- /**
1578
- * Initialize a new instance of the class SparkSessionOperations class.
1579
- * @param client Reference to the service client
1580
- */
1581
- constructor(client) {
1582
- this.client = client;
1583
- }
1584
- /**
1585
- * List all spark sessions which are running under a particular spark pool.
1586
- * @param options The options parameters.
1587
- */
1588
- async getSparkSessions(options) {
1589
- const { span } = createSpan("SparkClient-getSparkSessions", options || {});
1590
- try {
1591
- const result = await this.client.sendOperationRequest({ options }, getSparkSessionsOperationSpec);
1592
- return result;
1593
- }
1594
- catch (error) {
1595
- span.setStatus({
1596
- code: coreTracing.SpanStatusCode.UNSET,
1597
- message: error.message
1598
- });
1599
- throw error;
1600
- }
1601
- finally {
1602
- span.end();
1603
- }
1604
- }
1605
- /**
1606
- * Create new spark session.
1607
- * @param sparkSessionOptions Livy compatible batch job request payload.
1608
- * @param options The options parameters.
1609
- */
1610
- async createSparkSession(sparkSessionOptions, options) {
1611
- const { span } = createSpan("SparkClient-createSparkSession", options || {});
1612
- try {
1613
- const result = await this.client.sendOperationRequest({ sparkSessionOptions, options }, createSparkSessionOperationSpec);
1614
- return result;
1615
- }
1616
- catch (error) {
1617
- span.setStatus({
1618
- code: coreTracing.SpanStatusCode.UNSET,
1619
- message: error.message
1620
- });
1621
- throw error;
1622
- }
1623
- finally {
1624
- span.end();
1625
- }
1626
- }
1627
- /**
1628
- * Gets a single spark session.
1629
- * @param sessionId Identifier for the session.
1630
- * @param options The options parameters.
1631
- */
1632
- async getSparkSession(sessionId, options) {
1633
- const { span } = createSpan("SparkClient-getSparkSession", options || {});
1634
- try {
1635
- const result = await this.client.sendOperationRequest({ sessionId, options }, getSparkSessionOperationSpec);
1636
- return result;
1637
- }
1638
- catch (error) {
1639
- span.setStatus({
1640
- code: coreTracing.SpanStatusCode.UNSET,
1641
- message: error.message
1642
- });
1643
- throw error;
1644
- }
1645
- finally {
1646
- span.end();
1647
- }
1648
- }
1649
- /**
1650
- * Cancels a running spark session.
1651
- * @param sessionId Identifier for the session.
1652
- * @param options The options parameters.
1653
- */
1654
- async cancelSparkSession(sessionId, options) {
1655
- const { span } = createSpan("SparkClient-cancelSparkSession", options || {});
1656
- try {
1657
- const result = await this.client.sendOperationRequest({ sessionId, options }, cancelSparkSessionOperationSpec);
1658
- return result;
1659
- }
1660
- catch (error) {
1661
- span.setStatus({
1662
- code: coreTracing.SpanStatusCode.UNSET,
1663
- message: error.message
1664
- });
1665
- throw error;
1666
- }
1667
- finally {
1668
- span.end();
1669
- }
1670
- }
1671
- /**
1672
- * Sends a keep alive call to the current session to reset the session timeout.
1673
- * @param sessionId Identifier for the session.
1674
- * @param options The options parameters.
1675
- */
1676
- async resetSparkSessionTimeout(sessionId, options) {
1677
- const { span } = createSpan("SparkClient-resetSparkSessionTimeout", options || {});
1678
- try {
1679
- const result = await this.client.sendOperationRequest({ sessionId, options }, resetSparkSessionTimeoutOperationSpec);
1680
- return result;
1681
- }
1682
- catch (error) {
1683
- span.setStatus({
1684
- code: coreTracing.SpanStatusCode.UNSET,
1685
- message: error.message
1686
- });
1687
- throw error;
1688
- }
1689
- finally {
1690
- span.end();
1691
- }
1692
- }
1693
- /**
1694
- * Gets a list of statements within a spark session.
1695
- * @param sessionId Identifier for the session.
1696
- * @param options The options parameters.
1697
- */
1698
- async getSparkStatements(sessionId, options) {
1699
- const { span } = createSpan("SparkClient-getSparkStatements", options || {});
1700
- try {
1701
- const result = await this.client.sendOperationRequest({ sessionId, options }, getSparkStatementsOperationSpec);
1702
- return result;
1703
- }
1704
- catch (error) {
1705
- span.setStatus({
1706
- code: coreTracing.SpanStatusCode.UNSET,
1707
- message: error.message
1708
- });
1709
- throw error;
1710
- }
1711
- finally {
1712
- span.end();
1713
- }
1714
- }
1715
- /**
1716
- * Create statement within a spark session.
1717
- * @param sessionId Identifier for the session.
1718
- * @param sparkStatementOptions Livy compatible batch job request payload.
1719
- * @param options The options parameters.
1720
- */
1721
- async createSparkStatement(sessionId, sparkStatementOptions, options) {
1722
- const { span } = createSpan("SparkClient-createSparkStatement", options || {});
1723
- try {
1724
- const result = await this.client.sendOperationRequest({ sessionId, sparkStatementOptions, options }, createSparkStatementOperationSpec);
1725
- return result;
1726
- }
1727
- catch (error) {
1728
- span.setStatus({
1729
- code: coreTracing.SpanStatusCode.UNSET,
1730
- message: error.message
1731
- });
1732
- throw error;
1733
- }
1734
- finally {
1735
- span.end();
1736
- }
1737
- }
1738
- /**
1739
- * Gets a single statement within a spark session.
1740
- * @param sessionId Identifier for the session.
1741
- * @param statementId Identifier for the statement.
1742
- * @param options The options parameters.
1743
- */
1744
- async getSparkStatement(sessionId, statementId, options) {
1745
- const { span } = createSpan("SparkClient-getSparkStatement", options || {});
1746
- try {
1747
- const result = await this.client.sendOperationRequest({ sessionId, statementId, options }, getSparkStatementOperationSpec);
1748
- return result;
1749
- }
1750
- catch (error) {
1751
- span.setStatus({
1752
- code: coreTracing.SpanStatusCode.UNSET,
1753
- message: error.message
1754
- });
1755
- throw error;
1756
- }
1757
- finally {
1758
- span.end();
1759
- }
1760
- }
1761
- /**
1762
- * Kill a statement within a session.
1763
- * @param sessionId Identifier for the session.
1764
- * @param statementId Identifier for the statement.
1765
- * @param options The options parameters.
1766
- */
1767
- async cancelSparkStatement(sessionId, statementId, options) {
1768
- const { span } = createSpan("SparkClient-cancelSparkStatement", options || {});
1769
- try {
1770
- const result = await this.client.sendOperationRequest({ sessionId, statementId, options }, cancelSparkStatementOperationSpec);
1771
- return result;
1772
- }
1773
- catch (error) {
1774
- span.setStatus({
1775
- code: coreTracing.SpanStatusCode.UNSET,
1776
- message: error.message
1777
- });
1778
- throw error;
1779
- }
1780
- finally {
1781
- span.end();
1782
- }
1783
- }
1784
- }
1785
- // Operation Specifications
1786
- const serializer$1 = coreClient.createSerializer(Mappers, /* isXml */ false);
1787
- const getSparkSessionsOperationSpec = {
1788
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions",
1789
- httpMethod: "GET",
1790
- responses: {
1791
- 200: {
1792
- bodyMapper: SparkSessionCollection
1793
- }
1794
- },
1795
- queryParameters: [fromParam, size, detailed],
1796
- urlParameters: [
1797
- endpoint,
1798
- livyApiVersion,
1799
- sparkPoolName
1800
- ],
1801
- headerParameters: [accept],
1802
- serializer: serializer$1
1803
- };
1804
- const createSparkSessionOperationSpec = {
1805
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions",
1806
- httpMethod: "POST",
1807
- responses: {
1808
- 200: {
1809
- bodyMapper: SparkSession
1810
- }
1811
- },
1812
- requestBody: sparkSessionOptions,
1813
- queryParameters: [detailed],
1814
- urlParameters: [
1815
- endpoint,
1816
- livyApiVersion,
1817
- sparkPoolName
1818
- ],
1819
- headerParameters: [accept, contentType],
1820
- mediaType: "json",
1821
- serializer: serializer$1
1822
- };
1823
- const getSparkSessionOperationSpec = {
1824
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}",
1825
- httpMethod: "GET",
1826
- responses: {
1827
- 200: {
1828
- bodyMapper: SparkSession
1829
- }
1830
- },
1831
- queryParameters: [detailed],
1832
- urlParameters: [
1833
- endpoint,
1834
- livyApiVersion,
1835
- sparkPoolName,
1836
- sessionId
1837
- ],
1838
- headerParameters: [accept],
1839
- serializer: serializer$1
1840
- };
1841
- const cancelSparkSessionOperationSpec = {
1842
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}",
1843
- httpMethod: "DELETE",
1844
- responses: { 200: {} },
1845
- urlParameters: [
1846
- endpoint,
1847
- livyApiVersion,
1848
- sparkPoolName,
1849
- sessionId
1850
- ],
1851
- serializer: serializer$1
1852
- };
1853
- const resetSparkSessionTimeoutOperationSpec = {
1854
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/reset-timeout",
1855
- httpMethod: "PUT",
1856
- responses: { 200: {} },
1857
- urlParameters: [
1858
- endpoint,
1859
- livyApiVersion,
1860
- sparkPoolName,
1861
- sessionId
1862
- ],
1863
- serializer: serializer$1
1864
- };
1865
- const getSparkStatementsOperationSpec = {
1866
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements",
1867
- httpMethod: "GET",
1868
- responses: {
1869
- 200: {
1870
- bodyMapper: SparkStatementCollection
1871
- }
1872
- },
1873
- urlParameters: [
1874
- endpoint,
1875
- livyApiVersion,
1876
- sparkPoolName,
1877
- sessionId
1878
- ],
1879
- headerParameters: [accept],
1880
- serializer: serializer$1
1881
- };
1882
- const createSparkStatementOperationSpec = {
1883
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements",
1884
- httpMethod: "POST",
1885
- responses: {
1886
- 200: {
1887
- bodyMapper: SparkStatement
1888
- }
1889
- },
1890
- requestBody: sparkStatementOptions,
1891
- urlParameters: [
1892
- endpoint,
1893
- livyApiVersion,
1894
- sparkPoolName,
1895
- sessionId
1896
- ],
1897
- headerParameters: [accept, contentType],
1898
- mediaType: "json",
1899
- serializer: serializer$1
1900
- };
1901
- const getSparkStatementOperationSpec = {
1902
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}",
1903
- httpMethod: "GET",
1904
- responses: {
1905
- 200: {
1906
- bodyMapper: SparkStatement
1907
- }
1908
- },
1909
- urlParameters: [
1910
- endpoint,
1911
- livyApiVersion,
1912
- sparkPoolName,
1913
- sessionId,
1914
- statementId
1915
- ],
1916
- headerParameters: [accept],
1917
- serializer: serializer$1
1918
- };
1919
- const cancelSparkStatementOperationSpec = {
1920
- path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}/cancel",
1921
- httpMethod: "POST",
1922
- responses: {
1923
- 200: {
1924
- bodyMapper: SparkStatementCancellationResult
1925
- }
1926
- },
1927
- urlParameters: [
1928
- endpoint,
1929
- livyApiVersion,
1930
- sparkPoolName,
1931
- sessionId,
1932
- statementId
1933
- ],
1934
- headerParameters: [accept],
1935
- serializer: serializer$1
1936
- };
1937
-
1938
- /*
1939
- * Copyright (c) Microsoft Corporation.
1940
- * Licensed under the MIT License.
1941
- *
1942
- * Code generated by Microsoft (R) AutoRest Code Generator.
1943
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1944
- */
1945
- class SparkClientContext extends coreClient.ServiceClient {
1946
- /**
1947
- * Initializes a new instance of the SparkClientContext class.
1948
- * @param credentials Subscription credentials which uniquely identify client subscription.
1949
- * @param endpoint The workspace development endpoint, for example
1950
- * https://myworkspace.dev.azuresynapse.net.
1951
- * @param sparkPoolName Name of the spark pool.
1952
- * @param options The parameter options
1953
- */
1954
- constructor(credentials, endpoint, sparkPoolName, options) {
1955
- if (credentials === undefined) {
1956
- throw new Error("'credentials' cannot be null");
1957
- }
1958
- if (endpoint === undefined) {
1959
- throw new Error("'endpoint' cannot be null");
1960
- }
1961
- if (sparkPoolName === undefined) {
1962
- throw new Error("'sparkPoolName' cannot be null");
1963
- }
1964
- // Initializing default values for options
1965
- if (!options) {
1966
- options = {};
1967
- }
1968
- const defaults = {
1969
- requestContentType: "application/json; charset=utf-8",
1970
- credential: credentials
1971
- };
1972
- const packageDetails = `azsdk-js-synapse-spark/1.0.0-beta.4`;
1973
- const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix
1974
- ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`
1975
- : `${packageDetails}`;
1976
- if (!options.credentialScopes) {
1977
- options.credentialScopes = ["https://dev.azuresynapse.net/.default"];
1978
- }
1979
- const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: {
1980
- userAgentPrefix
1981
- }, baseUri: options.endpoint || "{endpoint}" });
1982
- super(optionsWithDefaults);
1983
- // Parameter assignments
1984
- this.endpoint = endpoint;
1985
- this.sparkPoolName = sparkPoolName;
1986
- // Assigning values to Constant parameters
1987
- this.livyApiVersion = options.livyApiVersion || "2019-11-01-preview";
1988
- }
1989
- }
1990
-
1991
- /*
1992
- * Copyright (c) Microsoft Corporation.
1993
- * Licensed under the MIT License.
1994
- *
1995
- * Code generated by Microsoft (R) AutoRest Code Generator.
1996
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
1997
- */
1998
- class SparkClient extends SparkClientContext {
1999
- /**
2000
- * Initializes a new instance of the SparkClient class.
2001
- * @param credentials Subscription credentials which uniquely identify client subscription.
2002
- * @param endpoint The workspace development endpoint, for example
2003
- * https://myworkspace.dev.azuresynapse.net.
2004
- * @param sparkPoolName Name of the spark pool.
2005
- * @param options The parameter options
2006
- */
2007
- constructor(credentials, endpoint, sparkPoolName, options) {
2008
- super(credentials, endpoint, sparkPoolName, options);
2009
- this.sparkBatch = new SparkBatchImpl(this);
2010
- this.sparkSessionOperations = new SparkSessionOperationsImpl(this);
2011
- }
2012
- }
2013
-
2014
- exports.SparkClient = SparkClient;
2015
- exports.SparkClientContext = SparkClientContext;
2016
- //# sourceMappingURL=index.js.map