@azure/synapse-spark 1.0.0-beta.3 → 1.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +14 -13
  3. package/dist/browser/index.d.ts +4 -0
  4. package/dist/browser/index.d.ts.map +1 -0
  5. package/dist/browser/index.js +11 -0
  6. package/dist/browser/index.js.map +1 -0
  7. package/{types/synapse-spark.d.ts → dist/browser/models/index.d.ts} +548 -621
  8. package/dist/browser/models/index.d.ts.map +1 -0
  9. package/dist/browser/models/index.js +134 -0
  10. package/dist/browser/models/index.js.map +1 -0
  11. package/dist/browser/models/mappers.d.ts +19 -0
  12. package/dist/browser/models/mappers.d.ts.map +1 -0
  13. package/dist/browser/models/mappers.js +1151 -0
  14. package/dist/browser/models/mappers.js.map +1 -0
  15. package/dist/browser/models/parameters.d.ts +16 -0
  16. package/dist/browser/models/parameters.d.ts.map +1 -0
  17. package/dist/browser/models/parameters.js +133 -0
  18. package/dist/browser/models/parameters.js.map +1 -0
  19. package/dist/browser/operations/index.d.ts +3 -0
  20. package/dist/browser/operations/index.d.ts.map +1 -0
  21. package/dist/browser/operations/index.js +10 -0
  22. package/dist/browser/operations/index.js.map +1 -0
  23. package/dist/browser/operations/sparkBatch.d.ts +36 -0
  24. package/dist/browser/operations/sparkBatch.d.ts.map +1 -0
  25. package/dist/browser/operations/sparkBatch.js +122 -0
  26. package/dist/browser/operations/sparkBatch.js.map +1 -0
  27. package/dist/browser/operations/sparkSessionOperations.d.ts +69 -0
  28. package/dist/browser/operations/sparkSessionOperations.d.ts.map +1 -0
  29. package/dist/browser/operations/sparkSessionOperations.js +259 -0
  30. package/dist/browser/operations/sparkSessionOperations.js.map +1 -0
  31. package/dist/browser/operationsInterfaces/index.d.ts +3 -0
  32. package/dist/browser/operationsInterfaces/index.d.ts.map +1 -0
  33. package/dist/browser/operationsInterfaces/index.js +10 -0
  34. package/dist/browser/operationsInterfaces/index.js.map +1 -0
  35. package/dist/browser/operationsInterfaces/sparkBatch.d.ts +28 -0
  36. package/dist/browser/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  37. package/dist/browser/operationsInterfaces/sparkBatch.js +9 -0
  38. package/dist/browser/operationsInterfaces/sparkBatch.js.map +1 -0
  39. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  40. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  41. package/dist/browser/operationsInterfaces/sparkSessionOperations.js +9 -0
  42. package/dist/browser/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  43. package/dist/browser/package.json +3 -0
  44. package/dist/browser/sparkClient.d.ts +24 -0
  45. package/dist/browser/sparkClient.d.ts.map +1 -0
  46. package/dist/browser/sparkClient.js +86 -0
  47. package/dist/browser/sparkClient.js.map +1 -0
  48. package/dist/browser/tracing.d.ts +2 -0
  49. package/dist/browser/tracing.d.ts.map +1 -0
  50. package/dist/browser/tracing.js +14 -0
  51. package/dist/browser/tracing.js.map +1 -0
  52. package/dist/commonjs/index.d.ts +4 -0
  53. package/dist/commonjs/index.d.ts.map +1 -0
  54. package/dist/commonjs/index.js +16 -0
  55. package/dist/commonjs/index.js.map +1 -0
  56. package/dist/commonjs/models/index.d.ts +548 -0
  57. package/dist/commonjs/models/index.d.ts.map +1 -0
  58. package/dist/commonjs/models/index.js +137 -0
  59. package/dist/commonjs/models/index.js.map +1 -0
  60. package/dist/commonjs/models/mappers.d.ts +19 -0
  61. package/dist/commonjs/models/mappers.d.ts.map +1 -0
  62. package/dist/commonjs/models/mappers.js +1154 -0
  63. package/dist/commonjs/models/mappers.js.map +1 -0
  64. package/dist/commonjs/models/parameters.d.ts +16 -0
  65. package/dist/commonjs/models/parameters.d.ts.map +1 -0
  66. package/dist/commonjs/models/parameters.js +136 -0
  67. package/dist/commonjs/models/parameters.js.map +1 -0
  68. package/dist/commonjs/operations/index.d.ts +3 -0
  69. package/dist/commonjs/operations/index.d.ts.map +1 -0
  70. package/dist/commonjs/operations/index.js +13 -0
  71. package/dist/commonjs/operations/index.js.map +1 -0
  72. package/dist/commonjs/operations/sparkBatch.d.ts +36 -0
  73. package/dist/commonjs/operations/sparkBatch.d.ts.map +1 -0
  74. package/dist/commonjs/operations/sparkBatch.js +127 -0
  75. package/dist/commonjs/operations/sparkBatch.js.map +1 -0
  76. package/dist/commonjs/operations/sparkSessionOperations.d.ts +69 -0
  77. package/dist/commonjs/operations/sparkSessionOperations.d.ts.map +1 -0
  78. package/dist/commonjs/operations/sparkSessionOperations.js +264 -0
  79. package/dist/commonjs/operations/sparkSessionOperations.js.map +1 -0
  80. package/dist/commonjs/operationsInterfaces/index.d.ts +3 -0
  81. package/dist/commonjs/operationsInterfaces/index.d.ts.map +1 -0
  82. package/dist/commonjs/operationsInterfaces/index.js +13 -0
  83. package/dist/commonjs/operationsInterfaces/index.js.map +1 -0
  84. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts +28 -0
  85. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  86. package/dist/commonjs/operationsInterfaces/sparkBatch.js +10 -0
  87. package/dist/commonjs/operationsInterfaces/sparkBatch.js.map +1 -0
  88. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  89. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  90. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js +10 -0
  91. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  92. package/dist/commonjs/package.json +3 -0
  93. package/dist/commonjs/sparkClient.d.ts +24 -0
  94. package/dist/commonjs/sparkClient.d.ts.map +1 -0
  95. package/dist/commonjs/sparkClient.js +91 -0
  96. package/dist/commonjs/sparkClient.js.map +1 -0
  97. package/dist/commonjs/tracing.d.ts +2 -0
  98. package/dist/commonjs/tracing.d.ts.map +1 -0
  99. package/dist/commonjs/tracing.js +17 -0
  100. package/dist/commonjs/tracing.js.map +1 -0
  101. package/dist/commonjs/tsdoc-metadata.json +11 -0
  102. package/dist/esm/index.d.ts +4 -0
  103. package/dist/esm/index.d.ts.map +1 -0
  104. package/dist/esm/index.js +11 -0
  105. package/dist/esm/index.js.map +1 -0
  106. package/dist/esm/models/index.d.ts +548 -0
  107. package/dist/esm/models/index.d.ts.map +1 -0
  108. package/dist/esm/models/index.js +134 -0
  109. package/dist/esm/models/index.js.map +1 -0
  110. package/dist/esm/models/mappers.d.ts +19 -0
  111. package/dist/esm/models/mappers.d.ts.map +1 -0
  112. package/dist/esm/models/mappers.js +1151 -0
  113. package/dist/esm/models/mappers.js.map +1 -0
  114. package/dist/esm/models/parameters.d.ts +16 -0
  115. package/dist/esm/models/parameters.d.ts.map +1 -0
  116. package/dist/esm/models/parameters.js +133 -0
  117. package/dist/esm/models/parameters.js.map +1 -0
  118. package/dist/esm/operations/index.d.ts +3 -0
  119. package/dist/esm/operations/index.d.ts.map +1 -0
  120. package/dist/esm/operations/index.js +10 -0
  121. package/dist/esm/operations/index.js.map +1 -0
  122. package/dist/esm/operations/sparkBatch.d.ts +36 -0
  123. package/dist/esm/operations/sparkBatch.d.ts.map +1 -0
  124. package/dist/esm/operations/sparkBatch.js +122 -0
  125. package/dist/esm/operations/sparkBatch.js.map +1 -0
  126. package/dist/esm/operations/sparkSessionOperations.d.ts +69 -0
  127. package/dist/esm/operations/sparkSessionOperations.d.ts.map +1 -0
  128. package/dist/esm/operations/sparkSessionOperations.js +259 -0
  129. package/dist/esm/operations/sparkSessionOperations.js.map +1 -0
  130. package/dist/esm/operationsInterfaces/index.d.ts +3 -0
  131. package/dist/esm/operationsInterfaces/index.d.ts.map +1 -0
  132. package/dist/esm/operationsInterfaces/index.js +10 -0
  133. package/dist/esm/operationsInterfaces/index.js.map +1 -0
  134. package/dist/esm/operationsInterfaces/sparkBatch.d.ts +28 -0
  135. package/dist/esm/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  136. package/dist/esm/operationsInterfaces/sparkBatch.js +9 -0
  137. package/dist/esm/operationsInterfaces/sparkBatch.js.map +1 -0
  138. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  139. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  140. package/dist/esm/operationsInterfaces/sparkSessionOperations.js +9 -0
  141. package/dist/esm/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  142. package/dist/esm/package.json +3 -0
  143. package/dist/esm/sparkClient.d.ts +24 -0
  144. package/dist/esm/sparkClient.d.ts.map +1 -0
  145. package/dist/esm/sparkClient.js +86 -0
  146. package/dist/esm/sparkClient.js.map +1 -0
  147. package/dist/esm/tracing.d.ts +2 -0
  148. package/dist/esm/tracing.d.ts.map +1 -0
  149. package/dist/esm/tracing.js +14 -0
  150. package/dist/esm/tracing.js.map +1 -0
  151. package/dist/react-native/index.d.ts +4 -0
  152. package/dist/react-native/index.d.ts.map +1 -0
  153. package/dist/react-native/index.js +11 -0
  154. package/dist/react-native/index.js.map +1 -0
  155. package/dist/react-native/models/index.d.ts +548 -0
  156. package/dist/react-native/models/index.d.ts.map +1 -0
  157. package/dist/react-native/models/index.js +134 -0
  158. package/dist/react-native/models/index.js.map +1 -0
  159. package/dist/react-native/models/mappers.d.ts +19 -0
  160. package/dist/react-native/models/mappers.d.ts.map +1 -0
  161. package/dist/react-native/models/mappers.js +1151 -0
  162. package/dist/react-native/models/mappers.js.map +1 -0
  163. package/dist/react-native/models/parameters.d.ts +16 -0
  164. package/dist/react-native/models/parameters.d.ts.map +1 -0
  165. package/dist/react-native/models/parameters.js +133 -0
  166. package/dist/react-native/models/parameters.js.map +1 -0
  167. package/dist/react-native/operations/index.d.ts +3 -0
  168. package/dist/react-native/operations/index.d.ts.map +1 -0
  169. package/dist/react-native/operations/index.js +10 -0
  170. package/dist/react-native/operations/index.js.map +1 -0
  171. package/dist/react-native/operations/sparkBatch.d.ts +36 -0
  172. package/dist/react-native/operations/sparkBatch.d.ts.map +1 -0
  173. package/dist/react-native/operations/sparkBatch.js +122 -0
  174. package/dist/react-native/operations/sparkBatch.js.map +1 -0
  175. package/dist/react-native/operations/sparkSessionOperations.d.ts +69 -0
  176. package/dist/react-native/operations/sparkSessionOperations.d.ts.map +1 -0
  177. package/dist/react-native/operations/sparkSessionOperations.js +259 -0
  178. package/dist/react-native/operations/sparkSessionOperations.js.map +1 -0
  179. package/dist/react-native/operationsInterfaces/index.d.ts +3 -0
  180. package/dist/react-native/operationsInterfaces/index.d.ts.map +1 -0
  181. package/dist/react-native/operationsInterfaces/index.js +10 -0
  182. package/dist/react-native/operationsInterfaces/index.js.map +1 -0
  183. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts +28 -0
  184. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  185. package/dist/react-native/operationsInterfaces/sparkBatch.js +9 -0
  186. package/dist/react-native/operationsInterfaces/sparkBatch.js.map +1 -0
  187. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  188. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  189. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js +9 -0
  190. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  191. package/dist/react-native/package.json +3 -0
  192. package/dist/react-native/sparkClient.d.ts +24 -0
  193. package/dist/react-native/sparkClient.d.ts.map +1 -0
  194. package/dist/react-native/sparkClient.js +86 -0
  195. package/dist/react-native/sparkClient.js.map +1 -0
  196. package/dist/react-native/tracing.d.ts +2 -0
  197. package/dist/react-native/tracing.d.ts.map +1 -0
  198. package/dist/react-native/tracing.js +14 -0
  199. package/dist/react-native/tracing.js.map +1 -0
  200. package/package.json +83 -79
  201. package/CHANGELOG.md +0 -17
  202. package/dist/index.js +0 -1995
  203. package/dist/index.js.map +0 -1
  204. package/dist/index.min.js +0 -1
  205. package/dist/index.min.js.map +0 -1
  206. package/rollup.config.js +0 -3
  207. package/tsconfig.json +0 -19
@@ -0,0 +1,259 @@
1
+ /*
2
+ * Copyright (c) Microsoft Corporation.
3
+ * Licensed under the MIT License.
4
+ *
5
+ * Code generated by Microsoft (R) AutoRest Code Generator.
6
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
7
+ */
8
+ import { tracingClient } from "../tracing.js";
9
+ import * as coreClient from "@azure/core-client";
10
+ import * as Mappers from "../models/mappers.js";
11
+ import * as Parameters from "../models/parameters.js";
12
+ // Operation Specifications
13
+ const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
14
+ const getSparkSessionsOperationSpec = {
15
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions",
16
+ httpMethod: "GET",
17
+ responses: {
18
+ 200: {
19
+ bodyMapper: Mappers.SparkSessionCollection,
20
+ },
21
+ },
22
+ queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed],
23
+ urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName],
24
+ headerParameters: [Parameters.accept],
25
+ serializer,
26
+ };
27
+ const createSparkSessionOperationSpec = {
28
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions",
29
+ httpMethod: "POST",
30
+ responses: {
31
+ 200: {
32
+ bodyMapper: Mappers.SparkSession,
33
+ },
34
+ },
35
+ requestBody: Parameters.sparkSessionOptions,
36
+ queryParameters: [Parameters.detailed],
37
+ urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName],
38
+ headerParameters: [Parameters.accept, Parameters.contentType],
39
+ mediaType: "json",
40
+ serializer,
41
+ };
42
+ const getSparkSessionOperationSpec = {
43
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}",
44
+ httpMethod: "GET",
45
+ responses: {
46
+ 200: {
47
+ bodyMapper: Mappers.SparkSession,
48
+ },
49
+ },
50
+ queryParameters: [Parameters.detailed],
51
+ urlParameters: [
52
+ Parameters.endpoint,
53
+ Parameters.livyApiVersion,
54
+ Parameters.sparkPoolName,
55
+ Parameters.sessionId,
56
+ ],
57
+ headerParameters: [Parameters.accept],
58
+ serializer,
59
+ };
60
+ const cancelSparkSessionOperationSpec = {
61
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}",
62
+ httpMethod: "DELETE",
63
+ responses: { 200: {} },
64
+ urlParameters: [
65
+ Parameters.endpoint,
66
+ Parameters.livyApiVersion,
67
+ Parameters.sparkPoolName,
68
+ Parameters.sessionId,
69
+ ],
70
+ serializer,
71
+ };
72
+ const resetSparkSessionTimeoutOperationSpec = {
73
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/reset-timeout",
74
+ httpMethod: "PUT",
75
+ responses: { 200: {} },
76
+ urlParameters: [
77
+ Parameters.endpoint,
78
+ Parameters.livyApiVersion,
79
+ Parameters.sparkPoolName,
80
+ Parameters.sessionId,
81
+ ],
82
+ serializer,
83
+ };
84
+ const getSparkStatementsOperationSpec = {
85
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements",
86
+ httpMethod: "GET",
87
+ responses: {
88
+ 200: {
89
+ bodyMapper: Mappers.SparkStatementCollection,
90
+ },
91
+ },
92
+ urlParameters: [
93
+ Parameters.endpoint,
94
+ Parameters.livyApiVersion,
95
+ Parameters.sparkPoolName,
96
+ Parameters.sessionId,
97
+ ],
98
+ headerParameters: [Parameters.accept],
99
+ serializer,
100
+ };
101
+ const createSparkStatementOperationSpec = {
102
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements",
103
+ httpMethod: "POST",
104
+ responses: {
105
+ 200: {
106
+ bodyMapper: Mappers.SparkStatement,
107
+ },
108
+ },
109
+ requestBody: Parameters.sparkStatementOptions,
110
+ urlParameters: [
111
+ Parameters.endpoint,
112
+ Parameters.livyApiVersion,
113
+ Parameters.sparkPoolName,
114
+ Parameters.sessionId,
115
+ ],
116
+ headerParameters: [Parameters.accept, Parameters.contentType],
117
+ mediaType: "json",
118
+ serializer,
119
+ };
120
+ const getSparkStatementOperationSpec = {
121
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}",
122
+ httpMethod: "GET",
123
+ responses: {
124
+ 200: {
125
+ bodyMapper: Mappers.SparkStatement,
126
+ },
127
+ },
128
+ urlParameters: [
129
+ Parameters.endpoint,
130
+ Parameters.livyApiVersion,
131
+ Parameters.sparkPoolName,
132
+ Parameters.sessionId,
133
+ Parameters.statementId,
134
+ ],
135
+ headerParameters: [Parameters.accept],
136
+ serializer,
137
+ };
138
+ const cancelSparkStatementOperationSpec = {
139
+ path: "/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}/cancel",
140
+ httpMethod: "POST",
141
+ responses: {
142
+ 200: {
143
+ bodyMapper: Mappers.SparkStatementCancellationResult,
144
+ },
145
+ },
146
+ urlParameters: [
147
+ Parameters.endpoint,
148
+ Parameters.livyApiVersion,
149
+ Parameters.sparkPoolName,
150
+ Parameters.sessionId,
151
+ Parameters.statementId,
152
+ ],
153
+ headerParameters: [Parameters.accept],
154
+ serializer,
155
+ };
156
+ /** Class containing SparkSessionOperations operations. */
157
+ export class SparkSessionOperationsImpl {
158
+ client;
159
+ /**
160
+ * Initialize a new instance of the class SparkSessionOperations class.
161
+ * @param client - Reference to the service client
162
+ */
163
+ constructor(client) {
164
+ this.client = client;
165
+ }
166
+ /**
167
+ * List all spark sessions which are running under a particular spark pool.
168
+ * @param options - The options parameters.
169
+ */
170
+ async getSparkSessions(options) {
171
+ return tracingClient.withSpan("SparkClient.getSparkSessions", options ?? {}, async (updatedOptions) => {
172
+ return this.client.sendOperationRequest({ updatedOptions }, getSparkSessionsOperationSpec);
173
+ });
174
+ }
175
+ /**
176
+ * Create new spark session.
177
+ * @param sparkSessionOptions - Livy compatible batch job request payload.
178
+ * @param options - The options parameters.
179
+ */
180
+ async createSparkSession(sparkSessionOptions, options) {
181
+ return tracingClient.withSpan("SparkClient.createSparkSession", options ?? {}, async (updatedOptions) => {
182
+ return this.client.sendOperationRequest({ sparkSessionOptions, updatedOptions }, createSparkSessionOperationSpec);
183
+ });
184
+ }
185
+ /**
186
+ * Gets a single spark session.
187
+ * @param sessionId - Identifier for the session.
188
+ * @param options - The options parameters.
189
+ */
190
+ async getSparkSession(sessionId, options) {
191
+ return tracingClient.withSpan("SparkClient.getSparkSession", options ?? {}, async (updatedOptions) => {
192
+ return this.client.sendOperationRequest({ sessionId, updatedOptions }, getSparkSessionOperationSpec);
193
+ });
194
+ }
195
+ /**
196
+ * Cancels a running spark session.
197
+ * @param sessionId - Identifier for the session.
198
+ * @param options - The options parameters.
199
+ */
200
+ async cancelSparkSession(sessionId, options) {
201
+ return tracingClient.withSpan("SparkClient.cancelSparkSession", options ?? {}, async (updatedOptions) => {
202
+ return this.client.sendOperationRequest({ sessionId, updatedOptions }, cancelSparkSessionOperationSpec);
203
+ });
204
+ }
205
+ /**
206
+ * Sends a keep alive call to the current session to reset the session timeout.
207
+ * @param sessionId - Identifier for the session.
208
+ * @param options - The options parameters.
209
+ */
210
+ async resetSparkSessionTimeout(sessionId, options) {
211
+ return tracingClient.withSpan("SparkClient.resetSparkSessionTimeout", options ?? {}, async (updatedOptions) => {
212
+ return this.client.sendOperationRequest({ sessionId, updatedOptions }, resetSparkSessionTimeoutOperationSpec);
213
+ });
214
+ }
215
+ /**
216
+ * Gets a list of statements within a spark session.
217
+ * @param sessionId - Identifier for the session.
218
+ * @param options - The options parameters.
219
+ */
220
+ async getSparkStatements(sessionId, options) {
221
+ return tracingClient.withSpan("SparkClient.getSparkStatements", options ?? {}, async (updatedOptions) => {
222
+ return this.client.sendOperationRequest({ sessionId, updatedOptions }, getSparkStatementsOperationSpec);
223
+ });
224
+ }
225
+ /**
226
+ * Create statement within a spark session.
227
+ * @param sessionId - Identifier for the session.
228
+ * @param sparkStatementOptions - Livy compatible batch job request payload.
229
+ * @param options - The options parameters.
230
+ */
231
+ async createSparkStatement(sessionId, sparkStatementOptions, options) {
232
+ return tracingClient.withSpan("SparkClient.createSparkStatement", options ?? {}, async (updatedOptions) => {
233
+ return this.client.sendOperationRequest({ sessionId, sparkStatementOptions, updatedOptions }, createSparkStatementOperationSpec);
234
+ });
235
+ }
236
+ /**
237
+ * Gets a single statement within a spark session.
238
+ * @param sessionId - Identifier for the session.
239
+ * @param statementId - Identifier for the statement.
240
+ * @param options - The options parameters.
241
+ */
242
+ async getSparkStatement(sessionId, statementId, options) {
243
+ return tracingClient.withSpan("SparkClient.getSparkStatement", options ?? {}, async (updatedOptions) => {
244
+ return this.client.sendOperationRequest({ sessionId, statementId, updatedOptions }, getSparkStatementOperationSpec);
245
+ });
246
+ }
247
+ /**
248
+ * Kill a statement within a session.
249
+ * @param sessionId - Identifier for the session.
250
+ * @param statementId - Identifier for the statement.
251
+ * @param options - The options parameters.
252
+ */
253
+ async cancelSparkStatement(sessionId, statementId, options) {
254
+ return tracingClient.withSpan("SparkClient.cancelSparkStatement", options ?? {}, async (updatedOptions) => {
255
+ return this.client.sendOperationRequest({ sessionId, statementId, updatedOptions }, cancelSparkStatementOperationSpec);
256
+ });
257
+ }
258
+ }
259
+ //# sourceMappingURL=sparkSessionOperations.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkSessionOperations.js","sourceRoot":"","sources":["../../../src/operations/sparkSessionOperations.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,aAAa,EAAE,MAAM,eAAe,CAAC;AAE9C,OAAO,KAAK,UAAU,MAAM,oBAAoB,CAAC;AACjD,OAAO,KAAK,OAAO,MAAM,sBAAsB,CAAC;AAChD,OAAO,KAAK,UAAU,MAAM,yBAAyB,CAAC;AAuBtD,2BAA2B;AAC3B,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAE3E,MAAM,6BAA6B,GAA6B;IAC9D,IAAI,EAAE,wEAAwE;IAC9E,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,sBAAsB;SAC3C;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,SAAS,EAAE,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IAC7E,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,cAAc,EAAE,UAAU,CAAC,aAAa,CAAC;IACzF,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA6B;IAChE,IAAI,EAAE,wEAAwE;IAC9E,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,YAAY;SACjC;KACF;IACD,WAAW,EAAE,UAAU,CAAC,mBAAmB;IAC3C,eAAe,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;IACtC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,cAAc,EAAE,UAAU,CAAC,aAAa,CAAC;IACzF,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,4BAA4B,GAA6B;IAC7D,IAAI,EAAE,oFAAoF;IAC1F,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,YAAY;SACjC;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;IACtC,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA6B;IAChE,IAAI,EAAE,oFAAoF;IAC1F,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE;IACtB,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,UAAU;CACX,CAAC;AACF,MAAM,qCAAqC,GAA6B;IACtE,IAAI,EAAE,kGAAkG;IACxG,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE;IACtB,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,UAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA6B;IAChE,IAAI,EAAE,+FAA+F;IACrG,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,wBAAwB;SAC7C;KACF;IACD,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,iCAAiC,GAA6B;IAClE,IAAI,EAAE,+FAA+F;IACrG,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,cAAc;SACnC;KACF;IACD,WAAW,EAAE,UAAU,CAAC,qBAAqB;IAC7C,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA6B;IAC/D,IAAI,EAAE,6GAA6G;IACnH,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,cAAc;SACnC;KACF;IACD,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,WAAW;KACvB;IACD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,iCAAiC,GAA6B;IAClE,IAAI,EAAE,oHAAoH;IAC1H,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,gCAAgC;SACrD;KACF;IACD,aAAa,EAAE;QACb,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,WAAW;KACvB;IACD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AAEF,0DAA0D;AAC1D,MAAM,OAAO,0BAA0B;IACpB,MAAM,CAAc;IAErC;;;OAGG;IACH,YAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,gBAAgB,CACpB,OAAoD;QAEpD,OAAO,aAAa,CAAC,QAAQ,CAC3B,8BAA8B,EAC9B,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,cAAc,EAAE,EAClB,6BAA6B,CACmB,CAAC;QACrD,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,kBAAkB,CACtB,mBAAwC,EACxC,OAAsD;QAEtD,OAAO,aAAa,CAAC,QAAQ,CAC3B,gCAAgC,EAChC,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,mBAAmB,EAAE,cAAc,EAAE,EACvC,+BAA+B,CACmB,CAAC;QACvD,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,eAAe,CACnB,SAAiB,EACjB,OAAmD;QAEnD,OAAO,aAAa,CAAC,QAAQ,CAC3B,6BAA6B,EAC7B,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,cAAc,EAAE,EAC7B,4BAA4B,CACmB,CAAC;QACpD,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,kBAAkB,CACtB,SAAiB,EACjB,OAAsD;QAEtD,OAAO,aAAa,CAAC,QAAQ,CAC3B,gCAAgC,EAChC,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,cAAc,EAAE,EAC7B,+BAA+B,CACf,CAAC;QACrB,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,wBAAwB,CAC5B,SAAiB,EACjB,OAA4D;QAE5D,OAAO,aAAa,CAAC,QAAQ,CAC3B,sCAAsC,EACtC,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,cAAc,EAAE,EAC7B,qCAAqC,CACrB,CAAC;QACrB,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,kBAAkB,CACtB,SAAiB,EACjB,OAAsD;QAEtD,OAAO,aAAa,CAAC,QAAQ,CAC3B,gCAAgC,EAChC,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,cAAc,EAAE,EAC7B,+BAA+B,CACmB,CAAC;QACvD,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,oBAAoB,CACxB,SAAiB,EACjB,qBAA4C,EAC5C,OAAwD;QAExD,OAAO,aAAa,CAAC,QAAQ,CAC3B,kCAAkC,EAClC,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,qBAAqB,EAAE,cAAc,EAAE,EACpD,iCAAiC,CACmB,CAAC;QACzD,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,iBAAiB,CACrB,SAAiB,EACjB,WAAmB,EACnB,OAAqD;QAErD,OAAO,aAAa,CAAC,QAAQ,CAC3B,+BAA+B,EAC/B,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,WAAW,EAAE,cAAc,EAAE,EAC1C,8BAA8B,CACmB,CAAC;QACtD,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,oBAAoB,CACxB,SAAiB,EACjB,WAAmB,EACnB,OAAwD;QAExD,OAAO,aAAa,CAAC,QAAQ,CAC3B,kCAAkC,EAClC,OAAO,IAAI,EAAE,EACb,KAAK,EAAE,cAAc,EAAE,EAAE;YACvB,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,EAAE,SAAS,EAAE,WAAW,EAAE,cAAc,EAAE,EAC1C,iCAAiC,CACmB,CAAC;QACzD,CAAC,CACF,CAAC;IACJ,CAAC;CACF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport { tracingClient } from \"../tracing.js\";\nimport type { SparkSessionOperations } from \"../operationsInterfaces/index.js\";\nimport * as coreClient from \"@azure/core-client\";\nimport * as Mappers from \"../models/mappers.js\";\nimport * as Parameters from \"../models/parameters.js\";\nimport type { SparkClient } from \"../sparkClient.js\";\nimport type {\n SparkSessionGetSparkSessionsOptionalParams,\n SparkSessionGetSparkSessionsResponse,\n SparkSessionOptions,\n SparkSessionCreateSparkSessionOptionalParams,\n SparkSessionCreateSparkSessionResponse,\n SparkSessionGetSparkSessionOptionalParams,\n SparkSessionGetSparkSessionResponse,\n SparkSessionCancelSparkSessionOptionalParams,\n SparkSessionResetSparkSessionTimeoutOptionalParams,\n SparkSessionGetSparkStatementsOptionalParams,\n SparkSessionGetSparkStatementsResponse,\n SparkStatementOptions,\n SparkSessionCreateSparkStatementOptionalParams,\n SparkSessionCreateSparkStatementResponse,\n SparkSessionGetSparkStatementOptionalParams,\n SparkSessionGetSparkStatementResponse,\n SparkSessionCancelSparkStatementOptionalParams,\n SparkSessionCancelSparkStatementResponse,\n} from \"../models/index.js\";\n\n// Operation Specifications\nconst serializer = coreClient.createSerializer(Mappers, /* isXml */ false);\n\nconst getSparkSessionsOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkSessionCollection,\n },\n },\n queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed],\n urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName],\n headerParameters: [Parameters.accept],\n serializer,\n};\nconst createSparkSessionOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkSession,\n },\n },\n requestBody: Parameters.sparkSessionOptions,\n queryParameters: [Parameters.detailed],\n urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName],\n headerParameters: [Parameters.accept, Parameters.contentType],\n mediaType: \"json\",\n serializer,\n};\nconst getSparkSessionOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkSession,\n },\n },\n queryParameters: [Parameters.detailed],\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n ],\n headerParameters: [Parameters.accept],\n serializer,\n};\nconst cancelSparkSessionOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}\",\n httpMethod: \"DELETE\",\n responses: { 200: {} },\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n ],\n serializer,\n};\nconst resetSparkSessionTimeoutOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/reset-timeout\",\n httpMethod: \"PUT\",\n responses: { 200: {} },\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n ],\n serializer,\n};\nconst getSparkStatementsOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkStatementCollection,\n },\n },\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n ],\n headerParameters: [Parameters.accept],\n serializer,\n};\nconst createSparkStatementOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkStatement,\n },\n },\n requestBody: Parameters.sparkStatementOptions,\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n ],\n headerParameters: [Parameters.accept, Parameters.contentType],\n mediaType: \"json\",\n serializer,\n};\nconst getSparkStatementOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkStatement,\n },\n },\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n Parameters.statementId,\n ],\n headerParameters: [Parameters.accept],\n serializer,\n};\nconst cancelSparkStatementOperationSpec: coreClient.OperationSpec = {\n path: \"/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}/cancel\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkStatementCancellationResult,\n },\n },\n urlParameters: [\n Parameters.endpoint,\n Parameters.livyApiVersion,\n Parameters.sparkPoolName,\n Parameters.sessionId,\n Parameters.statementId,\n ],\n headerParameters: [Parameters.accept],\n serializer,\n};\n\n/** Class containing SparkSessionOperations operations. */\nexport class SparkSessionOperationsImpl implements SparkSessionOperations {\n private readonly client: SparkClient;\n\n /**\n * Initialize a new instance of the class SparkSessionOperations class.\n * @param client - Reference to the service client\n */\n constructor(client: SparkClient) {\n this.client = client;\n }\n\n /**\n * List all spark sessions which are running under a particular spark pool.\n * @param options - The options parameters.\n */\n async getSparkSessions(\n options?: SparkSessionGetSparkSessionsOptionalParams,\n ): Promise<SparkSessionGetSparkSessionsResponse> {\n return tracingClient.withSpan(\n \"SparkClient.getSparkSessions\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { updatedOptions },\n getSparkSessionsOperationSpec,\n ) as Promise<SparkSessionGetSparkSessionsResponse>;\n },\n );\n }\n\n /**\n * Create new spark session.\n * @param sparkSessionOptions - Livy compatible batch job request payload.\n * @param options - The options parameters.\n */\n async createSparkSession(\n sparkSessionOptions: SparkSessionOptions,\n options?: SparkSessionCreateSparkSessionOptionalParams,\n ): Promise<SparkSessionCreateSparkSessionResponse> {\n return tracingClient.withSpan(\n \"SparkClient.createSparkSession\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sparkSessionOptions, updatedOptions },\n createSparkSessionOperationSpec,\n ) as Promise<SparkSessionCreateSparkSessionResponse>;\n },\n );\n }\n\n /**\n * Gets a single spark session.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n async getSparkSession(\n sessionId: number,\n options?: SparkSessionGetSparkSessionOptionalParams,\n ): Promise<SparkSessionGetSparkSessionResponse> {\n return tracingClient.withSpan(\n \"SparkClient.getSparkSession\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, updatedOptions },\n getSparkSessionOperationSpec,\n ) as Promise<SparkSessionGetSparkSessionResponse>;\n },\n );\n }\n\n /**\n * Cancels a running spark session.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n async cancelSparkSession(\n sessionId: number,\n options?: SparkSessionCancelSparkSessionOptionalParams,\n ): Promise<void> {\n return tracingClient.withSpan(\n \"SparkClient.cancelSparkSession\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, updatedOptions },\n cancelSparkSessionOperationSpec,\n ) as Promise<void>;\n },\n );\n }\n\n /**\n * Sends a keep alive call to the current session to reset the session timeout.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n async resetSparkSessionTimeout(\n sessionId: number,\n options?: SparkSessionResetSparkSessionTimeoutOptionalParams,\n ): Promise<void> {\n return tracingClient.withSpan(\n \"SparkClient.resetSparkSessionTimeout\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, updatedOptions },\n resetSparkSessionTimeoutOperationSpec,\n ) as Promise<void>;\n },\n );\n }\n\n /**\n * Gets a list of statements within a spark session.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n async getSparkStatements(\n sessionId: number,\n options?: SparkSessionGetSparkStatementsOptionalParams,\n ): Promise<SparkSessionGetSparkStatementsResponse> {\n return tracingClient.withSpan(\n \"SparkClient.getSparkStatements\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, updatedOptions },\n getSparkStatementsOperationSpec,\n ) as Promise<SparkSessionGetSparkStatementsResponse>;\n },\n );\n }\n\n /**\n * Create statement within a spark session.\n * @param sessionId - Identifier for the session.\n * @param sparkStatementOptions - Livy compatible batch job request payload.\n * @param options - The options parameters.\n */\n async createSparkStatement(\n sessionId: number,\n sparkStatementOptions: SparkStatementOptions,\n options?: SparkSessionCreateSparkStatementOptionalParams,\n ): Promise<SparkSessionCreateSparkStatementResponse> {\n return tracingClient.withSpan(\n \"SparkClient.createSparkStatement\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, sparkStatementOptions, updatedOptions },\n createSparkStatementOperationSpec,\n ) as Promise<SparkSessionCreateSparkStatementResponse>;\n },\n );\n }\n\n /**\n * Gets a single statement within a spark session.\n * @param sessionId - Identifier for the session.\n * @param statementId - Identifier for the statement.\n * @param options - The options parameters.\n */\n async getSparkStatement(\n sessionId: number,\n statementId: number,\n options?: SparkSessionGetSparkStatementOptionalParams,\n ): Promise<SparkSessionGetSparkStatementResponse> {\n return tracingClient.withSpan(\n \"SparkClient.getSparkStatement\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, statementId, updatedOptions },\n getSparkStatementOperationSpec,\n ) as Promise<SparkSessionGetSparkStatementResponse>;\n },\n );\n }\n\n /**\n * Kill a statement within a session.\n * @param sessionId - Identifier for the session.\n * @param statementId - Identifier for the statement.\n * @param options - The options parameters.\n */\n async cancelSparkStatement(\n sessionId: number,\n statementId: number,\n options?: SparkSessionCancelSparkStatementOptionalParams,\n ): Promise<SparkSessionCancelSparkStatementResponse> {\n return tracingClient.withSpan(\n \"SparkClient.cancelSparkStatement\",\n options ?? {},\n async (updatedOptions) => {\n return this.client.sendOperationRequest(\n { sessionId, statementId, updatedOptions },\n cancelSparkStatementOperationSpec,\n ) as Promise<SparkSessionCancelSparkStatementResponse>;\n },\n );\n }\n}\n"]}
@@ -0,0 +1,3 @@
1
+ export * from "./sparkBatch.js";
2
+ export * from "./sparkSessionOperations.js";
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/operationsInterfaces/index.ts"],"names":[],"mappings":"AAQA,cAAc,iBAAiB,CAAC;AAChC,cAAc,6BAA6B,CAAC"}
@@ -0,0 +1,10 @@
1
+ /*
2
+ * Copyright (c) Microsoft Corporation.
3
+ * Licensed under the MIT License.
4
+ *
5
+ * Code generated by Microsoft (R) AutoRest Code Generator.
6
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
7
+ */
8
+ export * from "./sparkBatch.js";
9
+ export * from "./sparkSessionOperations.js";
10
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/operationsInterfaces/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,iBAAiB,CAAC;AAChC,cAAc,6BAA6B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./sparkBatch.js\";\nexport * from \"./sparkSessionOperations.js\";\n"]}
@@ -0,0 +1,28 @@
1
+ import type { SparkBatchGetSparkBatchJobsOptionalParams, SparkBatchGetSparkBatchJobsResponse, SparkBatchJobOptions, SparkBatchCreateSparkBatchJobOptionalParams, SparkBatchCreateSparkBatchJobResponse, SparkBatchGetSparkBatchJobOptionalParams, SparkBatchGetSparkBatchJobResponse, SparkBatchCancelSparkBatchJobOptionalParams } from "../models/index.js";
2
+ /** Interface representing a SparkBatch. */
3
+ export interface SparkBatch {
4
+ /**
5
+ * List all spark batch jobs which are running under a particular spark pool.
6
+ * @param options - The options parameters.
7
+ */
8
+ getSparkBatchJobs(options?: SparkBatchGetSparkBatchJobsOptionalParams): Promise<SparkBatchGetSparkBatchJobsResponse>;
9
+ /**
10
+ * Create new spark batch job.
11
+ * @param sparkBatchJobOptions - Livy compatible batch job request payload.
12
+ * @param options - The options parameters.
13
+ */
14
+ createSparkBatchJob(sparkBatchJobOptions: SparkBatchJobOptions, options?: SparkBatchCreateSparkBatchJobOptionalParams): Promise<SparkBatchCreateSparkBatchJobResponse>;
15
+ /**
16
+ * Gets a single spark batch job.
17
+ * @param batchId - Identifier for the batch job.
18
+ * @param options - The options parameters.
19
+ */
20
+ getSparkBatchJob(batchId: number, options?: SparkBatchGetSparkBatchJobOptionalParams): Promise<SparkBatchGetSparkBatchJobResponse>;
21
+ /**
22
+ * Cancels a running spark batch job.
23
+ * @param batchId - Identifier for the batch job.
24
+ * @param options - The options parameters.
25
+ */
26
+ cancelSparkBatchJob(batchId: number, options?: SparkBatchCancelSparkBatchJobOptionalParams): Promise<void>;
27
+ }
28
+ //# sourceMappingURL=sparkBatch.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkBatch.d.ts","sourceRoot":"","sources":["../../../src/operationsInterfaces/sparkBatch.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EACV,yCAAyC,EACzC,mCAAmC,EACnC,oBAAoB,EACpB,2CAA2C,EAC3C,qCAAqC,EACrC,wCAAwC,EACxC,kCAAkC,EAClC,2CAA2C,EAC5C,MAAM,oBAAoB,CAAC;AAE5B,2CAA2C;AAC3C,MAAM,WAAW,UAAU;IACzB;;;OAGG;IACH,iBAAiB,CACf,OAAO,CAAC,EAAE,yCAAyC,GAClD,OAAO,CAAC,mCAAmC,CAAC,CAAC;IAChD;;;;OAIG;IACH,mBAAmB,CACjB,oBAAoB,EAAE,oBAAoB,EAC1C,OAAO,CAAC,EAAE,2CAA2C,GACpD,OAAO,CAAC,qCAAqC,CAAC,CAAC;IAClD;;;;OAIG;IACH,gBAAgB,CACd,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,wCAAwC,GACjD,OAAO,CAAC,kCAAkC,CAAC,CAAC;IAC/C;;;;OAIG;IACH,mBAAmB,CACjB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,2CAA2C,GACpD,OAAO,CAAC,IAAI,CAAC,CAAC;CAClB"}
@@ -0,0 +1,9 @@
1
+ /*
2
+ * Copyright (c) Microsoft Corporation.
3
+ * Licensed under the MIT License.
4
+ *
5
+ * Code generated by Microsoft (R) AutoRest Code Generator.
6
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
7
+ */
8
+ export {};
9
+ //# sourceMappingURL=sparkBatch.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkBatch.js","sourceRoot":"","sources":["../../../src/operationsInterfaces/sparkBatch.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport type {\n SparkBatchGetSparkBatchJobsOptionalParams,\n SparkBatchGetSparkBatchJobsResponse,\n SparkBatchJobOptions,\n SparkBatchCreateSparkBatchJobOptionalParams,\n SparkBatchCreateSparkBatchJobResponse,\n SparkBatchGetSparkBatchJobOptionalParams,\n SparkBatchGetSparkBatchJobResponse,\n SparkBatchCancelSparkBatchJobOptionalParams,\n} from \"../models/index.js\";\n\n/** Interface representing a SparkBatch. */\nexport interface SparkBatch {\n /**\n * List all spark batch jobs which are running under a particular spark pool.\n * @param options - The options parameters.\n */\n getSparkBatchJobs(\n options?: SparkBatchGetSparkBatchJobsOptionalParams,\n ): Promise<SparkBatchGetSparkBatchJobsResponse>;\n /**\n * Create new spark batch job.\n * @param sparkBatchJobOptions - Livy compatible batch job request payload.\n * @param options - The options parameters.\n */\n createSparkBatchJob(\n sparkBatchJobOptions: SparkBatchJobOptions,\n options?: SparkBatchCreateSparkBatchJobOptionalParams,\n ): Promise<SparkBatchCreateSparkBatchJobResponse>;\n /**\n * Gets a single spark batch job.\n * @param batchId - Identifier for the batch job.\n * @param options - The options parameters.\n */\n getSparkBatchJob(\n batchId: number,\n options?: SparkBatchGetSparkBatchJobOptionalParams,\n ): Promise<SparkBatchGetSparkBatchJobResponse>;\n /**\n * Cancels a running spark batch job.\n * @param batchId - Identifier for the batch job.\n * @param options - The options parameters.\n */\n cancelSparkBatchJob(\n batchId: number,\n options?: SparkBatchCancelSparkBatchJobOptionalParams,\n ): Promise<void>;\n}\n"]}
@@ -0,0 +1,61 @@
1
+ import type { SparkSessionGetSparkSessionsOptionalParams, SparkSessionGetSparkSessionsResponse, SparkSessionOptions, SparkSessionCreateSparkSessionOptionalParams, SparkSessionCreateSparkSessionResponse, SparkSessionGetSparkSessionOptionalParams, SparkSessionGetSparkSessionResponse, SparkSessionCancelSparkSessionOptionalParams, SparkSessionResetSparkSessionTimeoutOptionalParams, SparkSessionGetSparkStatementsOptionalParams, SparkSessionGetSparkStatementsResponse, SparkStatementOptions, SparkSessionCreateSparkStatementOptionalParams, SparkSessionCreateSparkStatementResponse, SparkSessionGetSparkStatementOptionalParams, SparkSessionGetSparkStatementResponse, SparkSessionCancelSparkStatementOptionalParams, SparkSessionCancelSparkStatementResponse } from "../models/index.js";
2
+ /** Interface representing a SparkSessionOperations. */
3
+ export interface SparkSessionOperations {
4
+ /**
5
+ * List all spark sessions which are running under a particular spark pool.
6
+ * @param options - The options parameters.
7
+ */
8
+ getSparkSessions(options?: SparkSessionGetSparkSessionsOptionalParams): Promise<SparkSessionGetSparkSessionsResponse>;
9
+ /**
10
+ * Create new spark session.
11
+ * @param sparkSessionOptions - Livy compatible batch job request payload.
12
+ * @param options - The options parameters.
13
+ */
14
+ createSparkSession(sparkSessionOptions: SparkSessionOptions, options?: SparkSessionCreateSparkSessionOptionalParams): Promise<SparkSessionCreateSparkSessionResponse>;
15
+ /**
16
+ * Gets a single spark session.
17
+ * @param sessionId - Identifier for the session.
18
+ * @param options - The options parameters.
19
+ */
20
+ getSparkSession(sessionId: number, options?: SparkSessionGetSparkSessionOptionalParams): Promise<SparkSessionGetSparkSessionResponse>;
21
+ /**
22
+ * Cancels a running spark session.
23
+ * @param sessionId - Identifier for the session.
24
+ * @param options - The options parameters.
25
+ */
26
+ cancelSparkSession(sessionId: number, options?: SparkSessionCancelSparkSessionOptionalParams): Promise<void>;
27
+ /**
28
+ * Sends a keep alive call to the current session to reset the session timeout.
29
+ * @param sessionId - Identifier for the session.
30
+ * @param options - The options parameters.
31
+ */
32
+ resetSparkSessionTimeout(sessionId: number, options?: SparkSessionResetSparkSessionTimeoutOptionalParams): Promise<void>;
33
+ /**
34
+ * Gets a list of statements within a spark session.
35
+ * @param sessionId - Identifier for the session.
36
+ * @param options - The options parameters.
37
+ */
38
+ getSparkStatements(sessionId: number, options?: SparkSessionGetSparkStatementsOptionalParams): Promise<SparkSessionGetSparkStatementsResponse>;
39
+ /**
40
+ * Create statement within a spark session.
41
+ * @param sessionId - Identifier for the session.
42
+ * @param sparkStatementOptions - Livy compatible batch job request payload.
43
+ * @param options - The options parameters.
44
+ */
45
+ createSparkStatement(sessionId: number, sparkStatementOptions: SparkStatementOptions, options?: SparkSessionCreateSparkStatementOptionalParams): Promise<SparkSessionCreateSparkStatementResponse>;
46
+ /**
47
+ * Gets a single statement within a spark session.
48
+ * @param sessionId - Identifier for the session.
49
+ * @param statementId - Identifier for the statement.
50
+ * @param options - The options parameters.
51
+ */
52
+ getSparkStatement(sessionId: number, statementId: number, options?: SparkSessionGetSparkStatementOptionalParams): Promise<SparkSessionGetSparkStatementResponse>;
53
+ /**
54
+ * Kill a statement within a session.
55
+ * @param sessionId - Identifier for the session.
56
+ * @param statementId - Identifier for the statement.
57
+ * @param options - The options parameters.
58
+ */
59
+ cancelSparkStatement(sessionId: number, statementId: number, options?: SparkSessionCancelSparkStatementOptionalParams): Promise<SparkSessionCancelSparkStatementResponse>;
60
+ }
61
+ //# sourceMappingURL=sparkSessionOperations.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkSessionOperations.d.ts","sourceRoot":"","sources":["../../../src/operationsInterfaces/sparkSessionOperations.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EACV,0CAA0C,EAC1C,oCAAoC,EACpC,mBAAmB,EACnB,4CAA4C,EAC5C,sCAAsC,EACtC,yCAAyC,EACzC,mCAAmC,EACnC,4CAA4C,EAC5C,kDAAkD,EAClD,4CAA4C,EAC5C,sCAAsC,EACtC,qBAAqB,EACrB,8CAA8C,EAC9C,wCAAwC,EACxC,2CAA2C,EAC3C,qCAAqC,EACrC,8CAA8C,EAC9C,wCAAwC,EACzC,MAAM,oBAAoB,CAAC;AAE5B,uDAAuD;AACvD,MAAM,WAAW,sBAAsB;IACrC;;;OAGG;IACH,gBAAgB,CACd,OAAO,CAAC,EAAE,0CAA0C,GACnD,OAAO,CAAC,oCAAoC,CAAC,CAAC;IACjD;;;;OAIG;IACH,kBAAkB,CAChB,mBAAmB,EAAE,mBAAmB,EACxC,OAAO,CAAC,EAAE,4CAA4C,GACrD,OAAO,CAAC,sCAAsC,CAAC,CAAC;IACnD;;;;OAIG;IACH,eAAe,CACb,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE,yCAAyC,GAClD,OAAO,CAAC,mCAAmC,CAAC,CAAC;IAChD;;;;OAIG;IACH,kBAAkB,CAChB,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE,4CAA4C,GACrD,OAAO,CAAC,IAAI,CAAC,CAAC;IACjB;;;;OAIG;IACH,wBAAwB,CACtB,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE,kDAAkD,GAC3D,OAAO,CAAC,IAAI,CAAC,CAAC;IACjB;;;;OAIG;IACH,kBAAkB,CAChB,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE,4CAA4C,GACrD,OAAO,CAAC,sCAAsC,CAAC,CAAC;IACnD;;;;;OAKG;IACH,oBAAoB,CAClB,SAAS,EAAE,MAAM,EACjB,qBAAqB,EAAE,qBAAqB,EAC5C,OAAO,CAAC,EAAE,8CAA8C,GACvD,OAAO,CAAC,wCAAwC,CAAC,CAAC;IACrD;;;;;OAKG;IACH,iBAAiB,CACf,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,2CAA2C,GACpD,OAAO,CAAC,qCAAqC,CAAC,CAAC;IAClD;;;;;OAKG;IACH,oBAAoB,CAClB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,8CAA8C,GACvD,OAAO,CAAC,wCAAwC,CAAC,CAAC;CACtD"}
@@ -0,0 +1,9 @@
1
+ /*
2
+ * Copyright (c) Microsoft Corporation.
3
+ * Licensed under the MIT License.
4
+ *
5
+ * Code generated by Microsoft (R) AutoRest Code Generator.
6
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
7
+ */
8
+ export {};
9
+ //# sourceMappingURL=sparkSessionOperations.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkSessionOperations.js","sourceRoot":"","sources":["../../../src/operationsInterfaces/sparkSessionOperations.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport type {\n SparkSessionGetSparkSessionsOptionalParams,\n SparkSessionGetSparkSessionsResponse,\n SparkSessionOptions,\n SparkSessionCreateSparkSessionOptionalParams,\n SparkSessionCreateSparkSessionResponse,\n SparkSessionGetSparkSessionOptionalParams,\n SparkSessionGetSparkSessionResponse,\n SparkSessionCancelSparkSessionOptionalParams,\n SparkSessionResetSparkSessionTimeoutOptionalParams,\n SparkSessionGetSparkStatementsOptionalParams,\n SparkSessionGetSparkStatementsResponse,\n SparkStatementOptions,\n SparkSessionCreateSparkStatementOptionalParams,\n SparkSessionCreateSparkStatementResponse,\n SparkSessionGetSparkStatementOptionalParams,\n SparkSessionGetSparkStatementResponse,\n SparkSessionCancelSparkStatementOptionalParams,\n SparkSessionCancelSparkStatementResponse,\n} from \"../models/index.js\";\n\n/** Interface representing a SparkSessionOperations. */\nexport interface SparkSessionOperations {\n /**\n * List all spark sessions which are running under a particular spark pool.\n * @param options - The options parameters.\n */\n getSparkSessions(\n options?: SparkSessionGetSparkSessionsOptionalParams,\n ): Promise<SparkSessionGetSparkSessionsResponse>;\n /**\n * Create new spark session.\n * @param sparkSessionOptions - Livy compatible batch job request payload.\n * @param options - The options parameters.\n */\n createSparkSession(\n sparkSessionOptions: SparkSessionOptions,\n options?: SparkSessionCreateSparkSessionOptionalParams,\n ): Promise<SparkSessionCreateSparkSessionResponse>;\n /**\n * Gets a single spark session.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n getSparkSession(\n sessionId: number,\n options?: SparkSessionGetSparkSessionOptionalParams,\n ): Promise<SparkSessionGetSparkSessionResponse>;\n /**\n * Cancels a running spark session.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n cancelSparkSession(\n sessionId: number,\n options?: SparkSessionCancelSparkSessionOptionalParams,\n ): Promise<void>;\n /**\n * Sends a keep alive call to the current session to reset the session timeout.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n resetSparkSessionTimeout(\n sessionId: number,\n options?: SparkSessionResetSparkSessionTimeoutOptionalParams,\n ): Promise<void>;\n /**\n * Gets a list of statements within a spark session.\n * @param sessionId - Identifier for the session.\n * @param options - The options parameters.\n */\n getSparkStatements(\n sessionId: number,\n options?: SparkSessionGetSparkStatementsOptionalParams,\n ): Promise<SparkSessionGetSparkStatementsResponse>;\n /**\n * Create statement within a spark session.\n * @param sessionId - Identifier for the session.\n * @param sparkStatementOptions - Livy compatible batch job request payload.\n * @param options - The options parameters.\n */\n createSparkStatement(\n sessionId: number,\n sparkStatementOptions: SparkStatementOptions,\n options?: SparkSessionCreateSparkStatementOptionalParams,\n ): Promise<SparkSessionCreateSparkStatementResponse>;\n /**\n * Gets a single statement within a spark session.\n * @param sessionId - Identifier for the session.\n * @param statementId - Identifier for the statement.\n * @param options - The options parameters.\n */\n getSparkStatement(\n sessionId: number,\n statementId: number,\n options?: SparkSessionGetSparkStatementOptionalParams,\n ): Promise<SparkSessionGetSparkStatementResponse>;\n /**\n * Kill a statement within a session.\n * @param sessionId - Identifier for the session.\n * @param statementId - Identifier for the statement.\n * @param options - The options parameters.\n */\n cancelSparkStatement(\n sessionId: number,\n statementId: number,\n options?: SparkSessionCancelSparkStatementOptionalParams,\n ): Promise<SparkSessionCancelSparkStatementResponse>;\n}\n"]}
@@ -0,0 +1,3 @@
1
+ {
2
+ "type": "module"
3
+ }
@@ -0,0 +1,24 @@
1
+ import * as coreClient from "@azure/core-client";
2
+ import type * as coreAuth from "@azure/core-auth";
3
+ import type { SparkBatch, SparkSessionOperations } from "./operationsInterfaces/index.js";
4
+ import type { SparkClientOptionalParams } from "./models/index.js";
5
+ /**
6
+ * Represents the Synapse Spark client operations.
7
+ */
8
+ export declare class SparkClient extends coreClient.ServiceClient {
9
+ endpoint: string;
10
+ livyApiVersion: string;
11
+ sparkPoolName: string;
12
+ /**
13
+ * Initializes a new instance of the SparkClient class.
14
+ * @param credentials - Subscription credentials which uniquely identify client subscription.
15
+ * @param endpoint - The workspace development endpoint, for example
16
+ * https://myworkspace.dev.azuresynapse.net.
17
+ * @param sparkPoolName - Name of the spark pool.
18
+ * @param options - The parameter options
19
+ */
20
+ constructor(credentials: coreAuth.TokenCredential, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams);
21
+ sparkBatch: SparkBatch;
22
+ sparkSessionOperations: SparkSessionOperations;
23
+ }
24
+ //# sourceMappingURL=sparkClient.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkClient.d.ts","sourceRoot":"","sources":["../../src/sparkClient.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,UAAU,MAAM,oBAAoB,CAAC;AAEjD,OAAO,KAAK,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAElD,OAAO,KAAK,EAAE,UAAU,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAC1F,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,mBAAmB,CAAC;AAEnE;;GAEG;AACH,qBAAa,WAAY,SAAQ,UAAU,CAAC,aAAa;IACvD,QAAQ,EAAE,MAAM,CAAC;IACjB,cAAc,EAAE,MAAM,CAAC;IACvB,aAAa,EAAE,MAAM,CAAC;IAEtB;;;;;;;OAOG;gBAED,WAAW,EAAE,QAAQ,CAAC,eAAe,EACrC,QAAQ,EAAE,MAAM,EAChB,aAAa,EAAE,MAAM,EACrB,OAAO,CAAC,EAAE,yBAAyB;IAuErC,UAAU,EAAE,UAAU,CAAC;IACvB,sBAAsB,EAAE,sBAAsB,CAAC;CAChD"}
@@ -0,0 +1,86 @@
1
+ /*
2
+ * Copyright (c) Microsoft Corporation.
3
+ * Licensed under the MIT License.
4
+ *
5
+ * Code generated by Microsoft (R) AutoRest Code Generator.
6
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
7
+ */
8
+ import * as coreClient from "@azure/core-client";
9
+ import * as coreRestPipeline from "@azure/core-rest-pipeline";
10
+ import { SparkBatchImpl, SparkSessionOperationsImpl } from "./operations/index.js";
11
+ /**
12
+ * Represents the Synapse Spark client operations.
13
+ */
14
+ export class SparkClient extends coreClient.ServiceClient {
15
+ endpoint;
16
+ livyApiVersion;
17
+ sparkPoolName;
18
+ /**
19
+ * Initializes a new instance of the SparkClient class.
20
+ * @param credentials - Subscription credentials which uniquely identify client subscription.
21
+ * @param endpoint - The workspace development endpoint, for example
22
+ * https://myworkspace.dev.azuresynapse.net.
23
+ * @param sparkPoolName - Name of the spark pool.
24
+ * @param options - The parameter options
25
+ */
26
+ constructor(credentials, endpoint, sparkPoolName, options) {
27
+ if (credentials === undefined) {
28
+ throw new Error("'credentials' cannot be null");
29
+ }
30
+ if (endpoint === undefined) {
31
+ throw new Error("'endpoint' cannot be null");
32
+ }
33
+ if (sparkPoolName === undefined) {
34
+ throw new Error("'sparkPoolName' cannot be null");
35
+ }
36
+ // Initializing default values for options
37
+ if (!options) {
38
+ options = {};
39
+ }
40
+ const defaults = {
41
+ requestContentType: "application/json; charset=utf-8",
42
+ credential: credentials,
43
+ };
44
+ const packageDetails = `azsdk-js-synapse-spark/1.0.0-beta.6`;
45
+ const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix
46
+ ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`
47
+ : `${packageDetails}`;
48
+ if (!options.credentialScopes) {
49
+ options.credentialScopes = ["https://dev.azuresynapse.net/.default"];
50
+ }
51
+ const optionsWithDefaults = {
52
+ ...defaults,
53
+ ...options,
54
+ userAgentOptions: {
55
+ userAgentPrefix,
56
+ },
57
+ baseUri: options.endpoint ?? options.baseUri ?? "{endpoint}",
58
+ };
59
+ super(optionsWithDefaults);
60
+ if (options?.pipeline && options.pipeline.getOrderedPolicies().length > 0) {
61
+ const pipelinePolicies = options.pipeline.getOrderedPolicies();
62
+ const bearerTokenAuthenticationPolicyFound = pipelinePolicies.some((pipelinePolicy) => pipelinePolicy.name === coreRestPipeline.bearerTokenAuthenticationPolicyName);
63
+ if (!bearerTokenAuthenticationPolicyFound) {
64
+ this.pipeline.removePolicy({
65
+ name: coreRestPipeline.bearerTokenAuthenticationPolicyName,
66
+ });
67
+ this.pipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({
68
+ scopes: `${optionsWithDefaults.baseUri}/.default`,
69
+ challengeCallbacks: {
70
+ authorizeRequestOnChallenge: coreClient.authorizeRequestOnClaimChallenge,
71
+ },
72
+ }));
73
+ }
74
+ }
75
+ // Parameter assignments
76
+ this.endpoint = endpoint;
77
+ this.sparkPoolName = sparkPoolName;
78
+ // Assigning values to Constant parameters
79
+ this.livyApiVersion = options.livyApiVersion || "2019-11-01-preview";
80
+ this.sparkBatch = new SparkBatchImpl(this);
81
+ this.sparkSessionOperations = new SparkSessionOperationsImpl(this);
82
+ }
83
+ sparkBatch;
84
+ sparkSessionOperations;
85
+ }
86
+ //# sourceMappingURL=sparkClient.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sparkClient.js","sourceRoot":"","sources":["../../src/sparkClient.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,UAAU,MAAM,oBAAoB,CAAC;AACjD,OAAO,KAAK,gBAAgB,MAAM,2BAA2B,CAAC;AAE9D,OAAO,EAAE,cAAc,EAAE,0BAA0B,EAAE,MAAM,uBAAuB,CAAC;AAInF;;GAEG;AACH,MAAM,OAAO,WAAY,SAAQ,UAAU,CAAC,aAAa;IACvD,QAAQ,CAAS;IACjB,cAAc,CAAS;IACvB,aAAa,CAAS;IAEtB;;;;;;;OAOG;IACH,YACE,WAAqC,EACrC,QAAgB,EAChB,aAAqB,EACrB,OAAmC;QAEnC,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;QAClD,CAAC;QACD,IAAI,QAAQ,KAAK,SAAS,EAAE,CAAC;YAC3B,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;QAC/C,CAAC;QACD,IAAI,aAAa,KAAK,SAAS,EAAE,CAAC;YAChC,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;QACpD,CAAC;QAED,0CAA0C;QAC1C,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO,GAAG,EAAE,CAAC;QACf,CAAC;QACD,MAAM,QAAQ,GAA8B;YAC1C,kBAAkB,EAAE,iCAAiC;YACrD,UAAU,EAAE,WAAW;SACxB,CAAC;QAEF,MAAM,cAAc,GAAG,qCAAqC,CAAC;QAC7D,MAAM,eAAe,GACnB,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,CAAC,eAAe;YAClE,CAAC,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC,eAAe,IAAI,cAAc,EAAE;YACjE,CAAC,CAAC,GAAG,cAAc,EAAE,CAAC;QAE1B,IAAI,CAAC,OAAO,CAAC,gBAAgB,EAAE,CAAC;YAC9B,OAAO,CAAC,gBAAgB,GAAG,CAAC,uCAAuC,CAAC,CAAC;QACvE,CAAC;QACD,MAAM,mBAAmB,GAAG;YAC1B,GAAG,QAAQ;YACX,GAAG,OAAO;YACV,gBAAgB,EAAE;gBAChB,eAAe;aAChB;YACD,OAAO,EAAE,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC,OAAO,IAAI,YAAY;SAC7D,CAAC;QACF,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAE3B,IAAI,OAAO,EAAE,QAAQ,IAAI,OAAO,CAAC,QAAQ,CAAC,kBAAkB,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC1E,MAAM,gBAAgB,GACpB,OAAO,CAAC,QAAQ,CAAC,kBAAkB,EAAE,CAAC;YACxC,MAAM,oCAAoC,GAAG,gBAAgB,CAAC,IAAI,CAChE,CAAC,cAAc,EAAE,EAAE,CACjB,cAAc,CAAC,IAAI,KAAK,gBAAgB,CAAC,mCAAmC,CAC/E,CAAC;YACF,IAAI,CAAC,oCAAoC,EAAE,CAAC;gBAC1C,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC;oBACzB,IAAI,EAAE,gBAAgB,CAAC,mCAAmC;iBAC3D,CAAC,CAAC;gBACH,IAAI,CAAC,QAAQ,CAAC,SAAS,CACrB,gBAAgB,CAAC,+BAA+B,CAAC;oBAC/C,MAAM,EAAE,GAAG,mBAAmB,CAAC,OAAO,WAAW;oBACjD,kBAAkB,EAAE;wBAClB,2BAA2B,EAAE,UAAU,CAAC,gCAAgC;qBACzE;iBACF,CAAC,CACH,CAAC;YACJ,CAAC;QACH,CAAC;QACD,wBAAwB;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QAEnC,0CAA0C;QAC1C,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,IAAI,oBAAoB,CAAC;QACrE,IAAI,CAAC,UAAU,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;QAC3C,IAAI,CAAC,sBAAsB,GAAG,IAAI,0BAA0B,CAAC,IAAI,CAAC,CAAC;IACrE,CAAC;IAED,UAAU,CAAa;IACvB,sBAAsB,CAAyB;CAChD","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreClient from \"@azure/core-client\";\nimport * as coreRestPipeline from \"@azure/core-rest-pipeline\";\nimport type * as coreAuth from \"@azure/core-auth\";\nimport { SparkBatchImpl, SparkSessionOperationsImpl } from \"./operations/index.js\";\nimport type { SparkBatch, SparkSessionOperations } from \"./operationsInterfaces/index.js\";\nimport type { SparkClientOptionalParams } from \"./models/index.js\";\n\n/**\n * Represents the Synapse Spark client operations.\n */\nexport class SparkClient extends coreClient.ServiceClient {\n endpoint: string;\n livyApiVersion: string;\n sparkPoolName: string;\n\n /**\n * Initializes a new instance of the SparkClient class.\n * @param credentials - Subscription credentials which uniquely identify client subscription.\n * @param endpoint - The workspace development endpoint, for example\n * https://myworkspace.dev.azuresynapse.net.\n * @param sparkPoolName - Name of the spark pool.\n * @param options - The parameter options\n */\n constructor(\n credentials: coreAuth.TokenCredential,\n endpoint: string,\n sparkPoolName: string,\n options?: SparkClientOptionalParams,\n ) {\n if (credentials === undefined) {\n throw new Error(\"'credentials' cannot be null\");\n }\n if (endpoint === undefined) {\n throw new Error(\"'endpoint' cannot be null\");\n }\n if (sparkPoolName === undefined) {\n throw new Error(\"'sparkPoolName' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n const defaults: SparkClientOptionalParams = {\n requestContentType: \"application/json; charset=utf-8\",\n credential: credentials,\n };\n\n const packageDetails = `azsdk-js-synapse-spark/1.0.0-beta.6`;\n const userAgentPrefix =\n options.userAgentOptions && options.userAgentOptions.userAgentPrefix\n ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`\n : `${packageDetails}`;\n\n if (!options.credentialScopes) {\n options.credentialScopes = [\"https://dev.azuresynapse.net/.default\"];\n }\n const optionsWithDefaults = {\n ...defaults,\n ...options,\n userAgentOptions: {\n userAgentPrefix,\n },\n baseUri: options.endpoint ?? options.baseUri ?? \"{endpoint}\",\n };\n super(optionsWithDefaults);\n\n if (options?.pipeline && options.pipeline.getOrderedPolicies().length > 0) {\n const pipelinePolicies: coreRestPipeline.PipelinePolicy[] =\n options.pipeline.getOrderedPolicies();\n const bearerTokenAuthenticationPolicyFound = pipelinePolicies.some(\n (pipelinePolicy) =>\n pipelinePolicy.name === coreRestPipeline.bearerTokenAuthenticationPolicyName,\n );\n if (!bearerTokenAuthenticationPolicyFound) {\n this.pipeline.removePolicy({\n name: coreRestPipeline.bearerTokenAuthenticationPolicyName,\n });\n this.pipeline.addPolicy(\n coreRestPipeline.bearerTokenAuthenticationPolicy({\n scopes: `${optionsWithDefaults.baseUri}/.default`,\n challengeCallbacks: {\n authorizeRequestOnChallenge: coreClient.authorizeRequestOnClaimChallenge,\n },\n }),\n );\n }\n }\n // Parameter assignments\n this.endpoint = endpoint;\n this.sparkPoolName = sparkPoolName;\n\n // Assigning values to Constant parameters\n this.livyApiVersion = options.livyApiVersion || \"2019-11-01-preview\";\n this.sparkBatch = new SparkBatchImpl(this);\n this.sparkSessionOperations = new SparkSessionOperationsImpl(this);\n }\n\n sparkBatch: SparkBatch;\n sparkSessionOperations: SparkSessionOperations;\n}\n"]}
@@ -0,0 +1,2 @@
1
+ export declare const tracingClient: import("@azure/core-tracing").TracingClient;
2
+ //# sourceMappingURL=tracing.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tracing.d.ts","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAUA,eAAO,MAAM,aAAa,6CAIxB,CAAC"}
@@ -0,0 +1,14 @@
1
+ /*
2
+ * Copyright (c) Microsoft Corporation.
3
+ * Licensed under the MIT License.
4
+ *
5
+ * Code generated by Microsoft (R) AutoRest Code Generator.
6
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
7
+ */
8
+ import { createTracingClient } from "@azure/core-tracing";
9
+ export const tracingClient = createTracingClient({
10
+ namespace: "Azure.Synapse.Spark",
11
+ packageName: "@azure/synapse-spark",
12
+ packageVersion: "1.0.0-beta.6",
13
+ });
14
+ //# sourceMappingURL=tracing.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE1D,MAAM,CAAC,MAAM,aAAa,GAAG,mBAAmB,CAAC;IAC/C,SAAS,EAAE,qBAAqB;IAChC,WAAW,EAAE,sBAAsB;IACnC,cAAc,EAAE,cAAc;CAC/B,CAAC,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport { createTracingClient } from \"@azure/core-tracing\";\n\nexport const tracingClient = createTracingClient({\n namespace: \"Azure.Synapse.Spark\",\n packageName: \"@azure/synapse-spark\",\n packageVersion: \"1.0.0-beta.6\",\n});\n"]}