@azure/synapse-spark 1.0.0-beta.3 → 1.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +14 -13
  3. package/dist/browser/index.d.ts +4 -0
  4. package/dist/browser/index.d.ts.map +1 -0
  5. package/dist/browser/index.js +11 -0
  6. package/dist/browser/index.js.map +1 -0
  7. package/{types/synapse-spark.d.ts → dist/browser/models/index.d.ts} +548 -621
  8. package/dist/browser/models/index.d.ts.map +1 -0
  9. package/dist/browser/models/index.js +134 -0
  10. package/dist/browser/models/index.js.map +1 -0
  11. package/dist/browser/models/mappers.d.ts +19 -0
  12. package/dist/browser/models/mappers.d.ts.map +1 -0
  13. package/dist/browser/models/mappers.js +1151 -0
  14. package/dist/browser/models/mappers.js.map +1 -0
  15. package/dist/browser/models/parameters.d.ts +16 -0
  16. package/dist/browser/models/parameters.d.ts.map +1 -0
  17. package/dist/browser/models/parameters.js +133 -0
  18. package/dist/browser/models/parameters.js.map +1 -0
  19. package/dist/browser/operations/index.d.ts +3 -0
  20. package/dist/browser/operations/index.d.ts.map +1 -0
  21. package/dist/browser/operations/index.js +10 -0
  22. package/dist/browser/operations/index.js.map +1 -0
  23. package/dist/browser/operations/sparkBatch.d.ts +36 -0
  24. package/dist/browser/operations/sparkBatch.d.ts.map +1 -0
  25. package/dist/browser/operations/sparkBatch.js +122 -0
  26. package/dist/browser/operations/sparkBatch.js.map +1 -0
  27. package/dist/browser/operations/sparkSessionOperations.d.ts +69 -0
  28. package/dist/browser/operations/sparkSessionOperations.d.ts.map +1 -0
  29. package/dist/browser/operations/sparkSessionOperations.js +259 -0
  30. package/dist/browser/operations/sparkSessionOperations.js.map +1 -0
  31. package/dist/browser/operationsInterfaces/index.d.ts +3 -0
  32. package/dist/browser/operationsInterfaces/index.d.ts.map +1 -0
  33. package/dist/browser/operationsInterfaces/index.js +10 -0
  34. package/dist/browser/operationsInterfaces/index.js.map +1 -0
  35. package/dist/browser/operationsInterfaces/sparkBatch.d.ts +28 -0
  36. package/dist/browser/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  37. package/dist/browser/operationsInterfaces/sparkBatch.js +9 -0
  38. package/dist/browser/operationsInterfaces/sparkBatch.js.map +1 -0
  39. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  40. package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  41. package/dist/browser/operationsInterfaces/sparkSessionOperations.js +9 -0
  42. package/dist/browser/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  43. package/dist/browser/package.json +3 -0
  44. package/dist/browser/sparkClient.d.ts +24 -0
  45. package/dist/browser/sparkClient.d.ts.map +1 -0
  46. package/dist/browser/sparkClient.js +86 -0
  47. package/dist/browser/sparkClient.js.map +1 -0
  48. package/dist/browser/tracing.d.ts +2 -0
  49. package/dist/browser/tracing.d.ts.map +1 -0
  50. package/dist/browser/tracing.js +14 -0
  51. package/dist/browser/tracing.js.map +1 -0
  52. package/dist/commonjs/index.d.ts +4 -0
  53. package/dist/commonjs/index.d.ts.map +1 -0
  54. package/dist/commonjs/index.js +16 -0
  55. package/dist/commonjs/index.js.map +1 -0
  56. package/dist/commonjs/models/index.d.ts +548 -0
  57. package/dist/commonjs/models/index.d.ts.map +1 -0
  58. package/dist/commonjs/models/index.js +137 -0
  59. package/dist/commonjs/models/index.js.map +1 -0
  60. package/dist/commonjs/models/mappers.d.ts +19 -0
  61. package/dist/commonjs/models/mappers.d.ts.map +1 -0
  62. package/dist/commonjs/models/mappers.js +1154 -0
  63. package/dist/commonjs/models/mappers.js.map +1 -0
  64. package/dist/commonjs/models/parameters.d.ts +16 -0
  65. package/dist/commonjs/models/parameters.d.ts.map +1 -0
  66. package/dist/commonjs/models/parameters.js +136 -0
  67. package/dist/commonjs/models/parameters.js.map +1 -0
  68. package/dist/commonjs/operations/index.d.ts +3 -0
  69. package/dist/commonjs/operations/index.d.ts.map +1 -0
  70. package/dist/commonjs/operations/index.js +13 -0
  71. package/dist/commonjs/operations/index.js.map +1 -0
  72. package/dist/commonjs/operations/sparkBatch.d.ts +36 -0
  73. package/dist/commonjs/operations/sparkBatch.d.ts.map +1 -0
  74. package/dist/commonjs/operations/sparkBatch.js +127 -0
  75. package/dist/commonjs/operations/sparkBatch.js.map +1 -0
  76. package/dist/commonjs/operations/sparkSessionOperations.d.ts +69 -0
  77. package/dist/commonjs/operations/sparkSessionOperations.d.ts.map +1 -0
  78. package/dist/commonjs/operations/sparkSessionOperations.js +264 -0
  79. package/dist/commonjs/operations/sparkSessionOperations.js.map +1 -0
  80. package/dist/commonjs/operationsInterfaces/index.d.ts +3 -0
  81. package/dist/commonjs/operationsInterfaces/index.d.ts.map +1 -0
  82. package/dist/commonjs/operationsInterfaces/index.js +13 -0
  83. package/dist/commonjs/operationsInterfaces/index.js.map +1 -0
  84. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts +28 -0
  85. package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  86. package/dist/commonjs/operationsInterfaces/sparkBatch.js +10 -0
  87. package/dist/commonjs/operationsInterfaces/sparkBatch.js.map +1 -0
  88. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  89. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  90. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js +10 -0
  91. package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  92. package/dist/commonjs/package.json +3 -0
  93. package/dist/commonjs/sparkClient.d.ts +24 -0
  94. package/dist/commonjs/sparkClient.d.ts.map +1 -0
  95. package/dist/commonjs/sparkClient.js +91 -0
  96. package/dist/commonjs/sparkClient.js.map +1 -0
  97. package/dist/commonjs/tracing.d.ts +2 -0
  98. package/dist/commonjs/tracing.d.ts.map +1 -0
  99. package/dist/commonjs/tracing.js +17 -0
  100. package/dist/commonjs/tracing.js.map +1 -0
  101. package/dist/commonjs/tsdoc-metadata.json +11 -0
  102. package/dist/esm/index.d.ts +4 -0
  103. package/dist/esm/index.d.ts.map +1 -0
  104. package/dist/esm/index.js +11 -0
  105. package/dist/esm/index.js.map +1 -0
  106. package/dist/esm/models/index.d.ts +548 -0
  107. package/dist/esm/models/index.d.ts.map +1 -0
  108. package/dist/esm/models/index.js +134 -0
  109. package/dist/esm/models/index.js.map +1 -0
  110. package/dist/esm/models/mappers.d.ts +19 -0
  111. package/dist/esm/models/mappers.d.ts.map +1 -0
  112. package/dist/esm/models/mappers.js +1151 -0
  113. package/dist/esm/models/mappers.js.map +1 -0
  114. package/dist/esm/models/parameters.d.ts +16 -0
  115. package/dist/esm/models/parameters.d.ts.map +1 -0
  116. package/dist/esm/models/parameters.js +133 -0
  117. package/dist/esm/models/parameters.js.map +1 -0
  118. package/dist/esm/operations/index.d.ts +3 -0
  119. package/dist/esm/operations/index.d.ts.map +1 -0
  120. package/dist/esm/operations/index.js +10 -0
  121. package/dist/esm/operations/index.js.map +1 -0
  122. package/dist/esm/operations/sparkBatch.d.ts +36 -0
  123. package/dist/esm/operations/sparkBatch.d.ts.map +1 -0
  124. package/dist/esm/operations/sparkBatch.js +122 -0
  125. package/dist/esm/operations/sparkBatch.js.map +1 -0
  126. package/dist/esm/operations/sparkSessionOperations.d.ts +69 -0
  127. package/dist/esm/operations/sparkSessionOperations.d.ts.map +1 -0
  128. package/dist/esm/operations/sparkSessionOperations.js +259 -0
  129. package/dist/esm/operations/sparkSessionOperations.js.map +1 -0
  130. package/dist/esm/operationsInterfaces/index.d.ts +3 -0
  131. package/dist/esm/operationsInterfaces/index.d.ts.map +1 -0
  132. package/dist/esm/operationsInterfaces/index.js +10 -0
  133. package/dist/esm/operationsInterfaces/index.js.map +1 -0
  134. package/dist/esm/operationsInterfaces/sparkBatch.d.ts +28 -0
  135. package/dist/esm/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  136. package/dist/esm/operationsInterfaces/sparkBatch.js +9 -0
  137. package/dist/esm/operationsInterfaces/sparkBatch.js.map +1 -0
  138. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  139. package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  140. package/dist/esm/operationsInterfaces/sparkSessionOperations.js +9 -0
  141. package/dist/esm/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  142. package/dist/esm/package.json +3 -0
  143. package/dist/esm/sparkClient.d.ts +24 -0
  144. package/dist/esm/sparkClient.d.ts.map +1 -0
  145. package/dist/esm/sparkClient.js +86 -0
  146. package/dist/esm/sparkClient.js.map +1 -0
  147. package/dist/esm/tracing.d.ts +2 -0
  148. package/dist/esm/tracing.d.ts.map +1 -0
  149. package/dist/esm/tracing.js +14 -0
  150. package/dist/esm/tracing.js.map +1 -0
  151. package/dist/react-native/index.d.ts +4 -0
  152. package/dist/react-native/index.d.ts.map +1 -0
  153. package/dist/react-native/index.js +11 -0
  154. package/dist/react-native/index.js.map +1 -0
  155. package/dist/react-native/models/index.d.ts +548 -0
  156. package/dist/react-native/models/index.d.ts.map +1 -0
  157. package/dist/react-native/models/index.js +134 -0
  158. package/dist/react-native/models/index.js.map +1 -0
  159. package/dist/react-native/models/mappers.d.ts +19 -0
  160. package/dist/react-native/models/mappers.d.ts.map +1 -0
  161. package/dist/react-native/models/mappers.js +1151 -0
  162. package/dist/react-native/models/mappers.js.map +1 -0
  163. package/dist/react-native/models/parameters.d.ts +16 -0
  164. package/dist/react-native/models/parameters.d.ts.map +1 -0
  165. package/dist/react-native/models/parameters.js +133 -0
  166. package/dist/react-native/models/parameters.js.map +1 -0
  167. package/dist/react-native/operations/index.d.ts +3 -0
  168. package/dist/react-native/operations/index.d.ts.map +1 -0
  169. package/dist/react-native/operations/index.js +10 -0
  170. package/dist/react-native/operations/index.js.map +1 -0
  171. package/dist/react-native/operations/sparkBatch.d.ts +36 -0
  172. package/dist/react-native/operations/sparkBatch.d.ts.map +1 -0
  173. package/dist/react-native/operations/sparkBatch.js +122 -0
  174. package/dist/react-native/operations/sparkBatch.js.map +1 -0
  175. package/dist/react-native/operations/sparkSessionOperations.d.ts +69 -0
  176. package/dist/react-native/operations/sparkSessionOperations.d.ts.map +1 -0
  177. package/dist/react-native/operations/sparkSessionOperations.js +259 -0
  178. package/dist/react-native/operations/sparkSessionOperations.js.map +1 -0
  179. package/dist/react-native/operationsInterfaces/index.d.ts +3 -0
  180. package/dist/react-native/operationsInterfaces/index.d.ts.map +1 -0
  181. package/dist/react-native/operationsInterfaces/index.js +10 -0
  182. package/dist/react-native/operationsInterfaces/index.js.map +1 -0
  183. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts +28 -0
  184. package/dist/react-native/operationsInterfaces/sparkBatch.d.ts.map +1 -0
  185. package/dist/react-native/operationsInterfaces/sparkBatch.js +9 -0
  186. package/dist/react-native/operationsInterfaces/sparkBatch.js.map +1 -0
  187. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
  188. package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
  189. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js +9 -0
  190. package/dist/react-native/operationsInterfaces/sparkSessionOperations.js.map +1 -0
  191. package/dist/react-native/package.json +3 -0
  192. package/dist/react-native/sparkClient.d.ts +24 -0
  193. package/dist/react-native/sparkClient.d.ts.map +1 -0
  194. package/dist/react-native/sparkClient.js +86 -0
  195. package/dist/react-native/sparkClient.js.map +1 -0
  196. package/dist/react-native/tracing.d.ts +2 -0
  197. package/dist/react-native/tracing.d.ts.map +1 -0
  198. package/dist/react-native/tracing.js +14 -0
  199. package/dist/react-native/tracing.js.map +1 -0
  200. package/package.json +83 -79
  201. package/CHANGELOG.md +0 -17
  202. package/dist/index.js +0 -1995
  203. package/dist/index.js.map +0 -1
  204. package/dist/index.min.js +0 -1
  205. package/dist/index.min.js.map +0 -1
  206. package/rollup.config.js +0 -3
  207. package/tsconfig.json +0 -19
@@ -1,621 +1,548 @@
1
- import * as coreAuth from '@azure/core-auth';
2
- import * as coreClient from '@azure/core-client';
3
-
4
- /** Known values of {@link PluginCurrentState} that the service accepts. */
5
- export declare enum KnownPluginCurrentState {
6
- Preparation = "Preparation",
7
- ResourceAcquisition = "ResourceAcquisition",
8
- Queued = "Queued",
9
- Submission = "Submission",
10
- Monitoring = "Monitoring",
11
- Cleanup = "Cleanup",
12
- Ended = "Ended"
13
- }
14
-
15
- /** Known values of {@link SchedulerCurrentState} that the service accepts. */
16
- export declare enum KnownSchedulerCurrentState {
17
- Queued = "Queued",
18
- Scheduled = "Scheduled",
19
- Ended = "Ended"
20
- }
21
-
22
- /** Known values of {@link SparkBatchJobResultType} that the service accepts. */
23
- export declare enum KnownSparkBatchJobResultType {
24
- Uncertain = "Uncertain",
25
- Succeeded = "Succeeded",
26
- Failed = "Failed",
27
- Cancelled = "Cancelled"
28
- }
29
-
30
- /** Known values of {@link SparkErrorSource} that the service accepts. */
31
- export declare enum KnownSparkErrorSource {
32
- System = "System",
33
- User = "User",
34
- Unknown = "Unknown",
35
- Dependency = "Dependency"
36
- }
37
-
38
- /** Known values of {@link SparkJobType} that the service accepts. */
39
- export declare enum KnownSparkJobType {
40
- SparkBatch = "SparkBatch",
41
- SparkSession = "SparkSession"
42
- }
43
-
44
- /** Known values of {@link SparkSessionResultType} that the service accepts. */
45
- export declare enum KnownSparkSessionResultType {
46
- Uncertain = "Uncertain",
47
- Succeeded = "Succeeded",
48
- Failed = "Failed",
49
- Cancelled = "Cancelled"
50
- }
51
-
52
- /** Known values of {@link SparkStatementLanguageType} that the service accepts. */
53
- export declare enum KnownSparkStatementLanguageType {
54
- Spark = "spark",
55
- PySpark = "pyspark",
56
- DotNetSpark = "dotnetspark",
57
- Sql = "sql"
58
- }
59
-
60
- /**
61
- * Defines values for PluginCurrentState. \
62
- * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState,
63
- * this enum contains the known values that the service supports.
64
- * ### Known values supported by the service
65
- * **Preparation** \
66
- * **ResourceAcquisition** \
67
- * **Queued** \
68
- * **Submission** \
69
- * **Monitoring** \
70
- * **Cleanup** \
71
- * **Ended**
72
- */
73
- export declare type PluginCurrentState = string;
74
-
75
- /**
76
- * Defines values for SchedulerCurrentState. \
77
- * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState,
78
- * this enum contains the known values that the service supports.
79
- * ### Known values supported by the service
80
- * **Queued** \
81
- * **Scheduled** \
82
- * **Ended**
83
- */
84
- export declare type SchedulerCurrentState = string;
85
-
86
- /** Interface representing a SparkBatch. */
87
- export declare interface SparkBatch {
88
- /**
89
- * List all spark batch jobs which are running under a particular spark pool.
90
- * @param options The options parameters.
91
- */
92
- getSparkBatchJobs(options?: SparkBatchGetSparkBatchJobsOptionalParams): Promise<SparkBatchGetSparkBatchJobsResponse>;
93
- /**
94
- * Create new spark batch job.
95
- * @param sparkBatchJobOptions Livy compatible batch job request payload.
96
- * @param options The options parameters.
97
- */
98
- createSparkBatchJob(sparkBatchJobOptions: SparkBatchJobOptions, options?: SparkBatchCreateSparkBatchJobOptionalParams): Promise<SparkBatchCreateSparkBatchJobResponse>;
99
- /**
100
- * Gets a single spark batch job.
101
- * @param batchId Identifier for the batch job.
102
- * @param options The options parameters.
103
- */
104
- getSparkBatchJob(batchId: number, options?: SparkBatchGetSparkBatchJobOptionalParams): Promise<SparkBatchGetSparkBatchJobResponse>;
105
- /**
106
- * Cancels a running spark batch job.
107
- * @param batchId Identifier for the batch job.
108
- * @param options The options parameters.
109
- */
110
- cancelSparkBatchJob(batchId: number, options?: SparkBatchCancelSparkBatchJobOptionalParams): Promise<void>;
111
- }
112
-
113
- /** Optional parameters. */
114
- export declare interface SparkBatchCancelSparkBatchJobOptionalParams extends coreClient.OperationOptions {
115
- }
116
-
117
- /** Optional parameters. */
118
- export declare interface SparkBatchCreateSparkBatchJobOptionalParams extends coreClient.OperationOptions {
119
- /** Optional query param specifying whether detailed response is returned beyond plain livy. */
120
- detailed?: boolean;
121
- }
122
-
123
- /** Contains response data for the createSparkBatchJob operation. */
124
- export declare type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob;
125
-
126
- /** Optional parameters. */
127
- export declare interface SparkBatchGetSparkBatchJobOptionalParams extends coreClient.OperationOptions {
128
- /** Optional query param specifying whether detailed response is returned beyond plain livy. */
129
- detailed?: boolean;
130
- }
131
-
132
- /** Contains response data for the getSparkBatchJob operation. */
133
- export declare type SparkBatchGetSparkBatchJobResponse = SparkBatchJob;
134
-
135
- /** Optional parameters. */
136
- export declare interface SparkBatchGetSparkBatchJobsOptionalParams extends coreClient.OperationOptions {
137
- /** Optional param specifying which index the list should begin from. */
138
- fromParam?: number;
139
- /**
140
- * Optional param specifying the size of the returned list.
141
- * By default it is 20 and that is the maximum.
142
- */
143
- size?: number;
144
- /** Optional query param specifying whether detailed response is returned beyond plain livy. */
145
- detailed?: boolean;
146
- }
147
-
148
- /** Contains response data for the getSparkBatchJobs operation. */
149
- export declare type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection;
150
-
151
- export declare interface SparkBatchJob {
152
- livyInfo?: SparkBatchJobState;
153
- /** The batch name. */
154
- name?: string;
155
- /** The workspace name. */
156
- workspaceName?: string;
157
- /** The Spark pool name. */
158
- sparkPoolName?: string;
159
- /** The submitter name. */
160
- submitterName?: string;
161
- /** The submitter identifier. */
162
- submitterId?: string;
163
- /** The artifact identifier. */
164
- artifactId?: string;
165
- /** The job type. */
166
- jobType?: SparkJobType;
167
- /** The Spark batch job result. */
168
- result?: SparkBatchJobResultType;
169
- /** The scheduler information. */
170
- scheduler?: SparkScheduler;
171
- /** The plugin information. */
172
- plugin?: SparkServicePlugin;
173
- /** The error information. */
174
- errors?: SparkServiceError[];
175
- /** The tags. */
176
- tags?: {
177
- [propertyName: string]: string;
178
- };
179
- /** The session Id. */
180
- id: number;
181
- /** The application id of this session */
182
- appId?: string;
183
- /** The detailed application info. */
184
- appInfo?: {
185
- [propertyName: string]: string;
186
- };
187
- /** The batch state */
188
- state?: string;
189
- /** The log lines. */
190
- logLines?: string[];
191
- }
192
-
193
- /** Response for batch list operation. */
194
- export declare interface SparkBatchJobCollection {
195
- /** The start index of fetched sessions. */
196
- from: number;
197
- /** Number of sessions fetched. */
198
- total: number;
199
- /** Batch list */
200
- sessions?: SparkBatchJob[];
201
- }
202
-
203
- export declare interface SparkBatchJobOptions {
204
- /** Dictionary of <string> */
205
- tags?: {
206
- [propertyName: string]: string;
207
- };
208
- artifactId?: string;
209
- name: string;
210
- file: string;
211
- className?: string;
212
- arguments?: string[];
213
- jars?: string[];
214
- pythonFiles?: string[];
215
- files?: string[];
216
- archives?: string[];
217
- /** Dictionary of <string> */
218
- configuration?: {
219
- [propertyName: string]: string;
220
- };
221
- driverMemory?: string;
222
- driverCores?: number;
223
- executorMemory?: string;
224
- executorCores?: number;
225
- executorCount?: number;
226
- }
227
-
228
- /**
229
- * Defines values for SparkBatchJobResultType. \
230
- * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType,
231
- * this enum contains the known values that the service supports.
232
- * ### Known values supported by the service
233
- * **Uncertain** \
234
- * **Succeeded** \
235
- * **Failed** \
236
- * **Cancelled**
237
- */
238
- export declare type SparkBatchJobResultType = string;
239
-
240
- export declare interface SparkBatchJobState {
241
- /** the time that at which "not_started" livy state was first seen. */
242
- notStartedAt?: Date;
243
- /** the time that at which "starting" livy state was first seen. */
244
- startingAt?: Date;
245
- /** the time that at which "running" livy state was first seen. */
246
- runningAt?: Date;
247
- /** time that at which "dead" livy state was first seen. */
248
- deadAt?: Date;
249
- /** the time that at which "success" livy state was first seen. */
250
- successAt?: Date;
251
- /** the time that at which "killed" livy state was first seen. */
252
- terminatedAt?: Date;
253
- /** the time that at which "recovering" livy state was first seen. */
254
- recoveringAt?: Date;
255
- /** the Spark job state. */
256
- currentState?: string;
257
- jobCreationRequest?: SparkRequest;
258
- }
259
-
260
- export declare class SparkClient extends SparkClientContext {
261
- /**
262
- * Initializes a new instance of the SparkClient class.
263
- * @param credentials Subscription credentials which uniquely identify client subscription.
264
- * @param endpoint The workspace development endpoint, for example
265
- * https://myworkspace.dev.azuresynapse.net.
266
- * @param sparkPoolName Name of the spark pool.
267
- * @param options The parameter options
268
- */
269
- constructor(credentials: coreAuth.TokenCredential, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams);
270
- sparkBatch: SparkBatch;
271
- sparkSessionOperations: SparkSessionOperations;
272
- }
273
-
274
- export declare class SparkClientContext extends coreClient.ServiceClient {
275
- endpoint: string;
276
- livyApiVersion: string;
277
- sparkPoolName: string;
278
- /**
279
- * Initializes a new instance of the SparkClientContext class.
280
- * @param credentials Subscription credentials which uniquely identify client subscription.
281
- * @param endpoint The workspace development endpoint, for example
282
- * https://myworkspace.dev.azuresynapse.net.
283
- * @param sparkPoolName Name of the spark pool.
284
- * @param options The parameter options
285
- */
286
- constructor(credentials: coreAuth.TokenCredential, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams);
287
- }
288
-
289
- /** Optional parameters. */
290
- export declare interface SparkClientOptionalParams extends coreClient.ServiceClientOptions {
291
- /** Valid api-version for the request. */
292
- livyApiVersion?: string;
293
- /** Overrides client endpoint. */
294
- endpoint?: string;
295
- }
296
-
297
- /**
298
- * Defines values for SparkErrorSource. \
299
- * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource,
300
- * this enum contains the known values that the service supports.
301
- * ### Known values supported by the service
302
- * **System** \
303
- * **User** \
304
- * **Unknown** \
305
- * **Dependency**
306
- */
307
- export declare type SparkErrorSource = string;
308
-
309
- /**
310
- * Defines values for SparkJobType. \
311
- * {@link KnownSparkJobType} can be used interchangeably with SparkJobType,
312
- * this enum contains the known values that the service supports.
313
- * ### Known values supported by the service
314
- * **SparkBatch** \
315
- * **SparkSession**
316
- */
317
- export declare type SparkJobType = string;
318
-
319
- export declare interface SparkRequest {
320
- name?: string;
321
- file?: string;
322
- className?: string;
323
- arguments?: string[];
324
- jars?: string[];
325
- pythonFiles?: string[];
326
- files?: string[];
327
- archives?: string[];
328
- /** Dictionary of <string> */
329
- configuration?: {
330
- [propertyName: string]: string;
331
- };
332
- driverMemory?: string;
333
- driverCores?: number;
334
- executorMemory?: string;
335
- executorCores?: number;
336
- executorCount?: number;
337
- }
338
-
339
- export declare interface SparkScheduler {
340
- submittedAt?: Date;
341
- scheduledAt?: Date;
342
- endedAt?: Date;
343
- cancellationRequestedAt?: Date;
344
- currentState?: SchedulerCurrentState;
345
- }
346
-
347
- export declare interface SparkServiceError {
348
- message?: string;
349
- errorCode?: string;
350
- source?: SparkErrorSource;
351
- }
352
-
353
- export declare interface SparkServicePlugin {
354
- preparationStartedAt?: Date;
355
- resourceAcquisitionStartedAt?: Date;
356
- submissionStartedAt?: Date;
357
- monitoringStartedAt?: Date;
358
- cleanupStartedAt?: Date;
359
- currentState?: PluginCurrentState;
360
- }
361
-
362
- export declare interface SparkSession {
363
- livyInfo?: SparkSessionState;
364
- name?: string;
365
- workspaceName?: string;
366
- sparkPoolName?: string;
367
- submitterName?: string;
368
- submitterId?: string;
369
- artifactId?: string;
370
- /** The job type. */
371
- jobType?: SparkJobType;
372
- result?: SparkSessionResultType;
373
- scheduler?: SparkScheduler;
374
- plugin?: SparkServicePlugin;
375
- errors?: SparkServiceError[];
376
- /** Dictionary of <string> */
377
- tags?: {
378
- [propertyName: string]: string;
379
- };
380
- id: number;
381
- appId?: string;
382
- /** Dictionary of <string> */
383
- appInfo?: {
384
- [propertyName: string]: string;
385
- };
386
- state?: string;
387
- logLines?: string[];
388
- }
389
-
390
- export declare interface SparkSessionCollection {
391
- from: number;
392
- total: number;
393
- sessions?: SparkSession[];
394
- }
395
-
396
- /** Interface representing a SparkSessionOperations. */
397
- export declare interface SparkSessionOperations {
398
- /**
399
- * List all spark sessions which are running under a particular spark pool.
400
- * @param options The options parameters.
401
- */
402
- getSparkSessions(options?: SparkSessionOperationsGetSparkSessionsOptionalParams): Promise<SparkSessionOperationsGetSparkSessionsResponse>;
403
- /**
404
- * Create new spark session.
405
- * @param sparkSessionOptions Livy compatible batch job request payload.
406
- * @param options The options parameters.
407
- */
408
- createSparkSession(sparkSessionOptions: SparkSessionOptions, options?: SparkSessionOperationsCreateSparkSessionOptionalParams): Promise<SparkSessionOperationsCreateSparkSessionResponse>;
409
- /**
410
- * Gets a single spark session.
411
- * @param sessionId Identifier for the session.
412
- * @param options The options parameters.
413
- */
414
- getSparkSession(sessionId: number, options?: SparkSessionOperationsGetSparkSessionOptionalParams): Promise<SparkSessionOperationsGetSparkSessionResponse>;
415
- /**
416
- * Cancels a running spark session.
417
- * @param sessionId Identifier for the session.
418
- * @param options The options parameters.
419
- */
420
- cancelSparkSession(sessionId: number, options?: SparkSessionOperationsCancelSparkSessionOptionalParams): Promise<void>;
421
- /**
422
- * Sends a keep alive call to the current session to reset the session timeout.
423
- * @param sessionId Identifier for the session.
424
- * @param options The options parameters.
425
- */
426
- resetSparkSessionTimeout(sessionId: number, options?: SparkSessionOperationsResetSparkSessionTimeoutOptionalParams): Promise<void>;
427
- /**
428
- * Gets a list of statements within a spark session.
429
- * @param sessionId Identifier for the session.
430
- * @param options The options parameters.
431
- */
432
- getSparkStatements(sessionId: number, options?: SparkSessionOperationsGetSparkStatementsOptionalParams): Promise<SparkSessionOperationsGetSparkStatementsResponse>;
433
- /**
434
- * Create statement within a spark session.
435
- * @param sessionId Identifier for the session.
436
- * @param sparkStatementOptions Livy compatible batch job request payload.
437
- * @param options The options parameters.
438
- */
439
- createSparkStatement(sessionId: number, sparkStatementOptions: SparkStatementOptions, options?: SparkSessionOperationsCreateSparkStatementOptionalParams): Promise<SparkSessionOperationsCreateSparkStatementResponse>;
440
- /**
441
- * Gets a single statement within a spark session.
442
- * @param sessionId Identifier for the session.
443
- * @param statementId Identifier for the statement.
444
- * @param options The options parameters.
445
- */
446
- getSparkStatement(sessionId: number, statementId: number, options?: SparkSessionOperationsGetSparkStatementOptionalParams): Promise<SparkSessionOperationsGetSparkStatementResponse>;
447
- /**
448
- * Kill a statement within a session.
449
- * @param sessionId Identifier for the session.
450
- * @param statementId Identifier for the statement.
451
- * @param options The options parameters.
452
- */
453
- cancelSparkStatement(sessionId: number, statementId: number, options?: SparkSessionOperationsCancelSparkStatementOptionalParams): Promise<SparkSessionOperationsCancelSparkStatementResponse>;
454
- }
455
-
456
- /** Optional parameters. */
457
- export declare interface SparkSessionOperationsCancelSparkSessionOptionalParams extends coreClient.OperationOptions {
458
- }
459
-
460
- /** Optional parameters. */
461
- export declare interface SparkSessionOperationsCancelSparkStatementOptionalParams extends coreClient.OperationOptions {
462
- }
463
-
464
- /** Contains response data for the cancelSparkStatement operation. */
465
- export declare type SparkSessionOperationsCancelSparkStatementResponse = SparkStatementCancellationResult;
466
-
467
- /** Optional parameters. */
468
- export declare interface SparkSessionOperationsCreateSparkSessionOptionalParams extends coreClient.OperationOptions {
469
- /** Optional query param specifying whether detailed response is returned beyond plain livy. */
470
- detailed?: boolean;
471
- }
472
-
473
- /** Contains response data for the createSparkSession operation. */
474
- export declare type SparkSessionOperationsCreateSparkSessionResponse = SparkSession;
475
-
476
- /** Optional parameters. */
477
- export declare interface SparkSessionOperationsCreateSparkStatementOptionalParams extends coreClient.OperationOptions {
478
- }
479
-
480
- /** Contains response data for the createSparkStatement operation. */
481
- export declare type SparkSessionOperationsCreateSparkStatementResponse = SparkStatement;
482
-
483
- /** Optional parameters. */
484
- export declare interface SparkSessionOperationsGetSparkSessionOptionalParams extends coreClient.OperationOptions {
485
- /** Optional query param specifying whether detailed response is returned beyond plain livy. */
486
- detailed?: boolean;
487
- }
488
-
489
- /** Contains response data for the getSparkSession operation. */
490
- export declare type SparkSessionOperationsGetSparkSessionResponse = SparkSession;
491
-
492
- /** Optional parameters. */
493
- export declare interface SparkSessionOperationsGetSparkSessionsOptionalParams extends coreClient.OperationOptions {
494
- /** Optional param specifying which index the list should begin from. */
495
- fromParam?: number;
496
- /**
497
- * Optional param specifying the size of the returned list.
498
- * By default it is 20 and that is the maximum.
499
- */
500
- size?: number;
501
- /** Optional query param specifying whether detailed response is returned beyond plain livy. */
502
- detailed?: boolean;
503
- }
504
-
505
- /** Contains response data for the getSparkSessions operation. */
506
- export declare type SparkSessionOperationsGetSparkSessionsResponse = SparkSessionCollection;
507
-
508
- /** Optional parameters. */
509
- export declare interface SparkSessionOperationsGetSparkStatementOptionalParams extends coreClient.OperationOptions {
510
- }
511
-
512
- /** Contains response data for the getSparkStatement operation. */
513
- export declare type SparkSessionOperationsGetSparkStatementResponse = SparkStatement;
514
-
515
- /** Optional parameters. */
516
- export declare interface SparkSessionOperationsGetSparkStatementsOptionalParams extends coreClient.OperationOptions {
517
- }
518
-
519
- /** Contains response data for the getSparkStatements operation. */
520
- export declare type SparkSessionOperationsGetSparkStatementsResponse = SparkStatementCollection;
521
-
522
- /** Optional parameters. */
523
- export declare interface SparkSessionOperationsResetSparkSessionTimeoutOptionalParams extends coreClient.OperationOptions {
524
- }
525
-
526
- export declare interface SparkSessionOptions {
527
- /** Dictionary of <string> */
528
- tags?: {
529
- [propertyName: string]: string;
530
- };
531
- artifactId?: string;
532
- name: string;
533
- file?: string;
534
- className?: string;
535
- arguments?: string[];
536
- jars?: string[];
537
- pythonFiles?: string[];
538
- files?: string[];
539
- archives?: string[];
540
- /** Dictionary of <string> */
541
- configuration?: {
542
- [propertyName: string]: string;
543
- };
544
- driverMemory?: string;
545
- driverCores?: number;
546
- executorMemory?: string;
547
- executorCores?: number;
548
- executorCount?: number;
549
- }
550
-
551
- /**
552
- * Defines values for SparkSessionResultType. \
553
- * {@link KnownSparkSessionResultType} can be used interchangeably with SparkSessionResultType,
554
- * this enum contains the known values that the service supports.
555
- * ### Known values supported by the service
556
- * **Uncertain** \
557
- * **Succeeded** \
558
- * **Failed** \
559
- * **Cancelled**
560
- */
561
- export declare type SparkSessionResultType = string;
562
-
563
- export declare interface SparkSessionState {
564
- notStartedAt?: Date;
565
- startingAt?: Date;
566
- idleAt?: Date;
567
- deadAt?: Date;
568
- shuttingDownAt?: Date;
569
- terminatedAt?: Date;
570
- recoveringAt?: Date;
571
- busyAt?: Date;
572
- errorAt?: Date;
573
- currentState?: string;
574
- jobCreationRequest?: SparkRequest;
575
- }
576
-
577
- export declare interface SparkStatement {
578
- id: number;
579
- code?: string;
580
- state?: string;
581
- output?: SparkStatementOutput;
582
- }
583
-
584
- export declare interface SparkStatementCancellationResult {
585
- /** The msg property from the Livy API. The value is always "canceled". */
586
- message?: string;
587
- }
588
-
589
- export declare interface SparkStatementCollection {
590
- total: number;
591
- statements?: SparkStatement[];
592
- }
593
-
594
- /**
595
- * Defines values for SparkStatementLanguageType. \
596
- * {@link KnownSparkStatementLanguageType} can be used interchangeably with SparkStatementLanguageType,
597
- * this enum contains the known values that the service supports.
598
- * ### Known values supported by the service
599
- * **spark** \
600
- * **pyspark** \
601
- * **dotnetspark** \
602
- * **sql**
603
- */
604
- export declare type SparkStatementLanguageType = string;
605
-
606
- export declare interface SparkStatementOptions {
607
- code?: string;
608
- kind?: SparkStatementLanguageType;
609
- }
610
-
611
- export declare interface SparkStatementOutput {
612
- status?: string;
613
- executionCount: number;
614
- /** Any object */
615
- data?: Record<string, unknown>;
616
- errorName?: string;
617
- errorValue?: string;
618
- traceback?: string[];
619
- }
620
-
621
- export { }
1
+ import type * as coreClient from "@azure/core-client";
2
+ /** Response for batch list operation. */
3
+ export interface SparkBatchJobCollection {
4
+ /** The start index of fetched sessions. */
5
+ from: number;
6
+ /** Number of sessions fetched. */
7
+ total: number;
8
+ /** Batch list */
9
+ sessions?: SparkBatchJob[];
10
+ }
11
+ export interface SparkBatchJob {
12
+ livyInfo?: SparkBatchJobState;
13
+ /** The batch name. */
14
+ name?: string;
15
+ /** The workspace name. */
16
+ workspaceName?: string;
17
+ /** The Spark pool name. */
18
+ sparkPoolName?: string;
19
+ /** The submitter name. */
20
+ submitterName?: string;
21
+ /** The submitter identifier. */
22
+ submitterId?: string;
23
+ /** The artifact identifier. */
24
+ artifactId?: string;
25
+ /** The job type. */
26
+ jobType?: SparkJobType;
27
+ /** The Spark batch job result. */
28
+ result?: SparkBatchJobResultType;
29
+ /** The scheduler information. */
30
+ scheduler?: SparkScheduler;
31
+ /** The plugin information. */
32
+ plugin?: SparkServicePlugin;
33
+ /** The error information. */
34
+ errors?: SparkServiceError[];
35
+ /** The tags. */
36
+ tags?: {
37
+ [propertyName: string]: string;
38
+ };
39
+ /** The session Id. */
40
+ id: number;
41
+ /** The application id of this session */
42
+ appId?: string;
43
+ /** The detailed application info. */
44
+ appInfo?: {
45
+ [propertyName: string]: string;
46
+ };
47
+ /** The batch state */
48
+ state?: LivyStates;
49
+ /** The log lines. */
50
+ logLines?: string[];
51
+ }
52
+ export interface SparkBatchJobState {
53
+ /** the time that at which "not_started" livy state was first seen. */
54
+ notStartedAt?: Date;
55
+ /** the time that at which "starting" livy state was first seen. */
56
+ startingAt?: Date;
57
+ /** the time that at which "running" livy state was first seen. */
58
+ runningAt?: Date;
59
+ /** time that at which "dead" livy state was first seen. */
60
+ deadAt?: Date;
61
+ /** the time that at which "success" livy state was first seen. */
62
+ successAt?: Date;
63
+ /** the time that at which "killed" livy state was first seen. */
64
+ terminatedAt?: Date;
65
+ /** the time that at which "recovering" livy state was first seen. */
66
+ recoveringAt?: Date;
67
+ /** the Spark job state. */
68
+ currentState?: string;
69
+ jobCreationRequest?: SparkRequest;
70
+ }
71
+ export interface SparkRequest {
72
+ name?: string;
73
+ file?: string;
74
+ className?: string;
75
+ arguments?: string[];
76
+ jars?: string[];
77
+ pythonFiles?: string[];
78
+ files?: string[];
79
+ archives?: string[];
80
+ /** Dictionary of <string> */
81
+ configuration?: {
82
+ [propertyName: string]: string;
83
+ };
84
+ driverMemory?: string;
85
+ driverCores?: number;
86
+ executorMemory?: string;
87
+ executorCores?: number;
88
+ executorCount?: number;
89
+ }
90
+ export interface SparkScheduler {
91
+ submittedAt?: Date;
92
+ scheduledAt?: Date;
93
+ endedAt?: Date;
94
+ cancellationRequestedAt?: Date;
95
+ currentState?: SchedulerCurrentState;
96
+ }
97
+ export interface SparkServicePlugin {
98
+ preparationStartedAt?: Date;
99
+ resourceAcquisitionStartedAt?: Date;
100
+ submissionStartedAt?: Date;
101
+ monitoringStartedAt?: Date;
102
+ cleanupStartedAt?: Date;
103
+ currentState?: PluginCurrentState;
104
+ }
105
+ export interface SparkServiceError {
106
+ message?: string;
107
+ errorCode?: string;
108
+ source?: SparkErrorSource;
109
+ }
110
+ export interface SparkBatchJobOptions {
111
+ /** Dictionary of <string> */
112
+ tags?: {
113
+ [propertyName: string]: string;
114
+ };
115
+ artifactId?: string;
116
+ name: string;
117
+ file: string;
118
+ className?: string;
119
+ arguments?: string[];
120
+ jars?: string[];
121
+ pythonFiles?: string[];
122
+ files?: string[];
123
+ archives?: string[];
124
+ /** Dictionary of <string> */
125
+ configuration?: {
126
+ [propertyName: string]: string;
127
+ };
128
+ driverMemory?: string;
129
+ driverCores?: number;
130
+ executorMemory?: string;
131
+ executorCores?: number;
132
+ executorCount?: number;
133
+ }
134
+ export interface SparkSessionCollection {
135
+ from: number;
136
+ total: number;
137
+ sessions?: SparkSession[];
138
+ }
139
+ export interface SparkSession {
140
+ livyInfo?: SparkSessionState;
141
+ name?: string;
142
+ workspaceName?: string;
143
+ sparkPoolName?: string;
144
+ submitterName?: string;
145
+ submitterId?: string;
146
+ artifactId?: string;
147
+ /** The job type. */
148
+ jobType?: SparkJobType;
149
+ result?: SparkSessionResultType;
150
+ scheduler?: SparkScheduler;
151
+ plugin?: SparkServicePlugin;
152
+ errors?: SparkServiceError[];
153
+ /** Dictionary of <string> */
154
+ tags?: {
155
+ [propertyName: string]: string;
156
+ };
157
+ id: number;
158
+ appId?: string;
159
+ /** Dictionary of <string> */
160
+ appInfo?: {
161
+ [propertyName: string]: string;
162
+ };
163
+ /** The session state. */
164
+ state?: LivyStates;
165
+ logLines?: string[];
166
+ }
167
+ export interface SparkSessionState {
168
+ notStartedAt?: Date;
169
+ startingAt?: Date;
170
+ idleAt?: Date;
171
+ deadAt?: Date;
172
+ shuttingDownAt?: Date;
173
+ terminatedAt?: Date;
174
+ recoveringAt?: Date;
175
+ busyAt?: Date;
176
+ errorAt?: Date;
177
+ currentState?: string;
178
+ jobCreationRequest?: SparkRequest;
179
+ }
180
+ export interface SparkSessionOptions {
181
+ /** Dictionary of <string> */
182
+ tags?: {
183
+ [propertyName: string]: string;
184
+ };
185
+ artifactId?: string;
186
+ name: string;
187
+ file?: string;
188
+ className?: string;
189
+ arguments?: string[];
190
+ jars?: string[];
191
+ pythonFiles?: string[];
192
+ files?: string[];
193
+ archives?: string[];
194
+ /** Dictionary of <string> */
195
+ configuration?: {
196
+ [propertyName: string]: string;
197
+ };
198
+ driverMemory?: string;
199
+ driverCores?: number;
200
+ executorMemory?: string;
201
+ executorCores?: number;
202
+ executorCount?: number;
203
+ }
204
+ export interface SparkStatementCollection {
205
+ total: number;
206
+ statements?: SparkStatement[];
207
+ }
208
+ export interface SparkStatement {
209
+ id: number;
210
+ code?: string;
211
+ state?: LivyStatementStates;
212
+ output?: SparkStatementOutput;
213
+ }
214
+ export interface SparkStatementOutput {
215
+ status?: string;
216
+ executionCount: number;
217
+ /** Any object */
218
+ data?: Record<string, unknown>;
219
+ errorName?: string;
220
+ errorValue?: string;
221
+ traceback?: string[];
222
+ }
223
+ export interface SparkStatementOptions {
224
+ code?: string;
225
+ kind?: SparkStatementLanguageType;
226
+ }
227
+ export interface SparkStatementCancellationResult {
228
+ /** The msg property from the Livy API. The value is always "canceled". */
229
+ message?: string;
230
+ }
231
+ /** Known values of {@link SparkJobType} that the service accepts. */
232
+ export declare enum KnownSparkJobType {
233
+ /** SparkBatch */
234
+ SparkBatch = "SparkBatch",
235
+ /** SparkSession */
236
+ SparkSession = "SparkSession"
237
+ }
238
+ /**
239
+ * Defines values for SparkJobType.
240
+ * {@link KnownSparkJobType} can be used interchangeably with SparkJobType,
241
+ * this enum contains the known values that the service supports.
242
+ * ### Known values supported by the service
243
+ * **SparkBatch**
244
+ * **SparkSession**
245
+ */
246
+ export type SparkJobType = string;
247
+ /** Known values of {@link SparkBatchJobResultType} that the service accepts. */
248
+ export declare enum KnownSparkBatchJobResultType {
249
+ /** Uncertain */
250
+ Uncertain = "Uncertain",
251
+ /** Succeeded */
252
+ Succeeded = "Succeeded",
253
+ /** Failed */
254
+ Failed = "Failed",
255
+ /** Cancelled */
256
+ Cancelled = "Cancelled"
257
+ }
258
+ /**
259
+ * Defines values for SparkBatchJobResultType.
260
+ * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType,
261
+ * this enum contains the known values that the service supports.
262
+ * ### Known values supported by the service
263
+ * **Uncertain**
264
+ * **Succeeded**
265
+ * **Failed**
266
+ * **Cancelled**
267
+ */
268
+ export type SparkBatchJobResultType = string;
269
+ /** Known values of {@link SchedulerCurrentState} that the service accepts. */
270
+ export declare enum KnownSchedulerCurrentState {
271
+ /** Queued */
272
+ Queued = "Queued",
273
+ /** Scheduled */
274
+ Scheduled = "Scheduled",
275
+ /** Ended */
276
+ Ended = "Ended"
277
+ }
278
+ /**
279
+ * Defines values for SchedulerCurrentState.
280
+ * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState,
281
+ * this enum contains the known values that the service supports.
282
+ * ### Known values supported by the service
283
+ * **Queued**
284
+ * **Scheduled**
285
+ * **Ended**
286
+ */
287
+ export type SchedulerCurrentState = string;
288
+ /** Known values of {@link PluginCurrentState} that the service accepts. */
289
+ export declare enum KnownPluginCurrentState {
290
+ /** Preparation */
291
+ Preparation = "Preparation",
292
+ /** ResourceAcquisition */
293
+ ResourceAcquisition = "ResourceAcquisition",
294
+ /** Queued */
295
+ Queued = "Queued",
296
+ /** Submission */
297
+ Submission = "Submission",
298
+ /** Monitoring */
299
+ Monitoring = "Monitoring",
300
+ /** Cleanup */
301
+ Cleanup = "Cleanup",
302
+ /** Ended */
303
+ Ended = "Ended"
304
+ }
305
+ /**
306
+ * Defines values for PluginCurrentState.
307
+ * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState,
308
+ * this enum contains the known values that the service supports.
309
+ * ### Known values supported by the service
310
+ * **Preparation**
311
+ * **ResourceAcquisition**
312
+ * **Queued**
313
+ * **Submission**
314
+ * **Monitoring**
315
+ * **Cleanup**
316
+ * **Ended**
317
+ */
318
+ export type PluginCurrentState = string;
319
+ /** Known values of {@link SparkErrorSource} that the service accepts. */
320
+ export declare enum KnownSparkErrorSource {
321
+ /** System */
322
+ System = "System",
323
+ /** User */
324
+ User = "User",
325
+ /** Unknown */
326
+ Unknown = "Unknown",
327
+ /** Dependency */
328
+ Dependency = "Dependency"
329
+ }
330
+ /**
331
+ * Defines values for SparkErrorSource.
332
+ * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource,
333
+ * this enum contains the known values that the service supports.
334
+ * ### Known values supported by the service
335
+ * **System**
336
+ * **User**
337
+ * **Unknown**
338
+ * **Dependency**
339
+ */
340
+ export type SparkErrorSource = string;
341
+ /** Known values of {@link LivyStates} that the service accepts. */
342
+ export declare enum KnownLivyStates {
343
+ /** NotStarted */
344
+ NotStarted = "not_started",
345
+ /** Starting */
346
+ Starting = "starting",
347
+ /** Idle */
348
+ Idle = "idle",
349
+ /** Busy */
350
+ Busy = "busy",
351
+ /** ShuttingDown */
352
+ ShuttingDown = "shutting_down",
353
+ /** Error */
354
+ Error = "error",
355
+ /** Dead */
356
+ Dead = "dead",
357
+ /** Killed */
358
+ Killed = "killed",
359
+ /** Success */
360
+ Success = "success",
361
+ /** Running */
362
+ Running = "running",
363
+ /** Recovering */
364
+ Recovering = "recovering"
365
+ }
366
+ /**
367
+ * Defines values for LivyStates.
368
+ * {@link KnownLivyStates} can be used interchangeably with LivyStates,
369
+ * this enum contains the known values that the service supports.
370
+ * ### Known values supported by the service
371
+ * **not_started**
372
+ * **starting**
373
+ * **idle**
374
+ * **busy**
375
+ * **shutting_down**
376
+ * **error**
377
+ * **dead**
378
+ * **killed**
379
+ * **success**
380
+ * **running**
381
+ * **recovering**
382
+ */
383
+ export type LivyStates = string;
384
+ /** Known values of {@link SparkSessionResultType} that the service accepts. */
385
+ export declare enum KnownSparkSessionResultType {
386
+ /** Uncertain */
387
+ Uncertain = "Uncertain",
388
+ /** Succeeded */
389
+ Succeeded = "Succeeded",
390
+ /** Failed */
391
+ Failed = "Failed",
392
+ /** Cancelled */
393
+ Cancelled = "Cancelled"
394
+ }
395
+ /**
396
+ * Defines values for SparkSessionResultType.
397
+ * {@link KnownSparkSessionResultType} can be used interchangeably with SparkSessionResultType,
398
+ * this enum contains the known values that the service supports.
399
+ * ### Known values supported by the service
400
+ * **Uncertain**
401
+ * **Succeeded**
402
+ * **Failed**
403
+ * **Cancelled**
404
+ */
405
+ export type SparkSessionResultType = string;
406
+ /** Known values of {@link LivyStatementStates} that the service accepts. */
407
+ export declare enum KnownLivyStatementStates {
408
+ /** Waiting */
409
+ Waiting = "waiting",
410
+ /** Running */
411
+ Running = "running",
412
+ /** Available */
413
+ Available = "available",
414
+ /** Error */
415
+ Error = "error",
416
+ /** Cancelling */
417
+ Cancelling = "cancelling",
418
+ /** Cancelled */
419
+ Cancelled = "cancelled"
420
+ }
421
+ /**
422
+ * Defines values for LivyStatementStates.
423
+ * {@link KnownLivyStatementStates} can be used interchangeably with LivyStatementStates,
424
+ * this enum contains the known values that the service supports.
425
+ * ### Known values supported by the service
426
+ * **waiting**
427
+ * **running**
428
+ * **available**
429
+ * **error**
430
+ * **cancelling**
431
+ * **cancelled**
432
+ */
433
+ export type LivyStatementStates = string;
434
+ /** Known values of {@link SparkStatementLanguageType} that the service accepts. */
435
+ export declare enum KnownSparkStatementLanguageType {
436
+ /** Spark */
437
+ Spark = "spark",
438
+ /** PySpark */
439
+ PySpark = "pyspark",
440
+ /** DotNetSpark */
441
+ DotNetSpark = "dotnetspark",
442
+ /** Sql */
443
+ Sql = "sql"
444
+ }
445
+ /**
446
+ * Defines values for SparkStatementLanguageType.
447
+ * {@link KnownSparkStatementLanguageType} can be used interchangeably with SparkStatementLanguageType,
448
+ * this enum contains the known values that the service supports.
449
+ * ### Known values supported by the service
450
+ * **spark**
451
+ * **pyspark**
452
+ * **dotnetspark**
453
+ * **sql**
454
+ */
455
+ export type SparkStatementLanguageType = string;
456
+ /** Optional parameters. */
457
+ export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreClient.OperationOptions {
458
+ /** Optional param specifying which index the list should begin from. */
459
+ fromParam?: number;
460
+ /**
461
+ * Optional param specifying the size of the returned list.
462
+ * By default it is 20 and that is the maximum.
463
+ */
464
+ size?: number;
465
+ /** Optional query param specifying whether detailed response is returned beyond plain livy. */
466
+ detailed?: boolean;
467
+ }
468
+ /** Contains response data for the getSparkBatchJobs operation. */
469
+ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection;
470
+ /** Optional parameters. */
471
+ export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreClient.OperationOptions {
472
+ /** Optional query param specifying whether detailed response is returned beyond plain livy. */
473
+ detailed?: boolean;
474
+ }
475
+ /** Contains response data for the createSparkBatchJob operation. */
476
+ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob;
477
+ /** Optional parameters. */
478
+ export interface SparkBatchGetSparkBatchJobOptionalParams extends coreClient.OperationOptions {
479
+ /** Optional query param specifying whether detailed response is returned beyond plain livy. */
480
+ detailed?: boolean;
481
+ }
482
+ /** Contains response data for the getSparkBatchJob operation. */
483
+ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob;
484
+ /** Optional parameters. */
485
+ export interface SparkBatchCancelSparkBatchJobOptionalParams extends coreClient.OperationOptions {
486
+ }
487
+ /** Optional parameters. */
488
+ export interface SparkSessionGetSparkSessionsOptionalParams extends coreClient.OperationOptions {
489
+ /** Optional param specifying which index the list should begin from. */
490
+ fromParam?: number;
491
+ /**
492
+ * Optional param specifying the size of the returned list.
493
+ * By default it is 20 and that is the maximum.
494
+ */
495
+ size?: number;
496
+ /** Optional query param specifying whether detailed response is returned beyond plain livy. */
497
+ detailed?: boolean;
498
+ }
499
+ /** Contains response data for the getSparkSessions operation. */
500
+ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection;
501
+ /** Optional parameters. */
502
+ export interface SparkSessionCreateSparkSessionOptionalParams extends coreClient.OperationOptions {
503
+ /** Optional query param specifying whether detailed response is returned beyond plain livy. */
504
+ detailed?: boolean;
505
+ }
506
+ /** Contains response data for the createSparkSession operation. */
507
+ export type SparkSessionCreateSparkSessionResponse = SparkSession;
508
+ /** Optional parameters. */
509
+ export interface SparkSessionGetSparkSessionOptionalParams extends coreClient.OperationOptions {
510
+ /** Optional query param specifying whether detailed response is returned beyond plain livy. */
511
+ detailed?: boolean;
512
+ }
513
+ /** Contains response data for the getSparkSession operation. */
514
+ export type SparkSessionGetSparkSessionResponse = SparkSession;
515
+ /** Optional parameters. */
516
+ export interface SparkSessionCancelSparkSessionOptionalParams extends coreClient.OperationOptions {
517
+ }
518
+ /** Optional parameters. */
519
+ export interface SparkSessionResetSparkSessionTimeoutOptionalParams extends coreClient.OperationOptions {
520
+ }
521
+ /** Optional parameters. */
522
+ export interface SparkSessionGetSparkStatementsOptionalParams extends coreClient.OperationOptions {
523
+ }
524
+ /** Contains response data for the getSparkStatements operation. */
525
+ export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection;
526
+ /** Optional parameters. */
527
+ export interface SparkSessionCreateSparkStatementOptionalParams extends coreClient.OperationOptions {
528
+ }
529
+ /** Contains response data for the createSparkStatement operation. */
530
+ export type SparkSessionCreateSparkStatementResponse = SparkStatement;
531
+ /** Optional parameters. */
532
+ export interface SparkSessionGetSparkStatementOptionalParams extends coreClient.OperationOptions {
533
+ }
534
+ /** Contains response data for the getSparkStatement operation. */
535
+ export type SparkSessionGetSparkStatementResponse = SparkStatement;
536
+ /** Optional parameters. */
537
+ export interface SparkSessionCancelSparkStatementOptionalParams extends coreClient.OperationOptions {
538
+ }
539
+ /** Contains response data for the cancelSparkStatement operation. */
540
+ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult;
541
+ /** Optional parameters. */
542
+ export interface SparkClientOptionalParams extends coreClient.ServiceClientOptions {
543
+ /** Valid api-version for the request. */
544
+ livyApiVersion?: string;
545
+ /** Overrides client endpoint. */
546
+ endpoint?: string;
547
+ }
548
+ //# sourceMappingURL=index.d.ts.map