@azure/synapse-spark 1.0.0-beta.4 → 1.0.0-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +14 -13
- package/dist/browser/index.d.ts +4 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.js +11 -0
- package/dist/browser/index.js.map +1 -0
- package/{types/synapse-spark.d.ts → dist/browser/models/index.d.ts} +548 -680
- package/dist/browser/models/index.d.ts.map +1 -0
- package/dist/browser/models/index.js +134 -0
- package/dist/browser/models/index.js.map +1 -0
- package/dist/browser/models/mappers.d.ts +19 -0
- package/dist/browser/models/mappers.d.ts.map +1 -0
- package/dist/browser/models/mappers.js +1151 -0
- package/dist/browser/models/mappers.js.map +1 -0
- package/dist/browser/models/parameters.d.ts +16 -0
- package/dist/browser/models/parameters.d.ts.map +1 -0
- package/dist/browser/models/parameters.js +133 -0
- package/dist/browser/models/parameters.js.map +1 -0
- package/dist/browser/operations/index.d.ts +3 -0
- package/dist/browser/operations/index.d.ts.map +1 -0
- package/dist/browser/operations/index.js +10 -0
- package/dist/browser/operations/index.js.map +1 -0
- package/dist/browser/operations/sparkBatch.d.ts +36 -0
- package/dist/browser/operations/sparkBatch.d.ts.map +1 -0
- package/dist/browser/operations/sparkBatch.js +122 -0
- package/dist/browser/operations/sparkBatch.js.map +1 -0
- package/dist/browser/operations/sparkSessionOperations.d.ts +69 -0
- package/dist/browser/operations/sparkSessionOperations.d.ts.map +1 -0
- package/dist/browser/operations/sparkSessionOperations.js +259 -0
- package/dist/browser/operations/sparkSessionOperations.js.map +1 -0
- package/dist/browser/operationsInterfaces/index.d.ts +3 -0
- package/dist/browser/operationsInterfaces/index.d.ts.map +1 -0
- package/dist/browser/operationsInterfaces/index.js +10 -0
- package/dist/browser/operationsInterfaces/index.js.map +1 -0
- package/dist/browser/operationsInterfaces/sparkBatch.d.ts +28 -0
- package/dist/browser/operationsInterfaces/sparkBatch.d.ts.map +1 -0
- package/dist/browser/operationsInterfaces/sparkBatch.js +9 -0
- package/dist/browser/operationsInterfaces/sparkBatch.js.map +1 -0
- package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
- package/dist/browser/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
- package/dist/browser/operationsInterfaces/sparkSessionOperations.js +9 -0
- package/dist/browser/operationsInterfaces/sparkSessionOperations.js.map +1 -0
- package/dist/browser/package.json +3 -0
- package/dist/browser/sparkClient.d.ts +24 -0
- package/dist/browser/sparkClient.d.ts.map +1 -0
- package/dist/browser/sparkClient.js +86 -0
- package/dist/browser/sparkClient.js.map +1 -0
- package/dist/browser/tracing.d.ts +2 -0
- package/dist/browser/tracing.d.ts.map +1 -0
- package/dist/browser/tracing.js +14 -0
- package/dist/browser/tracing.js.map +1 -0
- package/dist/commonjs/index.d.ts +4 -0
- package/dist/commonjs/index.d.ts.map +1 -0
- package/dist/commonjs/index.js +16 -0
- package/dist/commonjs/index.js.map +1 -0
- package/dist/commonjs/models/index.d.ts +548 -0
- package/dist/commonjs/models/index.d.ts.map +1 -0
- package/dist/commonjs/models/index.js +137 -0
- package/dist/commonjs/models/index.js.map +1 -0
- package/dist/commonjs/models/mappers.d.ts +19 -0
- package/dist/commonjs/models/mappers.d.ts.map +1 -0
- package/dist/commonjs/models/mappers.js +1154 -0
- package/dist/commonjs/models/mappers.js.map +1 -0
- package/dist/commonjs/models/parameters.d.ts +16 -0
- package/dist/commonjs/models/parameters.d.ts.map +1 -0
- package/dist/commonjs/models/parameters.js +136 -0
- package/dist/commonjs/models/parameters.js.map +1 -0
- package/dist/commonjs/operations/index.d.ts +3 -0
- package/dist/commonjs/operations/index.d.ts.map +1 -0
- package/dist/commonjs/operations/index.js +13 -0
- package/dist/commonjs/operations/index.js.map +1 -0
- package/dist/commonjs/operations/sparkBatch.d.ts +36 -0
- package/dist/commonjs/operations/sparkBatch.d.ts.map +1 -0
- package/dist/commonjs/operations/sparkBatch.js +127 -0
- package/dist/commonjs/operations/sparkBatch.js.map +1 -0
- package/dist/commonjs/operations/sparkSessionOperations.d.ts +69 -0
- package/dist/commonjs/operations/sparkSessionOperations.d.ts.map +1 -0
- package/dist/commonjs/operations/sparkSessionOperations.js +264 -0
- package/dist/commonjs/operations/sparkSessionOperations.js.map +1 -0
- package/dist/commonjs/operationsInterfaces/index.d.ts +3 -0
- package/dist/commonjs/operationsInterfaces/index.d.ts.map +1 -0
- package/dist/commonjs/operationsInterfaces/index.js +13 -0
- package/dist/commonjs/operationsInterfaces/index.js.map +1 -0
- package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts +28 -0
- package/dist/commonjs/operationsInterfaces/sparkBatch.d.ts.map +1 -0
- package/dist/commonjs/operationsInterfaces/sparkBatch.js +10 -0
- package/dist/commonjs/operationsInterfaces/sparkBatch.js.map +1 -0
- package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
- package/dist/commonjs/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
- package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js +10 -0
- package/dist/commonjs/operationsInterfaces/sparkSessionOperations.js.map +1 -0
- package/dist/commonjs/package.json +3 -0
- package/dist/commonjs/sparkClient.d.ts +24 -0
- package/dist/commonjs/sparkClient.d.ts.map +1 -0
- package/dist/commonjs/sparkClient.js +91 -0
- package/dist/commonjs/sparkClient.js.map +1 -0
- package/dist/commonjs/tracing.d.ts +2 -0
- package/dist/commonjs/tracing.d.ts.map +1 -0
- package/dist/commonjs/tracing.js +17 -0
- package/dist/commonjs/tracing.js.map +1 -0
- package/dist/commonjs/tsdoc-metadata.json +11 -0
- package/dist/esm/index.d.ts +4 -0
- package/dist/esm/index.d.ts.map +1 -0
- package/dist/esm/index.js +11 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/models/index.d.ts +548 -0
- package/dist/esm/models/index.d.ts.map +1 -0
- package/dist/esm/models/index.js +134 -0
- package/dist/esm/models/index.js.map +1 -0
- package/dist/esm/models/mappers.d.ts +19 -0
- package/dist/esm/models/mappers.d.ts.map +1 -0
- package/dist/esm/models/mappers.js +1151 -0
- package/dist/esm/models/mappers.js.map +1 -0
- package/dist/esm/models/parameters.d.ts +16 -0
- package/dist/esm/models/parameters.d.ts.map +1 -0
- package/dist/esm/models/parameters.js +133 -0
- package/dist/esm/models/parameters.js.map +1 -0
- package/dist/esm/operations/index.d.ts +3 -0
- package/dist/esm/operations/index.d.ts.map +1 -0
- package/dist/esm/operations/index.js +10 -0
- package/dist/esm/operations/index.js.map +1 -0
- package/dist/esm/operations/sparkBatch.d.ts +36 -0
- package/dist/esm/operations/sparkBatch.d.ts.map +1 -0
- package/dist/esm/operations/sparkBatch.js +122 -0
- package/dist/esm/operations/sparkBatch.js.map +1 -0
- package/dist/esm/operations/sparkSessionOperations.d.ts +69 -0
- package/dist/esm/operations/sparkSessionOperations.d.ts.map +1 -0
- package/dist/esm/operations/sparkSessionOperations.js +259 -0
- package/dist/esm/operations/sparkSessionOperations.js.map +1 -0
- package/dist/esm/operationsInterfaces/index.d.ts +3 -0
- package/dist/esm/operationsInterfaces/index.d.ts.map +1 -0
- package/dist/esm/operationsInterfaces/index.js +10 -0
- package/dist/esm/operationsInterfaces/index.js.map +1 -0
- package/dist/esm/operationsInterfaces/sparkBatch.d.ts +28 -0
- package/dist/esm/operationsInterfaces/sparkBatch.d.ts.map +1 -0
- package/dist/esm/operationsInterfaces/sparkBatch.js +9 -0
- package/dist/esm/operationsInterfaces/sparkBatch.js.map +1 -0
- package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
- package/dist/esm/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
- package/dist/esm/operationsInterfaces/sparkSessionOperations.js +9 -0
- package/dist/esm/operationsInterfaces/sparkSessionOperations.js.map +1 -0
- package/dist/esm/package.json +3 -0
- package/dist/esm/sparkClient.d.ts +24 -0
- package/dist/esm/sparkClient.d.ts.map +1 -0
- package/dist/esm/sparkClient.js +86 -0
- package/dist/esm/sparkClient.js.map +1 -0
- package/dist/esm/tracing.d.ts +2 -0
- package/dist/esm/tracing.d.ts.map +1 -0
- package/dist/esm/tracing.js +14 -0
- package/dist/esm/tracing.js.map +1 -0
- package/dist/react-native/index.d.ts +4 -0
- package/dist/react-native/index.d.ts.map +1 -0
- package/dist/react-native/index.js +11 -0
- package/dist/react-native/index.js.map +1 -0
- package/dist/react-native/models/index.d.ts +548 -0
- package/dist/react-native/models/index.d.ts.map +1 -0
- package/dist/react-native/models/index.js +134 -0
- package/dist/react-native/models/index.js.map +1 -0
- package/dist/react-native/models/mappers.d.ts +19 -0
- package/dist/react-native/models/mappers.d.ts.map +1 -0
- package/dist/react-native/models/mappers.js +1151 -0
- package/dist/react-native/models/mappers.js.map +1 -0
- package/dist/react-native/models/parameters.d.ts +16 -0
- package/dist/react-native/models/parameters.d.ts.map +1 -0
- package/dist/react-native/models/parameters.js +133 -0
- package/dist/react-native/models/parameters.js.map +1 -0
- package/dist/react-native/operations/index.d.ts +3 -0
- package/dist/react-native/operations/index.d.ts.map +1 -0
- package/dist/react-native/operations/index.js +10 -0
- package/dist/react-native/operations/index.js.map +1 -0
- package/dist/react-native/operations/sparkBatch.d.ts +36 -0
- package/dist/react-native/operations/sparkBatch.d.ts.map +1 -0
- package/dist/react-native/operations/sparkBatch.js +122 -0
- package/dist/react-native/operations/sparkBatch.js.map +1 -0
- package/dist/react-native/operations/sparkSessionOperations.d.ts +69 -0
- package/dist/react-native/operations/sparkSessionOperations.d.ts.map +1 -0
- package/dist/react-native/operations/sparkSessionOperations.js +259 -0
- package/dist/react-native/operations/sparkSessionOperations.js.map +1 -0
- package/dist/react-native/operationsInterfaces/index.d.ts +3 -0
- package/dist/react-native/operationsInterfaces/index.d.ts.map +1 -0
- package/dist/react-native/operationsInterfaces/index.js +10 -0
- package/dist/react-native/operationsInterfaces/index.js.map +1 -0
- package/dist/react-native/operationsInterfaces/sparkBatch.d.ts +28 -0
- package/dist/react-native/operationsInterfaces/sparkBatch.d.ts.map +1 -0
- package/dist/react-native/operationsInterfaces/sparkBatch.js +9 -0
- package/dist/react-native/operationsInterfaces/sparkBatch.js.map +1 -0
- package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts +61 -0
- package/dist/react-native/operationsInterfaces/sparkSessionOperations.d.ts.map +1 -0
- package/dist/react-native/operationsInterfaces/sparkSessionOperations.js +9 -0
- package/dist/react-native/operationsInterfaces/sparkSessionOperations.js.map +1 -0
- package/dist/react-native/package.json +3 -0
- package/dist/react-native/sparkClient.d.ts +24 -0
- package/dist/react-native/sparkClient.d.ts.map +1 -0
- package/dist/react-native/sparkClient.js +86 -0
- package/dist/react-native/sparkClient.js.map +1 -0
- package/dist/react-native/tracing.d.ts +2 -0
- package/dist/react-native/tracing.d.ts.map +1 -0
- package/dist/react-native/tracing.js +14 -0
- package/dist/react-native/tracing.js.map +1 -0
- package/package.json +82 -78
- package/CHANGELOG.md +0 -24
- package/dist/index.js +0 -2016
- package/dist/index.js.map +0 -1
- package/dist/index.min.js +0 -1
- package/dist/index.min.js.map +0 -1
- package/rollup.config.js +0 -3
- package/tsconfig.json +0 -19
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Copyright (c) Microsoft Corporation.
|
|
3
|
+
* Licensed under the MIT License.
|
|
4
|
+
*
|
|
5
|
+
* Code generated by Microsoft (R) AutoRest Code Generator.
|
|
6
|
+
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
|
7
|
+
*/
|
|
8
|
+
/** Known values of {@link SparkJobType} that the service accepts. */
|
|
9
|
+
export var KnownSparkJobType;
|
|
10
|
+
(function (KnownSparkJobType) {
|
|
11
|
+
/** SparkBatch */
|
|
12
|
+
KnownSparkJobType["SparkBatch"] = "SparkBatch";
|
|
13
|
+
/** SparkSession */
|
|
14
|
+
KnownSparkJobType["SparkSession"] = "SparkSession";
|
|
15
|
+
})(KnownSparkJobType || (KnownSparkJobType = {}));
|
|
16
|
+
/** Known values of {@link SparkBatchJobResultType} that the service accepts. */
|
|
17
|
+
export var KnownSparkBatchJobResultType;
|
|
18
|
+
(function (KnownSparkBatchJobResultType) {
|
|
19
|
+
/** Uncertain */
|
|
20
|
+
KnownSparkBatchJobResultType["Uncertain"] = "Uncertain";
|
|
21
|
+
/** Succeeded */
|
|
22
|
+
KnownSparkBatchJobResultType["Succeeded"] = "Succeeded";
|
|
23
|
+
/** Failed */
|
|
24
|
+
KnownSparkBatchJobResultType["Failed"] = "Failed";
|
|
25
|
+
/** Cancelled */
|
|
26
|
+
KnownSparkBatchJobResultType["Cancelled"] = "Cancelled";
|
|
27
|
+
})(KnownSparkBatchJobResultType || (KnownSparkBatchJobResultType = {}));
|
|
28
|
+
/** Known values of {@link SchedulerCurrentState} that the service accepts. */
|
|
29
|
+
export var KnownSchedulerCurrentState;
|
|
30
|
+
(function (KnownSchedulerCurrentState) {
|
|
31
|
+
/** Queued */
|
|
32
|
+
KnownSchedulerCurrentState["Queued"] = "Queued";
|
|
33
|
+
/** Scheduled */
|
|
34
|
+
KnownSchedulerCurrentState["Scheduled"] = "Scheduled";
|
|
35
|
+
/** Ended */
|
|
36
|
+
KnownSchedulerCurrentState["Ended"] = "Ended";
|
|
37
|
+
})(KnownSchedulerCurrentState || (KnownSchedulerCurrentState = {}));
|
|
38
|
+
/** Known values of {@link PluginCurrentState} that the service accepts. */
|
|
39
|
+
export var KnownPluginCurrentState;
|
|
40
|
+
(function (KnownPluginCurrentState) {
|
|
41
|
+
/** Preparation */
|
|
42
|
+
KnownPluginCurrentState["Preparation"] = "Preparation";
|
|
43
|
+
/** ResourceAcquisition */
|
|
44
|
+
KnownPluginCurrentState["ResourceAcquisition"] = "ResourceAcquisition";
|
|
45
|
+
/** Queued */
|
|
46
|
+
KnownPluginCurrentState["Queued"] = "Queued";
|
|
47
|
+
/** Submission */
|
|
48
|
+
KnownPluginCurrentState["Submission"] = "Submission";
|
|
49
|
+
/** Monitoring */
|
|
50
|
+
KnownPluginCurrentState["Monitoring"] = "Monitoring";
|
|
51
|
+
/** Cleanup */
|
|
52
|
+
KnownPluginCurrentState["Cleanup"] = "Cleanup";
|
|
53
|
+
/** Ended */
|
|
54
|
+
KnownPluginCurrentState["Ended"] = "Ended";
|
|
55
|
+
})(KnownPluginCurrentState || (KnownPluginCurrentState = {}));
|
|
56
|
+
/** Known values of {@link SparkErrorSource} that the service accepts. */
|
|
57
|
+
export var KnownSparkErrorSource;
|
|
58
|
+
(function (KnownSparkErrorSource) {
|
|
59
|
+
/** System */
|
|
60
|
+
KnownSparkErrorSource["System"] = "System";
|
|
61
|
+
/** User */
|
|
62
|
+
KnownSparkErrorSource["User"] = "User";
|
|
63
|
+
/** Unknown */
|
|
64
|
+
KnownSparkErrorSource["Unknown"] = "Unknown";
|
|
65
|
+
/** Dependency */
|
|
66
|
+
KnownSparkErrorSource["Dependency"] = "Dependency";
|
|
67
|
+
})(KnownSparkErrorSource || (KnownSparkErrorSource = {}));
|
|
68
|
+
/** Known values of {@link LivyStates} that the service accepts. */
|
|
69
|
+
export var KnownLivyStates;
|
|
70
|
+
(function (KnownLivyStates) {
|
|
71
|
+
/** NotStarted */
|
|
72
|
+
KnownLivyStates["NotStarted"] = "not_started";
|
|
73
|
+
/** Starting */
|
|
74
|
+
KnownLivyStates["Starting"] = "starting";
|
|
75
|
+
/** Idle */
|
|
76
|
+
KnownLivyStates["Idle"] = "idle";
|
|
77
|
+
/** Busy */
|
|
78
|
+
KnownLivyStates["Busy"] = "busy";
|
|
79
|
+
/** ShuttingDown */
|
|
80
|
+
KnownLivyStates["ShuttingDown"] = "shutting_down";
|
|
81
|
+
/** Error */
|
|
82
|
+
KnownLivyStates["Error"] = "error";
|
|
83
|
+
/** Dead */
|
|
84
|
+
KnownLivyStates["Dead"] = "dead";
|
|
85
|
+
/** Killed */
|
|
86
|
+
KnownLivyStates["Killed"] = "killed";
|
|
87
|
+
/** Success */
|
|
88
|
+
KnownLivyStates["Success"] = "success";
|
|
89
|
+
/** Running */
|
|
90
|
+
KnownLivyStates["Running"] = "running";
|
|
91
|
+
/** Recovering */
|
|
92
|
+
KnownLivyStates["Recovering"] = "recovering";
|
|
93
|
+
})(KnownLivyStates || (KnownLivyStates = {}));
|
|
94
|
+
/** Known values of {@link SparkSessionResultType} that the service accepts. */
|
|
95
|
+
export var KnownSparkSessionResultType;
|
|
96
|
+
(function (KnownSparkSessionResultType) {
|
|
97
|
+
/** Uncertain */
|
|
98
|
+
KnownSparkSessionResultType["Uncertain"] = "Uncertain";
|
|
99
|
+
/** Succeeded */
|
|
100
|
+
KnownSparkSessionResultType["Succeeded"] = "Succeeded";
|
|
101
|
+
/** Failed */
|
|
102
|
+
KnownSparkSessionResultType["Failed"] = "Failed";
|
|
103
|
+
/** Cancelled */
|
|
104
|
+
KnownSparkSessionResultType["Cancelled"] = "Cancelled";
|
|
105
|
+
})(KnownSparkSessionResultType || (KnownSparkSessionResultType = {}));
|
|
106
|
+
/** Known values of {@link LivyStatementStates} that the service accepts. */
|
|
107
|
+
export var KnownLivyStatementStates;
|
|
108
|
+
(function (KnownLivyStatementStates) {
|
|
109
|
+
/** Waiting */
|
|
110
|
+
KnownLivyStatementStates["Waiting"] = "waiting";
|
|
111
|
+
/** Running */
|
|
112
|
+
KnownLivyStatementStates["Running"] = "running";
|
|
113
|
+
/** Available */
|
|
114
|
+
KnownLivyStatementStates["Available"] = "available";
|
|
115
|
+
/** Error */
|
|
116
|
+
KnownLivyStatementStates["Error"] = "error";
|
|
117
|
+
/** Cancelling */
|
|
118
|
+
KnownLivyStatementStates["Cancelling"] = "cancelling";
|
|
119
|
+
/** Cancelled */
|
|
120
|
+
KnownLivyStatementStates["Cancelled"] = "cancelled";
|
|
121
|
+
})(KnownLivyStatementStates || (KnownLivyStatementStates = {}));
|
|
122
|
+
/** Known values of {@link SparkStatementLanguageType} that the service accepts. */
|
|
123
|
+
export var KnownSparkStatementLanguageType;
|
|
124
|
+
(function (KnownSparkStatementLanguageType) {
|
|
125
|
+
/** Spark */
|
|
126
|
+
KnownSparkStatementLanguageType["Spark"] = "spark";
|
|
127
|
+
/** PySpark */
|
|
128
|
+
KnownSparkStatementLanguageType["PySpark"] = "pyspark";
|
|
129
|
+
/** DotNetSpark */
|
|
130
|
+
KnownSparkStatementLanguageType["DotNetSpark"] = "dotnetspark";
|
|
131
|
+
/** Sql */
|
|
132
|
+
KnownSparkStatementLanguageType["Sql"] = "sql";
|
|
133
|
+
})(KnownSparkStatementLanguageType || (KnownSparkStatementLanguageType = {}));
|
|
134
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/models/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAwOH,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBAKX;AALD,WAAY,iBAAiB;IAC3B,iBAAiB;IACjB,8CAAyB,CAAA;IACzB,mBAAmB;IACnB,kDAA6B,CAAA;AAC/B,CAAC,EALW,iBAAiB,KAAjB,iBAAiB,QAK5B;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,aAAa;IACb,iDAAiB,CAAA;IACjB,gBAAgB;IAChB,uDAAuB,CAAA;AACzB,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,aAAa;IACb,+CAAiB,CAAA;IACjB,gBAAgB;IAChB,qDAAuB,CAAA;IACvB,YAAY;IACZ,6CAAe,CAAA;AACjB,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,2EAA2E;AAC3E,MAAM,CAAN,IAAY,uBAeX;AAfD,WAAY,uBAAuB;IACjC,kBAAkB;IAClB,sDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,sEAA2C,CAAA;IAC3C,aAAa;IACb,4CAAiB,CAAA;IACjB,iBAAiB;IACjB,oDAAyB,CAAA;IACzB,iBAAiB;IACjB,oDAAyB,CAAA;IACzB,cAAc;IACd,8CAAmB,CAAA;IACnB,YAAY;IACZ,0CAAe,CAAA;AACjB,CAAC,EAfW,uBAAuB,KAAvB,uBAAuB,QAelC;AAiBD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBASX;AATD,WAAY,qBAAqB;IAC/B,aAAa;IACb,0CAAiB,CAAA;IACjB,WAAW;IACX,sCAAa,CAAA;IACb,cAAc;IACd,4CAAmB,CAAA;IACnB,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EATW,qBAAqB,KAArB,qBAAqB,QAShC;AAcD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eAuBX;AAvBD,WAAY,eAAe;IACzB,iBAAiB;IACjB,6CAA0B,CAAA;IAC1B,eAAe;IACf,wCAAqB,CAAA;IACrB,WAAW;IACX,gCAAa,CAAA;IACb,WAAW;IACX,gCAAa,CAAA;IACb,mBAAmB;IACnB,iDAA8B,CAAA;IAC9B,YAAY;IACZ,kCAAe,CAAA;IACf,WAAW;IACX,gCAAa,CAAA;IACb,aAAa;IACb,oCAAiB,CAAA;IACjB,cAAc;IACd,sCAAmB,CAAA;IACnB,cAAc;IACd,sCAAmB,CAAA;IACnB,iBAAiB;IACjB,4CAAyB,CAAA;AAC3B,CAAC,EAvBW,eAAe,KAAf,eAAe,QAuB1B;AAqBD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BASX;AATD,WAAY,2BAA2B;IACrC,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,aAAa;IACb,gDAAiB,CAAA;IACjB,gBAAgB;IAChB,sDAAuB,CAAA;AACzB,CAAC,EATW,2BAA2B,KAA3B,2BAA2B,QAStC;AAcD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAaX;AAbD,WAAY,wBAAwB;IAClC,cAAc;IACd,+CAAmB,CAAA;IACnB,cAAc;IACd,+CAAmB,CAAA;IACnB,gBAAgB;IAChB,mDAAuB,CAAA;IACvB,YAAY;IACZ,2CAAe,CAAA;IACf,iBAAiB;IACjB,qDAAyB,CAAA;IACzB,gBAAgB;IAChB,mDAAuB,CAAA;AACzB,CAAC,EAbW,wBAAwB,KAAxB,wBAAwB,QAanC;AAgBD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BASX;AATD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,cAAc;IACd,sDAAmB,CAAA;IACnB,kBAAkB;IAClB,8DAA2B,CAAA;IAC3B,UAAU;IACV,8CAAW,CAAA;AACb,CAAC,EATW,+BAA+B,KAA/B,+BAA+B,QAS1C","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport type * as coreClient from \"@azure/core-client\";\n\n/** Response for batch list operation. */\nexport interface SparkBatchJobCollection {\n /** The start index of fetched sessions. */\n from: number;\n /** Number of sessions fetched. */\n total: number;\n /** Batch list */\n sessions?: SparkBatchJob[];\n}\n\nexport interface SparkBatchJob {\n livyInfo?: SparkBatchJobState;\n /** The batch name. */\n name?: string;\n /** The workspace name. */\n workspaceName?: string;\n /** The Spark pool name. */\n sparkPoolName?: string;\n /** The submitter name. */\n submitterName?: string;\n /** The submitter identifier. */\n submitterId?: string;\n /** The artifact identifier. */\n artifactId?: string;\n /** The job type. */\n jobType?: SparkJobType;\n /** The Spark batch job result. */\n result?: SparkBatchJobResultType;\n /** The scheduler information. */\n scheduler?: SparkScheduler;\n /** The plugin information. */\n plugin?: SparkServicePlugin;\n /** The error information. */\n errors?: SparkServiceError[];\n /** The tags. */\n tags?: { [propertyName: string]: string };\n /** The session Id. */\n id: number;\n /** The application id of this session */\n appId?: string;\n /** The detailed application info. */\n appInfo?: { [propertyName: string]: string };\n /** The batch state */\n state?: LivyStates;\n /** The log lines. */\n logLines?: string[];\n}\n\nexport interface SparkBatchJobState {\n /** the time that at which \"not_started\" livy state was first seen. */\n notStartedAt?: Date;\n /** the time that at which \"starting\" livy state was first seen. */\n startingAt?: Date;\n /** the time that at which \"running\" livy state was first seen. */\n runningAt?: Date;\n /** time that at which \"dead\" livy state was first seen. */\n deadAt?: Date;\n /** the time that at which \"success\" livy state was first seen. */\n successAt?: Date;\n /** the time that at which \"killed\" livy state was first seen. */\n terminatedAt?: Date;\n /** the time that at which \"recovering\" livy state was first seen. */\n recoveringAt?: Date;\n /** the Spark job state. */\n currentState?: string;\n jobCreationRequest?: SparkRequest;\n}\n\nexport interface SparkRequest {\n name?: string;\n file?: string;\n className?: string;\n arguments?: string[];\n jars?: string[];\n pythonFiles?: string[];\n files?: string[];\n archives?: string[];\n /** Dictionary of <string> */\n configuration?: { [propertyName: string]: string };\n driverMemory?: string;\n driverCores?: number;\n executorMemory?: string;\n executorCores?: number;\n executorCount?: number;\n}\n\nexport interface SparkScheduler {\n submittedAt?: Date;\n scheduledAt?: Date;\n endedAt?: Date;\n cancellationRequestedAt?: Date;\n currentState?: SchedulerCurrentState;\n}\n\nexport interface SparkServicePlugin {\n preparationStartedAt?: Date;\n resourceAcquisitionStartedAt?: Date;\n submissionStartedAt?: Date;\n monitoringStartedAt?: Date;\n cleanupStartedAt?: Date;\n currentState?: PluginCurrentState;\n}\n\nexport interface SparkServiceError {\n message?: string;\n errorCode?: string;\n source?: SparkErrorSource;\n}\n\nexport interface SparkBatchJobOptions {\n /** Dictionary of <string> */\n tags?: { [propertyName: string]: string };\n artifactId?: string;\n name: string;\n file: string;\n className?: string;\n arguments?: string[];\n jars?: string[];\n pythonFiles?: string[];\n files?: string[];\n archives?: string[];\n /** Dictionary of <string> */\n configuration?: { [propertyName: string]: string };\n driverMemory?: string;\n driverCores?: number;\n executorMemory?: string;\n executorCores?: number;\n executorCount?: number;\n}\n\nexport interface SparkSessionCollection {\n from: number;\n total: number;\n sessions?: SparkSession[];\n}\n\nexport interface SparkSession {\n livyInfo?: SparkSessionState;\n name?: string;\n workspaceName?: string;\n sparkPoolName?: string;\n submitterName?: string;\n submitterId?: string;\n artifactId?: string;\n /** The job type. */\n jobType?: SparkJobType;\n result?: SparkSessionResultType;\n scheduler?: SparkScheduler;\n plugin?: SparkServicePlugin;\n errors?: SparkServiceError[];\n /** Dictionary of <string> */\n tags?: { [propertyName: string]: string };\n id: number;\n appId?: string;\n /** Dictionary of <string> */\n appInfo?: { [propertyName: string]: string };\n /** The session state. */\n state?: LivyStates;\n logLines?: string[];\n}\n\nexport interface SparkSessionState {\n notStartedAt?: Date;\n startingAt?: Date;\n idleAt?: Date;\n deadAt?: Date;\n shuttingDownAt?: Date;\n terminatedAt?: Date;\n recoveringAt?: Date;\n busyAt?: Date;\n errorAt?: Date;\n currentState?: string;\n jobCreationRequest?: SparkRequest;\n}\n\nexport interface SparkSessionOptions {\n /** Dictionary of <string> */\n tags?: { [propertyName: string]: string };\n artifactId?: string;\n name: string;\n file?: string;\n className?: string;\n arguments?: string[];\n jars?: string[];\n pythonFiles?: string[];\n files?: string[];\n archives?: string[];\n /** Dictionary of <string> */\n configuration?: { [propertyName: string]: string };\n driverMemory?: string;\n driverCores?: number;\n executorMemory?: string;\n executorCores?: number;\n executorCount?: number;\n}\n\nexport interface SparkStatementCollection {\n total: number;\n statements?: SparkStatement[];\n}\n\nexport interface SparkStatement {\n id: number;\n code?: string;\n state?: LivyStatementStates;\n output?: SparkStatementOutput;\n}\n\nexport interface SparkStatementOutput {\n status?: string;\n executionCount: number;\n /** Any object */\n data?: Record<string, unknown>;\n errorName?: string;\n errorValue?: string;\n traceback?: string[];\n}\n\nexport interface SparkStatementOptions {\n code?: string;\n kind?: SparkStatementLanguageType;\n}\n\nexport interface SparkStatementCancellationResult {\n /** The msg property from the Livy API. The value is always \"canceled\". */\n message?: string;\n}\n\n/** Known values of {@link SparkJobType} that the service accepts. */\nexport enum KnownSparkJobType {\n /** SparkBatch */\n SparkBatch = \"SparkBatch\",\n /** SparkSession */\n SparkSession = \"SparkSession\",\n}\n\n/**\n * Defines values for SparkJobType.\n * {@link KnownSparkJobType} can be used interchangeably with SparkJobType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkBatch**\n * **SparkSession**\n */\nexport type SparkJobType = string;\n\n/** Known values of {@link SparkBatchJobResultType} that the service accepts. */\nexport enum KnownSparkBatchJobResultType {\n /** Uncertain */\n Uncertain = \"Uncertain\",\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Cancelled */\n Cancelled = \"Cancelled\",\n}\n\n/**\n * Defines values for SparkBatchJobResultType.\n * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Uncertain**\n * **Succeeded**\n * **Failed**\n * **Cancelled**\n */\nexport type SparkBatchJobResultType = string;\n\n/** Known values of {@link SchedulerCurrentState} that the service accepts. */\nexport enum KnownSchedulerCurrentState {\n /** Queued */\n Queued = \"Queued\",\n /** Scheduled */\n Scheduled = \"Scheduled\",\n /** Ended */\n Ended = \"Ended\",\n}\n\n/**\n * Defines values for SchedulerCurrentState.\n * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Queued**\n * **Scheduled**\n * **Ended**\n */\nexport type SchedulerCurrentState = string;\n\n/** Known values of {@link PluginCurrentState} that the service accepts. */\nexport enum KnownPluginCurrentState {\n /** Preparation */\n Preparation = \"Preparation\",\n /** ResourceAcquisition */\n ResourceAcquisition = \"ResourceAcquisition\",\n /** Queued */\n Queued = \"Queued\",\n /** Submission */\n Submission = \"Submission\",\n /** Monitoring */\n Monitoring = \"Monitoring\",\n /** Cleanup */\n Cleanup = \"Cleanup\",\n /** Ended */\n Ended = \"Ended\",\n}\n\n/**\n * Defines values for PluginCurrentState.\n * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Preparation**\n * **ResourceAcquisition**\n * **Queued**\n * **Submission**\n * **Monitoring**\n * **Cleanup**\n * **Ended**\n */\nexport type PluginCurrentState = string;\n\n/** Known values of {@link SparkErrorSource} that the service accepts. */\nexport enum KnownSparkErrorSource {\n /** System */\n System = \"System\",\n /** User */\n User = \"User\",\n /** Unknown */\n Unknown = \"Unknown\",\n /** Dependency */\n Dependency = \"Dependency\",\n}\n\n/**\n * Defines values for SparkErrorSource.\n * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **System**\n * **User**\n * **Unknown**\n * **Dependency**\n */\nexport type SparkErrorSource = string;\n\n/** Known values of {@link LivyStates} that the service accepts. */\nexport enum KnownLivyStates {\n /** NotStarted */\n NotStarted = \"not_started\",\n /** Starting */\n Starting = \"starting\",\n /** Idle */\n Idle = \"idle\",\n /** Busy */\n Busy = \"busy\",\n /** ShuttingDown */\n ShuttingDown = \"shutting_down\",\n /** Error */\n Error = \"error\",\n /** Dead */\n Dead = \"dead\",\n /** Killed */\n Killed = \"killed\",\n /** Success */\n Success = \"success\",\n /** Running */\n Running = \"running\",\n /** Recovering */\n Recovering = \"recovering\",\n}\n\n/**\n * Defines values for LivyStates.\n * {@link KnownLivyStates} can be used interchangeably with LivyStates,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **not_started**\n * **starting**\n * **idle**\n * **busy**\n * **shutting_down**\n * **error**\n * **dead**\n * **killed**\n * **success**\n * **running**\n * **recovering**\n */\nexport type LivyStates = string;\n\n/** Known values of {@link SparkSessionResultType} that the service accepts. */\nexport enum KnownSparkSessionResultType {\n /** Uncertain */\n Uncertain = \"Uncertain\",\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Cancelled */\n Cancelled = \"Cancelled\",\n}\n\n/**\n * Defines values for SparkSessionResultType.\n * {@link KnownSparkSessionResultType} can be used interchangeably with SparkSessionResultType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Uncertain**\n * **Succeeded**\n * **Failed**\n * **Cancelled**\n */\nexport type SparkSessionResultType = string;\n\n/** Known values of {@link LivyStatementStates} that the service accepts. */\nexport enum KnownLivyStatementStates {\n /** Waiting */\n Waiting = \"waiting\",\n /** Running */\n Running = \"running\",\n /** Available */\n Available = \"available\",\n /** Error */\n Error = \"error\",\n /** Cancelling */\n Cancelling = \"cancelling\",\n /** Cancelled */\n Cancelled = \"cancelled\",\n}\n\n/**\n * Defines values for LivyStatementStates.\n * {@link KnownLivyStatementStates} can be used interchangeably with LivyStatementStates,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **waiting**\n * **running**\n * **available**\n * **error**\n * **cancelling**\n * **cancelled**\n */\nexport type LivyStatementStates = string;\n\n/** Known values of {@link SparkStatementLanguageType} that the service accepts. */\nexport enum KnownSparkStatementLanguageType {\n /** Spark */\n Spark = \"spark\",\n /** PySpark */\n PySpark = \"pyspark\",\n /** DotNetSpark */\n DotNetSpark = \"dotnetspark\",\n /** Sql */\n Sql = \"sql\",\n}\n\n/**\n * Defines values for SparkStatementLanguageType.\n * {@link KnownSparkStatementLanguageType} can be used interchangeably with SparkStatementLanguageType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **spark**\n * **pyspark**\n * **dotnetspark**\n * **sql**\n */\nexport type SparkStatementLanguageType = string;\n\n/** Optional parameters. */\nexport interface SparkBatchGetSparkBatchJobsOptionalParams extends coreClient.OperationOptions {\n /** Optional param specifying which index the list should begin from. */\n fromParam?: number;\n /**\n * Optional param specifying the size of the returned list.\n * By default it is 20 and that is the maximum.\n */\n size?: number;\n /** Optional query param specifying whether detailed response is returned beyond plain livy. */\n detailed?: boolean;\n}\n\n/** Contains response data for the getSparkBatchJobs operation. */\nexport type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection;\n\n/** Optional parameters. */\nexport interface SparkBatchCreateSparkBatchJobOptionalParams extends coreClient.OperationOptions {\n /** Optional query param specifying whether detailed response is returned beyond plain livy. */\n detailed?: boolean;\n}\n\n/** Contains response data for the createSparkBatchJob operation. */\nexport type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob;\n\n/** Optional parameters. */\nexport interface SparkBatchGetSparkBatchJobOptionalParams extends coreClient.OperationOptions {\n /** Optional query param specifying whether detailed response is returned beyond plain livy. */\n detailed?: boolean;\n}\n\n/** Contains response data for the getSparkBatchJob operation. */\nexport type SparkBatchGetSparkBatchJobResponse = SparkBatchJob;\n\n/** Optional parameters. */\nexport interface SparkBatchCancelSparkBatchJobOptionalParams extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface SparkSessionGetSparkSessionsOptionalParams extends coreClient.OperationOptions {\n /** Optional param specifying which index the list should begin from. */\n fromParam?: number;\n /**\n * Optional param specifying the size of the returned list.\n * By default it is 20 and that is the maximum.\n */\n size?: number;\n /** Optional query param specifying whether detailed response is returned beyond plain livy. */\n detailed?: boolean;\n}\n\n/** Contains response data for the getSparkSessions operation. */\nexport type SparkSessionGetSparkSessionsResponse = SparkSessionCollection;\n\n/** Optional parameters. */\nexport interface SparkSessionCreateSparkSessionOptionalParams extends coreClient.OperationOptions {\n /** Optional query param specifying whether detailed response is returned beyond plain livy. */\n detailed?: boolean;\n}\n\n/** Contains response data for the createSparkSession operation. */\nexport type SparkSessionCreateSparkSessionResponse = SparkSession;\n\n/** Optional parameters. */\nexport interface SparkSessionGetSparkSessionOptionalParams extends coreClient.OperationOptions {\n /** Optional query param specifying whether detailed response is returned beyond plain livy. */\n detailed?: boolean;\n}\n\n/** Contains response data for the getSparkSession operation. */\nexport type SparkSessionGetSparkSessionResponse = SparkSession;\n\n/** Optional parameters. */\nexport interface SparkSessionCancelSparkSessionOptionalParams extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface SparkSessionResetSparkSessionTimeoutOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface SparkSessionGetSparkStatementsOptionalParams extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkStatements operation. */\nexport type SparkSessionGetSparkStatementsResponse = SparkStatementCollection;\n\n/** Optional parameters. */\nexport interface SparkSessionCreateSparkStatementOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the createSparkStatement operation. */\nexport type SparkSessionCreateSparkStatementResponse = SparkStatement;\n\n/** Optional parameters. */\nexport interface SparkSessionGetSparkStatementOptionalParams extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkStatement operation. */\nexport type SparkSessionGetSparkStatementResponse = SparkStatement;\n\n/** Optional parameters. */\nexport interface SparkSessionCancelSparkStatementOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the cancelSparkStatement operation. */\nexport type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult;\n\n/** Optional parameters. */\nexport interface SparkClientOptionalParams extends coreClient.ServiceClientOptions {\n /** Valid api-version for the request. */\n livyApiVersion?: string;\n /** Overrides client endpoint. */\n endpoint?: string;\n}\n"]}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type * as coreClient from "@azure/core-client";
|
|
2
|
+
export declare const SparkBatchJobCollection: coreClient.CompositeMapper;
|
|
3
|
+
export declare const SparkBatchJob: coreClient.CompositeMapper;
|
|
4
|
+
export declare const SparkBatchJobState: coreClient.CompositeMapper;
|
|
5
|
+
export declare const SparkRequest: coreClient.CompositeMapper;
|
|
6
|
+
export declare const SparkScheduler: coreClient.CompositeMapper;
|
|
7
|
+
export declare const SparkServicePlugin: coreClient.CompositeMapper;
|
|
8
|
+
export declare const SparkServiceError: coreClient.CompositeMapper;
|
|
9
|
+
export declare const SparkBatchJobOptions: coreClient.CompositeMapper;
|
|
10
|
+
export declare const SparkSessionCollection: coreClient.CompositeMapper;
|
|
11
|
+
export declare const SparkSession: coreClient.CompositeMapper;
|
|
12
|
+
export declare const SparkSessionState: coreClient.CompositeMapper;
|
|
13
|
+
export declare const SparkSessionOptions: coreClient.CompositeMapper;
|
|
14
|
+
export declare const SparkStatementCollection: coreClient.CompositeMapper;
|
|
15
|
+
export declare const SparkStatement: coreClient.CompositeMapper;
|
|
16
|
+
export declare const SparkStatementOutput: coreClient.CompositeMapper;
|
|
17
|
+
export declare const SparkStatementOptions: coreClient.CompositeMapper;
|
|
18
|
+
export declare const SparkStatementCancellationResult: coreClient.CompositeMapper;
|
|
19
|
+
//# sourceMappingURL=mappers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mappers.d.ts","sourceRoot":"","sources":["../../../src/models/mappers.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,KAAK,UAAU,MAAM,oBAAoB,CAAC;AAEtD,eAAO,MAAM,uBAAuB,EAAE,UAAU,CAAC,eAiChD,CAAC;AAEF,eAAO,MAAM,aAAa,EAAE,UAAU,CAAC,eAuItC,CAAC;AAEF,eAAO,MAAM,kBAAkB,EAAE,UAAU,CAAC,eAqE3C,CAAC;AAEF,eAAO,MAAM,YAAY,EAAE,UAAU,CAAC,eAqHrC,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,UAAU,CAAC,eAyCvC,CAAC;AAEF,eAAO,MAAM,kBAAkB,EAAE,UAAU,CAAC,eAgD3C,CAAC;AAEF,eAAO,MAAM,iBAAiB,EAAE,UAAU,CAAC,eAyB1C,CAAC;AAEF,eAAO,MAAM,oBAAoB,EAAE,UAAU,CAAC,eAoI7C,CAAC;AAEF,eAAO,MAAM,sBAAsB,EAAE,UAAU,CAAC,eAiC/C,CAAC;AAEF,eAAO,MAAM,YAAY,EAAE,UAAU,CAAC,eAuIrC,CAAC;AAEF,eAAO,MAAM,iBAAiB,EAAE,UAAU,CAAC,eAmF1C,CAAC;AAEF,eAAO,MAAM,mBAAmB,EAAE,UAAU,CAAC,eAmI5C,CAAC;AAEF,eAAO,MAAM,wBAAwB,EAAE,UAAU,CAAC,eA0BjD,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,UAAU,CAAC,eAiCvC,CAAC;AAEF,eAAO,MAAM,oBAAoB,EAAE,UAAU,CAAC,eAqD7C,CAAC;AAEF,eAAO,MAAM,qBAAqB,EAAE,UAAU,CAAC,eAmB9C,CAAC;AAEF,eAAO,MAAM,gCAAgC,EAAE,UAAU,CAAC,eAazD,CAAC"}
|