@azure/synapse-artifacts 1.0.0-beta.1 → 1.0.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +6 -4
- package/dist/index.js +22546 -19192
- package/dist/index.js.map +1 -1
- package/dist-esm/src/artifactsClient.js +72 -0
- package/dist-esm/src/artifactsClient.js.map +1 -0
- package/dist-esm/src/index.js +12 -0
- package/dist-esm/src/index.js.map +1 -0
- package/dist-esm/src/lroImpl.js +25 -0
- package/dist-esm/src/lroImpl.js.map +1 -0
- package/dist-esm/src/models/index.js +822 -0
- package/dist-esm/src/models/index.js.map +1 -0
- package/dist-esm/src/models/mappers.js +22801 -0
- package/dist-esm/src/models/mappers.js.map +1 -0
- package/dist-esm/src/models/parameters.js +541 -0
- package/dist-esm/src/models/parameters.js.map +1 -0
- package/dist-esm/src/operations/bigDataPools.js +100 -0
- package/dist-esm/src/operations/bigDataPools.js.map +1 -0
- package/dist-esm/src/operations/dataFlowDebugSession.js +403 -0
- package/dist-esm/src/operations/dataFlowDebugSession.js.map +1 -0
- package/dist-esm/src/operations/dataFlowOperations.js +439 -0
- package/dist-esm/src/operations/dataFlowOperations.js.map +1 -0
- package/dist-esm/src/operations/datasetOperations.js +439 -0
- package/dist-esm/src/operations/datasetOperations.js.map +1 -0
- package/dist-esm/src/operations/index.js +31 -0
- package/dist-esm/src/operations/index.js.map +1 -0
- package/dist-esm/src/operations/integrationRuntimes.js +100 -0
- package/dist-esm/src/operations/integrationRuntimes.js.map +1 -0
- package/dist-esm/src/operations/kqlScriptOperations.js +309 -0
- package/dist-esm/src/operations/kqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operations/kqlScripts.js +150 -0
- package/dist-esm/src/operations/kqlScripts.js.map +1 -0
- package/dist-esm/src/operations/library.js +512 -0
- package/dist-esm/src/operations/library.js.map +1 -0
- package/dist-esm/src/operations/linkConnectionOperations.js +534 -0
- package/dist-esm/src/operations/linkConnectionOperations.js.map +1 -0
- package/dist-esm/src/operations/linkedServiceOperations.js +440 -0
- package/dist-esm/src/operations/linkedServiceOperations.js.map +1 -0
- package/dist-esm/src/operations/metastore.js +182 -0
- package/dist-esm/src/operations/metastore.js.map +1 -0
- package/dist-esm/src/operations/notebookOperationResult.js +64 -0
- package/dist-esm/src/operations/notebookOperationResult.js.map +1 -0
- package/dist-esm/src/operations/notebookOperations.js +564 -0
- package/dist-esm/src/operations/notebookOperations.js.map +1 -0
- package/dist-esm/src/operations/pipelineOperations.js +485 -0
- package/dist-esm/src/operations/pipelineOperations.js.map +1 -0
- package/dist-esm/src/operations/pipelineRunOperations.js +185 -0
- package/dist-esm/src/operations/pipelineRunOperations.js.map +1 -0
- package/dist-esm/src/operations/sparkConfigurationOperations.js +440 -0
- package/dist-esm/src/operations/sparkConfigurationOperations.js.map +1 -0
- package/dist-esm/src/operations/sparkJobDefinitionOperations.js +612 -0
- package/dist-esm/src/operations/sparkJobDefinitionOperations.js.map +1 -0
- package/dist-esm/src/operations/sqlPools.js +100 -0
- package/dist-esm/src/operations/sqlPools.js.map +1 -0
- package/dist-esm/src/operations/sqlScriptOperations.js +440 -0
- package/dist-esm/src/operations/sqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operations/triggerOperations.js +717 -0
- package/dist-esm/src/operations/triggerOperations.js.map +1 -0
- package/dist-esm/src/operations/triggerRunOperations.js +147 -0
- package/dist-esm/src/operations/triggerRunOperations.js.map +1 -0
- package/dist-esm/src/operations/workspaceGitRepoManagement.js +66 -0
- package/dist-esm/src/operations/workspaceGitRepoManagement.js.map +1 -0
- package/dist-esm/src/operations/workspaceOperations.js +62 -0
- package/dist-esm/src/operations/workspaceOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/bigDataPools.js +9 -0
- package/dist-esm/src/operationsInterfaces/bigDataPools.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/dataFlowDebugSession.js +9 -0
- package/dist-esm/src/operationsInterfaces/dataFlowDebugSession.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/dataFlowOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/dataFlowOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/datasetOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/datasetOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/index.js +31 -0
- package/dist-esm/src/operationsInterfaces/index.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/integrationRuntimes.js +9 -0
- package/dist-esm/src/operationsInterfaces/integrationRuntimes.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/kqlScriptOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/kqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/kqlScripts.js +9 -0
- package/dist-esm/src/operationsInterfaces/kqlScripts.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/library.js +9 -0
- package/dist-esm/src/operationsInterfaces/library.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/linkConnectionOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/linkConnectionOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/linkedServiceOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/linkedServiceOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/metastore.js +9 -0
- package/dist-esm/src/operationsInterfaces/metastore.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/notebookOperationResult.js +9 -0
- package/dist-esm/src/operationsInterfaces/notebookOperationResult.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/notebookOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/notebookOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/pipelineOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/pipelineOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/pipelineRunOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/pipelineRunOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/sparkConfigurationOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/sparkConfigurationOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/sparkJobDefinitionOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/sparkJobDefinitionOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/sqlPools.js +9 -0
- package/dist-esm/src/operationsInterfaces/sqlPools.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/sqlScriptOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/sqlScriptOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/triggerOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/triggerOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/triggerRunOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/triggerRunOperations.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/workspaceGitRepoManagement.js +9 -0
- package/dist-esm/src/operationsInterfaces/workspaceGitRepoManagement.js.map +1 -0
- package/dist-esm/src/operationsInterfaces/workspaceOperations.js +9 -0
- package/dist-esm/src/operationsInterfaces/workspaceOperations.js.map +1 -0
- package/dist-esm/src/tracing.js +13 -0
- package/dist-esm/src/tracing.js.map +1 -0
- package/package.json +82 -34
- package/types/synapse-artifacts.d.ts +6680 -11132
- package/dist/index.min.js +0 -1
- package/dist/index.min.js.map +0 -1
- package/rollup.config.js +0 -41
- package/tsconfig.json +0 -20
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sparkConfigurationOperations.js","sourceRoot":"","sources":["../../../src/operations/sparkConfigurationOperations.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AAGxC,OAAO,KAAK,UAAU,MAAM,oBAAoB,CAAC;AACjD,OAAO,KAAK,WAAW,MAAM,qBAAqB,CAAC;AACnD,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAEnD,OAAO,EAAkC,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5E,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAgBrC,4CAA4C;AAC5C,gEAAgE;AAChE,MAAM,OAAO,gCAAgC;IAI3C;;;OAGG;IACH,YAAY,MAAuB;QACjC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;OAGG;IACI,kCAAkC,CACvC,OAA2E;QAE3E,MAAM,IAAI,GAAG,IAAI,CAAC,0CAA0C,CAAC,OAAO,CAAC,CAAC;QACtE,OAAO;YACL,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD,MAAM,EAAE,GAAG,EAAE;gBACX,OAAO,IAAI,CAAC,2CAA2C,CAAC,OAAO,CAAC,CAAC;YACnE,CAAC;SACF,CAAC;IACJ,CAAC;IAEc,2CAA2C,CACxD,OAA2E;;YAE3E,IAAI,MAAM,GAAG,cAAM,IAAI,CAAC,kCAAkC,CAAC,OAAO,CAAC,CAAA,CAAC;YACpE,oBAAM,MAAM,CAAC,KAAK,IAAI,EAAE,CAAA,CAAC;YACzB,IAAI,iBAAiB,GAAG,MAAM,CAAC,QAAQ,CAAC;YACxC,OAAO,iBAAiB,EAAE;gBACxB,MAAM,GAAG,cAAM,IAAI,CAAC,sCAAsC,CACxD,iBAAiB,EACjB,OAAO,CACR,CAAA,CAAC;gBACF,iBAAiB,GAAG,MAAM,CAAC,QAAQ,CAAC;gBACpC,oBAAM,MAAM,CAAC,KAAK,IAAI,EAAE,CAAA,CAAC;aAC1B;QACH,CAAC;KAAA;IAEc,0CAA0C,CACvD,OAA2E;;;;gBAE3E,KAAyB,IAAA,KAAA,cAAA,IAAI,CAAC,2CAA2C,CACvE,OAAO,CACR,CAAA,IAAA;oBAFU,MAAM,IAAI,WAAA,CAAA;oBAGnB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,IAAI,CAAA,CAAA,CAAA,CAAC;iBACb;;;;;;;;;QACH,CAAC;KAAA;IAED;;;OAGG;IACK,KAAK,CAAC,kCAAkC,CAC9C,OAA2E;QAE3E,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CACzB,oDAAoD,EACpD,OAAO,IAAI,EAAE,CACd,CAAC;QACF,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACnD,EAAE,OAAO,EAAE,EACX,8CAA8C,CAC/C,CAAC;YACF,OAAO,MAAqE,CAAC;SAC9E;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK;gBACtC,OAAO,EAAE,KAAK,CAAC,OAAO;aACvB,CAAC,CAAC;YACH,MAAM,KAAK,CAAC;SACb;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,qCAAqC,CACzC,sBAA8B,EAC9B,kBAA8C,EAC9C,OAA0E;QAS1E,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CACzB,uDAAuD,EACvD,OAAO,IAAI,EAAE,CACd,CAAC;QACF,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAmC,EACnC,IAA8B,EACuC,EAAE;YACvE,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;gBAClE,OAAO,MAAoE,CAAC;aAC7E;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK;oBACtC,OAAO,EAAE,KAAK,CAAC,OAAO;iBACvB,CAAC,CAAC;gBACH,MAAM,KAAK,CAAC;aACb;oBAAS;gBACR,IAAI,CAAC,GAAG,EAAE,CAAC;aACZ;QACH,CAAC,CAAC;QACF,MAAM,aAAa,GAAG,KAAK,EACzB,IAAmC,EACnC,IAA8B,EAC9B,EAAE;;YACF,IAAI,kBAAkB,GAEN,SAAS,CAAC;YAC1B,MAAM,gBAAgB,GAAG,MAAA,IAAI,CAAC,OAAO,0CAAE,UAAU,CAAC;YAClD,MAAM,QAAQ,GAAmC,CAC/C,WAA6C,EAC7C,YAAqB,EACrB,EAAE;gBACF,kBAAkB,GAAG,WAAW,CAAC;gBACjC,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAG,WAAW,EAAE,YAAY,CAAC,CAAC;YAChD,CAAC,CAAC;YACF,MAAM,WAAW,mCACZ,IAAI,KACP,OAAO,kCACF,IAAI,CAAC,OAAO,KACf,UAAU,EAAE,QAAQ,MAEvB,CAAC;YACF,MAAM,YAAY,GAAG,MAAM,mBAAmB,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC;YAClE,OAAO;gBACL,YAAY;gBACZ,WAAW,EAAE;oBACX,UAAU,EAAE,kBAAmB,CAAC,MAAM;oBACtC,IAAI,EAAE,kBAAmB,CAAC,UAAU;oBACpC,OAAO,EAAE,kBAAmB,CAAC,OAAO,CAAC,MAAM,EAAE;iBAC9C;aACF,CAAC;QACJ,CAAC,CAAC;QAEF,MAAM,GAAG,GAAG,IAAI,OAAO,CACrB,aAAa,EACb,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,OAAO,EAAE,EACvD,6CAA6C,CAC9C,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC,GAAG,EAAE;YAChC,UAAU,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU;YAC/B,YAAY,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,kBAAkB;SAC1C,CAAC,CAAC;QACH,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,4CAA4C,CAChD,sBAA8B,EAC9B,kBAA8C,EAC9C,OAA0E;QAE1E,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qCAAqC,CAC7D,sBAAsB,EACtB,kBAAkB,EAClB,OAAO,CACR,CAAC;QACF,OAAO,MAAM,CAAC,aAAa,EAAE,CAAC;IAChC,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,qBAAqB,CACzB,sBAA8B,EAC9B,OAA+D;QAE/D,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CACzB,uCAAuC,EACvC,OAAO,IAAI,EAAE,CACd,CAAC;QACF,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACnD,EAAE,sBAAsB,EAAE,OAAO,EAAE,EACnC,kCAAkC,CACnC,CAAC;YACF,OAAO,MAAyD,CAAC;SAClE;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK;gBACtC,OAAO,EAAE,KAAK,CAAC,OAAO;aACvB,CAAC,CAAC;YACH,MAAM,KAAK,CAAC;SACb;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,6BAA6B,CACjC,sBAA8B,EAC9B,OAAkE;QAElE,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CACzB,+CAA+C,EAC/C,OAAO,IAAI,EAAE,CACd,CAAC;QACF,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAmC,EACnC,IAA8B,EACf,EAAE;YACjB,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;gBAClE,OAAO,MAAc,CAAC;aACvB;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK;oBACtC,OAAO,EAAE,KAAK,CAAC,OAAO;iBACvB,CAAC,CAAC;gBACH,MAAM,KAAK,CAAC;aACb;oBAAS;gBACR,IAAI,CAAC,GAAG,EAAE,CAAC;aACZ;QACH,CAAC,CAAC;QACF,MAAM,aAAa,GAAG,KAAK,EACzB,IAAmC,EACnC,IAA8B,EAC9B,EAAE;;YACF,IAAI,kBAAkB,GAEN,SAAS,CAAC;YAC1B,MAAM,gBAAgB,GAAG,MAAA,IAAI,CAAC,OAAO,0CAAE,UAAU,CAAC;YAClD,MAAM,QAAQ,GAAmC,CAC/C,WAA6C,EAC7C,YAAqB,EACrB,EAAE;gBACF,kBAAkB,GAAG,WAAW,CAAC;gBACjC,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAG,WAAW,EAAE,YAAY,CAAC,CAAC;YAChD,CAAC,CAAC;YACF,MAAM,WAAW,mCACZ,IAAI,KACP,OAAO,kCACF,IAAI,CAAC,OAAO,KACf,UAAU,EAAE,QAAQ,MAEvB,CAAC;YACF,MAAM,YAAY,GAAG,MAAM,mBAAmB,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC;YAClE,OAAO;gBACL,YAAY;gBACZ,WAAW,EAAE;oBACX,UAAU,EAAE,kBAAmB,CAAC,MAAM;oBACtC,IAAI,EAAE,kBAAmB,CAAC,UAAU;oBACpC,OAAO,EAAE,kBAAmB,CAAC,OAAO,CAAC,MAAM,EAAE;iBAC9C;aACF,CAAC;QACJ,CAAC,CAAC;QAEF,MAAM,GAAG,GAAG,IAAI,OAAO,CACrB,aAAa,EACb,EAAE,sBAAsB,EAAE,OAAO,EAAE,EACnC,qCAAqC,CACtC,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC,GAAG,EAAE;YAChC,UAAU,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU;YAC/B,YAAY,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,kBAAkB;SAC1C,CAAC,CAAC;QACH,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,oCAAoC,CACxC,sBAA8B,EAC9B,OAAkE;QAElE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,6BAA6B,CACrD,sBAAsB,EACtB,OAAO,CACR,CAAC;QACF,OAAO,MAAM,CAAC,aAAa,EAAE,CAAC;IAChC,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,6BAA6B,CACjC,sBAA8B,EAC9B,OAA8B,EAC9B,OAAkE;QAElE,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CACzB,+CAA+C,EAC/C,OAAO,IAAI,EAAE,CACd,CAAC;QACF,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAmC,EACnC,IAA8B,EACf,EAAE;YACjB,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;gBAClE,OAAO,MAAc,CAAC;aACvB;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK;oBACtC,OAAO,EAAE,KAAK,CAAC,OAAO;iBACvB,CAAC,CAAC;gBACH,MAAM,KAAK,CAAC;aACb;oBAAS;gBACR,IAAI,CAAC,GAAG,EAAE,CAAC;aACZ;QACH,CAAC,CAAC;QACF,MAAM,aAAa,GAAG,KAAK,EACzB,IAAmC,EACnC,IAA8B,EAC9B,EAAE;;YACF,IAAI,kBAAkB,GAEN,SAAS,CAAC;YAC1B,MAAM,gBAAgB,GAAG,MAAA,IAAI,CAAC,OAAO,0CAAE,UAAU,CAAC;YAClD,MAAM,QAAQ,GAAmC,CAC/C,WAA6C,EAC7C,YAAqB,EACrB,EAAE;gBACF,kBAAkB,GAAG,WAAW,CAAC;gBACjC,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAG,WAAW,EAAE,YAAY,CAAC,CAAC;YAChD,CAAC,CAAC;YACF,MAAM,WAAW,mCACZ,IAAI,KACP,OAAO,kCACF,IAAI,CAAC,OAAO,KACf,UAAU,EAAE,QAAQ,MAEvB,CAAC;YACF,MAAM,YAAY,GAAG,MAAM,mBAAmB,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC;YAClE,OAAO;gBACL,YAAY;gBACZ,WAAW,EAAE;oBACX,UAAU,EAAE,kBAAmB,CAAC,MAAM;oBACtC,IAAI,EAAE,kBAAmB,CAAC,UAAU;oBACpC,OAAO,EAAE,kBAAmB,CAAC,OAAO,CAAC,MAAM,EAAE;iBAC9C;aACF,CAAC;QACJ,CAAC,CAAC;QAEF,MAAM,GAAG,GAAG,IAAI,OAAO,CACrB,aAAa,EACb,EAAE,sBAAsB,EAAE,OAAO,EAAE,OAAO,EAAE,EAC5C,qCAAqC,CACtC,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC,GAAG,EAAE;YAChC,UAAU,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU;YAC/B,YAAY,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,kBAAkB;SAC1C,CAAC,CAAC;QACH,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,oCAAoC,CACxC,sBAA8B,EAC9B,OAA8B,EAC9B,OAAkE;QAElE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,6BAA6B,CACrD,sBAAsB,EACtB,OAAO,EACP,OAAO,CACR,CAAC;QACF,OAAO,MAAM,CAAC,aAAa,EAAE,CAAC;IAChC,CAAC;IAED;;;;;OAKG;IACK,KAAK,CAAC,sCAAsC,CAClD,QAAgB,EAChB,OAA+E;QAE/E,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CACzB,wDAAwD,EACxD,OAAO,IAAI,EAAE,CACd,CAAC;QACF,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACnD,EAAE,QAAQ,EAAE,OAAO,EAAE,EACrB,kDAAkD,CACnD,CAAC;YACF,OAAO,MAAyE,CAAC;SAClF;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK;gBACtC,OAAO,EAAE,KAAK,CAAC,OAAO;aACvB,CAAC,CAAC;YACH,MAAM,KAAK,CAAC;SACb;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAE3E,MAAM,8CAA8C,GAA6B;IAC/E,IAAI,EAAE,sBAAsB;IAC5B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,8BAA8B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC;IACzC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;IACpC,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,6CAA6C,GAA6B;IAC9E,IAAI,EAAE,+CAA+C;IACrD,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,WAAW,EAAE,UAAU,CAAC,kBAAkB;IAC1C,eAAe,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC;IACzC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;KACnB;IACD,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,kCAAkC,GAA6B;IACnE,IAAI,EAAE,+CAA+C;IACrD,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,0BAA0B;SAC/C;QACD,GAAG,EAAE,EAAE;QACP,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC;IACzC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,UAAU;CACX,CAAC;AACF,MAAM,qCAAqC,GAA6B;IACtE,IAAI,EAAE,+CAA+C;IACrD,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC;IACzC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC;AACF,MAAM,qCAAqC,GAA6B;IACtE,IAAI,EAAE,sDAAsD;IAC5D,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,GAAG,EAAE,EAAE;QACP,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,WAAW,EAAE,UAAU,CAAC,OAAO;IAC/B,eAAe,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC;IACzC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,sBAAsB,CAAC;IACvE,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,UAAU,CAAC,WAAW,CAAC;IAC7D,SAAS,EAAE,MAAM;IACjB,UAAU;CACX,CAAC;AACF,MAAM,kDAAkD,GAA6B;IACnF,IAAI,EAAE,YAAY;IAClB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,8BAA8B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,UAAU;SAC/B;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,WAAW,CAAC;IACzC,aAAa,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,UAAU,CAAC,QAAQ,CAAC;IACzD,gBAAgB,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC;IACrC,UAAU;CACX,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport { createSpan } from \"../tracing\";\nimport { PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { SparkConfigurationOperations } from \"../operationsInterfaces\";\nimport * as coreClient from \"@azure/core-client\";\nimport * as coreTracing from \"@azure/core-tracing\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { ArtifactsClient } from \"../artifactsClient\";\nimport { PollerLike, PollOperationState, LroEngine } from \"@azure/core-lro\";\nimport { LroImpl } from \"../lroImpl\";\nimport {\n SparkConfigurationResource,\n SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams,\n SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams,\n SparkConfigurationGetSparkConfigurationsByWorkspaceResponse,\n SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams,\n SparkConfigurationCreateOrUpdateSparkConfigurationResponse,\n SparkConfigurationGetSparkConfigurationOptionalParams,\n SparkConfigurationGetSparkConfigurationResponse,\n SparkConfigurationDeleteSparkConfigurationOptionalParams,\n ArtifactRenameRequest,\n SparkConfigurationRenameSparkConfigurationOptionalParams,\n SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse\n} from \"../models\";\n\n/// <reference lib=\"esnext.asynciterable\" />\n/** Class containing SparkConfigurationOperations operations. */\nexport class SparkConfigurationOperationsImpl\n implements SparkConfigurationOperations {\n private readonly client: ArtifactsClient;\n\n /**\n * Initialize a new instance of the class SparkConfigurationOperations class.\n * @param client Reference to the service client\n */\n constructor(client: ArtifactsClient) {\n this.client = client;\n }\n\n /**\n * Lists sparkconfigurations.\n * @param options The options parameters.\n */\n public listSparkConfigurationsByWorkspace(\n options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams\n ): PagedAsyncIterableIterator<SparkConfigurationResource> {\n const iter = this.getSparkConfigurationsByWorkspacePagingAll(options);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage: () => {\n return this.getSparkConfigurationsByWorkspacePagingPage(options);\n }\n };\n }\n\n private async *getSparkConfigurationsByWorkspacePagingPage(\n options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams\n ): AsyncIterableIterator<SparkConfigurationResource[]> {\n let result = await this._getSparkConfigurationsByWorkspace(options);\n yield result.value || [];\n let continuationToken = result.nextLink;\n while (continuationToken) {\n result = await this._getSparkConfigurationsByWorkspaceNext(\n continuationToken,\n options\n );\n continuationToken = result.nextLink;\n yield result.value || [];\n }\n }\n\n private async *getSparkConfigurationsByWorkspacePagingAll(\n options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams\n ): AsyncIterableIterator<SparkConfigurationResource> {\n for await (const page of this.getSparkConfigurationsByWorkspacePagingPage(\n options\n )) {\n yield* page;\n }\n }\n\n /**\n * Lists sparkconfigurations.\n * @param options The options parameters.\n */\n private async _getSparkConfigurationsByWorkspace(\n options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams\n ): Promise<SparkConfigurationGetSparkConfigurationsByWorkspaceResponse> {\n const { span } = createSpan(\n \"ArtifactsClient-_getSparkConfigurationsByWorkspace\",\n options || {}\n );\n try {\n const result = await this.client.sendOperationRequest(\n { options },\n getSparkConfigurationsByWorkspaceOperationSpec\n );\n return result as SparkConfigurationGetSparkConfigurationsByWorkspaceResponse;\n } catch (error) {\n span.setStatus({\n code: coreTracing.SpanStatusCode.UNSET,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates or updates a sparkconfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param sparkConfiguration SparkConfiguration resource definition.\n * @param options The options parameters.\n */\n async beginCreateOrUpdateSparkConfiguration(\n sparkConfigurationName: string,\n sparkConfiguration: SparkConfigurationResource,\n options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams\n ): Promise<\n PollerLike<\n PollOperationState<\n SparkConfigurationCreateOrUpdateSparkConfigurationResponse\n >,\n SparkConfigurationCreateOrUpdateSparkConfigurationResponse\n >\n > {\n const { span } = createSpan(\n \"ArtifactsClient-beginCreateOrUpdateSparkConfiguration\",\n options || {}\n );\n const directSendOperation = async (\n args: coreClient.OperationArguments,\n spec: coreClient.OperationSpec\n ): Promise<SparkConfigurationCreateOrUpdateSparkConfigurationResponse> => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as SparkConfigurationCreateOrUpdateSparkConfigurationResponse;\n } catch (error) {\n span.setStatus({\n code: coreTracing.SpanStatusCode.UNSET,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n const sendOperation = async (\n args: coreClient.OperationArguments,\n spec: coreClient.OperationSpec\n ) => {\n let currentRawResponse:\n | coreClient.FullOperationResponse\n | undefined = undefined;\n const providedCallback = args.options?.onResponse;\n const callback: coreClient.RawResponseCallback = (\n rawResponse: coreClient.FullOperationResponse,\n flatResponse: unknown\n ) => {\n currentRawResponse = rawResponse;\n providedCallback?.(rawResponse, flatResponse);\n };\n const updatedArgs = {\n ...args,\n options: {\n ...args.options,\n onResponse: callback\n }\n };\n const flatResponse = await directSendOperation(updatedArgs, spec);\n return {\n flatResponse,\n rawResponse: {\n statusCode: currentRawResponse!.status,\n body: currentRawResponse!.parsedBody,\n headers: currentRawResponse!.headers.toJSON()\n }\n };\n };\n\n const lro = new LroImpl(\n sendOperation,\n { sparkConfigurationName, sparkConfiguration, options },\n createOrUpdateSparkConfigurationOperationSpec\n );\n const poller = new LroEngine(lro, {\n resumeFrom: options?.resumeFrom,\n intervalInMs: options?.updateIntervalInMs\n });\n await poller.poll();\n return poller;\n }\n\n /**\n * Creates or updates a sparkconfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param sparkConfiguration SparkConfiguration resource definition.\n * @param options The options parameters.\n */\n async beginCreateOrUpdateSparkConfigurationAndWait(\n sparkConfigurationName: string,\n sparkConfiguration: SparkConfigurationResource,\n options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams\n ): Promise<SparkConfigurationCreateOrUpdateSparkConfigurationResponse> {\n const poller = await this.beginCreateOrUpdateSparkConfiguration(\n sparkConfigurationName,\n sparkConfiguration,\n options\n );\n return poller.pollUntilDone();\n }\n\n /**\n * Gets a sparkConfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param options The options parameters.\n */\n async getSparkConfiguration(\n sparkConfigurationName: string,\n options?: SparkConfigurationGetSparkConfigurationOptionalParams\n ): Promise<SparkConfigurationGetSparkConfigurationResponse> {\n const { span } = createSpan(\n \"ArtifactsClient-getSparkConfiguration\",\n options || {}\n );\n try {\n const result = await this.client.sendOperationRequest(\n { sparkConfigurationName, options },\n getSparkConfigurationOperationSpec\n );\n return result as SparkConfigurationGetSparkConfigurationResponse;\n } catch (error) {\n span.setStatus({\n code: coreTracing.SpanStatusCode.UNSET,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a sparkConfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param options The options parameters.\n */\n async beginDeleteSparkConfiguration(\n sparkConfigurationName: string,\n options?: SparkConfigurationDeleteSparkConfigurationOptionalParams\n ): Promise<PollerLike<PollOperationState<void>, void>> {\n const { span } = createSpan(\n \"ArtifactsClient-beginDeleteSparkConfiguration\",\n options || {}\n );\n const directSendOperation = async (\n args: coreClient.OperationArguments,\n spec: coreClient.OperationSpec\n ): Promise<void> => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as void;\n } catch (error) {\n span.setStatus({\n code: coreTracing.SpanStatusCode.UNSET,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n const sendOperation = async (\n args: coreClient.OperationArguments,\n spec: coreClient.OperationSpec\n ) => {\n let currentRawResponse:\n | coreClient.FullOperationResponse\n | undefined = undefined;\n const providedCallback = args.options?.onResponse;\n const callback: coreClient.RawResponseCallback = (\n rawResponse: coreClient.FullOperationResponse,\n flatResponse: unknown\n ) => {\n currentRawResponse = rawResponse;\n providedCallback?.(rawResponse, flatResponse);\n };\n const updatedArgs = {\n ...args,\n options: {\n ...args.options,\n onResponse: callback\n }\n };\n const flatResponse = await directSendOperation(updatedArgs, spec);\n return {\n flatResponse,\n rawResponse: {\n statusCode: currentRawResponse!.status,\n body: currentRawResponse!.parsedBody,\n headers: currentRawResponse!.headers.toJSON()\n }\n };\n };\n\n const lro = new LroImpl(\n sendOperation,\n { sparkConfigurationName, options },\n deleteSparkConfigurationOperationSpec\n );\n const poller = new LroEngine(lro, {\n resumeFrom: options?.resumeFrom,\n intervalInMs: options?.updateIntervalInMs\n });\n await poller.poll();\n return poller;\n }\n\n /**\n * Deletes a sparkConfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param options The options parameters.\n */\n async beginDeleteSparkConfigurationAndWait(\n sparkConfigurationName: string,\n options?: SparkConfigurationDeleteSparkConfigurationOptionalParams\n ): Promise<void> {\n const poller = await this.beginDeleteSparkConfiguration(\n sparkConfigurationName,\n options\n );\n return poller.pollUntilDone();\n }\n\n /**\n * Renames a sparkConfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param request proposed new name.\n * @param options The options parameters.\n */\n async beginRenameSparkConfiguration(\n sparkConfigurationName: string,\n request: ArtifactRenameRequest,\n options?: SparkConfigurationRenameSparkConfigurationOptionalParams\n ): Promise<PollerLike<PollOperationState<void>, void>> {\n const { span } = createSpan(\n \"ArtifactsClient-beginRenameSparkConfiguration\",\n options || {}\n );\n const directSendOperation = async (\n args: coreClient.OperationArguments,\n spec: coreClient.OperationSpec\n ): Promise<void> => {\n try {\n const result = await this.client.sendOperationRequest(args, spec);\n return result as void;\n } catch (error) {\n span.setStatus({\n code: coreTracing.SpanStatusCode.UNSET,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n };\n const sendOperation = async (\n args: coreClient.OperationArguments,\n spec: coreClient.OperationSpec\n ) => {\n let currentRawResponse:\n | coreClient.FullOperationResponse\n | undefined = undefined;\n const providedCallback = args.options?.onResponse;\n const callback: coreClient.RawResponseCallback = (\n rawResponse: coreClient.FullOperationResponse,\n flatResponse: unknown\n ) => {\n currentRawResponse = rawResponse;\n providedCallback?.(rawResponse, flatResponse);\n };\n const updatedArgs = {\n ...args,\n options: {\n ...args.options,\n onResponse: callback\n }\n };\n const flatResponse = await directSendOperation(updatedArgs, spec);\n return {\n flatResponse,\n rawResponse: {\n statusCode: currentRawResponse!.status,\n body: currentRawResponse!.parsedBody,\n headers: currentRawResponse!.headers.toJSON()\n }\n };\n };\n\n const lro = new LroImpl(\n sendOperation,\n { sparkConfigurationName, request, options },\n renameSparkConfigurationOperationSpec\n );\n const poller = new LroEngine(lro, {\n resumeFrom: options?.resumeFrom,\n intervalInMs: options?.updateIntervalInMs\n });\n await poller.poll();\n return poller;\n }\n\n /**\n * Renames a sparkConfiguration.\n * @param sparkConfigurationName The spark Configuration name.\n * @param request proposed new name.\n * @param options The options parameters.\n */\n async beginRenameSparkConfigurationAndWait(\n sparkConfigurationName: string,\n request: ArtifactRenameRequest,\n options?: SparkConfigurationRenameSparkConfigurationOptionalParams\n ): Promise<void> {\n const poller = await this.beginRenameSparkConfiguration(\n sparkConfigurationName,\n request,\n options\n );\n return poller.pollUntilDone();\n }\n\n /**\n * GetSparkConfigurationsByWorkspaceNext\n * @param nextLink The nextLink from the previous successful call to the\n * GetSparkConfigurationsByWorkspace method.\n * @param options The options parameters.\n */\n private async _getSparkConfigurationsByWorkspaceNext(\n nextLink: string,\n options?: SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams\n ): Promise<SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse> {\n const { span } = createSpan(\n \"ArtifactsClient-_getSparkConfigurationsByWorkspaceNext\",\n options || {}\n );\n try {\n const result = await this.client.sendOperationRequest(\n { nextLink, options },\n getSparkConfigurationsByWorkspaceNextOperationSpec\n );\n return result as SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse;\n } catch (error) {\n span.setStatus({\n code: coreTracing.SpanStatusCode.UNSET,\n message: error.message\n });\n throw error;\n } finally {\n span.end();\n }\n }\n}\n// Operation Specifications\nconst serializer = coreClient.createSerializer(Mappers, /* isXml */ false);\n\nconst getSparkConfigurationsByWorkspaceOperationSpec: coreClient.OperationSpec = {\n path: \"/sparkconfigurations\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkConfigurationListResponse\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion3],\n urlParameters: [Parameters.endpoint],\n headerParameters: [Parameters.accept],\n serializer\n};\nconst createOrUpdateSparkConfigurationOperationSpec: coreClient.OperationSpec = {\n path: \"/sparkconfigurations/{sparkConfigurationName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkConfigurationResource\n },\n 201: {\n bodyMapper: Mappers.SparkConfigurationResource\n },\n 202: {\n bodyMapper: Mappers.SparkConfigurationResource\n },\n 204: {\n bodyMapper: Mappers.SparkConfigurationResource\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n requestBody: Parameters.sparkConfiguration,\n queryParameters: [Parameters.apiVersion3],\n urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName],\n headerParameters: [\n Parameters.accept,\n Parameters.contentType,\n Parameters.ifMatch\n ],\n mediaType: \"json\",\n serializer\n};\nconst getSparkConfigurationOperationSpec: coreClient.OperationSpec = {\n path: \"/sparkconfigurations/{sparkConfigurationName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkConfigurationResource\n },\n 304: {},\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion3],\n urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName],\n headerParameters: [Parameters.accept, Parameters.ifNoneMatch],\n serializer\n};\nconst deleteSparkConfigurationOperationSpec: coreClient.OperationSpec = {\n path: \"/sparkconfigurations/{sparkConfigurationName}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {},\n 201: {},\n 202: {},\n 204: {},\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion3],\n urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName],\n headerParameters: [Parameters.accept],\n serializer\n};\nconst renameSparkConfigurationOperationSpec: coreClient.OperationSpec = {\n path: \"/sparkconfigurations/{sparkConfigurationName}/rename\",\n httpMethod: \"POST\",\n responses: {\n 200: {},\n 201: {},\n 202: {},\n 204: {},\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n requestBody: Parameters.request,\n queryParameters: [Parameters.apiVersion3],\n urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName],\n headerParameters: [Parameters.accept, Parameters.contentType],\n mediaType: \"json\",\n serializer\n};\nconst getSparkConfigurationsByWorkspaceNextOperationSpec: coreClient.OperationSpec = {\n path: \"{nextLink}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.SparkConfigurationListResponse\n },\n default: {\n bodyMapper: Mappers.CloudError\n }\n },\n queryParameters: [Parameters.apiVersion3],\n urlParameters: [Parameters.endpoint, Parameters.nextLink],\n headerParameters: [Parameters.accept],\n serializer\n};\n"]}
|
|
@@ -0,0 +1,612 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Copyright (c) Microsoft Corporation.
|
|
3
|
+
* Licensed under the MIT License.
|
|
4
|
+
*
|
|
5
|
+
* Code generated by Microsoft (R) AutoRest Code Generator.
|
|
6
|
+
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
|
7
|
+
*/
|
|
8
|
+
import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib";
|
|
9
|
+
import { createSpan } from "../tracing";
|
|
10
|
+
import * as coreClient from "@azure/core-client";
|
|
11
|
+
import * as coreTracing from "@azure/core-tracing";
|
|
12
|
+
import * as Mappers from "../models/mappers";
|
|
13
|
+
import * as Parameters from "../models/parameters";
|
|
14
|
+
import { LroEngine } from "@azure/core-lro";
|
|
15
|
+
import { LroImpl } from "../lroImpl";
|
|
16
|
+
/// <reference lib="esnext.asynciterable" />
|
|
17
|
+
/** Class containing SparkJobDefinitionOperations operations. */
|
|
18
|
+
export class SparkJobDefinitionOperationsImpl {
|
|
19
|
+
/**
|
|
20
|
+
* Initialize a new instance of the class SparkJobDefinitionOperations class.
|
|
21
|
+
* @param client Reference to the service client
|
|
22
|
+
*/
|
|
23
|
+
constructor(client) {
|
|
24
|
+
this.client = client;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Lists spark job definitions.
|
|
28
|
+
* @param options The options parameters.
|
|
29
|
+
*/
|
|
30
|
+
listSparkJobDefinitionsByWorkspace(options) {
|
|
31
|
+
const iter = this.getSparkJobDefinitionsByWorkspacePagingAll(options);
|
|
32
|
+
return {
|
|
33
|
+
next() {
|
|
34
|
+
return iter.next();
|
|
35
|
+
},
|
|
36
|
+
[Symbol.asyncIterator]() {
|
|
37
|
+
return this;
|
|
38
|
+
},
|
|
39
|
+
byPage: () => {
|
|
40
|
+
return this.getSparkJobDefinitionsByWorkspacePagingPage(options);
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
getSparkJobDefinitionsByWorkspacePagingPage(options) {
|
|
45
|
+
return __asyncGenerator(this, arguments, function* getSparkJobDefinitionsByWorkspacePagingPage_1() {
|
|
46
|
+
let result = yield __await(this._getSparkJobDefinitionsByWorkspace(options));
|
|
47
|
+
yield yield __await(result.value || []);
|
|
48
|
+
let continuationToken = result.nextLink;
|
|
49
|
+
while (continuationToken) {
|
|
50
|
+
result = yield __await(this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options));
|
|
51
|
+
continuationToken = result.nextLink;
|
|
52
|
+
yield yield __await(result.value || []);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
getSparkJobDefinitionsByWorkspacePagingAll(options) {
|
|
57
|
+
return __asyncGenerator(this, arguments, function* getSparkJobDefinitionsByWorkspacePagingAll_1() {
|
|
58
|
+
var e_1, _a;
|
|
59
|
+
try {
|
|
60
|
+
for (var _b = __asyncValues(this.getSparkJobDefinitionsByWorkspacePagingPage(options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
|
61
|
+
const page = _c.value;
|
|
62
|
+
yield __await(yield* __asyncDelegator(__asyncValues(page)));
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
66
|
+
finally {
|
|
67
|
+
try {
|
|
68
|
+
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
|
69
|
+
}
|
|
70
|
+
finally { if (e_1) throw e_1.error; }
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Lists spark job definitions.
|
|
76
|
+
* @param options The options parameters.
|
|
77
|
+
*/
|
|
78
|
+
async _getSparkJobDefinitionsByWorkspace(options) {
|
|
79
|
+
const { span } = createSpan("ArtifactsClient-_getSparkJobDefinitionsByWorkspace", options || {});
|
|
80
|
+
try {
|
|
81
|
+
const result = await this.client.sendOperationRequest({ options }, getSparkJobDefinitionsByWorkspaceOperationSpec);
|
|
82
|
+
return result;
|
|
83
|
+
}
|
|
84
|
+
catch (error) {
|
|
85
|
+
span.setStatus({
|
|
86
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
87
|
+
message: error.message
|
|
88
|
+
});
|
|
89
|
+
throw error;
|
|
90
|
+
}
|
|
91
|
+
finally {
|
|
92
|
+
span.end();
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Creates or updates a Spark Job Definition.
|
|
97
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
98
|
+
* @param sparkJobDefinition Spark Job Definition resource definition.
|
|
99
|
+
* @param options The options parameters.
|
|
100
|
+
*/
|
|
101
|
+
async beginCreateOrUpdateSparkJobDefinition(sparkJobDefinitionName, sparkJobDefinition, options) {
|
|
102
|
+
const { span } = createSpan("ArtifactsClient-beginCreateOrUpdateSparkJobDefinition", options || {});
|
|
103
|
+
const directSendOperation = async (args, spec) => {
|
|
104
|
+
try {
|
|
105
|
+
const result = await this.client.sendOperationRequest(args, spec);
|
|
106
|
+
return result;
|
|
107
|
+
}
|
|
108
|
+
catch (error) {
|
|
109
|
+
span.setStatus({
|
|
110
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
111
|
+
message: error.message
|
|
112
|
+
});
|
|
113
|
+
throw error;
|
|
114
|
+
}
|
|
115
|
+
finally {
|
|
116
|
+
span.end();
|
|
117
|
+
}
|
|
118
|
+
};
|
|
119
|
+
const sendOperation = async (args, spec) => {
|
|
120
|
+
var _a;
|
|
121
|
+
let currentRawResponse = undefined;
|
|
122
|
+
const providedCallback = (_a = args.options) === null || _a === void 0 ? void 0 : _a.onResponse;
|
|
123
|
+
const callback = (rawResponse, flatResponse) => {
|
|
124
|
+
currentRawResponse = rawResponse;
|
|
125
|
+
providedCallback === null || providedCallback === void 0 ? void 0 : providedCallback(rawResponse, flatResponse);
|
|
126
|
+
};
|
|
127
|
+
const updatedArgs = Object.assign(Object.assign({}, args), { options: Object.assign(Object.assign({}, args.options), { onResponse: callback }) });
|
|
128
|
+
const flatResponse = await directSendOperation(updatedArgs, spec);
|
|
129
|
+
return {
|
|
130
|
+
flatResponse,
|
|
131
|
+
rawResponse: {
|
|
132
|
+
statusCode: currentRawResponse.status,
|
|
133
|
+
body: currentRawResponse.parsedBody,
|
|
134
|
+
headers: currentRawResponse.headers.toJSON()
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
};
|
|
138
|
+
const lro = new LroImpl(sendOperation, { sparkJobDefinitionName, sparkJobDefinition, options }, createOrUpdateSparkJobDefinitionOperationSpec);
|
|
139
|
+
const poller = new LroEngine(lro, {
|
|
140
|
+
resumeFrom: options === null || options === void 0 ? void 0 : options.resumeFrom,
|
|
141
|
+
intervalInMs: options === null || options === void 0 ? void 0 : options.updateIntervalInMs
|
|
142
|
+
});
|
|
143
|
+
await poller.poll();
|
|
144
|
+
return poller;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Creates or updates a Spark Job Definition.
|
|
148
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
149
|
+
* @param sparkJobDefinition Spark Job Definition resource definition.
|
|
150
|
+
* @param options The options parameters.
|
|
151
|
+
*/
|
|
152
|
+
async beginCreateOrUpdateSparkJobDefinitionAndWait(sparkJobDefinitionName, sparkJobDefinition, options) {
|
|
153
|
+
const poller = await this.beginCreateOrUpdateSparkJobDefinition(sparkJobDefinitionName, sparkJobDefinition, options);
|
|
154
|
+
return poller.pollUntilDone();
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Gets a Spark Job Definition.
|
|
158
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
159
|
+
* @param options The options parameters.
|
|
160
|
+
*/
|
|
161
|
+
async getSparkJobDefinition(sparkJobDefinitionName, options) {
|
|
162
|
+
const { span } = createSpan("ArtifactsClient-getSparkJobDefinition", options || {});
|
|
163
|
+
try {
|
|
164
|
+
const result = await this.client.sendOperationRequest({ sparkJobDefinitionName, options }, getSparkJobDefinitionOperationSpec);
|
|
165
|
+
return result;
|
|
166
|
+
}
|
|
167
|
+
catch (error) {
|
|
168
|
+
span.setStatus({
|
|
169
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
170
|
+
message: error.message
|
|
171
|
+
});
|
|
172
|
+
throw error;
|
|
173
|
+
}
|
|
174
|
+
finally {
|
|
175
|
+
span.end();
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Deletes a Spark Job Definition.
|
|
180
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
181
|
+
* @param options The options parameters.
|
|
182
|
+
*/
|
|
183
|
+
async beginDeleteSparkJobDefinition(sparkJobDefinitionName, options) {
|
|
184
|
+
const { span } = createSpan("ArtifactsClient-beginDeleteSparkJobDefinition", options || {});
|
|
185
|
+
const directSendOperation = async (args, spec) => {
|
|
186
|
+
try {
|
|
187
|
+
const result = await this.client.sendOperationRequest(args, spec);
|
|
188
|
+
return result;
|
|
189
|
+
}
|
|
190
|
+
catch (error) {
|
|
191
|
+
span.setStatus({
|
|
192
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
193
|
+
message: error.message
|
|
194
|
+
});
|
|
195
|
+
throw error;
|
|
196
|
+
}
|
|
197
|
+
finally {
|
|
198
|
+
span.end();
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
const sendOperation = async (args, spec) => {
|
|
202
|
+
var _a;
|
|
203
|
+
let currentRawResponse = undefined;
|
|
204
|
+
const providedCallback = (_a = args.options) === null || _a === void 0 ? void 0 : _a.onResponse;
|
|
205
|
+
const callback = (rawResponse, flatResponse) => {
|
|
206
|
+
currentRawResponse = rawResponse;
|
|
207
|
+
providedCallback === null || providedCallback === void 0 ? void 0 : providedCallback(rawResponse, flatResponse);
|
|
208
|
+
};
|
|
209
|
+
const updatedArgs = Object.assign(Object.assign({}, args), { options: Object.assign(Object.assign({}, args.options), { onResponse: callback }) });
|
|
210
|
+
const flatResponse = await directSendOperation(updatedArgs, spec);
|
|
211
|
+
return {
|
|
212
|
+
flatResponse,
|
|
213
|
+
rawResponse: {
|
|
214
|
+
statusCode: currentRawResponse.status,
|
|
215
|
+
body: currentRawResponse.parsedBody,
|
|
216
|
+
headers: currentRawResponse.headers.toJSON()
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
};
|
|
220
|
+
const lro = new LroImpl(sendOperation, { sparkJobDefinitionName, options }, deleteSparkJobDefinitionOperationSpec);
|
|
221
|
+
const poller = new LroEngine(lro, {
|
|
222
|
+
resumeFrom: options === null || options === void 0 ? void 0 : options.resumeFrom,
|
|
223
|
+
intervalInMs: options === null || options === void 0 ? void 0 : options.updateIntervalInMs
|
|
224
|
+
});
|
|
225
|
+
await poller.poll();
|
|
226
|
+
return poller;
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* Deletes a Spark Job Definition.
|
|
230
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
231
|
+
* @param options The options parameters.
|
|
232
|
+
*/
|
|
233
|
+
async beginDeleteSparkJobDefinitionAndWait(sparkJobDefinitionName, options) {
|
|
234
|
+
const poller = await this.beginDeleteSparkJobDefinition(sparkJobDefinitionName, options);
|
|
235
|
+
return poller.pollUntilDone();
|
|
236
|
+
}
|
|
237
|
+
/**
|
|
238
|
+
* Executes the spark job definition.
|
|
239
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
240
|
+
* @param options The options parameters.
|
|
241
|
+
*/
|
|
242
|
+
async beginExecuteSparkJobDefinition(sparkJobDefinitionName, options) {
|
|
243
|
+
const { span } = createSpan("ArtifactsClient-beginExecuteSparkJobDefinition", options || {});
|
|
244
|
+
const directSendOperation = async (args, spec) => {
|
|
245
|
+
try {
|
|
246
|
+
const result = await this.client.sendOperationRequest(args, spec);
|
|
247
|
+
return result;
|
|
248
|
+
}
|
|
249
|
+
catch (error) {
|
|
250
|
+
span.setStatus({
|
|
251
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
252
|
+
message: error.message
|
|
253
|
+
});
|
|
254
|
+
throw error;
|
|
255
|
+
}
|
|
256
|
+
finally {
|
|
257
|
+
span.end();
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
const sendOperation = async (args, spec) => {
|
|
261
|
+
var _a;
|
|
262
|
+
let currentRawResponse = undefined;
|
|
263
|
+
const providedCallback = (_a = args.options) === null || _a === void 0 ? void 0 : _a.onResponse;
|
|
264
|
+
const callback = (rawResponse, flatResponse) => {
|
|
265
|
+
currentRawResponse = rawResponse;
|
|
266
|
+
providedCallback === null || providedCallback === void 0 ? void 0 : providedCallback(rawResponse, flatResponse);
|
|
267
|
+
};
|
|
268
|
+
const updatedArgs = Object.assign(Object.assign({}, args), { options: Object.assign(Object.assign({}, args.options), { onResponse: callback }) });
|
|
269
|
+
const flatResponse = await directSendOperation(updatedArgs, spec);
|
|
270
|
+
return {
|
|
271
|
+
flatResponse,
|
|
272
|
+
rawResponse: {
|
|
273
|
+
statusCode: currentRawResponse.status,
|
|
274
|
+
body: currentRawResponse.parsedBody,
|
|
275
|
+
headers: currentRawResponse.headers.toJSON()
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
};
|
|
279
|
+
const lro = new LroImpl(sendOperation, { sparkJobDefinitionName, options }, executeSparkJobDefinitionOperationSpec);
|
|
280
|
+
const poller = new LroEngine(lro, {
|
|
281
|
+
resumeFrom: options === null || options === void 0 ? void 0 : options.resumeFrom,
|
|
282
|
+
intervalInMs: options === null || options === void 0 ? void 0 : options.updateIntervalInMs,
|
|
283
|
+
lroResourceLocationConfig: "location"
|
|
284
|
+
});
|
|
285
|
+
await poller.poll();
|
|
286
|
+
return poller;
|
|
287
|
+
}
|
|
288
|
+
/**
|
|
289
|
+
* Executes the spark job definition.
|
|
290
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
291
|
+
* @param options The options parameters.
|
|
292
|
+
*/
|
|
293
|
+
async beginExecuteSparkJobDefinitionAndWait(sparkJobDefinitionName, options) {
|
|
294
|
+
const poller = await this.beginExecuteSparkJobDefinition(sparkJobDefinitionName, options);
|
|
295
|
+
return poller.pollUntilDone();
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Renames a sparkJobDefinition.
|
|
299
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
300
|
+
* @param request proposed new name.
|
|
301
|
+
* @param options The options parameters.
|
|
302
|
+
*/
|
|
303
|
+
async beginRenameSparkJobDefinition(sparkJobDefinitionName, request, options) {
|
|
304
|
+
const { span } = createSpan("ArtifactsClient-beginRenameSparkJobDefinition", options || {});
|
|
305
|
+
const directSendOperation = async (args, spec) => {
|
|
306
|
+
try {
|
|
307
|
+
const result = await this.client.sendOperationRequest(args, spec);
|
|
308
|
+
return result;
|
|
309
|
+
}
|
|
310
|
+
catch (error) {
|
|
311
|
+
span.setStatus({
|
|
312
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
313
|
+
message: error.message
|
|
314
|
+
});
|
|
315
|
+
throw error;
|
|
316
|
+
}
|
|
317
|
+
finally {
|
|
318
|
+
span.end();
|
|
319
|
+
}
|
|
320
|
+
};
|
|
321
|
+
const sendOperation = async (args, spec) => {
|
|
322
|
+
var _a;
|
|
323
|
+
let currentRawResponse = undefined;
|
|
324
|
+
const providedCallback = (_a = args.options) === null || _a === void 0 ? void 0 : _a.onResponse;
|
|
325
|
+
const callback = (rawResponse, flatResponse) => {
|
|
326
|
+
currentRawResponse = rawResponse;
|
|
327
|
+
providedCallback === null || providedCallback === void 0 ? void 0 : providedCallback(rawResponse, flatResponse);
|
|
328
|
+
};
|
|
329
|
+
const updatedArgs = Object.assign(Object.assign({}, args), { options: Object.assign(Object.assign({}, args.options), { onResponse: callback }) });
|
|
330
|
+
const flatResponse = await directSendOperation(updatedArgs, spec);
|
|
331
|
+
return {
|
|
332
|
+
flatResponse,
|
|
333
|
+
rawResponse: {
|
|
334
|
+
statusCode: currentRawResponse.status,
|
|
335
|
+
body: currentRawResponse.parsedBody,
|
|
336
|
+
headers: currentRawResponse.headers.toJSON()
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
};
|
|
340
|
+
const lro = new LroImpl(sendOperation, { sparkJobDefinitionName, request, options }, renameSparkJobDefinitionOperationSpec);
|
|
341
|
+
const poller = new LroEngine(lro, {
|
|
342
|
+
resumeFrom: options === null || options === void 0 ? void 0 : options.resumeFrom,
|
|
343
|
+
intervalInMs: options === null || options === void 0 ? void 0 : options.updateIntervalInMs
|
|
344
|
+
});
|
|
345
|
+
await poller.poll();
|
|
346
|
+
return poller;
|
|
347
|
+
}
|
|
348
|
+
/**
|
|
349
|
+
* Renames a sparkJobDefinition.
|
|
350
|
+
* @param sparkJobDefinitionName The spark job definition name.
|
|
351
|
+
* @param request proposed new name.
|
|
352
|
+
* @param options The options parameters.
|
|
353
|
+
*/
|
|
354
|
+
async beginRenameSparkJobDefinitionAndWait(sparkJobDefinitionName, request, options) {
|
|
355
|
+
const poller = await this.beginRenameSparkJobDefinition(sparkJobDefinitionName, request, options);
|
|
356
|
+
return poller.pollUntilDone();
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* Debug the spark job definition.
|
|
360
|
+
* @param sparkJobDefinitionAzureResource Spark Job Definition resource definition.
|
|
361
|
+
* @param options The options parameters.
|
|
362
|
+
*/
|
|
363
|
+
async beginDebugSparkJobDefinition(sparkJobDefinitionAzureResource, options) {
|
|
364
|
+
const { span } = createSpan("ArtifactsClient-beginDebugSparkJobDefinition", options || {});
|
|
365
|
+
const directSendOperation = async (args, spec) => {
|
|
366
|
+
try {
|
|
367
|
+
const result = await this.client.sendOperationRequest(args, spec);
|
|
368
|
+
return result;
|
|
369
|
+
}
|
|
370
|
+
catch (error) {
|
|
371
|
+
span.setStatus({
|
|
372
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
373
|
+
message: error.message
|
|
374
|
+
});
|
|
375
|
+
throw error;
|
|
376
|
+
}
|
|
377
|
+
finally {
|
|
378
|
+
span.end();
|
|
379
|
+
}
|
|
380
|
+
};
|
|
381
|
+
const sendOperation = async (args, spec) => {
|
|
382
|
+
var _a;
|
|
383
|
+
let currentRawResponse = undefined;
|
|
384
|
+
const providedCallback = (_a = args.options) === null || _a === void 0 ? void 0 : _a.onResponse;
|
|
385
|
+
const callback = (rawResponse, flatResponse) => {
|
|
386
|
+
currentRawResponse = rawResponse;
|
|
387
|
+
providedCallback === null || providedCallback === void 0 ? void 0 : providedCallback(rawResponse, flatResponse);
|
|
388
|
+
};
|
|
389
|
+
const updatedArgs = Object.assign(Object.assign({}, args), { options: Object.assign(Object.assign({}, args.options), { onResponse: callback }) });
|
|
390
|
+
const flatResponse = await directSendOperation(updatedArgs, spec);
|
|
391
|
+
return {
|
|
392
|
+
flatResponse,
|
|
393
|
+
rawResponse: {
|
|
394
|
+
statusCode: currentRawResponse.status,
|
|
395
|
+
body: currentRawResponse.parsedBody,
|
|
396
|
+
headers: currentRawResponse.headers.toJSON()
|
|
397
|
+
}
|
|
398
|
+
};
|
|
399
|
+
};
|
|
400
|
+
const lro = new LroImpl(sendOperation, { sparkJobDefinitionAzureResource, options }, debugSparkJobDefinitionOperationSpec);
|
|
401
|
+
const poller = new LroEngine(lro, {
|
|
402
|
+
resumeFrom: options === null || options === void 0 ? void 0 : options.resumeFrom,
|
|
403
|
+
intervalInMs: options === null || options === void 0 ? void 0 : options.updateIntervalInMs,
|
|
404
|
+
lroResourceLocationConfig: "location"
|
|
405
|
+
});
|
|
406
|
+
await poller.poll();
|
|
407
|
+
return poller;
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Debug the spark job definition.
|
|
411
|
+
* @param sparkJobDefinitionAzureResource Spark Job Definition resource definition.
|
|
412
|
+
* @param options The options parameters.
|
|
413
|
+
*/
|
|
414
|
+
async beginDebugSparkJobDefinitionAndWait(sparkJobDefinitionAzureResource, options) {
|
|
415
|
+
const poller = await this.beginDebugSparkJobDefinition(sparkJobDefinitionAzureResource, options);
|
|
416
|
+
return poller.pollUntilDone();
|
|
417
|
+
}
|
|
418
|
+
/**
|
|
419
|
+
* GetSparkJobDefinitionsByWorkspaceNext
|
|
420
|
+
* @param nextLink The nextLink from the previous successful call to the
|
|
421
|
+
* GetSparkJobDefinitionsByWorkspace method.
|
|
422
|
+
* @param options The options parameters.
|
|
423
|
+
*/
|
|
424
|
+
async _getSparkJobDefinitionsByWorkspaceNext(nextLink, options) {
|
|
425
|
+
const { span } = createSpan("ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext", options || {});
|
|
426
|
+
try {
|
|
427
|
+
const result = await this.client.sendOperationRequest({ nextLink, options }, getSparkJobDefinitionsByWorkspaceNextOperationSpec);
|
|
428
|
+
return result;
|
|
429
|
+
}
|
|
430
|
+
catch (error) {
|
|
431
|
+
span.setStatus({
|
|
432
|
+
code: coreTracing.SpanStatusCode.UNSET,
|
|
433
|
+
message: error.message
|
|
434
|
+
});
|
|
435
|
+
throw error;
|
|
436
|
+
}
|
|
437
|
+
finally {
|
|
438
|
+
span.end();
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
// Operation Specifications
|
|
443
|
+
const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
|
|
444
|
+
const getSparkJobDefinitionsByWorkspaceOperationSpec = {
|
|
445
|
+
path: "/sparkJobDefinitions",
|
|
446
|
+
httpMethod: "GET",
|
|
447
|
+
responses: {
|
|
448
|
+
200: {
|
|
449
|
+
bodyMapper: Mappers.SparkJobDefinitionsListResponse
|
|
450
|
+
},
|
|
451
|
+
default: {
|
|
452
|
+
bodyMapper: Mappers.CloudError
|
|
453
|
+
}
|
|
454
|
+
},
|
|
455
|
+
queryParameters: [Parameters.apiVersion4],
|
|
456
|
+
urlParameters: [Parameters.endpoint],
|
|
457
|
+
headerParameters: [Parameters.accept],
|
|
458
|
+
serializer
|
|
459
|
+
};
|
|
460
|
+
const createOrUpdateSparkJobDefinitionOperationSpec = {
|
|
461
|
+
path: "/sparkJobDefinitions/{sparkJobDefinitionName}",
|
|
462
|
+
httpMethod: "PUT",
|
|
463
|
+
responses: {
|
|
464
|
+
200: {
|
|
465
|
+
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
466
|
+
},
|
|
467
|
+
201: {
|
|
468
|
+
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
469
|
+
},
|
|
470
|
+
202: {
|
|
471
|
+
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
472
|
+
},
|
|
473
|
+
204: {
|
|
474
|
+
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
475
|
+
},
|
|
476
|
+
default: {
|
|
477
|
+
bodyMapper: Mappers.CloudError
|
|
478
|
+
}
|
|
479
|
+
},
|
|
480
|
+
requestBody: Parameters.sparkJobDefinition,
|
|
481
|
+
queryParameters: [Parameters.apiVersion4],
|
|
482
|
+
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
483
|
+
headerParameters: [
|
|
484
|
+
Parameters.accept,
|
|
485
|
+
Parameters.contentType,
|
|
486
|
+
Parameters.ifMatch
|
|
487
|
+
],
|
|
488
|
+
mediaType: "json",
|
|
489
|
+
serializer
|
|
490
|
+
};
|
|
491
|
+
const getSparkJobDefinitionOperationSpec = {
|
|
492
|
+
path: "/sparkJobDefinitions/{sparkJobDefinitionName}",
|
|
493
|
+
httpMethod: "GET",
|
|
494
|
+
responses: {
|
|
495
|
+
200: {
|
|
496
|
+
bodyMapper: Mappers.SparkJobDefinitionResource
|
|
497
|
+
},
|
|
498
|
+
304: {},
|
|
499
|
+
default: {
|
|
500
|
+
bodyMapper: Mappers.CloudError
|
|
501
|
+
}
|
|
502
|
+
},
|
|
503
|
+
queryParameters: [Parameters.apiVersion4],
|
|
504
|
+
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
505
|
+
headerParameters: [Parameters.accept, Parameters.ifNoneMatch],
|
|
506
|
+
serializer
|
|
507
|
+
};
|
|
508
|
+
const deleteSparkJobDefinitionOperationSpec = {
|
|
509
|
+
path: "/sparkJobDefinitions/{sparkJobDefinitionName}",
|
|
510
|
+
httpMethod: "DELETE",
|
|
511
|
+
responses: {
|
|
512
|
+
200: {},
|
|
513
|
+
201: {},
|
|
514
|
+
202: {},
|
|
515
|
+
204: {},
|
|
516
|
+
default: {
|
|
517
|
+
bodyMapper: Mappers.CloudError
|
|
518
|
+
}
|
|
519
|
+
},
|
|
520
|
+
queryParameters: [Parameters.apiVersion4],
|
|
521
|
+
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
522
|
+
headerParameters: [Parameters.accept],
|
|
523
|
+
serializer
|
|
524
|
+
};
|
|
525
|
+
const executeSparkJobDefinitionOperationSpec = {
|
|
526
|
+
path: "/sparkJobDefinitions/{sparkJobDefinitionName}/execute",
|
|
527
|
+
httpMethod: "POST",
|
|
528
|
+
responses: {
|
|
529
|
+
200: {
|
|
530
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
531
|
+
},
|
|
532
|
+
201: {
|
|
533
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
534
|
+
},
|
|
535
|
+
202: {
|
|
536
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
537
|
+
},
|
|
538
|
+
204: {
|
|
539
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
540
|
+
},
|
|
541
|
+
default: {
|
|
542
|
+
bodyMapper: Mappers.CloudError
|
|
543
|
+
}
|
|
544
|
+
},
|
|
545
|
+
queryParameters: [Parameters.apiVersion4],
|
|
546
|
+
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
547
|
+
headerParameters: [Parameters.accept],
|
|
548
|
+
serializer
|
|
549
|
+
};
|
|
550
|
+
const renameSparkJobDefinitionOperationSpec = {
|
|
551
|
+
path: "/sparkJobDefinitions/{sparkJobDefinitionName}/rename",
|
|
552
|
+
httpMethod: "POST",
|
|
553
|
+
responses: {
|
|
554
|
+
200: {},
|
|
555
|
+
201: {},
|
|
556
|
+
202: {},
|
|
557
|
+
204: {},
|
|
558
|
+
default: {
|
|
559
|
+
bodyMapper: Mappers.CloudError
|
|
560
|
+
}
|
|
561
|
+
},
|
|
562
|
+
requestBody: Parameters.request,
|
|
563
|
+
queryParameters: [Parameters.apiVersion4],
|
|
564
|
+
urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName],
|
|
565
|
+
headerParameters: [Parameters.accept, Parameters.contentType],
|
|
566
|
+
mediaType: "json",
|
|
567
|
+
serializer
|
|
568
|
+
};
|
|
569
|
+
const debugSparkJobDefinitionOperationSpec = {
|
|
570
|
+
path: "/debugSparkJobDefinition",
|
|
571
|
+
httpMethod: "POST",
|
|
572
|
+
responses: {
|
|
573
|
+
200: {
|
|
574
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
575
|
+
},
|
|
576
|
+
201: {
|
|
577
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
578
|
+
},
|
|
579
|
+
202: {
|
|
580
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
581
|
+
},
|
|
582
|
+
204: {
|
|
583
|
+
bodyMapper: Mappers.SparkBatchJob
|
|
584
|
+
},
|
|
585
|
+
default: {
|
|
586
|
+
bodyMapper: Mappers.CloudError
|
|
587
|
+
}
|
|
588
|
+
},
|
|
589
|
+
requestBody: Parameters.sparkJobDefinitionAzureResource,
|
|
590
|
+
queryParameters: [Parameters.apiVersion4],
|
|
591
|
+
urlParameters: [Parameters.endpoint],
|
|
592
|
+
headerParameters: [Parameters.accept, Parameters.contentType],
|
|
593
|
+
mediaType: "json",
|
|
594
|
+
serializer
|
|
595
|
+
};
|
|
596
|
+
const getSparkJobDefinitionsByWorkspaceNextOperationSpec = {
|
|
597
|
+
path: "{nextLink}",
|
|
598
|
+
httpMethod: "GET",
|
|
599
|
+
responses: {
|
|
600
|
+
200: {
|
|
601
|
+
bodyMapper: Mappers.SparkJobDefinitionsListResponse
|
|
602
|
+
},
|
|
603
|
+
default: {
|
|
604
|
+
bodyMapper: Mappers.CloudError
|
|
605
|
+
}
|
|
606
|
+
},
|
|
607
|
+
queryParameters: [Parameters.apiVersion4],
|
|
608
|
+
urlParameters: [Parameters.endpoint, Parameters.nextLink],
|
|
609
|
+
headerParameters: [Parameters.accept],
|
|
610
|
+
serializer
|
|
611
|
+
};
|
|
612
|
+
//# sourceMappingURL=sparkJobDefinitionOperations.js.map
|