@azure/synapse-artifacts 1.0.0-beta.11 → 1.0.0-beta.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +808 -469
- package/dist/index.js.map +1 -1
- package/dist-esm/src/artifactsClient.js +20 -15
- package/dist-esm/src/artifactsClient.js.map +1 -1
- package/dist-esm/src/index.js +1 -0
- package/dist-esm/src/index.js.map +1 -1
- package/dist-esm/src/models/index.js +10 -0
- package/dist-esm/src/models/index.js.map +1 -1
- package/dist-esm/src/models/mappers.js +406 -308
- package/dist-esm/src/models/mappers.js.map +1 -1
- package/dist-esm/src/models/parameters.js +1 -1
- package/dist-esm/src/models/parameters.js.map +1 -1
- package/dist-esm/src/operations/dataFlowDebugSession.js +19 -8
- package/dist-esm/src/operations/dataFlowDebugSession.js.map +1 -1
- package/dist-esm/src/operations/dataFlowOperations.js +19 -8
- package/dist-esm/src/operations/dataFlowOperations.js.map +1 -1
- package/dist-esm/src/operations/datasetOperations.js +19 -8
- package/dist-esm/src/operations/datasetOperations.js.map +1 -1
- package/dist-esm/src/operations/kqlScripts.js +19 -8
- package/dist-esm/src/operations/kqlScripts.js.map +1 -1
- package/dist-esm/src/operations/library.js +19 -8
- package/dist-esm/src/operations/library.js.map +1 -1
- package/dist-esm/src/operations/linkConnectionOperations.js +102 -40
- package/dist-esm/src/operations/linkConnectionOperations.js.map +1 -1
- package/dist-esm/src/operations/linkedServiceOperations.js +19 -8
- package/dist-esm/src/operations/linkedServiceOperations.js.map +1 -1
- package/dist-esm/src/operations/notebookOperations.js +37 -16
- package/dist-esm/src/operations/notebookOperations.js.map +1 -1
- package/dist-esm/src/operations/pipelineOperations.js +19 -8
- package/dist-esm/src/operations/pipelineOperations.js.map +1 -1
- package/dist-esm/src/operations/sparkConfigurationOperations.js +19 -8
- package/dist-esm/src/operations/sparkConfigurationOperations.js.map +1 -1
- package/dist-esm/src/operations/sparkJobDefinitionOperations.js +19 -8
- package/dist-esm/src/operations/sparkJobDefinitionOperations.js.map +1 -1
- package/dist-esm/src/operations/sqlScriptOperations.js +19 -8
- package/dist-esm/src/operations/sqlScriptOperations.js.map +1 -1
- package/dist-esm/src/operations/triggerOperations.js +19 -8
- package/dist-esm/src/operations/triggerOperations.js.map +1 -1
- package/dist-esm/src/operationsInterfaces/linkConnectionOperations.js.map +1 -1
- package/dist-esm/src/pagingHelper.js +32 -0
- package/dist-esm/src/pagingHelper.js.map +1 -0
- package/dist-esm/src/tracing.js +1 -1
- package/dist-esm/src/tracing.js.map +1 -1
- package/package.json +5 -5
- package/types/synapse-artifacts.d.ts +118 -28
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/models/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAyoUH,6DAA6D;AAC7D,MAAM,CAAN,IAAY,SAGX;AAHD,WAAY,SAAS;IACnB,6BAA6B;IAC7B,8DAAiD,CAAA;AACnD,CAAC,EAHW,SAAS,KAAT,SAAS,QAGpB;AAWD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAOX;AAPD,WAAY,kBAAkB;IAC5B,cAAc;IACd,yCAAmB,CAAA;IACnB,gBAAgB;IAChB,6CAAuB,CAAA;IACvB,aAAa;IACb,uCAAiB,CAAA;AACnB,CAAC,EAPW,kBAAkB,KAAlB,kBAAkB,QAO7B;AAaD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,eAAe;IACf,4CAAqB,CAAA;IACrB,cAAc;IACd,0CAAmB,CAAA;IACnB,aAAa;IACb,wCAAiB,CAAA;AACnB,CAAC,EAPW,mBAAmB,KAAnB,mBAAmB,QAO9B;AAaD,iEAAiE;AACjE,MAAM,CAAN,IAAY,aAeX;AAfD,WAAY,aAAa;IACvB,WAAW;IACX,8BAAa,CAAA;IACb,YAAY;IACZ,gCAAe,CAAA;IACf,aAAa;IACb,kCAAiB,CAAA;IACjB,YAAY;IACZ,gCAAe,CAAA;IACf,aAAa;IACb,kCAAiB,CAAA;IACjB,cAAc;IACd,oCAAmB,CAAA;IACnB,eAAe;IACf,sCAAqB,CAAA;AACvB,CAAC,EAfW,aAAa,KAAb,aAAa,QAexB;AAiBD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAKX;AALD,WAAY,mBAAmB;IAC7B,WAAW;IACX,oCAAa,CAAA;IACb,sBAAsB;IACtB,0DAAmC,CAAA;AACrC,CAAC,EALW,mBAAmB,KAAnB,mBAAmB,QAK9B;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,cAAc;IACd,kDAAmB,CAAA;IACnB,iBAAiB;IACjB,wDAAyB,CAAA;AAC3B,CAAC,EALW,2BAA2B,KAA3B,2BAA2B,QAKtC;AAYD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAeX;AAfD,WAAY,kBAAkB;IAC5B,aAAa;IACb,uCAAiB,CAAA;IACjB,aAAa;IACb,uCAAiB,CAAA;IACjB,UAAU;IACV,iCAAW,CAAA;IACX,YAAY;IACZ,qCAAe,CAAA;IACf,WAAW;IACX,mCAAa,CAAA;IACb,YAAY;IACZ,qCAAe,CAAA;IACf,mBAAmB;IACnB,mDAA6B,CAAA;AAC/B,CAAC,EAfW,kBAAkB,KAAlB,kBAAkB,QAe7B;AAiBD,wFAAwF;AACxF,MAAM,CAAN,IAAY,oCAGX;AAHD,WAAY,oCAAoC;IAC9C,kCAAkC;IAClC,mGAA2D,CAAA;AAC7D,CAAC,EAHW,oCAAoC,KAApC,oCAAoC,QAG/C;AAWD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAOX;AAPD,WAAY,6BAA6B;IACvC,0BAA0B;IAC1B,4EAA2C,CAAA;IAC3C,6BAA6B;IAC7B,kFAAiD,CAAA;IACjD,6BAA6B;IAC7B,kFAAiD,CAAA;AACnD,CAAC,EAPW,6BAA6B,KAA7B,6BAA6B,QAOxC;AAaD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAGX;AAHD,WAAY,6BAA6B;IACvC,2BAA2B;IAC3B,8EAA6C,CAAA;AAC/C,CAAC,EAHW,6BAA6B,KAA7B,6BAA6B,QAGxC;AAWD,wFAAwF;AACxF,MAAM,CAAN,IAAY,oCAGX;AAHD,WAAY,oCAAoC;IAC9C,kCAAkC;IAClC,mGAA2D,CAAA;AAC7D,CAAC,EAHW,oCAAoC,KAApC,oCAAoC,QAG/C;AAWD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBASX;AATD,WAAY,mBAAmB;IAC7B,oBAAoB;IACpB,uDAAgC,CAAA;IAChC,kBAAkB;IAClB,mDAA4B,CAAA;IAC5B,aAAa;IACb,wCAAiB,CAAA;IACjB,YAAY;IACZ,sCAAe,CAAA;AACjB,CAAC,EATW,mBAAmB,KAAnB,mBAAmB,QAS9B;AAcD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBASX;AATD,WAAY,wBAAwB;IAClC,gBAAgB;IAChB,mDAAuB,CAAA;IACvB,aAAa;IACb,6CAAiB,CAAA;IACjB,cAAc;IACd,+CAAmB,CAAA;IACnB,gBAAgB;IAChB,mDAAuB,CAAA;AACzB,CAAC,EATW,wBAAwB,KAAxB,wBAAwB,QASnC;AAcD,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBASX;AATD,WAAY,iBAAiB;IAC3B,aAAa;IACb,sCAAiB,CAAA;IACjB,WAAW;IACX,kCAAa,CAAA;IACb,cAAc;IACd,wCAAmB,CAAA;IACnB,YAAY;IACZ,oCAAe,CAAA;AACjB,CAAC,EATW,iBAAiB,KAAjB,iBAAiB,QAS5B;AAcD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAyBX;AAzBD,WAAY,0BAA0B;IACpC,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,aAAa;IACb,+CAAiB,CAAA;IACjB,eAAe;IACf,mDAAqB,CAAA;IACrB,aAAa;IACb,+CAAiB,CAAA;IACjB,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,uBAAuB;IACvB,mEAAqC,CAAA;IACrC,qBAAqB;IACrB,+DAAiC,CAAA;IACjC,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,kBAAkB;IAClB,yDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,yEAA2C,CAAA;IAC3C,iBAAiB;IACjB,uDAAyB,CAAA;IACzB,iBAAiB;IACjB,uDAAyB,CAAA;AAC3B,CAAC,EAzBW,0BAA0B,KAA1B,0BAA0B,QAyBrC;AAsBD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BASX;AATD,WAAY,2BAA2B;IACrC,aAAa;IACb,gDAAiB,CAAA;IACjB,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,SAAS;IACT,wCAAS,CAAA;IACT,YAAY;IACZ,8CAAe,CAAA;AACjB,CAAC,EATW,2BAA2B,KAA3B,2BAA2B,QAStC;AAcD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAmBX;AAnBD,WAAY,yBAAyB;IACnC,eAAe;IACf,kDAAqB,CAAA;IACrB,aAAa;IACb,8CAAiB,CAAA;IACjB,mBAAmB;IACnB,0DAA6B,CAAA;IAC7B,aAAa;IACb,8CAAiB,CAAA;IACjB,mBAAmB;IACnB,0DAA6B,CAAA;IAC7B,uBAAuB;IACvB,kEAAqC,CAAA;IACrC,qBAAqB;IACrB,8DAAiC,CAAA;IACjC,kBAAkB;IAClB,wDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,wEAA2C,CAAA;AAC7C,CAAC,EAnBW,yBAAyB,KAAzB,yBAAyB,QAmBpC;AAmBD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAKX;AALD,WAAY,kBAAkB;IAC5B,UAAU;IACV,iCAAW,CAAA;IACX,WAAW;IACX,mCAAa,CAAA;AACf,CAAC,EALW,kBAAkB,KAAlB,kBAAkB,QAK7B;AAYD,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBAKX;AALD,WAAY,iBAAiB;IAC3B,iBAAiB;IACjB,8CAAyB,CAAA;IACzB,mBAAmB;IACnB,kDAA6B,CAAA;AAC/B,CAAC,EALW,iBAAiB,KAAjB,iBAAiB,QAK5B;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,aAAa;IACb,iDAAiB,CAAA;IACjB,gBAAgB;IAChB,uDAAuB,CAAA;AACzB,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,aAAa;IACb,+CAAiB,CAAA;IACjB,gBAAgB;IAChB,qDAAuB,CAAA;IACvB,YAAY;IACZ,6CAAe,CAAA;AACjB,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,2EAA2E;AAC3E,MAAM,CAAN,IAAY,uBAeX;AAfD,WAAY,uBAAuB;IACjC,kBAAkB;IAClB,sDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,sEAA2C,CAAA;IAC3C,aAAa;IACb,4CAAiB,CAAA;IACjB,iBAAiB;IACjB,oDAAyB,CAAA;IACzB,iBAAiB;IACjB,oDAAyB,CAAA;IACzB,cAAc;IACd,8CAAmB,CAAA;IACnB,YAAY;IACZ,0CAAe,CAAA;AACjB,CAAC,EAfW,uBAAuB,KAAvB,uBAAuB,QAelC;AAiBD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBASX;AATD,WAAY,qBAAqB;IAC/B,aAAa;IACb,0CAAiB,CAAA;IACjB,WAAW;IACX,sCAAa,CAAA;IACb,cAAc;IACd,4CAAmB,CAAA;IACnB,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EATW,qBAAqB,KAArB,qBAAqB,QAShC;AAcD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eAuBX;AAvBD,WAAY,eAAe;IACzB,iBAAiB;IACjB,6CAA0B,CAAA;IAC1B,eAAe;IACf,wCAAqB,CAAA;IACrB,WAAW;IACX,gCAAa,CAAA;IACb,WAAW;IACX,gCAAa,CAAA;IACb,mBAAmB;IACnB,iDAA8B,CAAA;IAC9B,YAAY;IACZ,kCAAe,CAAA;IACf,WAAW;IACX,gCAAa,CAAA;IACb,aAAa;IACb,oCAAiB,CAAA;IACjB,cAAc;IACd,sCAAmB,CAAA;IACnB,cAAc;IACd,sCAAmB,CAAA;IACnB,iBAAiB;IACjB,4CAAyB,CAAA;AAC3B,CAAC,EAvBW,eAAe,KAAf,eAAe,QAuB1B;AAqBD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eASX;AATD,WAAY,eAAe;IACzB,cAAc;IACd,sCAAmB,CAAA;IACnB,yBAAyB;IACzB,4DAAyC,CAAA;IACzC,eAAe;IACf,wCAAqB,CAAA;IACrB,cAAc;IACd,sCAAmB,CAAA;AACrB,CAAC,EATW,eAAe,KAAf,eAAe,QAS1B;AAcD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAGX;AAHD,WAAY,kBAAkB;IAC5B,eAAe;IACf,2CAAqB,CAAA;AACvB,CAAC,EAHW,kBAAkB,KAAlB,kBAAkB,QAG7B;AAWD,0EAA0E;AAC1E,MAAM,CAAN,IAAY,sBAKX;AALD,WAAY,sBAAsB;IAChC,kBAAkB;IAClB,qDAA2B,CAAA;IAC3B,cAAc;IACd,6CAAmB,CAAA;AACrB,CAAC,EALW,sBAAsB,KAAtB,sBAAsB,QAKjC;AAYD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAOX;AAPD,WAAY,wBAAwB;IAClC,cAAc;IACd,+CAAmB,CAAA;IACnB,cAAc;IACd,+CAAmB,CAAA;IACnB,eAAe;IACf,iDAAqB,CAAA;AACvB,CAAC,EAPW,wBAAwB,KAAxB,wBAAwB,QAOnC;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAWX;AAXD,WAAY,4BAA4B;IACtC,cAAc;IACd,mDAAmB,CAAA;IACnB,mBAAmB;IACnB,6DAA6B,CAAA;IAC7B,qBAAqB;IACrB,iEAAiC,CAAA;IACjC,eAAe;IACf,qDAAqB,CAAA;IACrB,cAAc;IACd,mDAAmB,CAAA;AACrB,CAAC,EAXW,4BAA4B,KAA5B,4BAA4B,QAWvC;AAeD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBAOX;AAPD,WAAY,qBAAqB;IAC/B,gBAAgB;IAChB,gDAAuB,CAAA;IACvB,aAAa;IACb,0CAAiB,CAAA;IACjB,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EAPW,qBAAqB,KAArB,qBAAqB,QAOhC;AAaD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAGX;AAHD,WAAY,mBAAmB;IAC7B,iBAAiB;IACjB,gDAAyB,CAAA;AAC3B,CAAC,EAHW,mBAAmB,KAAnB,mBAAmB,QAG9B;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAGX;AAHD,WAAY,yBAAyB;IACnC,uBAAuB;IACvB,kEAAqC,CAAA;AACvC,CAAC,EAHW,yBAAyB,KAAzB,yBAAyB,QAGpC;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,kCAAkC;IAClC,yFAA2D,CAAA;AAC7D,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAGX;AAHD,WAAY,yBAAyB;IACnC,uBAAuB;IACvB,kEAAqC,CAAA;AACvC,CAAC,EAHW,yBAAyB,KAAzB,yBAAyB,QAGpC;AAWD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAKX;AALD,WAAY,6BAA6B;IACvC,YAAY;IACZ,gDAAe,CAAA;IACf,cAAc;IACd,oDAAmB,CAAA;AACrB,CAAC,EALW,6BAA6B,KAA7B,6BAA6B,QAKxC;AAYD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,YAAY;IACZ,6CAAe,CAAA;AACjB,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAKX;AALD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,cAAc;IACd,sDAAmB,CAAA;AACrB,CAAC,EALW,+BAA+B,KAA/B,+BAA+B,QAK1C;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAWX;AAXD,WAAY,4BAA4B;IACtC,YAAY;IACZ,+CAAe,CAAA;IACf,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,cAAc;IACd,mDAAmB,CAAA;IACnB,0BAA0B;IAC1B,2EAA2C,CAAA;IAC3C,6BAA6B;IAC7B,iFAAiD,CAAA;AACnD,CAAC,EAXW,4BAA4B,KAA5B,4BAA4B,QAWvC;AAeD,+FAA+F;AAC/F,MAAM,CAAN,IAAY,2CAKX;AALD,WAAY,2CAA2C;IACrD,0BAA0B;IAC1B,0FAA2C,CAAA;IAC3C,2BAA2B;IAC3B,4FAA6C,CAAA;AAC/C,CAAC,EALW,2CAA2C,KAA3C,2CAA2C,QAKtD;AAYD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,YAAY;IACZ,6CAAe,CAAA;IACf,gBAAgB;IAChB,qDAAuB,CAAA;IACvB,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,YAAY;IACZ,iDAAe,CAAA;IACf,gBAAgB;IAChB,yDAAuB,CAAA;AACzB,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,sFAAsF;AACtF,MAAM,CAAN,IAAY,kCAWX;AAXD,WAAY,kCAAkC;IAC5C,gBAAgB;IAChB,6DAAuB,CAAA;IACvB,YAAY;IACZ,qDAAe,CAAA;IACf,0BAA0B;IAC1B,iFAA2C,CAAA;IAC3C,6BAA6B;IAC7B,uFAAiD,CAAA;IACjD,6BAA6B;IAC7B,uFAAiD,CAAA;AACnD,CAAC,EAXW,kCAAkC,KAAlC,kCAAkC,QAW7C;AAeD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAKX;AALD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,YAAY;IACZ,kDAAe,CAAA;AACjB,CAAC,EALW,+BAA+B,KAA/B,+BAA+B,QAK1C;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,YAAY;IACZ,iDAAe,CAAA;IACf,YAAY;IACZ,iDAAe,CAAA;AACjB,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAWX;AAXD,WAAY,2BAA2B;IACrC,YAAY;IACZ,8CAAe,CAAA;IACf,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,aAAa;IACb,gDAAiB,CAAA;IACjB,cAAc;IACd,kDAAmB,CAAA;IACnB,wBAAwB;IACxB,sEAAuC,CAAA;AACzC,CAAC,EAXW,2BAA2B,KAA3B,2BAA2B,QAWtC;AAeD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAKX;AALD,WAAY,0BAA0B;IACpC,YAAY;IACZ,6CAAe,CAAA;IACf,gBAAgB;IAChB,qDAAuB,CAAA;AACzB,CAAC,EALW,0BAA0B,KAA1B,0BAA0B,QAKrC;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,YAAY;IACZ,8CAAe,CAAA;IACf,mBAAmB;IACnB,4DAA6B,CAAA;AAC/B,CAAC,EALW,2BAA2B,KAA3B,2BAA2B,QAKtC;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,YAAY;IACZ,iDAAe,CAAA;IACf,cAAc;IACd,qDAAmB,CAAA;AACrB,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,yFAAyF;AACzF,MAAM,CAAN,IAAY,qCAKX;AALD,WAAY,qCAAqC;IAC/C,4BAA4B;IAC5B,wFAA+C,CAAA;IAC/C,yBAAyB;IACzB,kFAAyC,CAAA;AAC3C,CAAC,EALW,qCAAqC,KAArC,qCAAqC,QAKhD;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAKX;AALD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,YAAY;IACZ,+CAAe,CAAA;AACjB,CAAC,EALW,4BAA4B,KAA5B,4BAA4B,QAKvC;AAYD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,kBAAkB;IAClB,kDAA2B,CAAA;IAC3B,kBAAkB;IAClB,kDAA2B,CAAA;IAC3B,uBAAuB;IACvB,4DAAqC,CAAA;AACvC,CAAC,EAPW,mBAAmB,KAAnB,mBAAmB,QAO9B;AAaD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAOX;AAPD,WAAY,gCAAgC;IAC1C,aAAa;IACb,qDAAiB,CAAA;IACjB,WAAW;IACX,iDAAa,CAAA;IACb,WAAW;IACX,kDAAc,CAAA;AAChB,CAAC,EAPW,gCAAgC,KAAhC,gCAAgC,QAO3C;AAaD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BASX;AATD,WAAY,2BAA2B;IACrC,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,eAAe;IACf,oDAAqB,CAAA;IACrB,0BAA0B;IAC1B,0EAA2C,CAAA;IAC3C,mCAAmC;IACnC,4FAA6D,CAAA;AAC/D,CAAC,EATW,2BAA2B,KAA3B,2BAA2B,QAStC;AAcD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAOX;AAPD,WAAY,6BAA6B;IACvC,gBAAgB;IAChB,wDAAuB,CAAA;IACvB,mBAAmB;IACnB,8DAA6B,CAAA;IAC7B,0BAA0B;IAC1B,4EAA2C,CAAA;AAC7C,CAAC,EAPW,6BAA6B,KAA7B,6BAA6B,QAOxC;AAaD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAOX;AAPD,WAAY,8BAA8B;IACxC,gBAAgB;IAChB,yDAAuB,CAAA;IACvB,0BAA0B;IAC1B,6EAA2C,CAAA;IAC3C,mCAAmC;IACnC,+FAA6D,CAAA;AAC/D,CAAC,EAPW,8BAA8B,KAA9B,8BAA8B,QAOzC;AAaD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAKX;AALD,WAAY,6BAA6B;IACvC,gBAAgB;IAChB,wDAAuB,CAAA;IACvB,WAAW;IACX,8CAAa,CAAA;AACf,CAAC,EALW,6BAA6B,KAA7B,6BAA6B,QAKxC;AAYD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAKX;AALD,WAAY,iCAAiC;IAC3C,YAAY;IACZ,oDAAe,CAAA;IACf,aAAa;IACb,sDAAiB,CAAA;AACnB,CAAC,EALW,iCAAiC,KAAjC,iCAAiC,QAK5C;AAYD,wEAAwE;AACxE,MAAM,CAAN,IAAY,oBAOX;AAPD,WAAY,oBAAoB;IAC9B,kBAAkB;IAClB,mDAA2B,CAAA;IAC3B,mBAAmB;IACnB,qDAA6B,CAAA;IAC7B,wBAAwB;IACxB,+DAAuC,CAAA;AACzC,CAAC,EAPW,oBAAoB,KAApB,oBAAoB,QAO/B;AAaD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAOX;AAPD,WAAY,iCAAiC;IAC3C,aAAa;IACb,sDAAiB,CAAA;IACjB,WAAW;IACX,kDAAa,CAAA;IACb,WAAW;IACX,mDAAc,CAAA;AAChB,CAAC,EAPW,iCAAiC,KAAjC,iCAAiC,QAO5C;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,eAAe;IACf,qDAAqB,CAAA;IACrB,0BAA0B;IAC1B,2EAA2C,CAAA;IAC3C,mCAAmC;IACnC,6FAA6D,CAAA;AAC/D,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,wFAAwF;AACxF,MAAM,CAAN,IAAY,oCAKX;AALD,WAAY,oCAAoC;IAC9C,4BAA4B;IAC5B,uFAA+C,CAAA;IAC/C,yBAAyB;IACzB,iFAAyC,CAAA;AAC3C,CAAC,EALW,oCAAoC,KAApC,oCAAoC,QAK/C;AAYD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAKX;AALD,WAAY,iCAAiC;IAC3C,YAAY;IACZ,oDAAe,CAAA;IACf,eAAe;IACf,0DAAqB,CAAA;AACvB,CAAC,EALW,iCAAiC,KAAjC,iCAAiC,QAK5C;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAOX;AAPD,WAAY,2BAA2B;IACrC,WAAW;IACX,4CAAa,CAAA;IACb,gCAAgC;IAChC,sFAAuD,CAAA;IACvD,0BAA0B;IAC1B,0EAA2C,CAAA;AAC7C,CAAC,EAPW,2BAA2B,KAA3B,2BAA2B,QAOtC;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAaX;AAbD,WAAY,4BAA4B;IACtC,WAAW;IACX,6CAAa,CAAA;IACb,qBAAqB;IACrB,iEAAiC,CAAA;IACjC,8BAA8B;IAC9B,mFAAmD,CAAA;IACnD,+BAA+B;IAC/B,qFAAqD,CAAA;IACrD,8BAA8B;IAC9B,mFAAmD,CAAA;IACnD,sBAAsB;IACtB,mEAAmC,CAAA;AACrC,CAAC,EAbW,4BAA4B,KAA5B,4BAA4B,QAavC;AAgBD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAeX;AAfD,WAAY,iCAAiC;IAC3C,aAAa;IACb,sDAAiB,CAAA;IACjB,UAAU;IACV,gDAAW,CAAA;IACX,YAAY;IACZ,oDAAe,CAAA;IACf,cAAc;IACd,wDAAmB,CAAA;IACnB,WAAW;IACX,kDAAa,CAAA;IACb,cAAc;IACd,wDAAmB,CAAA;IACnB,WAAW;IACX,kDAAa,CAAA;AACf,CAAC,EAfW,iCAAiC,KAAjC,iCAAiC,QAe5C;AAiBD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,WAAW;IACX,2CAAa,CAAA;IACb,gCAAgC;IAChC,qFAAuD,CAAA;IACvD,mBAAmB;IACnB,2DAA6B,CAAA;AAC/B,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAOX;AAPD,WAAY,4BAA4B;IACtC,WAAW;IACX,6CAAa,CAAA;IACb,WAAW;IACX,6CAAa,CAAA;IACb,mBAAmB;IACnB,6DAA6B,CAAA;AAC/B,CAAC,EAPW,4BAA4B,KAA5B,4BAA4B,QAOvC;AAaD,6FAA6F;AAC7F,MAAM,CAAN,IAAY,yCAqBX;AArBD,WAAY,yCAAyC;IACnD,UAAU;IACV,wDAAW,CAAA;IACX,iBAAiB;IACjB,uEAA0B,CAAA;IAC1B,aAAa;IACb,8DAAiB,CAAA;IACjB,kBAAkB;IAClB,yEAA4B,CAAA;IAC5B,UAAU;IACV,wDAAW,CAAA;IACX,UAAU;IACV,wDAAW,CAAA;IACX,YAAY;IACZ,4DAAe,CAAA;IACf,eAAe;IACf,mEAAsB,CAAA;IACtB,aAAa;IACb,8DAAiB,CAAA;IACjB,kBAAkB;IAClB,yEAA4B,CAAA;AAC9B,CAAC,EArBW,yCAAyC,KAAzC,yCAAyC,QAqBpD;AAoBD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAOX;AAPD,WAAY,2BAA2B;IACrC,WAAW;IACX,4CAAa,CAAA;IACb,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,mBAAmB;IACnB,4DAA6B,CAAA;AAC/B,CAAC,EAPW,2BAA2B,KAA3B,2BAA2B,QAOtC;AAaD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BASX;AATD,WAAY,0BAA0B;IACpC,aAAa;IACb,+CAAiB,CAAA;IACjB,UAAU;IACV,yCAAW,CAAA;IACX,YAAY;IACZ,6CAAe,CAAA;IACf,WAAW;IACX,2CAAa,CAAA;AACf,CAAC,EATW,0BAA0B,KAA1B,0BAA0B,QASrC;AAcD,6FAA6F;AAC7F,MAAM,CAAN,IAAY,yCAKX;AALD,WAAY,yCAAyC;IACnD,aAAa;IACb,8DAAiB,CAAA;IACjB,aAAa;IACb,8DAAiB,CAAA;AACnB,CAAC,EALW,yCAAyC,KAAzC,yCAAyC,QAKpD;AAYD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAKX;AALD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,iBAAiB;IACjB,4DAAyB,CAAA;AAC3B,CAAC,EALW,+BAA+B,KAA/B,+BAA+B,QAK1C;AAYD,0FAA0F;AAC1F,MAAM,CAAN,IAAY,sCAKX;AALD,WAAY,sCAAsC;IAChD,YAAY;IACZ,yDAAe,CAAA;IACf,aAAa;IACb,2DAAiB,CAAA;AACnB,CAAC,EALW,sCAAsC,KAAtC,sCAAsC,QAKjD;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAGX;AAHD,WAAY,8BAA8B;IACxC,aAAa;IACb,mDAAiB,CAAA;AACnB,CAAC,EAHW,8BAA8B,KAA9B,8BAA8B,QAGzC;AAWD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAKX;AALD,WAAY,gCAAgC;IAC1C,aAAa;IACb,qDAAiB,CAAA;IACjB,aAAa;IACb,qDAAiB,CAAA;AACnB,CAAC,EALW,gCAAgC,KAAhC,gCAAgC,QAK3C;AAYD,yFAAyF;AACzF,MAAM,CAAN,IAAY,qCAOX;AAPD,WAAY,qCAAqC;IAC/C,WAAW;IACX,sDAAa,CAAA;IACb,aAAa;IACb,0DAAiB,CAAA;IACjB,cAAc;IACd,4DAAmB,CAAA;AACrB,CAAC,EAPW,qCAAqC,KAArC,qCAAqC,QAOhD;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,aAAa;IACb,iDAAiB,CAAA;IACjB,WAAW;IACX,6CAAa,CAAA;IACb,oBAAoB;IACpB,+DAA+B,CAAA;IAC/B,mBAAmB;IACnB,6DAA6B,CAAA;AAC/B,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAGX;AAHD,WAAY,wBAAwB;IAClC,WAAW;IACX,yCAAa,CAAA;AACf,CAAC,EAHW,wBAAwB,KAAxB,wBAAwB,QAGnC;AAWD,0EAA0E;AAC1E,MAAM,CAAN,IAAY,sBASX;AATD,WAAY,sBAAsB;IAChC,UAAU;IACV,qCAAW,CAAA;IACX,WAAW;IACX,uCAAa,CAAA;IACb,UAAU;IACV,qCAAW,CAAA;IACX,aAAa;IACb,2CAAiB,CAAA;AACnB,CAAC,EATW,sBAAsB,KAAtB,sBAAsB,QASjC;AAcD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAeX;AAfD,WAAY,gCAAgC;IAC1C,UAAU;IACV,+CAAW,CAAA;IACX,WAAW;IACX,iDAAa,CAAA;IACb,UAAU;IACV,+CAAW,CAAA;IACX,aAAa;IACb,qDAAiB,CAAA;IACjB,cAAc;IACd,uDAAmB,CAAA;IACnB,WAAW;IACX,iDAAa,CAAA;IACb,YAAY;IACZ,mDAAe,CAAA;AACjB,CAAC,EAfW,gCAAgC,KAAhC,gCAAgC,QAe3C;AAiBD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,WAAW;IACX,2CAAa,CAAA;AACf,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAOX;AAPD,WAAY,wBAAwB;IAClC,cAAc;IACd,+CAAmB,CAAA;IACnB,sBAAsB;IACtB,+DAAmC,CAAA;IACnC,uBAAuB;IACvB,iEAAqC,CAAA;AACvC,CAAC,EAPW,wBAAwB,KAAxB,wBAAwB,QAOnC;AAaD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eAKX;AALD,WAAY,eAAe;IACzB,YAAY;IACZ,kCAAe,CAAA;IACf,eAAe;IACf,wCAAqB,CAAA;AACvB,CAAC,EALW,eAAe,KAAf,eAAe,QAK1B;AAYD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAyBX;AAzBD,WAAY,gCAAgC;IAC1C,cAAc;IACd,uDAAmB,CAAA;IACnB,eAAe;IACf,yDAAqB,CAAA;IACrB,qBAAqB;IACrB,qEAAiC,CAAA;IACjC,cAAc;IACd,uDAAmB,CAAA;IACnB,aAAa;IACb,qDAAiB,CAAA;IACjB,WAAW;IACX,iDAAa,CAAA;IACb,YAAY;IACZ,mDAAe,CAAA;IACf,YAAY;IACZ,mDAAe,CAAA;IACf,YAAY;IACZ,mDAAe,CAAA;IACf,aAAa;IACb,qDAAiB,CAAA;IACjB,aAAa;IACb,qDAAiB,CAAA;IACjB,eAAe;IACf,yDAAqB,CAAA;AACvB,CAAC,EAzBW,gCAAgC,KAAhC,gCAAgC,QAyB3C;AAsBD,yFAAyF;AACzF,MAAM,CAAN,IAAY,qCAOX;AAPD,WAAY,qCAAqC;IAC/C,YAAY;IACZ,wDAAe,CAAA;IACf,aAAa;IACb,0DAAiB,CAAA;IACjB,kBAAkB;IAClB,oEAA2B,CAAA;AAC7B,CAAC,EAPW,qCAAqC,KAArC,qCAAqC,QAOhD;AAaD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAKX;AALD,WAAY,iCAAiC;IAC3C,qBAAqB;IACrB,sEAAiC,CAAA;IACjC,oBAAoB;IACpB,oEAA+B,CAAA;AACjC,CAAC,EALW,iCAAiC,KAAjC,iCAAiC,QAK5C;AAYD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAeX;AAfD,WAAY,wBAAwB;IAClC,mBAAmB;IACnB,yDAA6B,CAAA;IAC7B,aAAa;IACb,6CAAiB,CAAA;IACjB,WAAW;IACX,yCAAa,CAAA;IACb,UAAU;IACV,uCAAW,CAAA;IACX,WAAW;IACX,yCAAa,CAAA;IACb,YAAY;IACZ,2CAAe,CAAA;IACf,WAAW;IACX,yCAAa,CAAA;AACf,CAAC,EAfW,wBAAwB,KAAxB,wBAAwB,QAenC;AAiBD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAKX;AALD,WAAY,kBAAkB;IAC5B,kCAAkC;IAClC,mFAA6D,CAAA;IAC7D,kCAAkC;IAClC,mFAA6D,CAAA;AAC/D,CAAC,EALW,kBAAkB,KAAlB,kBAAkB,QAK7B;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAOX;AAPD,WAAY,4BAA4B;IACtC,aAAa;IACb,iDAAiB,CAAA;IACjB,WAAW;IACX,6CAAa,CAAA;IACb,YAAY;IACZ,+CAAe,CAAA;AACjB,CAAC,EAPW,4BAA4B,KAA5B,4BAA4B,QAOvC;AAaD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAGX;AAHD,WAAY,yBAAyB;IACnC,uBAAuB;IACvB,kEAAqC,CAAA;AACvC,CAAC,EAHW,yBAAyB,KAAzB,yBAAyB,QAGpC;AAWD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAqBX;AArBD,WAAY,4BAA4B;IACtC,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;IACnB,eAAe;IACf,qDAAqB,CAAA;IACrB,eAAe;IACf,qDAAqB,CAAA;IACrB,uBAAuB;IACvB,qEAAqC,CAAA;IACrC,aAAa;IACb,iDAAiB,CAAA;IACjB,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;IACnB,mBAAmB;IACnB,6DAA6B,CAAA;AAC/B,CAAC,EArBW,4BAA4B,KAA5B,4BAA4B,QAqBvC;AAoBD,iGAAiG;AACjG,MAAM,CAAN,IAAY,6CASX;AATD,WAAY,6CAA6C;IACvD,YAAY;IACZ,gEAAe,CAAA;IACf,eAAe;IACf,sEAAqB,CAAA;IACrB,cAAc;IACd,oEAAmB,CAAA;IACnB,gBAAgB;IAChB,wEAAuB,CAAA;AACzB,CAAC,EATW,6CAA6C,KAA7C,6CAA6C,QASxD;AAcD,sFAAsF;AACtF,MAAM,CAAN,IAAY,kCAKX;AALD,WAAY,kCAAkC;IAC5C,gBAAgB;IAChB,6DAAuB,CAAA;IACvB,sBAAsB;IACtB,yEAAmC,CAAA;AACrC,CAAC,EALW,kCAAkC,KAAlC,kCAAkC,QAK7C;AAYD,8FAA8F;AAC9F,MAAM,CAAN,IAAY,0CAKX;AALD,WAAY,0CAA0C;IACpD,kCAAkC;IAClC,yGAA2D,CAAA;IAC3D,6BAA6B;IAC7B,+FAAiD,CAAA;AACnD,CAAC,EALW,0CAA0C,KAA1C,0CAA0C,QAKrD;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,eAAe;IACf,uDAAqB,CAAA;IACrB,iBAAiB;IACjB,2DAAyB,CAAA;AAC3B,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAKX;AALD,WAAY,0BAA0B;IACpC,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,qBAAqB;IACrB,+DAAiC,CAAA;AACnC,CAAC,EALW,0BAA0B,KAA1B,0BAA0B,QAKrC;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAKX;AALD,WAAY,4BAA4B;IACtC,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;AACrB,CAAC,EALW,4BAA4B,KAA5B,4BAA4B,QAKvC;AAYD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAWX;AAXD,WAAY,yBAAyB;IACnC,WAAW;IACX,0CAAa,CAAA;IACb,cAAc;IACd,gDAAmB,CAAA;IACnB,aAAa;IACb,8CAAiB,CAAA;IACjB,SAAS;IACT,sCAAS,CAAA;IACT,YAAY;IACZ,4CAAe,CAAA;AACjB,CAAC,EAXW,yBAAyB,KAAzB,yBAAyB,QAWpC;AAeD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCASX;AATD,WAAY,gCAAgC;IAC1C,WAAW;IACX,iDAAa,CAAA;IACb,WAAW;IACX,iDAAa,CAAA;IACb,aAAa;IACb,qDAAiB,CAAA;IACjB,UAAU;IACV,+CAAW,CAAA;AACb,CAAC,EATW,gCAAgC,KAAhC,gCAAgC,QAS3C;AAcD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBAqBX;AArBD,WAAY,qBAAqB;IAC/B,WAAW;IACX,sCAAa,CAAA;IACb,UAAU;IACV,oCAAW,CAAA;IACX,YAAY;IACZ,wCAAe,CAAA;IACf,WAAW;IACX,sCAAa,CAAA;IACb,cAAc;IACd,4CAAmB,CAAA;IACnB,iBAAiB;IACjB,kDAAyB,CAAA;IACzB,aAAa;IACb,0CAAiB,CAAA;IACjB,UAAU;IACV,oCAAW,CAAA;IACX,UAAU;IACV,oCAAW,CAAA;IACX,cAAc;IACd,4CAAmB,CAAA;AACrB,CAAC,EArBW,qBAAqB,KAArB,qBAAqB,QAqBhC;AAoBD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBASX;AATD,WAAY,wBAAwB;IAClC,WAAW;IACX,yCAAa,CAAA;IACb,WAAW;IACX,yCAAa,CAAA;IACb,aAAa;IACb,6CAAiB,CAAA;IACjB,UAAU;IACV,uCAAW,CAAA;AACb,CAAC,EATW,wBAAwB,KAAxB,wBAAwB,QASnC;AAcD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,aAAa;IACb,gDAAiB,CAAA;IACjB,wBAAwB;IACxB,sEAAuC,CAAA;AACzC,CAAC,EALW,2BAA2B,KAA3B,2BAA2B,QAKtC;AAYD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAOX;AAPD,WAAY,+BAA+B;IACzC,gBAAgB;IAChB,0DAAuB,CAAA;IACvB,UAAU;IACV,8CAAW,CAAA;IACX,0BAA0B;IAC1B,8EAA2C,CAAA;AAC7C,CAAC,EAPW,+BAA+B,KAA/B,+BAA+B,QAO1C;AAaD,+FAA+F;AAC/F,MAAM,CAAN,IAAY,2CAKX;AALD,WAAY,2CAA2C;IACrD,0BAA0B;IAC1B,0FAA2C,CAAA;IAC3C,2BAA2B;IAC3B,4FAA6C,CAAA;AAC/C,CAAC,EALW,2CAA2C,KAA3C,2CAA2C,QAKtD;AAYD,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBAOX;AAPD,WAAY,iBAAiB;IAC3B,eAAe;IACf,0CAAqB,CAAA;IACrB,iBAAiB;IACjB,8CAAyB,CAAA;IACzB,gBAAgB;IAChB,4CAAuB,CAAA;AACzB,CAAC,EAPW,iBAAiB,KAAjB,iBAAiB,QAO5B;AAaD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAKX;AALD,WAAY,yBAAyB;IACnC,mBAAmB;IACnB,0DAA6B,CAAA;IAC7B,qBAAqB;IACrB,8DAAiC,CAAA;AACnC,CAAC,EALW,yBAAyB,KAAzB,yBAAyB,QAKpC;AAYD,0FAA0F;AAC1F,MAAM,CAAN,IAAY,sCAOX;AAPD,WAAY,sCAAsC;IAChD,WAAW;IACX,uDAAa,CAAA;IACb,gCAAgC;IAChC,iGAAuD,CAAA;IACvD,mBAAmB;IACnB,uEAA6B,CAAA;AAC/B,CAAC,EAPW,sCAAsC,KAAtC,sCAAsC,QAOjD;AAaD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBAOX;AAPD,WAAY,qBAAqB;IAC/B,wBAAwB;IACxB,gEAAuC,CAAA;IACvC,uBAAuB;IACvB,8DAAqC,CAAA;IACrC,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EAPW,qBAAqB,KAArB,qBAAqB,QAOhC;AAaD,2EAA2E;AAC3E,MAAM,CAAN,IAAY,uBAOX;AAPD,WAAY,uBAAuB;IACjC,WAAW;IACX,wCAAa,CAAA;IACb,gCAAgC;IAChC,kFAAuD,CAAA;IACvD,mBAAmB;IACnB,wDAA6B,CAAA;AAC/B,CAAC,EAPW,uBAAuB,KAAvB,uBAAuB,QAOlC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreClient from \"@azure/core-client\";\n\nexport type SecretBaseUnion =\n | SecretBase\n | SecureString\n | AzureKeyVaultSecretReference;\nexport type DataFlowUnion = DataFlow | MappingDataFlow | Flowlet;\nexport type IntegrationRuntimeUnion =\n | IntegrationRuntime\n | ManagedIntegrationRuntime\n | SelfHostedIntegrationRuntime;\nexport type DatasetUnion =\n | Dataset\n | AmazonS3Dataset\n | AvroDataset\n | ExcelDataset\n | ParquetDataset\n | DelimitedTextDataset\n | JsonDataset\n | XmlDataset\n | OrcDataset\n | BinaryDataset\n | AzureBlobDataset\n | AzureTableDataset\n | AzureSqlTableDataset\n | AzureSqlMITableDataset\n | AzureSqlDWTableDataset\n | CassandraTableDataset\n | CustomDataset\n | CosmosDbSqlApiCollectionDataset\n | DocumentDbCollectionDataset\n | DynamicsEntityDataset\n | DynamicsCrmEntityDataset\n | CommonDataServiceForAppsEntityDataset\n | AzureDataLakeStoreDataset\n | AzureBlobFSDataset\n | Office365Dataset\n | FileShareDataset\n | MongoDbCollectionDataset\n | MongoDbAtlasCollectionDataset\n | MongoDbV2CollectionDataset\n | CosmosDbMongoDbApiCollectionDataset\n | ODataResourceDataset\n | OracleTableDataset\n | AmazonRdsForOracleTableDataset\n | TeradataTableDataset\n | AzureMySqlTableDataset\n | AmazonRedshiftTableDataset\n | Db2TableDataset\n | RelationalTableDataset\n | InformixTableDataset\n | OdbcTableDataset\n | MySqlTableDataset\n | PostgreSqlTableDataset\n | MicrosoftAccessTableDataset\n | SalesforceObjectDataset\n | SalesforceServiceCloudObjectDataset\n | SybaseTableDataset\n | SapBwCubeDataset\n | SapCloudForCustomerResourceDataset\n | SapEccResourceDataset\n | SapHanaTableDataset\n | SapOpenHubTableDataset\n | SqlServerTableDataset\n | AmazonRdsForSqlServerTableDataset\n | RestResourceDataset\n | SapTableResourceDataset\n | SapOdpResourceDataset\n | WebTableDataset\n | AzureSearchIndexDataset\n | HttpDataset\n | AmazonMWSObjectDataset\n | AzurePostgreSqlTableDataset\n | ConcurObjectDataset\n | CouchbaseTableDataset\n | DrillTableDataset\n | EloquaObjectDataset\n | GoogleBigQueryObjectDataset\n | GreenplumTableDataset\n | HBaseObjectDataset\n | HiveObjectDataset\n | HubspotObjectDataset\n | ImpalaObjectDataset\n | JiraObjectDataset\n | MagentoObjectDataset\n | MariaDBTableDataset\n | AzureMariaDBTableDataset\n | MarketoObjectDataset\n | PaypalObjectDataset\n | PhoenixObjectDataset\n | PrestoObjectDataset\n | QuickBooksObjectDataset\n | ServiceNowObjectDataset\n | ShopifyObjectDataset\n | SparkObjectDataset\n | SquareObjectDataset\n | XeroObjectDataset\n | ZohoObjectDataset\n | NetezzaTableDataset\n | VerticaTableDataset\n | SalesforceMarketingCloudObjectDataset\n | ResponsysObjectDataset\n | DynamicsAXResourceDataset\n | OracleServiceCloudObjectDataset\n | AzureDataExplorerTableDataset\n | GoogleAdWordsObjectDataset\n | SnowflakeDataset\n | SharePointOnlineListResourceDataset\n | AzureDatabricksDeltaLakeDataset;\nexport type LinkedServiceUnion =\n | LinkedService\n | AzureStorageLinkedService\n | AzureBlobStorageLinkedService\n | AzureTableStorageLinkedService\n | AzureSqlDWLinkedService\n | SqlServerLinkedService\n | AmazonRdsForSqlServerLinkedService\n | AzureSqlDatabaseLinkedService\n | AzureSqlMILinkedService\n | AzureBatchLinkedService\n | AzureKeyVaultLinkedService\n | CosmosDbLinkedService\n | DynamicsLinkedService\n | DynamicsCrmLinkedService\n | CommonDataServiceForAppsLinkedService\n | HDInsightLinkedService\n | FileServerLinkedService\n | AzureFileStorageLinkedService\n | GoogleCloudStorageLinkedService\n | OracleLinkedService\n | AmazonRdsForOracleLinkedService\n | AzureMySqlLinkedService\n | MySqlLinkedService\n | PostgreSqlLinkedService\n | SybaseLinkedService\n | Db2LinkedService\n | TeradataLinkedService\n | AzureMLLinkedService\n | AzureMLServiceLinkedService\n | OdbcLinkedService\n | InformixLinkedService\n | MicrosoftAccessLinkedService\n | HdfsLinkedService\n | ODataLinkedService\n | WebLinkedService\n | CassandraLinkedService\n | MongoDbLinkedService\n | MongoDbAtlasLinkedService\n | MongoDbV2LinkedService\n | CosmosDbMongoDbApiLinkedService\n | AzureDataLakeStoreLinkedService\n | AzureBlobFSLinkedService\n | Office365LinkedService\n | SalesforceLinkedService\n | SalesforceServiceCloudLinkedService\n | SapCloudForCustomerLinkedService\n | SapEccLinkedService\n | SapOpenHubLinkedService\n | SapOdpLinkedService\n | RestServiceLinkedService\n | TeamDeskLinkedService\n | QuickbaseLinkedService\n | SmartsheetLinkedService\n | ZendeskLinkedService\n | DataworldLinkedService\n | AppFiguresLinkedService\n | AsanaLinkedService\n | TwilioLinkedService\n | GoogleSheetsLinkedService\n | AmazonS3LinkedService\n | AmazonRedshiftLinkedService\n | CustomDataSourceLinkedService\n | AzureSearchLinkedService\n | HttpLinkedService\n | FtpServerLinkedService\n | SftpServerLinkedService\n | SapBWLinkedService\n | SapHanaLinkedService\n | AmazonMWSLinkedService\n | AzurePostgreSqlLinkedService\n | ConcurLinkedService\n | CouchbaseLinkedService\n | DrillLinkedService\n | EloquaLinkedService\n | GoogleBigQueryLinkedService\n | GreenplumLinkedService\n | HBaseLinkedService\n | HiveLinkedService\n | HubspotLinkedService\n | ImpalaLinkedService\n | JiraLinkedService\n | MagentoLinkedService\n | MariaDBLinkedService\n | AzureMariaDBLinkedService\n | MarketoLinkedService\n | PaypalLinkedService\n | PhoenixLinkedService\n | PrestoLinkedService\n | QuickBooksLinkedService\n | ServiceNowLinkedService\n | ShopifyLinkedService\n | SparkLinkedService\n | SquareLinkedService\n | XeroLinkedService\n | ZohoLinkedService\n | VerticaLinkedService\n | NetezzaLinkedService\n | SalesforceMarketingCloudLinkedService\n | HDInsightOnDemandLinkedService\n | AzureDataLakeAnalyticsLinkedService\n | AzureDatabricksLinkedService\n | AzureDatabricksDeltaLakeLinkedService\n | ResponsysLinkedService\n | DynamicsAXLinkedService\n | OracleServiceCloudLinkedService\n | GoogleAdWordsLinkedService\n | SapTableLinkedService\n | AzureDataExplorerLinkedService\n | AzureFunctionLinkedService\n | SnowflakeLinkedService\n | SharePointOnlineListLinkedService\n | AzureSynapseArtifactsLinkedService;\nexport type ActivityUnion =\n | Activity\n | ControlActivityUnion\n | ExecutionActivityUnion\n | SqlPoolStoredProcedureActivity;\nexport type TriggerUnion =\n | Trigger\n | RerunTumblingWindowTrigger\n | MultiplePipelineTriggerUnion\n | TumblingWindowTrigger\n | ChainingTrigger;\nexport type DatasetLocationUnion =\n | DatasetLocation\n | AzureBlobStorageLocation\n | AzureBlobFSLocation\n | AzureDataLakeStoreLocation\n | AmazonS3Location\n | FileServerLocation\n | AzureFileStorageLocation\n | GoogleCloudStorageLocation\n | FtpServerLocation\n | SftpLocation\n | HttpServerLocation\n | HdfsLocation;\nexport type DatasetStorageFormatUnion =\n | DatasetStorageFormat\n | TextFormat\n | JsonFormat\n | AvroFormat\n | OrcFormat\n | ParquetFormat;\nexport type WebLinkedServiceTypePropertiesUnion =\n | WebLinkedServiceTypeProperties\n | WebAnonymousAuthentication\n | WebBasicAuthentication\n | WebClientCertificateAuthentication;\nexport type StoreReadSettingsUnion =\n | StoreReadSettings\n | AzureBlobStorageReadSettings\n | AzureBlobFSReadSettings\n | AzureDataLakeStoreReadSettings\n | AmazonS3ReadSettings\n | FileServerReadSettings\n | AzureFileStorageReadSettings\n | GoogleCloudStorageReadSettings\n | FtpReadSettings\n | SftpReadSettings\n | HttpReadSettings\n | HdfsReadSettings;\nexport type StoreWriteSettingsUnion =\n | StoreWriteSettings\n | SftpWriteSettings\n | AzureBlobStorageWriteSettings\n | AzureBlobFSWriteSettings\n | AzureDataLakeStoreWriteSettings\n | FileServerWriteSettings\n | AzureFileStorageWriteSettings;\nexport type FormatReadSettingsUnion =\n | FormatReadSettings\n | DelimitedTextReadSettings\n | JsonReadSettings\n | XmlReadSettings\n | BinaryReadSettings;\nexport type CompressionReadSettingsUnion =\n | CompressionReadSettings\n | ZipDeflateReadSettings\n | TarReadSettings\n | TarGZipReadSettings;\nexport type FormatWriteSettingsUnion =\n | FormatWriteSettings\n | AvroWriteSettings\n | OrcWriteSettings\n | ParquetWriteSettings\n | DelimitedTextWriteSettings\n | JsonWriteSettings;\nexport type CopySourceUnion =\n | CopySource\n | AvroSource\n | ExcelSource\n | ParquetSource\n | DelimitedTextSource\n | JsonSource\n | XmlSource\n | OrcSource\n | BinarySource\n | TabularSourceUnion\n | BlobSource\n | DocumentDbCollectionSource\n | CosmosDbSqlApiSource\n | DynamicsSource\n | DynamicsCrmSource\n | CommonDataServiceForAppsSource\n | RelationalSource\n | MicrosoftAccessSource\n | ODataSource\n | SalesforceServiceCloudSource\n | RestSource\n | FileSystemSource\n | HdfsSource\n | AzureDataExplorerSource\n | OracleSource\n | AmazonRdsForOracleSource\n | WebSource\n | MongoDbSource\n | MongoDbAtlasSource\n | MongoDbV2Source\n | CosmosDbMongoDbApiSource\n | Office365Source\n | AzureDataLakeStoreSource\n | AzureBlobFSSource\n | HttpSource\n | SnowflakeSource\n | AzureDatabricksDeltaLakeSource\n | SharePointOnlineListSource;\nexport type CopySinkUnion =\n | CopySink\n | DelimitedTextSink\n | JsonSink\n | OrcSink\n | RestSink\n | AzurePostgreSqlSink\n | AzureMySqlSink\n | AzureDatabricksDeltaLakeSink\n | SapCloudForCustomerSink\n | AzureQueueSink\n | AzureTableSink\n | AvroSink\n | ParquetSink\n | BinarySink\n | BlobSink\n | FileSystemSink\n | DocumentDbCollectionSink\n | CosmosDbSqlApiSink\n | SqlSink\n | SqlServerSink\n | AzureSqlSink\n | SqlMISink\n | SqlDWSink\n | SnowflakeSink\n | OracleSink\n | AzureDataLakeStoreSink\n | AzureBlobFSSink\n | AzureSearchIndexSink\n | OdbcSink\n | InformixSink\n | MicrosoftAccessSink\n | DynamicsSink\n | DynamicsCrmSink\n | CommonDataServiceForAppsSink\n | AzureDataExplorerSink\n | SalesforceSink\n | SalesforceServiceCloudSink\n | CosmosDbMongoDbApiSink;\nexport type ExportSettingsUnion =\n | ExportSettings\n | SnowflakeExportCopyCommand\n | AzureDatabricksDeltaLakeExportCommand;\nexport type ImportSettingsUnion =\n | ImportSettings\n | AzureDatabricksDeltaLakeImportCommand\n | SnowflakeImportCopyCommand;\nexport type CopyTranslatorUnion = CopyTranslator | TabularTranslator;\nexport type DependencyReferenceUnion =\n | DependencyReference\n | TriggerDependencyReferenceUnion\n | SelfDependencyTumblingWindowTriggerReference;\nexport type LinkedIntegrationRuntimeTypeUnion =\n | LinkedIntegrationRuntimeType\n | LinkedIntegrationRuntimeKeyAuthorization\n | LinkedIntegrationRuntimeRbacAuthorization;\nexport type ControlActivityUnion =\n | ControlActivity\n | ExecutePipelineActivity\n | IfConditionActivity\n | SwitchActivity\n | ForEachActivity\n | WaitActivity\n | FailActivity\n | UntilActivity\n | ValidationActivity\n | FilterActivity\n | SetVariableActivity\n | AppendVariableActivity\n | WebHookActivity;\nexport type ExecutionActivityUnion =\n | ExecutionActivity\n | CopyActivity\n | HDInsightHiveActivity\n | HDInsightPigActivity\n | HDInsightMapReduceActivity\n | HDInsightStreamingActivity\n | HDInsightSparkActivity\n | ExecuteSsisPackageActivity\n | CustomActivity\n | SqlServerStoredProcedureActivity\n | DeleteActivity\n | AzureDataExplorerCommandActivity\n | LookupActivity\n | WebActivity\n | GetMetadataActivity\n | AzureMLBatchExecutionActivity\n | AzureMLUpdateResourceActivity\n | AzureMLExecutePipelineActivity\n | DataLakeAnalyticsUsqlActivity\n | DatabricksNotebookActivity\n | DatabricksSparkJarActivity\n | DatabricksSparkPythonActivity\n | AzureFunctionActivity\n | ExecuteDataFlowActivity\n | ScriptActivity\n | SynapseNotebookActivity\n | SynapseSparkJobDefinitionActivity;\nexport type MultiplePipelineTriggerUnion =\n | MultiplePipelineTrigger\n | ScheduleTrigger\n | BlobTrigger\n | BlobEventsTrigger\n | CustomEventsTrigger;\nexport type TabularSourceUnion =\n | TabularSource\n | AzureTableSource\n | InformixSource\n | Db2Source\n | OdbcSource\n | MySqlSource\n | PostgreSqlSource\n | SybaseSource\n | SapBwSource\n | SalesforceSource\n | SapCloudForCustomerSource\n | SapEccSource\n | SapHanaSource\n | SapOpenHubSource\n | SapOdpSource\n | SapTableSource\n | SqlSource\n | SqlServerSource\n | AmazonRdsForSqlServerSource\n | AzureSqlSource\n | SqlMISource\n | SqlDWSource\n | AzureMySqlSource\n | TeradataSource\n | CassandraSource\n | AmazonMWSSource\n | AzurePostgreSqlSource\n | ConcurSource\n | CouchbaseSource\n | DrillSource\n | EloquaSource\n | GoogleBigQuerySource\n | GreenplumSource\n | HBaseSource\n | HiveSource\n | HubspotSource\n | ImpalaSource\n | JiraSource\n | MagentoSource\n | MariaDBSource\n | AzureMariaDBSource\n | MarketoSource\n | PaypalSource\n | PhoenixSource\n | PrestoSource\n | QuickBooksSource\n | ServiceNowSource\n | ShopifySource\n | SparkSource\n | SquareSource\n | XeroSource\n | ZohoSource\n | NetezzaSource\n | VerticaSource\n | SalesforceMarketingCloudSource\n | ResponsysSource\n | DynamicsAXSource\n | OracleServiceCloudSource\n | GoogleAdWordsSource\n | AmazonRedshiftSource;\nexport type TriggerDependencyReferenceUnion =\n | TriggerDependencyReference\n | TumblingWindowTriggerDependencyReference;\n\nexport interface LinkConnectionListResponse {\n /** List link connection value */\n value: LinkConnectionResource[];\n /** List link connections next link */\n nextLink?: string;\n}\n\nexport interface LinkConnectionResource {\n /** Link connection id */\n id?: string;\n /** Link connection name */\n name?: string;\n /** Link connection type */\n type?: string;\n /** Properties of link connection */\n properties: LinkConnection;\n /** Link connection description */\n description?: string;\n}\n\nexport interface LinkConnection {\n /** Properties of link connection's source database */\n sourceDatabase?: LinkConnectionSourceDatabase;\n /** Properties of link connection's target database */\n targetDatabase?: LinkConnectionTargetDatabase;\n /** Properties of link connection's landing zone */\n landingZone?: LinkConnectionLandingZone;\n /** Properties of link connection's compute */\n compute?: LinkConnectionCompute;\n}\n\nexport interface LinkConnectionSourceDatabase {\n /** Linked service reference */\n linkedService?: LinkedServiceReference;\n /** Source database type properties */\n typeProperties?: LinkConnectionSourceDatabaseTypeProperties;\n}\n\n/** Linked service reference type. */\nexport interface LinkedServiceReference {\n /** Linked service reference type. */\n type: Type;\n /** Reference LinkedService name. */\n referenceName: string;\n /** Arguments for LinkedService. */\n parameters?: { [propertyName: string]: any };\n}\n\nexport interface LinkConnectionSourceDatabaseTypeProperties {\n /** Link connection source database server's resource id */\n resourceId?: string;\n /** Link connection source database server's principal id */\n principalId?: string;\n}\n\nexport interface LinkConnectionTargetDatabase {\n /** Linked service reference */\n linkedService?: LinkedServiceReference;\n /** Target database type properties */\n typeProperties?: LinkConnectionTargetDatabaseTypeProperties;\n}\n\nexport interface LinkConnectionTargetDatabaseTypeProperties {\n /** Enable cross table transaction consistency on target database */\n crossTableTransaction?: boolean;\n /** Drop and recreate same existing target table on link connection target database */\n dropExistingTargetTableOnStart?: boolean;\n}\n\nexport interface LinkConnectionLandingZone {\n /** Linked service reference */\n linkedService?: LinkedServiceReference;\n /** Landing zone's file system name */\n fileSystem?: string;\n /** Landing zone's folder path name */\n folderPath?: string;\n /** Landing zone's sas token */\n sasToken?: SecureString;\n}\n\n/** The base definition of a secret type. */\nexport interface SecretBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SecureString\" | \"AzureKeyVaultSecret\";\n}\n\nexport interface LinkConnectionCompute {\n /** Compute core count used by the link connection */\n coreCount?: number;\n /** Link connection's compute type */\n computeType?: string;\n /** Link connection's data process interval in minutes */\n dataProcessIntervalMinutes?: number;\n}\n\n/** The object that defines the structure of an Azure Synapse error response. */\nexport interface CloudError {\n /** Error code. */\n code: string;\n /** Error message. */\n message: string;\n /** Property name/path in request associated with error. */\n target?: string;\n /** Array with additional error details. */\n details?: CloudError[];\n}\n\nexport interface EditTablesRequest {\n /** Edit link tables request */\n linkTables?: LinkTableRequest[];\n}\n\nexport interface LinkTableRequest {\n /** Link table id */\n id?: string;\n /** Source table properties for link table request */\n source?: LinkTableRequestSource;\n /** Target table properties for link table request */\n target?: LinkTableRequestTarget;\n /** Link table operation type */\n operationType?: string;\n}\n\nexport interface LinkTableRequestSource {\n /** Source table table name */\n tableName?: string;\n /** Source table schema name */\n schemaName?: string;\n}\n\nexport interface LinkTableRequestTarget {\n /** Target table table name */\n tableName?: string;\n /** Target table schema name */\n schemaName?: string;\n /** Target table distribution options for link table request */\n distributionOptions?: LinkTableRequestTargetDistributionOptions;\n /** Target table structure options for link table request */\n structureOptions?: LinkTableRequestTargetStructureOptions;\n}\n\nexport interface LinkTableRequestTargetDistributionOptions {\n /** Target table distribution type */\n type?: string;\n /** Target table distribution column */\n distributionColumn?: string;\n}\n\nexport interface LinkTableRequestTargetStructureOptions {\n /** Target table structure type */\n type?: string;\n}\n\nexport interface LinkConnectionDetailedStatus {\n /** Link connection id */\n id?: string;\n /** Link connection name */\n name?: string;\n /** Is link connection applying changes */\n isApplyingChanges?: boolean;\n /** Is link connection partially failed */\n isPartiallyFailed?: boolean;\n /** Link connection start time */\n startTime?: any;\n /** Link connection stop time */\n stopTime?: any;\n /** Link connection status */\n status?: string;\n /** Link connection's corresponding continuous run id */\n continuousRunId?: string;\n /** Link connection error */\n error?: any;\n /** Link connection refresh status */\n refreshStatus?: LinkConnectionRefreshStatus;\n /** Link connection landing zone credential expire time */\n landingZoneCredentialExpireTime?: Date;\n}\n\nexport interface LinkConnectionRefreshStatus {\n /**\n * Link connection refresh status\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly refreshStatus?: string;\n /**\n * Link connection refresh error message\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly errorMessage?: string;\n}\n\nexport interface LinkTableListResponse {\n /** List link table value */\n value?: LinkTableResource[];\n}\n\nexport interface LinkTableResource {\n /** Link table id */\n id?: string;\n /** Link table name */\n name?: string;\n /** Source table properties for link table request */\n source?: LinkTableRequestSource;\n /** Target table properties for link table request */\n target?: LinkTableRequestTarget;\n}\n\nexport interface QueryTableStatusRequest {\n /** Max segment count to query table status */\n maxSegmentCount?: number;\n /** Continuation token to query table status */\n continuationToken?: any;\n}\n\nexport interface LinkConnectionQueryTableStatus {\n /** Link tables' status */\n value?: LinkTableStatus[];\n /** Continuation token to query table status */\n continuationToken?: any;\n}\n\nexport interface LinkTableStatus {\n /** ID provided by the client */\n id?: string;\n /** Link table status */\n status?: string;\n /** Link table error message */\n errorMessage?: string;\n /** Link table start time */\n startTime?: any;\n /** Link table stop time */\n stopTime?: any;\n /** Link table ID */\n linkTableId?: string;\n /** Link table error code */\n errorCode?: string;\n /** Link table last processed data time */\n lastProcessedData?: Date;\n /** Link table last transaction commit time */\n lastTransactionCommitTime?: Date;\n}\n\nexport interface UpdateLandingZoneCredential {\n /** Landing zone's sas token */\n sasToken?: SecureString;\n}\n\nexport interface KqlScriptsResourceCollectionResponse {\n value?: KqlScriptResource[];\n nextLink?: string;\n}\n\nexport interface KqlScriptResource {\n id?: string;\n name?: string;\n type?: string;\n /** Properties of sql script. */\n properties?: KqlScript;\n}\n\nexport interface KqlScript {\n content?: KqlScriptContent;\n}\n\nexport interface KqlScriptContent {\n query?: string;\n metadata?: KqlScriptContentMetadata;\n currentConnection?: KqlScriptContentCurrentConnection;\n}\n\nexport interface KqlScriptContentMetadata {\n language?: string;\n}\n\nexport interface KqlScriptContentCurrentConnection {\n name?: string;\n poolName?: string;\n databaseName?: string;\n type?: string;\n}\n\n/** Contains details when the response code indicates an error. */\nexport interface ErrorContract {\n /** The error details. */\n error?: ErrorResponse;\n}\n\n/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.) */\nexport interface ErrorResponse {\n /**\n * The error code.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly code?: string;\n /**\n * The error message.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly message?: string;\n /**\n * The error target.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly target?: string;\n /**\n * The error details.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly details?: ErrorResponse[];\n /**\n * The error additional info.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly additionalInfo?: ErrorAdditionalInfo[];\n}\n\n/** The resource management error additional info. */\nexport interface ErrorAdditionalInfo {\n /**\n * The additional info type.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * The additional info.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly info?: any;\n}\n\n/** Request body structure for rename artifact. */\nexport interface ArtifactRenameRequest {\n /** New name of the artifact. */\n newName?: string;\n}\n\nexport interface MetastoreRegisterObject {\n /** The input folder containing CDM files. */\n inputFolder: string;\n}\n\nexport interface MetastoreRegistrationResponse {\n /** Enumerates possible request statuses. */\n status?: RequestStatus;\n}\n\nexport interface MetastoreRequestSuccessResponse {\n /** Enumerates possible Status of the resource. */\n status?: ResourceStatus;\n}\n\nexport interface MetastoreUpdateObject {\n /** The input folder containing CDM files. */\n inputFolder: string;\n}\n\nexport interface MetastoreUpdationResponse {\n /** Enumerates possible request statuses. */\n status?: RequestStatus;\n}\n\n/** A list of sparkconfiguration resources. */\nexport interface SparkConfigurationListResponse {\n /** List of sparkconfigurations. */\n value: SparkConfigurationResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** SparkConfiguration Artifact information */\nexport interface SparkConfiguration {\n /** Description about the SparkConfiguration. */\n description?: string;\n /** SparkConfiguration configs. */\n configs: { [propertyName: string]: string };\n /** Annotations for SparkConfiguration. */\n annotations?: string[];\n /** additional Notes. */\n notes?: string;\n /** The identity that created the resource. */\n createdBy?: string;\n /** The timestamp of resource creation. */\n created?: Date;\n /** SparkConfiguration configMergeRule. */\n configMergeRule?: { [propertyName: string]: string };\n}\n\n/** Common fields that are returned in the response for all Azure Resource Manager resources */\nexport interface Resource {\n /**\n * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /**\n * The name of the resource\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or \"Microsoft.Storage/storageAccounts\"\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n}\n\n/** Collection of Big Data pool information */\nexport interface BigDataPoolResourceInfoListResult {\n /** Link to the next page of results */\n nextLink?: string;\n /** List of Big Data pools */\n value?: BigDataPoolResourceInfo[];\n}\n\n/** Auto-scaling properties of a Big Data pool powered by Apache Spark */\nexport interface AutoScaleProperties {\n /** The minimum number of nodes the Big Data pool can support. */\n minNodeCount?: number;\n /** Whether automatic scaling is enabled for the Big Data pool. */\n enabled?: boolean;\n /** The maximum number of nodes the Big Data pool can support. */\n maxNodeCount?: number;\n}\n\n/** Auto-pausing properties of a Big Data pool powered by Apache Spark */\nexport interface AutoPauseProperties {\n /** Number of minutes of idle time before the Big Data pool is automatically paused. */\n delayInMinutes?: number;\n /** Whether auto-pausing is enabled for the Big Data pool. */\n enabled?: boolean;\n}\n\n/** Dynamic Executor Allocation Properties */\nexport interface DynamicExecutorAllocation {\n /** Indicates whether Dynamic Executor Allocation is enabled or not. */\n enabled?: boolean;\n}\n\n/** Library requirements for a Big Data pool powered by Apache Spark */\nexport interface LibraryRequirements {\n /**\n * The last update time of the library requirements file.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly time?: Date;\n /** The library requirements. */\n content?: string;\n /** The filename of the library requirements file. */\n filename?: string;\n}\n\n/** Library/package information of a Big Data pool powered by Apache Spark */\nexport interface LibraryInfo {\n /** Name of the library. */\n name?: string;\n /** Storage blob path of library. */\n path?: string;\n /** Storage blob container name. */\n containerName?: string;\n /** The last update time of the library. */\n uploadedTimestamp?: Date;\n /** Type of the library. */\n type?: string;\n /**\n * Provisioning status of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningStatus?: string;\n /**\n * Creator Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly creatorId?: string;\n}\n\n/** Azure Synapse nested object which contains a flow with data movements and transformations. */\nexport interface DataFlow {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MappingDataFlow\" | \"Flowlet\";\n /** The description of the data flow. */\n description?: string;\n /** List of tags that can be used for describing the data flow. */\n annotations?: any[];\n /** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */\n folder?: DataFlowFolder;\n}\n\n/** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */\nexport interface DataFlowFolder {\n /** The name of the folder that this data flow is in. */\n name?: string;\n}\n\n/** A list of data flow resources. */\nexport interface DataFlowListResponse {\n /** List of data flows. */\n value: DataFlowResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Request body structure for creating data flow debug session. */\nexport interface CreateDataFlowDebugSessionRequest {\n /** Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */\n computeType?: string;\n /** Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */\n coreCount?: number;\n /** Time to live setting of the cluster in minutes. */\n timeToLive?: number;\n /** Set to use integration runtime setting for data flow debug session. */\n integrationRuntime?: IntegrationRuntimeDebugResource;\n}\n\n/** Azure Synapse nested object which serves as a compute resource for activities. */\nexport interface IntegrationRuntime {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Managed\" | \"SelfHosted\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Integration runtime description. */\n description?: string;\n}\n\n/** Azure Synapse nested debug resource. */\nexport interface SubResourceDebugResource {\n /** The resource name. */\n name?: string;\n}\n\n/** Response body structure for creating data flow debug session. */\nexport interface CreateDataFlowDebugSessionResponse {\n /** The ID of data flow debug session. */\n sessionId?: string;\n}\n\n/** A list of active debug sessions. */\nexport interface QueryDataFlowDebugSessionsResponse {\n /** Array with all active debug sessions. */\n value?: DataFlowDebugSessionInfo[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Data flow debug session info. */\nexport interface DataFlowDebugSessionInfo {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The name of the data flow. */\n dataFlowName?: string;\n /** Compute type of the cluster. */\n computeType?: string;\n /** Core count of the cluster. */\n coreCount?: number;\n /** Node count of the cluster. (deprecated property) */\n nodeCount?: number;\n /** Attached integration runtime name of data flow debug session. */\n integrationRuntimeName?: string;\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** Start time of data flow debug session. */\n startTime?: string;\n /** Compute type of the cluster. */\n timeToLiveInMinutes?: number;\n /** Last activity time of data flow debug session. */\n lastActivityTime?: string;\n}\n\n/** Request body structure for starting data flow debug session. */\nexport interface DataFlowDebugPackage {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** Data flow instance. */\n dataFlow?: DataFlowDebugResource;\n /** List of Data flows */\n dataFlows?: DataFlowDebugResource[];\n /** List of datasets. */\n datasets?: DatasetDebugResource[];\n /** List of linked services. */\n linkedServices?: LinkedServiceDebugResource[];\n /** Staging info for debug session. */\n staging?: DataFlowStagingInfo;\n /** Data flow debug settings. */\n debugSettings?: DataFlowDebugPackageDebugSettings;\n}\n\n/** The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. */\nexport interface Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AmazonS3Object\"\n | \"Avro\"\n | \"Excel\"\n | \"Parquet\"\n | \"DelimitedText\"\n | \"Json\"\n | \"Xml\"\n | \"Orc\"\n | \"Binary\"\n | \"AzureBlob\"\n | \"AzureTable\"\n | \"AzureSqlTable\"\n | \"AzureSqlMITable\"\n | \"AzureSqlDWTable\"\n | \"CassandraTable\"\n | \"CustomDataset\"\n | \"CosmosDbSqlApiCollection\"\n | \"DocumentDbCollection\"\n | \"DynamicsEntity\"\n | \"DynamicsCrmEntity\"\n | \"CommonDataServiceForAppsEntity\"\n | \"AzureDataLakeStoreFile\"\n | \"AzureBlobFSFile\"\n | \"Office365Table\"\n | \"FileShare\"\n | \"MongoDbCollection\"\n | \"MongoDbAtlasCollection\"\n | \"MongoDbV2Collection\"\n | \"CosmosDbMongoDbApiCollection\"\n | \"ODataResource\"\n | \"OracleTable\"\n | \"AmazonRdsForOracleTable\"\n | \"TeradataTable\"\n | \"AzureMySqlTable\"\n | \"AmazonRedshiftTable\"\n | \"Db2Table\"\n | \"RelationalTable\"\n | \"InformixTable\"\n | \"OdbcTable\"\n | \"MySqlTable\"\n | \"PostgreSqlTable\"\n | \"MicrosoftAccessTable\"\n | \"SalesforceObject\"\n | \"SalesforceServiceCloudObject\"\n | \"SybaseTable\"\n | \"SapBwCube\"\n | \"SapCloudForCustomerResource\"\n | \"SapEccResource\"\n | \"SapHanaTable\"\n | \"SapOpenHubTable\"\n | \"SqlServerTable\"\n | \"AmazonRdsForSqlServerTable\"\n | \"RestResource\"\n | \"SapTableResource\"\n | \"SapOdpResource\"\n | \"WebTable\"\n | \"AzureSearchIndex\"\n | \"HttpFile\"\n | \"AmazonMWSObject\"\n | \"AzurePostgreSqlTable\"\n | \"ConcurObject\"\n | \"CouchbaseTable\"\n | \"DrillTable\"\n | \"EloquaObject\"\n | \"GoogleBigQueryObject\"\n | \"GreenplumTable\"\n | \"HBaseObject\"\n | \"HiveObject\"\n | \"HubspotObject\"\n | \"ImpalaObject\"\n | \"JiraObject\"\n | \"MagentoObject\"\n | \"MariaDBTable\"\n | \"AzureMariaDBTable\"\n | \"MarketoObject\"\n | \"PaypalObject\"\n | \"PhoenixObject\"\n | \"PrestoObject\"\n | \"QuickBooksObject\"\n | \"ServiceNowObject\"\n | \"ShopifyObject\"\n | \"SparkObject\"\n | \"SquareObject\"\n | \"XeroObject\"\n | \"ZohoObject\"\n | \"NetezzaTable\"\n | \"VerticaTable\"\n | \"SalesforceMarketingCloudObject\"\n | \"ResponsysObject\"\n | \"DynamicsAXResource\"\n | \"OracleServiceCloudObject\"\n | \"AzureDataExplorerTable\"\n | \"GoogleAdWordsObject\"\n | \"SnowflakeTable\"\n | \"SharePointOnlineListResource\"\n | \"AzureDatabricksDeltaLakeDataset\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Dataset description. */\n description?: string;\n /** Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. */\n structure?: any;\n /** Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. */\n schema?: any;\n /** Linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** Parameters for dataset. */\n parameters?: { [propertyName: string]: ParameterSpecification };\n /** List of tags that can be used for describing the Dataset. */\n annotations?: any[];\n /** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */\n folder?: DatasetFolder;\n}\n\n/** Definition of a single parameter for an entity. */\nexport interface ParameterSpecification {\n /** Parameter type. */\n type: ParameterType;\n /** Default value of parameter. */\n defaultValue?: any;\n}\n\n/** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */\nexport interface DatasetFolder {\n /** The name of the folder that this Dataset is in. */\n name?: string;\n}\n\n/** The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. */\nexport interface LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AzureStorage\"\n | \"AzureBlobStorage\"\n | \"AzureTableStorage\"\n | \"AzureSqlDW\"\n | \"SqlServer\"\n | \"AmazonRdsForSqlServer\"\n | \"AzureSqlDatabase\"\n | \"AzureSqlMI\"\n | \"AzureBatch\"\n | \"AzureKeyVault\"\n | \"CosmosDb\"\n | \"Dynamics\"\n | \"DynamicsCrm\"\n | \"CommonDataServiceForApps\"\n | \"HDInsight\"\n | \"FileServer\"\n | \"AzureFileStorage\"\n | \"GoogleCloudStorage\"\n | \"Oracle\"\n | \"AmazonRdsForOracle\"\n | \"AzureMySql\"\n | \"MySql\"\n | \"PostgreSql\"\n | \"Sybase\"\n | \"Db2\"\n | \"Teradata\"\n | \"AzureML\"\n | \"AzureMLService\"\n | \"Odbc\"\n | \"Informix\"\n | \"MicrosoftAccess\"\n | \"Hdfs\"\n | \"OData\"\n | \"Web\"\n | \"Cassandra\"\n | \"MongoDb\"\n | \"MongoDbAtlas\"\n | \"MongoDbV2\"\n | \"CosmosDbMongoDbApi\"\n | \"AzureDataLakeStore\"\n | \"AzureBlobFS\"\n | \"Office365\"\n | \"Salesforce\"\n | \"SalesforceServiceCloud\"\n | \"SapCloudForCustomer\"\n | \"SapEcc\"\n | \"SapOpenHub\"\n | \"SapOdp\"\n | \"RestService\"\n | \"TeamDesk\"\n | \"Quickbase\"\n | \"Smartsheet\"\n | \"Zendesk\"\n | \"Dataworld\"\n | \"AppFigures\"\n | \"Asana\"\n | \"Twilio\"\n | \"GoogleSheets\"\n | \"AmazonS3\"\n | \"AmazonRedshift\"\n | \"CustomDataSource\"\n | \"AzureSearch\"\n | \"HttpServer\"\n | \"FtpServer\"\n | \"Sftp\"\n | \"SapBW\"\n | \"SapHana\"\n | \"AmazonMWS\"\n | \"AzurePostgreSql\"\n | \"Concur\"\n | \"Couchbase\"\n | \"Drill\"\n | \"Eloqua\"\n | \"GoogleBigQuery\"\n | \"Greenplum\"\n | \"HBase\"\n | \"Hive\"\n | \"Hubspot\"\n | \"Impala\"\n | \"Jira\"\n | \"Magento\"\n | \"MariaDB\"\n | \"AzureMariaDB\"\n | \"Marketo\"\n | \"Paypal\"\n | \"Phoenix\"\n | \"Presto\"\n | \"QuickBooks\"\n | \"ServiceNow\"\n | \"Shopify\"\n | \"Spark\"\n | \"Square\"\n | \"Xero\"\n | \"Zoho\"\n | \"Vertica\"\n | \"Netezza\"\n | \"SalesforceMarketingCloud\"\n | \"HDInsightOnDemand\"\n | \"AzureDataLakeAnalytics\"\n | \"AzureDatabricks\"\n | \"AzureDatabricksDeltaLake\"\n | \"Responsys\"\n | \"DynamicsAX\"\n | \"OracleServiceCloud\"\n | \"GoogleAdWords\"\n | \"SapTable\"\n | \"AzureDataExplorer\"\n | \"AzureFunction\"\n | \"Snowflake\"\n | \"SharePointOnlineList\"\n | \"AzureSynapseArtifacts\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The integration runtime reference. */\n connectVia?: IntegrationRuntimeReference;\n /** Linked service description. */\n description?: string;\n /** Parameters for linked service. */\n parameters?: { [propertyName: string]: ParameterSpecification };\n /** List of tags that can be used for describing the linked service. */\n annotations?: any[];\n}\n\n/** Integration runtime reference type. */\nexport interface IntegrationRuntimeReference {\n /** Type of integration runtime. */\n type: IntegrationRuntimeReferenceType;\n /** Reference integration runtime name. */\n referenceName: string;\n /** Arguments for integration runtime. */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Staging info for execute data flow activity. */\nexport interface DataFlowStagingInfo {\n /** Staging linked service reference. */\n linkedService?: LinkedServiceReference;\n /** Folder path for staging blob. */\n folderPath?: string;\n}\n\n/** Data flow debug settings. */\nexport interface DataFlowDebugPackageDebugSettings {\n /** Source setting for data flow debug. */\n sourceSettings?: DataFlowSourceSetting[];\n /** Data flow parameters. */\n parameters?: { [propertyName: string]: any };\n /** Parameters for dataset. */\n datasetParameters?: any;\n}\n\n/** Definition of data flow source setting for debug. */\nexport interface DataFlowSourceSetting {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The data flow source name. */\n sourceName?: string;\n /** Defines the row limit of data flow source in debug. */\n rowLimit?: number;\n}\n\n/** Response body structure for starting data flow debug session. */\nexport interface AddDataFlowToDebugSessionResponse {\n /** The ID of data flow debug job version. */\n jobVersion?: string;\n}\n\n/** Request body structure for deleting data flow debug session. */\nexport interface DeleteDataFlowDebugSessionRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n}\n\n/** Request body structure for data flow expression preview. */\nexport interface DataFlowDebugCommandRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The command type. */\n command?: DataFlowDebugCommandType;\n /** The command payload object. */\n commandPayload?: DataFlowDebugCommandPayload;\n}\n\n/** Structure of command payload. */\nexport interface DataFlowDebugCommandPayload {\n /** The stream name which is used for preview. */\n streamName: string;\n /** Row limits for preview response. */\n rowLimits?: number;\n /** Array of column names. */\n columns?: string[];\n /** The expression which is used for preview. */\n expression?: string;\n}\n\n/** Response body structure of data flow result for data preview, statistics or expression preview. */\nexport interface DataFlowDebugCommandResponse {\n /** The run status of data preview, statistics or expression preview. */\n status?: string;\n /** The result data of data preview, statistics or expression preview. */\n data?: string;\n}\n\n/** A list of dataset resources. */\nexport interface DatasetListResponse {\n /** List of datasets. */\n value: DatasetResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\nexport interface GitHubAccessTokenRequest {\n /** The GitHub Client Id. */\n gitHubClientId: string;\n /** The GitHub Access code. */\n gitHubAccessCode: string;\n /** The GitHub access token base URL. */\n gitHubAccessTokenBaseUrl: string;\n}\n\nexport interface GitHubAccessTokenResponse {\n gitHubAccessToken?: string;\n}\n\n/** A list of integration runtime resources. */\nexport interface IntegrationRuntimeListResponse {\n /** List of integration runtimes. */\n value: IntegrationRuntimeResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** A list of Library resources. */\nexport interface LibraryListResponse {\n /** List of Library. */\n value: LibraryResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Library/package properties */\nexport interface LibraryResourceProperties {\n /**\n * Name of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * Location of library/package in storage account.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly path?: string;\n /**\n * Container name of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly containerName?: string;\n /**\n * The last update time of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly uploadedTimestamp?: string;\n /**\n * Type of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Provisioning status of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningStatus?: string;\n /**\n * Creator Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly creatorId?: string;\n}\n\n/** Library resource info */\nexport interface LibraryResourceInfo {\n /**\n * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /**\n * record Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly recordId?: number;\n /**\n * Provisioning status of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly state?: string;\n /**\n * The creation time of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly created?: string;\n /**\n * The last updated time of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly changed?: string;\n /**\n * The type of the resource. E.g. LibraryArtifact\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Name of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * Operation Id of the operation performed on library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly operationId?: string;\n /**\n * artifact Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly artifactId?: string;\n}\n\n/** Operation status for the operation */\nexport interface OperationResult {\n /**\n * Operation status\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /** Error code. */\n code?: string;\n /** Error message. */\n message?: string;\n /** Property name/path in request associated with error. */\n target?: string;\n /** Array with additional error details. */\n details?: CloudError[];\n}\n\n/** A list of linked service resources. */\nexport interface LinkedServiceListResponse {\n /** List of linked services. */\n value: LinkedServiceResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** A list of Notebook resources. */\nexport interface NotebookListResponse {\n /** List of Notebooks. */\n value: NotebookResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Notebook resource type. */\nexport interface NotebookResource {\n /**\n * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /** The name of the resource */\n name: string;\n /**\n * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Resource Etag.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly etag?: string;\n /** Properties of Notebook. */\n properties: Notebook;\n}\n\n/** Notebook. */\nexport interface Notebook {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the notebook. */\n description?: string;\n /** Big data pool reference. */\n bigDataPool?: BigDataPoolReference;\n /** The spark configuration of the spark job. */\n targetSparkConfiguration?: SparkConfigurationReference;\n /** Session properties. */\n sessionProperties?: NotebookSessionProperties;\n /** Notebook root-level metadata. */\n metadata: NotebookMetadata;\n /** Notebook format (major number). Incremented between backwards incompatible changes to the notebook format. */\n nbformat: number;\n /** Notebook format (minor number). Incremented for backward compatible changes to the notebook format. */\n nbformatMinor: number;\n /** Array of cells of the current notebook. */\n cells: NotebookCell[];\n /** The folder that this notebook is in. If not specified, this notebook will appear at the root level. */\n folder?: NotebookFolder;\n}\n\n/** Big data pool reference. */\nexport interface BigDataPoolReference {\n /** Big data pool reference type. */\n type: BigDataPoolReferenceType;\n /** Reference big data pool name. */\n referenceName: string;\n}\n\n/** Spark configuration reference. */\nexport interface SparkConfigurationReference {\n /** Spark configuration reference type. */\n type: SparkConfigurationReferenceType;\n /** Reference spark configuration name. */\n referenceName: string;\n}\n\n/** Session properties. */\nexport interface NotebookSessionProperties {\n /** Amount of memory to use for the driver process. */\n driverMemory: string;\n /** Number of cores to use for the driver. */\n driverCores: number;\n /** Amount of memory to use per executor process. */\n executorMemory: string;\n /** Number of cores to use for each executor. */\n executorCores: number;\n /** Number of executors to launch for this session. */\n numExecutors: number;\n}\n\n/** Notebook root-level metadata. */\nexport interface NotebookMetadata {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Kernel information. */\n kernelspec?: NotebookKernelSpec;\n /** Language info. */\n languageInfo?: NotebookLanguageInfo;\n}\n\n/** Kernel information. */\nexport interface NotebookKernelSpec {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Name of the kernel specification. */\n name: string;\n /** Name to display in UI. */\n displayName: string;\n}\n\n/** Language info. */\nexport interface NotebookLanguageInfo {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The programming language which this kernel runs. */\n name: string;\n /** The codemirror mode to use for code in this language. */\n codemirrorMode?: string;\n}\n\n/** Notebook cell. */\nexport interface NotebookCell {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** String identifying the type of cell. */\n cellType: string;\n /** Cell-level metadata. */\n metadata: any;\n /** Contents of the cell, represented as an array of lines. */\n source: string[];\n /** Attachments associated with the cell. */\n attachments?: any;\n /** Cell-level output items. */\n outputs?: NotebookCellOutputItem[];\n}\n\n/** An item of the notebook cell execution output. */\nexport interface NotebookCellOutputItem {\n /** For output_type=stream, determines the name of stream (stdout / stderr). */\n name?: string;\n /** Execution sequence number. */\n executionCount?: number;\n /** Execution, display, or stream outputs. */\n outputType: CellOutputType;\n /** For output_type=stream, the stream's text output, represented as a string or an array of strings. */\n text?: any;\n /** Output data. Use MIME type as key, and content as value. */\n data?: any;\n /** Metadata for the output item. */\n metadata?: any;\n}\n\n/** The folder that this notebook is in. If not specified, this notebook will appear at the root level. */\nexport interface NotebookFolder {\n /** The name of the folder that this notebook is in. */\n name?: string;\n}\n\n/** A list of pipeline resources. */\nexport interface PipelineListResponse {\n /** List of pipelines. */\n value: PipelineResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** A pipeline activity. */\nexport interface Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"Container\"\n | \"Execution\"\n | \"Copy\"\n | \"HDInsightHive\"\n | \"HDInsightPig\"\n | \"HDInsightMapReduce\"\n | \"HDInsightStreaming\"\n | \"HDInsightSpark\"\n | \"ExecuteSSISPackage\"\n | \"Custom\"\n | \"SqlServerStoredProcedure\"\n | \"ExecutePipeline\"\n | \"Delete\"\n | \"AzureDataExplorerCommand\"\n | \"Lookup\"\n | \"WebActivity\"\n | \"GetMetadata\"\n | \"IfCondition\"\n | \"Switch\"\n | \"ForEach\"\n | \"AzureMLBatchExecution\"\n | \"AzureMLUpdateResource\"\n | \"AzureMLExecutePipeline\"\n | \"DataLakeAnalyticsU-SQL\"\n | \"Wait\"\n | \"Fail\"\n | \"Until\"\n | \"Validation\"\n | \"Filter\"\n | \"DatabricksNotebook\"\n | \"DatabricksSparkJar\"\n | \"DatabricksSparkPython\"\n | \"SetVariable\"\n | \"AppendVariable\"\n | \"AzureFunctionActivity\"\n | \"WebHook\"\n | \"ExecuteDataFlow\"\n | \"Script\"\n | \"SynapseNotebook\"\n | \"SparkJob\"\n | \"SqlPoolStoredProcedure\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Activity name. */\n name: string;\n /** Activity description. */\n description?: string;\n /** Activity depends on condition. */\n dependsOn?: ActivityDependency[];\n /** Activity user properties. */\n userProperties?: UserProperty[];\n}\n\n/** Activity dependency information. */\nexport interface ActivityDependency {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Activity name. */\n activity: string;\n /** Match-Condition for the dependency. */\n dependencyConditions: DependencyCondition[];\n}\n\n/** User property. */\nexport interface UserProperty {\n /** User property name. */\n name: string;\n /** User property value. Type: string (or Expression with resultType string). */\n value: any;\n}\n\n/** Definition of a single variable for a Pipeline. */\nexport interface VariableSpecification {\n /** Variable type. */\n type: VariableType;\n /** Default value of variable. */\n defaultValue?: any;\n}\n\n/** The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. */\nexport interface PipelineFolder {\n /** The name of the folder that this Pipeline is in. */\n name?: string;\n}\n\n/** Response body with a run identifier. */\nexport interface CreateRunResponse {\n /** Identifier of a run. */\n runId: string;\n}\n\n/** Query parameters for listing runs. */\nexport interface RunFilterParameters {\n /** The continuation token for getting the next page of results. Null for first page. */\n continuationToken?: string;\n /** The time at or after which the run event was updated in 'ISO 8601' format. */\n lastUpdatedAfter: Date;\n /** The time at or before which the run event was updated in 'ISO 8601' format. */\n lastUpdatedBefore: Date;\n /** List of filters. */\n filters?: RunQueryFilter[];\n /** List of OrderBy option. */\n orderBy?: RunQueryOrderBy[];\n}\n\n/** Query filter option for listing runs. */\nexport interface RunQueryFilter {\n /** Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. */\n operand: RunQueryFilterOperand;\n /** Operator to be used for filter. */\n operator: RunQueryFilterOperator;\n /** List of filter values. */\n values: string[];\n}\n\n/** An object to provide order by options for listing runs. */\nexport interface RunQueryOrderBy {\n /** Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. */\n orderBy: RunQueryOrderByField;\n /** Sorting order of the parameter. */\n order: RunQueryOrder;\n}\n\n/** A list pipeline runs. */\nexport interface PipelineRunsQueryResponse {\n /** List of pipeline runs. */\n value: PipelineRun[];\n /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */\n continuationToken?: string;\n}\n\n/** Information about a pipeline run. */\nexport interface PipelineRun {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /**\n * Identifier of a run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runId?: string;\n /**\n * Identifier that correlates all the recovery runs of a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runGroupId?: string;\n /**\n * Indicates if the recovered pipeline run is the latest in its group.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly isLatest?: boolean;\n /**\n * The pipeline name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly pipelineName?: string;\n /**\n * The full or partial list of parameter name, value pair used in the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly parameters?: { [propertyName: string]: string };\n /**\n * Entity that started the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly invokedBy?: PipelineRunInvokedBy;\n /**\n * The last updated timestamp for the pipeline run event in ISO8601 format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly lastUpdated?: Date;\n /**\n * The start time of a pipeline run in ISO8601 format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runStart?: Date;\n /**\n * The end time of a pipeline run in ISO8601 format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runEnd?: Date;\n /**\n * The duration of a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly durationInMs?: number;\n /**\n * The status of a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /**\n * The message from a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly message?: string;\n}\n\n/** Provides entity name and id that started the pipeline run. */\nexport interface PipelineRunInvokedBy {\n /**\n * Name of the entity that started the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * The ID of the entity that started the run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /**\n * The type of the entity that started the run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly invokedByType?: string;\n}\n\n/** A list activity runs. */\nexport interface ActivityRunsQueryResponse {\n /** List of activity runs. */\n value: ActivityRun[];\n /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */\n continuationToken?: string;\n}\n\n/** Information about an activity run in a pipeline. */\nexport interface ActivityRun {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /**\n * The name of the pipeline.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly pipelineName?: string;\n /**\n * The id of the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly pipelineRunId?: string;\n /**\n * The name of the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityName?: string;\n /**\n * The type of the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityType?: string;\n /**\n * The id of the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityRunId?: string;\n /**\n * The name of the compute linked service.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly linkedServiceName?: string;\n /**\n * The status of the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /**\n * The start time of the activity run in 'ISO 8601' format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityRunStart?: Date;\n /**\n * The end time of the activity run in 'ISO 8601' format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityRunEnd?: Date;\n /**\n * The duration of the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly durationInMs?: number;\n /**\n * The input for the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly input?: any;\n /**\n * The output for the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly output?: any;\n /**\n * The error if any from the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly error?: any;\n}\n\n/** A list of spark job definitions resources. */\nexport interface SparkJobDefinitionsListResponse {\n /** List of spark job definitions. */\n value: SparkJobDefinitionResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Spark job definition. */\nexport interface SparkJobDefinition {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the Spark job definition. */\n description?: string;\n /** Big data pool reference. */\n targetBigDataPool: BigDataPoolReference;\n /** The spark configuration of the spark job. */\n targetSparkConfiguration?: SparkConfigurationReference;\n /** The required Spark version of the application. */\n requiredSparkVersion?: string;\n /** The language of the Spark application. */\n language?: string;\n /** The properties of the Spark job. */\n jobProperties: SparkJobProperties;\n /** The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. */\n folder?: SparkJobDefinitionFolder;\n}\n\n/** The properties of the Spark job. */\nexport interface SparkJobProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The name of the job. */\n name?: string;\n /** File containing the application to execute. */\n file: string;\n /** Main class for Java/Scala application. */\n className?: string;\n /** Spark configuration properties. */\n conf?: any;\n /** Command line arguments for the application. */\n args?: string[];\n /** Jars to be used in this job. */\n jars?: string[];\n /** files to be used in this job. */\n files?: string[];\n /** Archives to be used in this job. */\n archives?: string[];\n /** Amount of memory to use for the driver process. */\n driverMemory: string;\n /** Number of cores to use for the driver. */\n driverCores: number;\n /** Amount of memory to use per executor process. */\n executorMemory: string;\n /** Number of cores to use for each executor. */\n executorCores: number;\n /** Number of executors to launch for this job. */\n numExecutors: number;\n}\n\n/** The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. */\nexport interface SparkJobDefinitionFolder {\n /** The name of the folder that this Spark job definition is in. */\n name?: string;\n}\n\nexport interface SparkBatchJob {\n livyInfo?: SparkBatchJobState;\n /** The batch name. */\n name?: string;\n /** The workspace name. */\n workspaceName?: string;\n /** The Spark pool name. */\n sparkPoolName?: string;\n /** The submitter name. */\n submitterName?: string;\n /** The submitter identifier. */\n submitterId?: string;\n /** The artifact identifier. */\n artifactId?: string;\n /** The job type. */\n jobType?: SparkJobType;\n /** The Spark batch job result. */\n result?: SparkBatchJobResultType;\n /** The scheduler information. */\n scheduler?: SparkScheduler;\n /** The plugin information. */\n plugin?: SparkServicePlugin;\n /** The error information. */\n errors?: SparkServiceError[];\n /** The tags. */\n tags?: { [propertyName: string]: string };\n /** The session Id. */\n id: number;\n /** The application id of this session */\n appId?: string;\n /** The detailed application info. */\n appInfo?: { [propertyName: string]: string };\n /** The batch state */\n state?: LivyStates;\n /** The log lines. */\n logLines?: string[];\n}\n\nexport interface SparkBatchJobState {\n /** the time that at which \"not_started\" livy state was first seen. */\n notStartedAt?: Date;\n /** the time that at which \"starting\" livy state was first seen. */\n startingAt?: Date;\n /** the time that at which \"running\" livy state was first seen. */\n runningAt?: Date;\n /** time that at which \"dead\" livy state was first seen. */\n deadAt?: Date;\n /** the time that at which \"success\" livy state was first seen. */\n successAt?: Date;\n /** the time that at which \"killed\" livy state was first seen. */\n terminatedAt?: Date;\n /** the time that at which \"recovering\" livy state was first seen. */\n recoveringAt?: Date;\n /** the Spark job state. */\n currentState?: string;\n jobCreationRequest?: SparkRequest;\n}\n\nexport interface SparkRequest {\n name?: string;\n file?: string;\n className?: string;\n arguments?: string[];\n jars?: string[];\n pythonFiles?: string[];\n files?: string[];\n archives?: string[];\n /** Dictionary of <string> */\n configuration?: { [propertyName: string]: string };\n driverMemory?: string;\n driverCores?: number;\n executorMemory?: string;\n executorCores?: number;\n executorCount?: number;\n}\n\nexport interface SparkScheduler {\n submittedAt?: Date;\n scheduledAt?: Date;\n endedAt?: Date;\n cancellationRequestedAt?: Date;\n currentState?: SchedulerCurrentState;\n}\n\nexport interface SparkServicePlugin {\n preparationStartedAt?: Date;\n resourceAcquisitionStartedAt?: Date;\n submissionStartedAt?: Date;\n monitoringStartedAt?: Date;\n cleanupStartedAt?: Date;\n currentState?: PluginCurrentState;\n}\n\nexport interface SparkServiceError {\n message?: string;\n errorCode?: string;\n source?: SparkErrorSource;\n}\n\n/** List of SQL pools */\nexport interface SqlPoolInfoListResult {\n /** Link to the next page of results */\n nextLink?: string;\n /** List of SQL pools */\n value?: SqlPool[];\n}\n\n/** SQL pool SKU */\nexport interface Sku {\n /** The service tier */\n tier?: string;\n /** The SKU name */\n name?: string;\n /** If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. */\n capacity?: number;\n}\n\n/** A list of sql scripts resources. */\nexport interface SqlScriptsListResponse {\n /** List of sql scripts. */\n value: SqlScriptResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Sql Script resource type. */\nexport interface SqlScriptResource {\n /**\n * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /** The name of the resource */\n name: string;\n /**\n * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Resource Etag.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly etag?: string;\n /** Properties of sql script. */\n properties: SqlScript;\n}\n\n/** SQL script. */\nexport interface SqlScript {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the SQL script. */\n description?: string;\n /** The type of the SQL script. */\n type?: SqlScriptType;\n /** The content of the SQL script. */\n content: SqlScriptContent;\n /** The folder that this SQL script is in. If not specified, this SQL script will appear at the root level. */\n folder?: SqlScriptFolder;\n}\n\n/** The content of the SQL script. */\nexport interface SqlScriptContent {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** SQL query to execute. */\n query: string;\n /** The connection used to execute the SQL script. */\n currentConnection?: SqlConnection;\n /** Limit of results, '-1' for no limit. */\n resultLimit?: number;\n /** The metadata of the SQL script. */\n metadata?: SqlScriptMetadata;\n}\n\n/** The connection used to execute the SQL script. */\nexport interface SqlConnection {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The type of the connection. */\n type?: SqlConnectionType;\n /** The identifier of the connection. */\n name?: string;\n /** The associated SQL pool name (supported by SQL pool v3) */\n poolName?: string;\n /** The associated database name (supported by SQL pool v3) */\n databaseName?: string;\n}\n\n/** The metadata of the SQL script. */\nexport interface SqlScriptMetadata {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The language of the SQL script. */\n language?: string;\n}\n\n/** The folder that this SQL script is in. If not specified, this SQL script will appear at the root level. */\nexport interface SqlScriptFolder {\n /** The name of the folder that this SQL script is in. */\n name?: string;\n}\n\n/** A list of trigger resources. */\nexport interface TriggerListResponse {\n /** List of triggers. */\n value: TriggerResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Azure Synapse nested object which contains information about creating pipeline run */\nexport interface Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"RerunTumblingWindowTrigger\"\n | \"MultiplePipelineTrigger\"\n | \"ScheduleTrigger\"\n | \"BlobTrigger\"\n | \"BlobEventsTrigger\"\n | \"CustomEventsTrigger\"\n | \"TumblingWindowTrigger\"\n | \"ChainingTrigger\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Trigger description. */\n description?: string;\n /**\n * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runtimeState?: TriggerRuntimeState;\n /** List of tags that can be used for describing the trigger. */\n annotations?: any[];\n}\n\n/** Defines the response of a trigger subscription operation. */\nexport interface TriggerSubscriptionOperationStatus {\n /**\n * Trigger name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerName?: string;\n /**\n * Event Subscription Status.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: EventSubscriptionStatus;\n}\n\n/** A list of trigger runs. */\nexport interface TriggerRunsQueryResponse {\n /** List of trigger runs. */\n value: TriggerRun[];\n /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */\n continuationToken?: string;\n}\n\n/** Trigger runs. */\nexport interface TriggerRun {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /**\n * Trigger run id.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerRunId?: string;\n /**\n * Trigger name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerName?: string;\n /**\n * Trigger type.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerType?: string;\n /**\n * Trigger run start time.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerRunTimestamp?: Date;\n /**\n * Trigger run status.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: TriggerRunStatus;\n /**\n * Trigger error message.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly message?: string;\n /**\n * List of property name and value related to trigger run. Name, value pair depends on type of trigger.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly properties?: { [propertyName: string]: string };\n /**\n * List of pipeline name and run Id triggered by the trigger run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggeredPipelines?: { [propertyName: string]: string };\n}\n\n/** Details of the data lake storage account associated with the workspace */\nexport interface DataLakeStorageAccountDetails {\n /** Account URL */\n accountUrl?: string;\n /** Filesystem name */\n filesystem?: string;\n}\n\n/** Virtual Network Profile */\nexport interface VirtualNetworkProfile {\n /** Subnet ID used for computes in workspace */\n computeSubnetId?: string;\n}\n\n/** Private endpoint details */\nexport interface PrivateEndpoint {\n /**\n * Resource id of the private endpoint.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n}\n\n/** Connection state details of the private endpoint */\nexport interface PrivateLinkServiceConnectionState {\n /** The private link service connection status. */\n status?: string;\n /** The private link service connection description. */\n description?: string;\n /**\n * The actions required for private link service connection.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly actionsRequired?: string;\n}\n\n/** Details of the encryption associated with the workspace */\nexport interface EncryptionDetails {\n /**\n * Double Encryption enabled\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly doubleEncryptionEnabled?: boolean;\n /** Customer Managed Key Details */\n cmk?: CustomerManagedKeyDetails;\n}\n\n/** Details of the customer managed key associated with the workspace */\nexport interface CustomerManagedKeyDetails {\n /**\n * The customer managed key status on the workspace\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /** The key object of the workspace */\n key?: WorkspaceKeyDetails;\n}\n\n/** Details of the customer managed key associated with the workspace */\nexport interface WorkspaceKeyDetails {\n /** Workspace Key sub-resource name */\n name?: string;\n /** Workspace Key sub-resource key vault url */\n keyVaultUrl?: string;\n}\n\n/** Managed Virtual Network Settings */\nexport interface ManagedVirtualNetworkSettings {\n /** Prevent Data Exfiltration */\n preventDataExfiltration?: boolean;\n /** Linked Access Check On Target Resource */\n linkedAccessCheckOnTargetResource?: boolean;\n /** Allowed Aad Tenant Ids For Linking */\n allowedAadTenantIdsForLinking?: string[];\n}\n\n/** Git integration settings */\nexport interface WorkspaceRepositoryConfiguration {\n /** Type of workspace repositoryID configuration. Example WorkspaceVSTSConfiguration, WorkspaceGitHubConfiguration */\n type?: string;\n /** GitHub Enterprise host name. For example: https://github.mydomain.com */\n hostName?: string;\n /** Account name */\n accountName?: string;\n /** VSTS project name */\n projectName?: string;\n /** Repository name */\n repositoryName?: string;\n /** Collaboration branch */\n collaborationBranch?: string;\n /** Root folder to use in the repository */\n rootFolder?: string;\n /** The last commit ID */\n lastCommitId?: string;\n /** The VSTS tenant ID */\n tenantId?: string;\n /** GitHub bring your own app client id */\n clientId?: string;\n /** GitHub bring your own app client secret information. */\n clientSecret?: GitHubClientSecret;\n}\n\n/** Client secret information for factory's bring your own app repository configuration */\nexport interface GitHubClientSecret {\n /** Bring your own app client secret AKV URL */\n byoaSecretAkvUrl?: string;\n /** Bring your own app client secret name in AKV */\n byoaSecretName?: string;\n}\n\n/** Purview Configuration */\nexport interface PurviewConfiguration {\n /** Purview Resource ID */\n purviewResourceId?: string;\n}\n\n/** The workspace managed identity */\nexport interface ManagedIdentity {\n /**\n * The principal ID of the workspace managed identity\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly principalId?: string;\n /**\n * The tenant ID of the workspace managed identity\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly tenantId?: string;\n /** The type of managed identity for the workspace */\n type?: ResourceIdentityType;\n}\n\n/** Azure Synapse expression definition. */\nexport interface Expression {\n /** Expression type. */\n type: ExpressionType;\n /** Expression value. */\n value: string;\n}\n\n/** Defines the response of a provision trigger dependency operation. */\nexport interface TriggerDependencyProvisioningStatus {\n /** Trigger name. */\n triggerName: string;\n /** Provisioning status. */\n provisioningStatus: string;\n}\n\n/** Pipeline reference type. */\nexport interface PipelineReference {\n /** Pipeline reference type. */\n type: PipelineReferenceType;\n /** Reference pipeline name. */\n referenceName: string;\n /** Reference name. */\n name?: string;\n}\n\n/** Pipeline that needs to be triggered with the given parameters. */\nexport interface TriggerPipelineReference {\n /** Pipeline reference. */\n pipelineReference?: PipelineReference;\n /** Pipeline parameters. */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Parameters for updating a workspace resource. */\nexport interface WorkspaceUpdateParameters {\n /** The resource tags. */\n tags?: { [propertyName: string]: string };\n /** Managed service identity of the workspace. */\n identity?: WorkspaceIdentity;\n}\n\n/** Identity properties of the workspace resource. */\nexport interface WorkspaceIdentity {\n /** The identity type. Currently the only supported type is 'SystemAssigned'. */\n type: \"SystemAssigned\";\n /**\n * The principal id of the identity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly principalId?: string;\n /**\n * The client tenant id of the identity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly tenantId?: string;\n}\n\n/** Dataset reference type. */\nexport interface DatasetReference {\n /** Dataset reference type. */\n type: DatasetReferenceType;\n /** Reference dataset name. */\n referenceName: string;\n /** Arguments for dataset. */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Data flow reference type. */\nexport interface DataFlowReference {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Data flow reference type. */\n type: DataFlowReferenceType;\n /** Reference data flow name. */\n referenceName: string;\n /** Reference data flow parameters from dataset. */\n datasetParameters?: any;\n /** Data flow parameters */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Rerun tumbling window trigger Parameters. */\nexport interface RerunTumblingWindowTriggerActionParameters {\n /** The start time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n startTime: Date;\n /** The end time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n endTime: Date;\n /** The max number of parallel time windows (ready for execution) for which a rerun is triggered. */\n maxConcurrency: number;\n}\n\n/** The request payload of get SSIS object metadata. */\nexport interface GetSsisObjectMetadataRequest {\n /** Metadata path. */\n metadataPath?: string;\n}\n\n/** The status of the operation. */\nexport interface SsisObjectMetadataStatusResponse {\n /** The status of the operation. */\n status?: string;\n /** The operation name. */\n name?: string;\n /** The operation properties. */\n properties?: string;\n /** The operation error message. */\n error?: string;\n}\n\n/** The exposure control request. */\nexport interface ExposureControlRequest {\n /** The feature name. */\n featureName?: string;\n /** The feature type. */\n featureType?: string;\n}\n\n/** The exposure control response. */\nexport interface ExposureControlResponse {\n /**\n * The feature name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly featureName?: string;\n /**\n * The feature value.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly value?: string;\n}\n\n/** Synapse notebook reference type. */\nexport interface SynapseNotebookReference {\n /** Synapse notebook reference type. */\n type: NotebookReferenceType;\n /** Reference notebook name. Type: string (or Expression with resultType string). */\n referenceName: any;\n}\n\n/** Synapse spark job reference type. */\nexport interface SynapseSparkJobReference {\n /** Synapse spark job reference type. */\n type: SparkJobReferenceType;\n /** Reference spark job name. */\n referenceName: string;\n}\n\n/** SQL pool reference type. */\nexport interface SqlPoolReference {\n /** SQL pool reference type. */\n type: SqlPoolReferenceType;\n /** Reference SQL pool name. */\n referenceName: string;\n}\n\n/** Big data pool reference type. */\nexport interface BigDataPoolParametrizationReference {\n /** Big data pool reference type. */\n type: BigDataPoolReferenceType;\n /** Reference big data pool name. Type: string (or Expression with resultType string). */\n referenceName: any;\n}\n\n/** Request body structure for starting data flow debug session. */\nexport interface StartDataFlowDebugSessionRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** Data flow instance. */\n dataFlow?: DataFlowResource;\n /** List of Data flows */\n dataFlows?: DataFlowResource[];\n /** List of datasets. */\n datasets?: DatasetResource[];\n /** List of linked services. */\n linkedServices?: LinkedServiceResource[];\n /** Staging info for debug session. */\n staging?: any;\n /** Data flow debug settings. */\n debugSettings?: any;\n /** The type of new Databricks cluster. */\n incrementalDebug?: boolean;\n}\n\n/** Response body structure for starting data flow debug session. */\nexport interface StartDataFlowDebugSessionResponse {\n /** The ID of data flow debug job version. */\n jobVersion?: string;\n}\n\n/** Request body structure for data flow preview data. */\nexport interface DataFlowDebugPreviewDataRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n /** The output stream name. */\n streamName?: string;\n /** The row limit for preview request. */\n rowLimits?: number;\n}\n\n/** Request body structure for data flow statistics. */\nexport interface DataFlowDebugStatisticsRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n /** The output stream name. */\n streamName?: string;\n /** List of column names. */\n columns?: string[];\n}\n\n/** Request body structure for data flow expression preview. */\nexport interface EvaluateDataFlowExpressionRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n /** The output stream name. */\n streamName?: string;\n /** The row limit for preview request. */\n rowLimits?: number;\n /** The expression for preview. */\n expression?: string;\n}\n\n/** Response body structure of data flow query for data preview, statistics or expression preview. */\nexport interface DataFlowDebugQueryResponse {\n /** The run ID of data flow debug session. */\n runId?: string;\n}\n\n/** Response body structure of data flow result for data preview, statistics or expression preview. */\nexport interface DataFlowDebugResultResponse {\n /** The run status of data preview, statistics or expression preview. */\n status?: string;\n /** The result data of data preview, statistics or expression preview. */\n data?: string;\n}\n\n/** A list of rerun triggers. */\nexport interface RerunTriggerListResponse {\n /** List of rerun triggers. */\n value: RerunTriggerResource[];\n /**\n * The continuation token for getting the next page of results, if any remaining results exist, null otherwise.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly nextLink?: string;\n}\n\n/** A data flow transformation. */\nexport interface Transformation {\n /** Transformation name. */\n name: string;\n /** Transformation description. */\n description?: string;\n /** Dataset reference. */\n dataset?: DatasetReference;\n /** Linked service reference. */\n linkedService?: LinkedServiceReference;\n /** Flowlet Reference */\n flowlet?: DataFlowReference;\n}\n\n/** Dataset location. */\nexport interface DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AzureBlobStorageLocation\"\n | \"AzureBlobFSLocation\"\n | \"AzureDataLakeStoreLocation\"\n | \"AmazonS3Location\"\n | \"FileServerLocation\"\n | \"AzureFileStorageLocation\"\n | \"GoogleCloudStorageLocation\"\n | \"FtpServerLocation\"\n | \"SftpLocation\"\n | \"HttpServerLocation\"\n | \"HdfsLocation\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Specify the folder path of dataset. Type: string (or Expression with resultType string) */\n folderPath?: any;\n /** Specify the file name of dataset. Type: string (or Expression with resultType string). */\n fileName?: any;\n}\n\n/** Columns that define the structure of the dataset. */\nexport interface DatasetDataElement {\n /** Name of the column. Type: string (or Expression with resultType string). */\n name?: any;\n /** Type of the column. Type: string (or Expression with resultType string). */\n type?: any;\n}\n\n/** Columns that define the physical type schema of the dataset. */\nexport interface DatasetSchemaDataElement {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Name of the schema column. Type: string (or Expression with resultType string). */\n name?: any;\n /** Type of the schema column. Type: string (or Expression with resultType string). */\n type?: any;\n}\n\n/** The format definition of a storage. */\nexport interface DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TextFormat\"\n | \"JsonFormat\"\n | \"AvroFormat\"\n | \"OrcFormat\"\n | \"ParquetFormat\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Serializer. Type: string (or Expression with resultType string). */\n serializer?: any;\n /** Deserializer. Type: string (or Expression with resultType string). */\n deserializer?: any;\n}\n\n/** The compression method used on a dataset. */\nexport interface DatasetCompression {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Type of dataset compression. Type: string (or Expression with resultType string). */\n type: any;\n /** The dataset compression level. Type: string (or Expression with resultType string). */\n level?: any;\n}\n\n/** Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. */\nexport interface WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"Anonymous\" | \"Basic\" | \"ClientCertificate\";\n /** The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). */\n url: any;\n}\n\n/** Custom script action to run on HDI ondemand cluster once it's up. */\nexport interface ScriptAction {\n /** The user provided name of the script action. */\n name: string;\n /** The URI for the script action. */\n uri: string;\n /** The node types on which the script action should be executed. */\n roles: any;\n /** The parameters for the script action. */\n parameters?: string;\n}\n\n/** Execution policy for an activity. */\nexport interface ActivityPolicy {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: any;\n /** Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */\n retry?: any;\n /** Interval between each retry attempt (in seconds). The default is 30 sec. */\n retryIntervalInSeconds?: number;\n /** When set to true, Input from activity is considered as secure and will not be logged to monitoring. */\n secureInput?: boolean;\n /** When set to true, Output from activity is considered as secure and will not be logged to monitoring. */\n secureOutput?: boolean;\n}\n\n/** Connector read setting. */\nexport interface StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AzureBlobStorageReadSettings\"\n | \"AzureBlobFSReadSettings\"\n | \"AzureDataLakeStoreReadSettings\"\n | \"AmazonS3ReadSettings\"\n | \"FileServerReadSettings\"\n | \"AzureFileStorageReadSettings\"\n | \"GoogleCloudStorageReadSettings\"\n | \"FtpReadSettings\"\n | \"SftpReadSettings\"\n | \"HttpReadSettings\"\n | \"HdfsReadSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n}\n\n/** Connector write settings. */\nexport interface StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"SftpWriteSettings\"\n | \"AzureBlobStorageWriteSettings\"\n | \"AzureBlobFSWriteSettings\"\n | \"AzureDataLakeStoreWriteSettings\"\n | \"FileServerWriteSettings\"\n | \"AzureFileStorageWriteSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** Distcp settings. */\nexport interface DistcpSettings {\n /** Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). */\n resourceManagerEndpoint: any;\n /** Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). */\n tempScriptPath: any;\n /** Specifies the Distcp options. Type: string (or Expression with resultType string). */\n distcpOptions?: any;\n}\n\n/** Format read settings. */\nexport interface FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"DelimitedTextReadSettings\"\n | \"JsonReadSettings\"\n | \"XmlReadSettings\"\n | \"BinaryReadSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Compression read settings. */\nexport interface CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZipDeflateReadSettings\" | \"TarReadSettings\" | \"TarGZipReadSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Format write settings. */\nexport interface FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AvroWriteSettings\"\n | \"OrcWriteSettings\"\n | \"ParquetWriteSettings\"\n | \"DelimitedTextWriteSettings\"\n | \"JsonWriteSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** A copy activity source. */\nexport interface CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AvroSource\"\n | \"ExcelSource\"\n | \"ParquetSource\"\n | \"DelimitedTextSource\"\n | \"JsonSource\"\n | \"XmlSource\"\n | \"OrcSource\"\n | \"BinarySource\"\n | \"TabularSource\"\n | \"AzureTableSource\"\n | \"BlobSource\"\n | \"DocumentDbCollectionSource\"\n | \"CosmosDbSqlApiSource\"\n | \"DynamicsSource\"\n | \"DynamicsCrmSource\"\n | \"CommonDataServiceForAppsSource\"\n | \"RelationalSource\"\n | \"InformixSource\"\n | \"MicrosoftAccessSource\"\n | \"Db2Source\"\n | \"OdbcSource\"\n | \"MySqlSource\"\n | \"PostgreSqlSource\"\n | \"SybaseSource\"\n | \"SapBwSource\"\n | \"ODataSource\"\n | \"SalesforceSource\"\n | \"SalesforceServiceCloudSource\"\n | \"SapCloudForCustomerSource\"\n | \"SapEccSource\"\n | \"SapHanaSource\"\n | \"SapOpenHubSource\"\n | \"SapOdpSource\"\n | \"SapTableSource\"\n | \"RestSource\"\n | \"SqlSource\"\n | \"SqlServerSource\"\n | \"AmazonRdsForSqlServerSource\"\n | \"AzureSqlSource\"\n | \"SqlMISource\"\n | \"SqlDWSource\"\n | \"FileSystemSource\"\n | \"HdfsSource\"\n | \"AzureMySqlSource\"\n | \"AzureDataExplorerSource\"\n | \"OracleSource\"\n | \"AmazonRdsForOracleSource\"\n | \"TeradataSource\"\n | \"WebSource\"\n | \"CassandraSource\"\n | \"MongoDbSource\"\n | \"MongoDbAtlasSource\"\n | \"MongoDbV2Source\"\n | \"CosmosDbMongoDbApiSource\"\n | \"Office365Source\"\n | \"AzureDataLakeStoreSource\"\n | \"AzureBlobFSSource\"\n | \"HttpSource\"\n | \"AmazonMWSSource\"\n | \"AzurePostgreSqlSource\"\n | \"ConcurSource\"\n | \"CouchbaseSource\"\n | \"DrillSource\"\n | \"EloquaSource\"\n | \"GoogleBigQuerySource\"\n | \"GreenplumSource\"\n | \"HBaseSource\"\n | \"HiveSource\"\n | \"HubspotSource\"\n | \"ImpalaSource\"\n | \"JiraSource\"\n | \"MagentoSource\"\n | \"MariaDBSource\"\n | \"AzureMariaDBSource\"\n | \"MarketoSource\"\n | \"PaypalSource\"\n | \"PhoenixSource\"\n | \"PrestoSource\"\n | \"QuickBooksSource\"\n | \"ServiceNowSource\"\n | \"ShopifySource\"\n | \"SparkSource\"\n | \"SquareSource\"\n | \"XeroSource\"\n | \"ZohoSource\"\n | \"NetezzaSource\"\n | \"VerticaSource\"\n | \"SalesforceMarketingCloudSource\"\n | \"ResponsysSource\"\n | \"DynamicsAXSource\"\n | \"OracleServiceCloudSource\"\n | \"GoogleAdWordsSource\"\n | \"AmazonRedshiftSource\"\n | \"SnowflakeSource\"\n | \"AzureDatabricksDeltaLakeSource\"\n | \"SharePointOnlineListSource\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Source retry count. Type: integer (or Expression with resultType integer). */\n sourceRetryCount?: any;\n /** Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n sourceRetryWait?: any;\n /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n}\n\n/** A copy activity sink. */\nexport interface CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"DelimitedTextSink\"\n | \"JsonSink\"\n | \"OrcSink\"\n | \"RestSink\"\n | \"AzurePostgreSqlSink\"\n | \"AzureMySqlSink\"\n | \"AzureDatabricksDeltaLakeSink\"\n | \"SapCloudForCustomerSink\"\n | \"AzureQueueSink\"\n | \"AzureTableSink\"\n | \"AvroSink\"\n | \"ParquetSink\"\n | \"BinarySink\"\n | \"BlobSink\"\n | \"FileSystemSink\"\n | \"DocumentDbCollectionSink\"\n | \"CosmosDbSqlApiSink\"\n | \"SqlSink\"\n | \"SqlServerSink\"\n | \"AzureSqlSink\"\n | \"SqlMISink\"\n | \"SqlDWSink\"\n | \"SnowflakeSink\"\n | \"OracleSink\"\n | \"AzureDataLakeStoreSink\"\n | \"AzureBlobFSSink\"\n | \"AzureSearchIndexSink\"\n | \"OdbcSink\"\n | \"InformixSink\"\n | \"MicrosoftAccessSink\"\n | \"DynamicsSink\"\n | \"DynamicsCrmSink\"\n | \"CommonDataServiceForAppsSink\"\n | \"AzureDataExplorerSink\"\n | \"SalesforceSink\"\n | \"SalesforceServiceCloudSink\"\n | \"CosmosDbMongoDbApiSink\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. */\n writeBatchSize?: any;\n /** Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n writeBatchTimeout?: any;\n /** Sink retry count. Type: integer (or Expression with resultType integer). */\n sinkRetryCount?: any;\n /** Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n sinkRetryWait?: any;\n /** The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n}\n\n/** Staging settings. */\nexport interface StagingSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Staging linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The path to storage for storing the interim data. Type: string (or Expression with resultType string). */\n path?: any;\n /** Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableCompression?: any;\n}\n\n/** Redirect incompatible row settings */\nexport interface RedirectIncompatibleRowSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). */\n linkedServiceName: any;\n /** The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** (Deprecated. Please use LogSettings) Log storage settings. */\nexport interface LogStorageSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Log storage linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */\n path?: any;\n /** Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */\n logLevel?: any;\n /** Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */\n enableReliableLogging?: any;\n}\n\n/** Log settings. */\nexport interface LogSettings {\n /** Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). */\n enableCopyActivityLog?: any;\n /** Specifies settings for copy activity log. */\n copyActivityLogSettings?: CopyActivityLogSettings;\n /** Log location settings customer needs to provide when enabling log. */\n logLocationSettings: LogLocationSettings;\n}\n\n/** Settings for copy activity log. */\nexport interface CopyActivityLogSettings {\n /** Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */\n logLevel?: any;\n /** Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */\n enableReliableLogging?: any;\n}\n\n/** Log location settings. */\nexport interface LogLocationSettings {\n /** Log storage linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** Skip error file. */\nexport interface SkipErrorFile {\n /** Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). */\n fileMissing?: any;\n /** Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). */\n dataInconsistency?: any;\n}\n\n/** The settings that will be leveraged for SAP HANA source partitioning. */\nexport interface SapHanaPartitionSettings {\n /** The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n}\n\n/** The settings that will be leveraged for SAP table source partitioning. */\nexport interface SapTablePartitionSettings {\n /** The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n /** The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). */\n maxPartitionsNumber?: any;\n}\n\n/** SQL stored procedure parameter. */\nexport interface StoredProcedureParameter {\n /** Stored procedure parameter value. Type: string (or Expression with resultType string). */\n value?: any;\n /** Stored procedure parameter type. */\n type?: StoredProcedureParameterType;\n}\n\n/** The settings that will be leveraged for Sql source partitioning. */\nexport interface SqlPartitionSettings {\n /** The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The settings that will be leveraged for Oracle source partitioning. */\nexport interface OraclePartitionSettings {\n /** Names of the physical partitions of Oracle table. */\n partitionNames?: any;\n /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The settings that will be leveraged for AmazonRdsForOracle source partitioning. */\nexport interface AmazonRdsForOraclePartitionSettings {\n /** Names of the physical partitions of AmazonRdsForOracle table. */\n partitionNames?: any;\n /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The settings that will be leveraged for teradata source partitioning. */\nexport interface TeradataPartitionSettings {\n /** The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** Cursor methods for Mongodb query */\nexport interface MongoDbCursorMethodsProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). */\n project?: any;\n /** Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */\n sort?: any;\n /** Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). */\n skip?: any;\n /** Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). */\n limit?: any;\n}\n\n/** The settings that will be leveraged for Netezza source partitioning. */\nexport interface NetezzaPartitionSettings {\n /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. */\nexport interface RedshiftUnloadSettings {\n /** The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. */\n s3LinkedServiceName: LinkedServiceReference;\n /** The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). */\n bucketName: any;\n}\n\n/** Export command settings. */\nexport interface ExportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeExportCopyCommand\" | \"AzureDatabricksDeltaLakeExportCommand\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Import command settings. */\nexport interface ImportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeImportCommand\" | \"SnowflakeImportCopyCommand\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Notebook parameter. */\nexport interface NotebookParameter {\n /** Notebook parameter value. Type: string (or Expression with resultType string). */\n value?: any;\n /** Notebook parameter type. */\n type?: NotebookParameterType;\n}\n\n/** PolyBase settings. */\nexport interface PolybaseSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Reject type. */\n rejectType?: PolybaseSettingsRejectType;\n /** Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. */\n rejectValue?: any;\n /** Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. */\n rejectSampleValue?: any;\n /** Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). */\n useTypeDefault?: any;\n}\n\n/** DW Copy Command settings. */\nexport interface DWCopyCommandSettings {\n /** Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). */\n defaultValues?: DWCopyCommandDefaultValue[];\n /** Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalOptions\": { \"MAXERRORS\": \"1000\", \"DATEFORMAT\": \"'ymd'\" } */\n additionalOptions?: { [propertyName: string]: string };\n}\n\n/** Default value. */\nexport interface DWCopyCommandDefaultValue {\n /** Column name. Type: object (or Expression with resultType string). */\n columnName?: any;\n /** The default value of the column. Type: object (or Expression with resultType string). */\n defaultValue?: any;\n}\n\n/** Specify the column name and value of additional columns. */\nexport interface AdditionalColumns {\n /** Additional column name. Type: string (or Expression with resultType string). */\n name?: any;\n /** Additional column value. Type: string (or Expression with resultType string). */\n value?: any;\n}\n\n/** A copy activity translator. */\nexport interface CopyTranslator {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TabularTranslator\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Type conversion settings */\nexport interface TypeConversionSettings {\n /** Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). */\n allowDataTruncation?: any;\n /** Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). */\n treatBooleanAsNumber?: any;\n /** The format for DateTime values. Type: string (or Expression with resultType string). */\n dateTimeFormat?: any;\n /** The format for DateTimeOffset values. Type: string (or Expression with resultType string). */\n dateTimeOffsetFormat?: any;\n /** The format for TimeSpan values. Type: string (or Expression with resultType string). */\n timeSpanFormat?: any;\n /** The culture used to convert data from/to string. Type: string (or Expression with resultType string). */\n culture?: any;\n}\n\n/** SSIS package location. */\nexport interface SsisPackageLocation {\n /** The SSIS package path. Type: string (or Expression with resultType string). */\n packagePath?: any;\n /** The type of SSIS package location. */\n type?: SsisPackageLocationType;\n /** Password of the package. */\n packagePassword?: SecretBaseUnion;\n /** The package access credential. */\n accessCredential?: SsisAccessCredential;\n /** The configuration file of the package execution. Type: string (or Expression with resultType string). */\n configurationPath?: any;\n /** The configuration file access credential. */\n configurationAccessCredential?: SsisAccessCredential;\n /** The package name. */\n packageName?: string;\n /** The embedded package content. Type: string (or Expression with resultType string). */\n packageContent?: any;\n /** The embedded package last modified date. */\n packageLastModifiedDate?: string;\n /** The embedded child package list. */\n childPackages?: SsisChildPackage[];\n}\n\n/** SSIS access credential. */\nexport interface SsisAccessCredential {\n /** Domain for windows authentication. */\n domain: any;\n /** UseName for windows authentication. */\n userName: any;\n /** Password for windows authentication. */\n password: SecretBaseUnion;\n}\n\n/** SSIS embedded child package. */\nexport interface SsisChildPackage {\n /** Path for embedded child package. Type: string (or Expression with resultType string). */\n packagePath: any;\n /** Name for embedded child package. */\n packageName?: string;\n /** Content for embedded child package. Type: string (or Expression with resultType string). */\n packageContent: any;\n /** Last modified date for embedded child package. */\n packageLastModifiedDate?: string;\n}\n\n/** SSIS package execution credential. */\nexport interface SsisExecutionCredential {\n /** Domain for windows authentication. */\n domain: any;\n /** UseName for windows authentication. */\n userName: any;\n /** Password for windows authentication. */\n password: SecureString;\n}\n\n/** SSIS execution parameter. */\nexport interface SsisExecutionParameter {\n /** SSIS package execution parameter value. Type: string (or Expression with resultType string). */\n value: any;\n}\n\n/** SSIS property override. */\nexport interface SsisPropertyOverride {\n /** SSIS package property override value. Type: string (or Expression with resultType string). */\n value: any;\n /** Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true */\n isSensitive?: boolean;\n}\n\n/** SSIS package execution log location */\nexport interface SsisLogLocation {\n /** The SSIS package execution log path. Type: string (or Expression with resultType string). */\n logPath: any;\n /** The type of SSIS log location. */\n type: SsisLogLocationType;\n /** The package execution log access credential. */\n accessCredential?: SsisAccessCredential;\n /** Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n logRefreshInterval?: any;\n}\n\n/** Reference objects for custom activity */\nexport interface CustomActivityReferenceObject {\n /** Linked service references. */\n linkedServices?: LinkedServiceReference[];\n /** Dataset references. */\n datasets?: DatasetReference[];\n}\n\n/** Web activity authentication properties. */\nexport interface WebActivityAuthentication {\n /** Web activity authentication (Basic/ClientCertificate/MSI) */\n type: string;\n /** Base64-encoded contents of a PFX file. */\n pfx?: SecretBaseUnion;\n /** Web activity authentication user name for basic authentication. */\n username?: string;\n /** Password for the PFX file or basic authentication. */\n password?: SecretBaseUnion;\n /** Resource for which Azure Auth token will be requested when using MSI Authentication. */\n resource?: string;\n}\n\n/** Switch cases with have a value and corresponding activities. */\nexport interface SwitchCase {\n /** Expected value that satisfies the expression result of the 'on' property. */\n value?: string;\n /** List of activities to execute for satisfied case condition. */\n activities?: ActivityUnion[];\n}\n\n/** Azure ML WebService Input/Output file */\nexport interface AzureMLWebServiceFile {\n /** The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). */\n filePath: any;\n /** Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. */\n linkedServiceName: LinkedServiceReference;\n}\n\n/** Compute properties for data flow activity. */\nexport interface ExecuteDataFlowActivityTypePropertiesCompute {\n /** Compute type of the cluster which will execute data flow job. */\n computeType?: DataFlowComputeType;\n /** Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. */\n coreCount?: number;\n}\n\n/** Script block of scripts. */\nexport interface ScriptActivityScriptBlock {\n /** The query text. Type: string (or Expression with resultType string). */\n text: any;\n /** The type of the query. Type: string. */\n type: ScriptType;\n /** Array of script parameters. Type: array. */\n parameters?: ScriptActivityParameter[];\n}\n\n/** Parameters of a script block. */\nexport interface ScriptActivityParameter {\n /** The name of the parameter. Type: string (or Expression with resultType string). */\n name?: any;\n /** The type of the parameter. */\n type?: ScriptActivityParameterType;\n /** The value of the parameter. */\n value?: any;\n /** The direction of the parameter. */\n direction?: ScriptActivityParameterDirection;\n /** The size of the output direction parameter. */\n size?: number;\n}\n\n/** Log settings of script activity. */\nexport interface ScriptActivityTypePropertiesLogSettings {\n /** The destination of logs. Type: string. */\n logDestination: ScriptActivityLogDestination;\n /** Log location settings customer needs to provide when enabling log. */\n logLocationSettings?: LogLocationSettings;\n}\n\n/** The workflow trigger recurrence. */\nexport interface ScheduleTriggerRecurrence {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The frequency. */\n frequency?: RecurrenceFrequency;\n /** The interval. */\n interval?: number;\n /** The start time. */\n startTime?: Date;\n /** The end time. */\n endTime?: Date;\n /** The time zone. */\n timeZone?: string;\n /** The recurrence schedule. */\n schedule?: RecurrenceSchedule;\n}\n\n/** The recurrence schedule. */\nexport interface RecurrenceSchedule {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The minutes. */\n minutes?: number[];\n /** The hours. */\n hours?: number[];\n /** The days of the week. */\n weekDays?: DayOfWeek[];\n /** The month days. */\n monthDays?: number[];\n /** The monthly occurrences. */\n monthlyOccurrences?: RecurrenceScheduleOccurrence[];\n}\n\n/** The recurrence schedule occurrence. */\nexport interface RecurrenceScheduleOccurrence {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The day of the week. */\n day?: DayOfWeek;\n /** The occurrence. */\n occurrence?: number;\n}\n\n/** Execution policy for an activity. */\nexport interface RetryPolicy {\n /** Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */\n count?: any;\n /** Interval between retries in seconds. Default is 30. */\n intervalInSeconds?: number;\n}\n\n/** Referenced dependency. */\nexport interface DependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TriggerDependencyReference\"\n | \"TumblingWindowTriggerDependencyReference\"\n | \"SelfDependencyTumblingWindowTriggerReference\";\n}\n\n/** Trigger reference type. */\nexport interface TriggerReference {\n /** Trigger reference type. */\n type: TriggerReferenceType;\n /** Reference trigger name. */\n referenceName: string;\n}\n\n/** The compute resource properties for managed integration runtime. */\nexport interface IntegrationRuntimeComputeProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities */\n location?: string;\n /** The node size requirement to managed integration runtime. */\n nodeSize?: string;\n /** The required number of nodes for managed integration runtime. */\n numberOfNodes?: number;\n /** Maximum parallel executions count per node for managed integration runtime. */\n maxParallelExecutionsPerNode?: number;\n /** Data flow properties for managed integration runtime. */\n dataFlowProperties?: IntegrationRuntimeDataFlowProperties;\n /** VNet properties for managed integration runtime. */\n vNetProperties?: IntegrationRuntimeVNetProperties;\n}\n\n/** Data flow properties for managed integration runtime. */\nexport interface IntegrationRuntimeDataFlowProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Compute type of the cluster which will execute data flow job. */\n computeType?: DataFlowComputeType;\n /** Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. */\n coreCount?: number;\n /** Time to live (in minutes) setting of the cluster which will execute data flow job. */\n timeToLive?: number;\n /** Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. */\n cleanup?: boolean;\n}\n\n/** VNet properties for managed integration runtime. */\nexport interface IntegrationRuntimeVNetProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The ID of the VNet that this integration runtime will join. */\n vNetId?: string;\n /** The name of the subnet this integration runtime will join. */\n subnet?: string;\n /** Resource IDs of the public IP addresses that this integration runtime will use. */\n publicIPs?: string[];\n}\n\n/** SSIS properties for managed integration runtime. */\nexport interface IntegrationRuntimeSsisProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Catalog information for managed dedicated integration runtime. */\n catalogInfo?: IntegrationRuntimeSsisCatalogInfo;\n /** License type for bringing your own license scenario. */\n licenseType?: IntegrationRuntimeLicenseType;\n /** Custom setup script properties for a managed dedicated integration runtime. */\n customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties;\n /** Data proxy properties for a managed dedicated integration runtime. */\n dataProxyProperties?: IntegrationRuntimeDataProxyProperties;\n /** The edition for the SSIS Integration Runtime */\n edition?: IntegrationRuntimeEdition;\n /** Custom setup without script properties for a SSIS integration runtime. */\n expressCustomSetupProperties?: CustomSetupBase[];\n}\n\n/** Catalog information for managed dedicated integration runtime. */\nexport interface IntegrationRuntimeSsisCatalogInfo {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The catalog database server URL. */\n catalogServerEndpoint?: string;\n /** The administrator user name of catalog database. */\n catalogAdminUserName?: string;\n /** The password of the administrator user account of the catalog database. */\n catalogAdminPassword?: SecureString;\n /** The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ */\n catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier;\n}\n\n/** Custom setup script properties for a managed dedicated integration runtime. */\nexport interface IntegrationRuntimeCustomSetupScriptProperties {\n /** The URI of the Azure blob container that contains the custom setup script. */\n blobContainerUri?: string;\n /** The SAS token of the Azure blob container. */\n sasToken?: SecureString;\n}\n\n/** Data proxy properties for a managed dedicated integration runtime. */\nexport interface IntegrationRuntimeDataProxyProperties {\n /** The self-hosted integration runtime reference. */\n connectVia?: EntityReference;\n /** The staging linked service reference. */\n stagingLinkedService?: EntityReference;\n /** The path to contain the staged data in the Blob storage. */\n path?: string;\n}\n\n/** The entity reference. */\nexport interface EntityReference {\n /** The type of this referenced entity. */\n type?: IntegrationRuntimeEntityReferenceType;\n /** The name of this referenced entity. */\n referenceName?: string;\n}\n\n/** The base definition of the custom setup. */\nexport interface CustomSetupBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomSetupBase\";\n}\n\n/** Managed Virtual Network reference type. */\nexport interface ManagedVirtualNetworkReference {\n /** Managed Virtual Network reference type. */\n type: \"ManagedVirtualNetworkReference\";\n /** Reference ManagedVirtualNetwork name. */\n referenceName: string;\n}\n\n/** The base definition of a linked integration runtime. */\nexport interface LinkedIntegrationRuntimeType {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authorizationType: \"Key\" | \"RBAC\";\n}\n\n/** Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. */\nexport interface SecureString extends SecretBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SecureString\";\n /** Value of secure string. */\n value: string;\n}\n\n/** Azure Key Vault secret reference. */\nexport interface AzureKeyVaultSecretReference extends SecretBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureKeyVaultSecret\";\n /** The Azure Key Vault linked service reference. */\n store: LinkedServiceReference;\n /** The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). */\n secretName: any;\n /** The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). */\n secretVersion?: any;\n}\n\n/** The resource model definition for an Azure Resource Manager resource with an etag. */\nexport interface AzureEntityResource extends Resource {\n /**\n * Resource Etag.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly etag?: string;\n}\n\n/** The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location' */\nexport interface TrackedResource extends Resource {\n /** Resource tags. */\n tags?: { [propertyName: string]: string };\n /** The geo-location where the resource lives */\n location: string;\n}\n\n/** The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location */\nexport interface ProxyResource extends Resource {}\n\n/** Mapping data flow. */\nexport interface MappingDataFlow extends DataFlow {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MappingDataFlow\";\n /** List of sources in data flow. */\n sources?: DataFlowSource[];\n /** List of sinks in data flow. */\n sinks?: DataFlowSink[];\n /** List of transformations in data flow. */\n transformations?: Transformation[];\n /** DataFlow script. */\n script?: string;\n /** Data flow script lines. */\n scriptLines?: string[];\n}\n\n/** Data flow flowlet */\nexport interface Flowlet extends DataFlow {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Flowlet\";\n /** List of sources in Flowlet. */\n sources?: DataFlowSource[];\n /** List of sinks in Flowlet. */\n sinks?: DataFlowSink[];\n /** List of transformations in Flowlet. */\n transformations?: Transformation[];\n /** Flowlet script. */\n script?: string;\n /** Flowlet script lines. */\n scriptLines?: string[];\n}\n\n/** Managed integration runtime, including managed elastic and managed dedicated integration runtimes. */\nexport interface ManagedIntegrationRuntime extends IntegrationRuntime {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Managed\";\n /**\n * Integration runtime state, only valid for managed dedicated integration runtime.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly state?: IntegrationRuntimeState;\n /** Managed Virtual Network reference. */\n managedVirtualNetwork?: ManagedVirtualNetworkReference;\n /** The compute resource for managed integration runtime. */\n computeProperties?: IntegrationRuntimeComputeProperties;\n /** SSIS properties for managed integration runtime. */\n ssisProperties?: IntegrationRuntimeSsisProperties;\n}\n\n/** Self-hosted integration runtime. */\nexport interface SelfHostedIntegrationRuntime extends IntegrationRuntime {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SelfHosted\";\n /** Linked integration runtime type from data factory */\n linkedInfo?: LinkedIntegrationRuntimeTypeUnion;\n}\n\n/** Integration runtime debug resource. */\nexport interface IntegrationRuntimeDebugResource\n extends SubResourceDebugResource {\n /** Integration runtime properties. */\n properties: IntegrationRuntimeUnion;\n}\n\n/** Data flow debug resource. */\nexport interface DataFlowDebugResource extends SubResourceDebugResource {\n /** Data flow properties. */\n properties: DataFlowUnion;\n}\n\n/** Dataset debug resource. */\nexport interface DatasetDebugResource extends SubResourceDebugResource {\n /** Dataset properties. */\n properties: DatasetUnion;\n}\n\n/** Linked service debug resource. */\nexport interface LinkedServiceDebugResource extends SubResourceDebugResource {\n /** Properties of linked service. */\n properties: LinkedServiceUnion;\n}\n\n/** A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */\nexport interface AmazonS3Dataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3Object\";\n /** The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). */\n bucketName: any;\n /** The key of the Amazon S3 object. Type: string (or Expression with resultType string). */\n key?: any;\n /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** The version for the S3 object. Type: string (or Expression with resultType string). */\n version?: any;\n /** The start of S3 object's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of S3 object's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** The format of files. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the Amazon S3 object. */\n compression?: DatasetCompression;\n}\n\n/** Avro dataset. */\nexport interface AvroDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Avro\";\n /** The location of the avro storage. */\n location?: DatasetLocationUnion;\n /** A string from AvroCompressionCodecEnum or an expression */\n avroCompressionCodec?: any;\n avroCompressionLevel?: number;\n}\n\n/** Excel dataset. */\nexport interface ExcelDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Excel\";\n /** The location of the excel storage. */\n location?: DatasetLocationUnion;\n /** The sheet name of excel file. Type: string (or Expression with resultType string). */\n sheetName?: any;\n /** The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer) */\n sheetIndex?: any;\n /** The partial data of one sheet. Type: string (or Expression with resultType string). */\n range?: any;\n /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */\n firstRowAsHeader?: any;\n /** The data compression method used for the json dataset. */\n compression?: DatasetCompression;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n}\n\n/** Parquet dataset. */\nexport interface ParquetDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Parquet\";\n /** The location of the parquet storage. */\n location?: DatasetLocationUnion;\n /** A string from ParquetCompressionCodecEnum or an expression */\n compressionCodec?: any;\n}\n\n/** Delimited text dataset. */\nexport interface DelimitedTextDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedText\";\n /** The location of the delimited text storage. */\n location?: DatasetLocationUnion;\n /** The column delimiter. Type: string (or Expression with resultType string). */\n columnDelimiter?: any;\n /** The row delimiter. Type: string (or Expression with resultType string). */\n rowDelimiter?: any;\n /** The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The data compressionCodec. Type: string (or Expression with resultType string). */\n compressionCodec?: any;\n /** The data compression method used for DelimitedText. */\n compressionLevel?: any;\n /** The quote character. Type: string (or Expression with resultType string). */\n quoteChar?: any;\n /** The escape character. Type: string (or Expression with resultType string). */\n escapeChar?: any;\n /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */\n firstRowAsHeader?: any;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n}\n\n/** Json dataset. */\nexport interface JsonDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Json\";\n /** The location of the json data storage. */\n location?: DatasetLocationUnion;\n /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The data compression method used for the json dataset. */\n compression?: DatasetCompression;\n}\n\n/** Xml dataset. */\nexport interface XmlDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Xml\";\n /** The location of the json data storage. */\n location?: DatasetLocationUnion;\n /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n /** The data compression method used for the json dataset. */\n compression?: DatasetCompression;\n}\n\n/** ORC dataset. */\nexport interface OrcDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Orc\";\n /** The location of the ORC data storage. */\n location?: DatasetLocationUnion;\n /** The data orcCompressionCodec. Type: string (or Expression with resultType string). */\n orcCompressionCodec?: any;\n}\n\n/** Binary dataset. */\nexport interface BinaryDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Binary\";\n /** The location of the Binary storage. */\n location?: DatasetLocationUnion;\n /** The data compression method used for the binary dataset. */\n compression?: DatasetCompression;\n}\n\n/** The Azure Blob storage. */\nexport interface AzureBlobDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlob\";\n /** The path of the Azure Blob storage. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The root of blob path. Type: string (or Expression with resultType string). */\n tableRootLocation?: any;\n /** The name of the Azure Blob. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** The format of the Azure Blob storage. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the blob storage. */\n compression?: DatasetCompression;\n}\n\n/** The Azure Table storage dataset. */\nexport interface AzureTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTable\";\n /** The table name of the Azure Table storage. Type: string (or Expression with resultType string). */\n tableName: any;\n}\n\n/** The Azure SQL Server database dataset. */\nexport interface AzureSqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the Azure SQL database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Azure SQL database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Azure SQL Managed Instance dataset. */\nexport interface AzureSqlMITableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlMITable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Azure SQL Data Warehouse dataset. */\nexport interface AzureSqlDWTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlDWTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Cassandra database dataset. */\nexport interface CassandraTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CassandraTable\";\n /** The table name of the Cassandra database. Type: string (or Expression with resultType string). */\n tableName?: any;\n /** The keyspace of the Cassandra database. Type: string (or Expression with resultType string). */\n keyspace?: any;\n}\n\n/** The custom dataset. */\nexport interface CustomDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomDataset\";\n /** Custom dataset properties. */\n typeProperties?: any;\n}\n\n/** Microsoft Azure CosmosDB (SQL API) Collection dataset. */\nexport interface CosmosDbSqlApiCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbSqlApiCollection\";\n /** CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). */\n collectionName: any;\n}\n\n/** Microsoft Azure Document Database Collection dataset. */\nexport interface DocumentDbCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DocumentDbCollection\";\n /** Document Database collection name. Type: string (or Expression with resultType string). */\n collectionName: any;\n}\n\n/** The Dynamics entity dataset. */\nexport interface DynamicsEntityDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsEntity\";\n /** The logical name of the entity. Type: string (or Expression with resultType string). */\n entityName?: any;\n}\n\n/** The Dynamics CRM entity dataset. */\nexport interface DynamicsCrmEntityDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrmEntity\";\n /** The logical name of the entity. Type: string (or Expression with resultType string). */\n entityName?: any;\n}\n\n/** The Common Data Service for Apps entity dataset. */\nexport interface CommonDataServiceForAppsEntityDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForAppsEntity\";\n /** The logical name of the entity. Type: string (or Expression with resultType string). */\n entityName?: any;\n}\n\n/** Azure Data Lake Store dataset. */\nexport interface AzureDataLakeStoreDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreFile\";\n /** Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The format of the Data Lake Store. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the item(s) in the Azure Data Lake Store. */\n compression?: DatasetCompression;\n}\n\n/** The Azure Data Lake Storage Gen2 storage. */\nexport interface AzureBlobFSDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSFile\";\n /** The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The format of the Azure Data Lake Storage Gen2 storage. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the blob storage. */\n compression?: DatasetCompression;\n}\n\n/** The Office365 account. */\nexport interface Office365Dataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Office365Table\";\n /** Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). */\n tableName: any;\n /** A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). */\n predicate?: any;\n}\n\n/** An on-premises file system dataset. */\nexport interface FileShareDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileShare\";\n /** The path of the on-premises file system. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The name of the on-premises file system. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** The format of the files. */\n format?: DatasetStorageFormatUnion;\n /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */\n fileFilter?: any;\n /** The data compression method used for the file system. */\n compression?: DatasetCompression;\n}\n\n/** The MongoDB database dataset. */\nexport interface MongoDbCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbCollection\";\n /** The table name of the MongoDB database. Type: string (or Expression with resultType string). */\n collectionName: any;\n}\n\n/** The MongoDB Atlas database dataset. */\nexport interface MongoDbAtlasCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbAtlasCollection\";\n /** The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). */\n collection: any;\n}\n\n/** The MongoDB database dataset. */\nexport interface MongoDbV2CollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbV2Collection\";\n /** The collection name of the MongoDB database. Type: string (or Expression with resultType string). */\n collection: any;\n}\n\n/** The CosmosDB (MongoDB API) database dataset. */\nexport interface CosmosDbMongoDbApiCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApiCollection\";\n /** The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). */\n collection: any;\n}\n\n/** The Open Data Protocol (OData) resource dataset. */\nexport interface ODataResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ODataResource\";\n /** The OData resource path. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** The on-premises Oracle database dataset. */\nexport interface OracleTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The AmazonRdsForOracle database dataset. */\nexport interface AmazonRdsForOracleTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForOracleTable\";\n /** The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Teradata database dataset. */\nexport interface TeradataTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TeradataTable\";\n /** The database name of Teradata. Type: string (or Expression with resultType string). */\n database?: any;\n /** The table name of Teradata. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Azure MySQL database dataset. */\nexport interface AzureMySqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySqlTable\";\n /** The Azure MySQL database table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n /** The name of Azure MySQL database table. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Amazon Redshift table dataset. */\nexport interface AmazonRedshiftTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRedshiftTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The Amazon Redshift table name. Type: string (or Expression with resultType string). */\n table?: any;\n /** The Amazon Redshift schema name. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** The Db2 table dataset. */\nexport interface Db2TableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Db2Table\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The Db2 schema name. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The Db2 table name. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The relational table dataset. */\nexport interface RelationalTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RelationalTable\";\n /** The relational table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The Informix table dataset. */\nexport interface InformixTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"InformixTable\";\n /** The Informix table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The ODBC table dataset. */\nexport interface OdbcTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OdbcTable\";\n /** The ODBC table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The MySQL table dataset. */\nexport interface MySqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MySqlTable\";\n /** The MySQL table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The PostgreSQL table dataset. */\nexport interface PostgreSqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PostgreSqlTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The PostgreSQL table name. Type: string (or Expression with resultType string). */\n table?: any;\n /** The PostgreSQL schema name. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** The Microsoft Access table dataset. */\nexport interface MicrosoftAccessTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccessTable\";\n /** The Microsoft Access table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The Salesforce object dataset. */\nexport interface SalesforceObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceObject\";\n /** The Salesforce object API name. Type: string (or Expression with resultType string). */\n objectApiName?: any;\n}\n\n/** The Salesforce Service Cloud object dataset. */\nexport interface SalesforceServiceCloudObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloudObject\";\n /** The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). */\n objectApiName?: any;\n}\n\n/** The Sybase table dataset. */\nexport interface SybaseTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SybaseTable\";\n /** The Sybase table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The SAP BW cube dataset. */\nexport interface SapBwCubeDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapBwCube\";\n}\n\n/** The path of the SAP Cloud for Customer OData entity. */\nexport interface SapCloudForCustomerResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomerResource\";\n /** The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). */\n path: any;\n}\n\n/** The path of the SAP ECC OData entity. */\nexport interface SapEccResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapEccResource\";\n /** The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). */\n path: any;\n}\n\n/** SAP HANA Table properties. */\nexport interface SapHanaTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapHanaTable\";\n /** The schema name of SAP HANA. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of SAP HANA. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** Sap Business Warehouse Open Hub Destination Table properties. */\nexport interface SapOpenHubTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOpenHubTable\";\n /** The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). */\n openHubDestinationName: any;\n /** Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */\n excludeLastRequest?: any;\n /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */\n baseRequestId?: any;\n}\n\n/** The on-premises SQL Server dataset. */\nexport interface SqlServerTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Amazon RDS for SQL Server dataset. */\nexport interface AmazonRdsForSqlServerTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForSqlServerTable\";\n /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** A Rest service dataset. */\nexport interface RestResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestResource\";\n /** The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). */\n relativeUrl?: any;\n /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */\n paginationRules?: any;\n}\n\n/** SAP Table Resource properties. */\nexport interface SapTableResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapTableResource\";\n /** The name of the SAP Table. Type: string (or Expression with resultType string). */\n tableName: any;\n}\n\n/** SAP ODP Resource properties. */\nexport interface SapOdpResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOdpResource\";\n /** The context of the SAP ODP Object. Type: string (or Expression with resultType string). */\n context: any;\n /** The name of the SAP ODP Object. Type: string (or Expression with resultType string). */\n objectName: any;\n}\n\n/** The dataset points to a HTML table in the web page. */\nexport interface WebTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebTable\";\n /** The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. */\n index: any;\n /** The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** The Azure Search Index. */\nexport interface AzureSearchIndexDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSearchIndex\";\n /** The name of the Azure Search Index. Type: string (or Expression with resultType string). */\n indexName: any;\n}\n\n/** A file in an HTTP web server. */\nexport interface HttpDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpFile\";\n /** The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). */\n relativeUrl?: any;\n /** The HTTP method for the HTTP request. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The body for the HTTP request. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /**\n * The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1\n * ...\n * request-header-name-n:request-header-value-n Type: string (or Expression with resultType string).\n */\n additionalHeaders?: any;\n /** The format of files. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used on files. */\n compression?: DatasetCompression;\n}\n\n/** Amazon Marketplace Web Service dataset. */\nexport interface AmazonMWSObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonMWSObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Azure PostgreSQL dataset. */\nexport interface AzurePostgreSqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSqlTable\";\n /** The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). */\n tableName?: any;\n /** The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Concur Service dataset. */\nexport interface ConcurObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ConcurObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Couchbase server dataset. */\nexport interface CouchbaseTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CouchbaseTable\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Drill server dataset. */\nexport interface DrillTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DrillTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Drill. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Drill. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Eloqua server dataset. */\nexport interface EloquaObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"EloquaObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Google BigQuery service dataset. */\nexport interface GoogleBigQueryObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleBigQueryObject\";\n /** This property will be retired. Please consider using database + table properties instead. */\n tableName?: any;\n /** The table name of the Google BigQuery. Type: string (or Expression with resultType string). */\n table?: any;\n /** The database name of the Google BigQuery. Type: string (or Expression with resultType string). */\n dataset?: any;\n}\n\n/** Greenplum Database dataset. */\nexport interface GreenplumTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GreenplumTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of Greenplum. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of Greenplum. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** HBase server dataset. */\nexport interface HBaseObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HBaseObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Hive Server dataset. */\nexport interface HiveObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HiveObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Hive. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Hive. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Hubspot Service dataset. */\nexport interface HubspotObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HubspotObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Impala server dataset. */\nexport interface ImpalaObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ImpalaObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Impala. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Impala. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Jira Service dataset. */\nexport interface JiraObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JiraObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Magento server dataset. */\nexport interface MagentoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MagentoObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** MariaDB server dataset. */\nexport interface MariaDBTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MariaDBTable\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Azure Database for MariaDB dataset. */\nexport interface AzureMariaDBTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMariaDBTable\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Marketo server dataset. */\nexport interface MarketoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MarketoObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Paypal Service dataset. */\nexport interface PaypalObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PaypalObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Phoenix server dataset. */\nexport interface PhoenixObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PhoenixObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Phoenix. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Phoenix. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Presto server dataset. */\nexport interface PrestoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PrestoObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Presto. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Presto. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** QuickBooks server dataset. */\nexport interface QuickBooksObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"QuickBooksObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** ServiceNow server dataset. */\nexport interface ServiceNowObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ServiceNowObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Shopify Service dataset. */\nexport interface ShopifyObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ShopifyObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Spark Server dataset. */\nexport interface SparkObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SparkObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Spark. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Spark. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Square Service dataset. */\nexport interface SquareObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SquareObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Xero Service dataset. */\nexport interface XeroObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XeroObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Zoho server dataset. */\nexport interface ZohoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZohoObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Netezza dataset. */\nexport interface NetezzaTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"NetezzaTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Netezza. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Netezza. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Vertica dataset. */\nexport interface VerticaTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"VerticaTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Vertica. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Vertica. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Salesforce Marketing Cloud dataset. */\nexport interface SalesforceMarketingCloudObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceMarketingCloudObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Responsys dataset. */\nexport interface ResponsysObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ResponsysObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The path of the Dynamics AX OData entity. */\nexport interface DynamicsAXResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsAXResource\";\n /** The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). */\n path: any;\n}\n\n/** Oracle Service Cloud dataset. */\nexport interface OracleServiceCloudObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleServiceCloudObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The Azure Data Explorer (Kusto) dataset. */\nexport interface AzureDataExplorerTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerTable\";\n /** The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** Google AdWords service dataset. */\nexport interface GoogleAdWordsObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleAdWordsObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The snowflake dataset. */\nexport interface SnowflakeDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeTable\";\n /** The schema name of the Snowflake database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Snowflake database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The sharepoint online list resource dataset. */\nexport interface SharePointOnlineListResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SharePointOnlineListResource\";\n /** The name of the SharePoint Online list. Type: string (or Expression with resultType string). */\n listName?: any;\n}\n\n/** Azure Databricks Delta Lake dataset. */\nexport interface AzureDatabricksDeltaLakeDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeDataset\";\n /** The name of delta table. Type: string (or Expression with resultType string). */\n table?: any;\n /** The database name of delta table. Type: string (or Expression with resultType string). */\n database?: any;\n}\n\n/** The storage account linked service. */\nexport interface AzureStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureStorage\";\n /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** The azure blob storage linked service. */\nexport interface AzureBlobStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorage\";\n /** The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. */\n serviceEndpoint?: string;\n /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). */\n accountKind?: string;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** The azure table storage linked service. */\nexport interface AzureTableStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTableStorage\";\n /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** Azure SQL Data Warehouse linked service. */\nexport interface AzureSqlDWLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlDW\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SQL Server linked service. */\nexport interface SqlServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServer\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The on-premises Windows authentication password. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Amazon RDS for SQL Server linked service. */\nexport interface AmazonRdsForSqlServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForSqlServer\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The on-premises Windows authentication password. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Microsoft Azure SQL Database linked service. */\nexport interface AzureSqlDatabaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlDatabase\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Database. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure SQL Managed Instance linked service. */\nexport interface AzureSqlMILinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlMI\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Managed Instance. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Batch linked service. */\nexport interface AzureBatchLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBatch\";\n /** The Azure Batch account name. Type: string (or Expression with resultType string). */\n accountName: any;\n /** The Azure Batch account access key. */\n accessKey?: SecretBaseUnion;\n /** The Azure Batch URI. Type: string (or Expression with resultType string). */\n batchUri: any;\n /** The Azure Batch pool name. Type: string (or Expression with resultType string). */\n poolName: any;\n /** The Azure Storage linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Key Vault linked service. */\nexport interface AzureKeyVaultLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureKeyVault\";\n /** The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). */\n baseUrl: any;\n}\n\n/** Microsoft Azure Cosmos Database (CosmosDB) linked service. */\nexport interface CosmosDbLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDb\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) */\n accountEndpoint?: any;\n /** The name of the database. Type: string (or Expression with resultType string) */\n database?: any;\n /** The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. */\n accountKey?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Dynamics linked service. */\nexport interface DynamicsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Dynamics\";\n /** The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). */\n deploymentType: any;\n /** The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */\n hostName?: any;\n /** The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */\n serviceUri?: any;\n /** The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). */\n organizationName?: any;\n /** The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */\n authenticationType: any;\n /** User name to access the Dynamics instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password to access the Dynamics instance. */\n password?: SecretBaseUnion;\n /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Dynamics CRM linked service. */\nexport interface DynamicsCrmLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrm\";\n /** The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). */\n deploymentType: any;\n /** The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */\n hostName?: any;\n /** The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */\n serviceUri?: any;\n /** The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). */\n organizationName?: any;\n /** The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */\n authenticationType: any;\n /** User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password to access the Dynamics CRM instance. */\n password?: SecretBaseUnion;\n /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** A string from ServicePrincipalCredentialEnum or an expression */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Common Data Service for Apps linked service. */\nexport interface CommonDataServiceForAppsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForApps\";\n /** The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). */\n deploymentType: any;\n /** The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */\n hostName?: any;\n /** The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */\n serviceUri?: any;\n /** The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). */\n organizationName?: any;\n /** The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */\n authenticationType: any;\n /** User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password to access the Common Data Service for Apps instance. */\n password?: SecretBaseUnion;\n /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** A string from ServicePrincipalCredentialEnum or an expression */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** HDInsight linked service. */\nexport interface HDInsightLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsight\";\n /** HDInsight cluster URI. Type: string (or Expression with resultType string). */\n clusterUri: any;\n /** HDInsight cluster user name. Type: string (or Expression with resultType string). */\n userName?: any;\n /** HDInsight cluster password. */\n password?: SecretBaseUnion;\n /** The Azure Storage linked service reference. */\n linkedServiceName?: LinkedServiceReference;\n /** A reference to the Azure SQL linked service that points to the HCatalog database. */\n hcatalogLinkedServiceName?: LinkedServiceReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. */\n isEspEnabled?: any;\n /** Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). */\n fileSystem?: any;\n}\n\n/** File system linked service. */\nexport interface FileServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServer\";\n /** Host name of the server. Type: string (or Expression with resultType string). */\n host: any;\n /** User ID to logon the server. Type: string (or Expression with resultType string). */\n userId?: any;\n /** Password to logon the server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure File Storage linked service. */\nexport interface AzureFileStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorage\";\n /** Host name of the server. Type: string (or Expression with resultType string). */\n host: any;\n /** User ID to logon the server. Type: string (or Expression with resultType string). */\n userId?: any;\n /** Password to logon the server. */\n password?: SecretBaseUnion;\n /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). */\n fileShare?: any;\n /** The azure file share snapshot version. Type: string (or Expression with resultType string). */\n snapshot?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Google Cloud Storage. */\nexport interface GoogleCloudStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleCloudStorage\";\n /** The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */\n accessKeyId?: any;\n /** The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. */\n secretAccessKey?: SecretBaseUnion;\n /** This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */\n serviceUrl?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Oracle database. */\nexport interface OracleLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Oracle\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** AmazonRdsForOracle database. */\nexport interface AmazonRdsForOracleLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForOracle\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure MySQL database linked service. */\nexport interface AzureMySqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySql\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for MySQL data source. */\nexport interface MySqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MySql\";\n /** The connection string. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for PostgreSQL data source. */\nexport interface PostgreSqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PostgreSql\";\n /** The connection string. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Sybase data source. */\nexport interface SybaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Sybase\";\n /** Server name for connection. Type: string (or Expression with resultType string). */\n server: any;\n /** Database name for connection. Type: string (or Expression with resultType string). */\n database: any;\n /** Schema name for connection. Type: string (or Expression with resultType string). */\n schema?: any;\n /** AuthenticationType to be used for connection. */\n authenticationType?: SybaseAuthenticationType;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for DB2 data source. */\nexport interface Db2LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Db2\";\n /** The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n server: any;\n /** Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n database: any;\n /** AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. */\n authenticationType?: Db2AuthenticationType;\n /** Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n packageCollection?: any;\n /** Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n certificateCommonName?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Teradata data source. */\nexport interface TeradataLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Teradata\";\n /** Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** Server name for connection. Type: string (or Expression with resultType string). */\n server?: any;\n /** AuthenticationType to be used for connection. */\n authenticationType?: TeradataAuthenticationType;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure ML Studio Web Service linked service. */\nexport interface AzureMLLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureML\";\n /** The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */\n mlEndpoint: any;\n /** The API key for accessing the Azure ML model endpoint. */\n apiKey: SecretBaseUnion;\n /** The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */\n updateResourceEndpoint?: any;\n /** The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure ML Service linked service. */\nexport interface AzureMLServiceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLService\";\n /** Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). */\n subscriptionId: any;\n /** Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). */\n resourceGroupName: any;\n /** Azure ML Service workspace name. Type: string (or Expression with resultType string). */\n mlWorkspaceName: any;\n /** The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Open Database Connectivity (ODBC) linked service. */\nexport interface OdbcLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Odbc\";\n /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access credential portion of the connection string specified in driver-specific property-value format. */\n credential?: SecretBaseUnion;\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Informix linked service. */\nexport interface InformixLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Informix\";\n /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access credential portion of the connection string specified in driver-specific property-value format. */\n credential?: SecretBaseUnion;\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Microsoft Access linked service. */\nexport interface MicrosoftAccessLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccess\";\n /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access credential portion of the connection string specified in driver-specific property-value format. */\n credential?: SecretBaseUnion;\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Hadoop Distributed File System (HDFS) linked service. */\nexport interface HdfsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Hdfs\";\n /** The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). */\n url: any;\n /** Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** User name for Windows authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Windows authentication. */\n password?: SecretBaseUnion;\n}\n\n/** Open Data Protocol (OData) linked service. */\nexport interface ODataLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OData\";\n /** The URL of the OData service endpoint. Type: string (or Expression with resultType string). */\n url: any;\n /** Type of authentication used to connect to the OData service. */\n authenticationType?: ODataAuthenticationType;\n /** User name of the OData service. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password of the OData service. */\n password?: SecretBaseUnion;\n /** Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). */\n aadResourceId?: any;\n /** Specify the credential type (key or cert) is used for service principal. */\n aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType;\n /** Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalKey?: SecretBaseUnion;\n /** Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalEmbeddedCert?: SecretBaseUnion;\n /** Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). */\n servicePrincipalEmbeddedCertPassword?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Web linked service. */\nexport interface WebLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Web\";\n /** Web linked service properties. */\n typeProperties: WebLinkedServiceTypePropertiesUnion;\n}\n\n/** Linked service for Cassandra data source. */\nexport interface CassandraLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Cassandra\";\n /** Host name for connection. Type: string (or Expression with resultType string). */\n host: any;\n /** AuthenticationType to be used for connection. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The port for the connection. Type: integer (or Expression with resultType integer). */\n port?: any;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for MongoDb data source. */\nexport interface MongoDbLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDb\";\n /** The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). */\n server: any;\n /** The authentication type to be used to connect to the MongoDB database. */\n authenticationType?: MongoDbAuthenticationType;\n /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */\n databaseName: any;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** Database to verify the username and password. Type: string (or Expression with resultType string). */\n authSource?: any;\n /** The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). */\n enableSsl?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for MongoDB Atlas data source. */\nexport interface MongoDbAtlasLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbAtlas\";\n /** The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). */\n database: any;\n}\n\n/** Linked service for MongoDB data source. */\nexport interface MongoDbV2LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbV2\";\n /** The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */\n database: any;\n}\n\n/** Linked service for CosmosDB (MongoDB API) data source. */\nexport interface CosmosDbMongoDbApiLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApi\";\n /** The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). */\n database: any;\n}\n\n/** Azure Data Lake Store linked service. */\nexport interface AzureDataLakeStoreLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStore\";\n /** Data Lake Store service URI. Type: string (or Expression with resultType string). */\n dataLakeStoreUri: any;\n /** The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The Key of the application used to authenticate against the Azure Data Lake Store account. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** Data Lake Store account name. Type: string (or Expression with resultType string). */\n accountName?: any;\n /** Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */\n subscriptionId?: any;\n /** Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */\n resourceGroupName?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Data Lake Storage Gen2 linked service. */\nexport interface AzureBlobFSLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFS\";\n /** Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */\n url: any;\n /** Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */\n accountKey?: any;\n /** The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Office365 linked service. */\nexport interface Office365LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Office365\";\n /** Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). */\n office365TenantId: any;\n /** Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). */\n servicePrincipalTenantId: any;\n /** Specify the application's client ID. Type: string (or Expression with resultType string). */\n servicePrincipalId: any;\n /** Specify the application's key. */\n servicePrincipalKey: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Salesforce. */\nexport interface SalesforceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Salesforce\";\n /** The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */\n environmentUrl?: any;\n /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password for Basic authentication of the Salesforce instance. */\n password?: SecretBaseUnion;\n /** The security token is optional to remotely access Salesforce instance. */\n securityToken?: SecretBaseUnion;\n /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */\n apiVersion?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Salesforce Service Cloud. */\nexport interface SalesforceServiceCloudLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloud\";\n /** The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */\n environmentUrl?: any;\n /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password for Basic authentication of the Salesforce instance. */\n password?: SecretBaseUnion;\n /** The security token is optional to remotely access Salesforce instance. */\n securityToken?: SecretBaseUnion;\n /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */\n apiVersion?: any;\n /** Extended properties appended to the connection string. Type: string (or Expression with resultType string). */\n extendedProperties?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for SAP Cloud for Customer. */\nexport interface SapCloudForCustomerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomer\";\n /** The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). */\n url: any;\n /** The username for Basic authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for SAP ERP Central Component(SAP ECC). */\nexport interface SapEccLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapEcc\";\n /** The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). */\n url: string;\n /** The username for Basic authentication. Type: string (or Expression with resultType string). */\n username?: string;\n /** The password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** SAP Business Warehouse Open Hub Destination Linked Service. */\nexport interface SapOpenHubLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOpenHub\";\n /** Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). */\n server: any;\n /** System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber: any;\n /** Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId: any;\n /** Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). */\n language?: any;\n /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */\n systemId?: any;\n /** Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP BW server where the open hub destination is located. */\n password?: SecretBaseUnion;\n /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */\n messageServer?: any;\n /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */\n messageServerService?: any;\n /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */\n logonGroup?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SAP ODP Linked Service. */\nexport interface SapOdpLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOdp\";\n /** Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */\n server?: any;\n /** System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber?: any;\n /** Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId?: any;\n /** Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */\n language?: any;\n /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */\n systemId?: any;\n /** Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP server where the table is located. */\n password?: SecretBaseUnion;\n /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */\n messageServer?: any;\n /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */\n messageServerService?: any;\n /** SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */\n sncMode?: any;\n /** Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncMyName?: any;\n /** Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncPartnerName?: any;\n /** External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncLibraryPath?: any;\n /** SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */\n sncQop?: any;\n /** SNC X509 certificate file path. Type: string (or Expression with resultType string). */\n x509CertificatePath?: any;\n /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */\n logonGroup?: any;\n /** The subscriber name. Type: string (or Expression with resultType string). */\n subscriberName?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Rest Service linked service. */\nexport interface RestServiceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestService\";\n /** The base URL of the REST service. */\n url: any;\n /** Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). */\n enableServerCertificateValidation?: any;\n /** Type of authentication used to connect to the REST service. */\n authenticationType: RestServiceAuthenticationType;\n /** The user name used in Basic authentication type. */\n userName?: any;\n /** The password used in Basic authentication type. */\n password?: SecretBaseUnion;\n /** The application's client ID used in AadServicePrincipal authentication type. */\n servicePrincipalId?: any;\n /** The application's key used in AadServicePrincipal authentication type. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The resource you are requesting authorization to use. */\n aadResourceId?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** The client ID associated with your application. Type: string (or Expression with resultType string). */\n clientId?: any;\n /** The client secret associated with your application. */\n clientSecret?: SecretBaseUnion;\n /** The token endpoint of the authorization server to acquire access token. Type: string (or Expression with resultType string). */\n tokenEndpoint?: any;\n /** The target service or resource to which the access will be requested. Type: string (or Expression with resultType string). */\n resource?: any;\n /** The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). */\n scope?: any;\n}\n\n/** Linked service for TeamDesk. */\nexport interface TeamDeskLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TeamDesk\";\n /** The authentication type to use. */\n authenticationType: TeamDeskAuthenticationType;\n /** The url to connect TeamDesk source. Type: string (or Expression with resultType string). */\n url: any;\n /** The username of the TeamDesk source. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The password of the TeamDesk source. */\n password?: SecretBaseUnion;\n /** The api token for the TeamDesk source. */\n apiToken?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Quickbase. */\nexport interface QuickbaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Quickbase\";\n /** The url to connect Quickbase source. Type: string (or Expression with resultType string). */\n url: any;\n /** The user token for the Quickbase source. */\n userToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Smartsheet. */\nexport interface SmartsheetLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Smartsheet\";\n /** The api token for the Smartsheet source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Zendesk. */\nexport interface ZendeskLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Zendesk\";\n /** The authentication type to use. */\n authenticationType: ZendeskAuthenticationType;\n /** The url to connect Zendesk source. Type: string (or Expression with resultType string). */\n url: any;\n /** The username of the Zendesk source. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The password of the Zendesk source. */\n password?: SecretBaseUnion;\n /** The api token for the Zendesk source. */\n apiToken?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Dataworld. */\nexport interface DataworldLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Dataworld\";\n /** The api token for the Dataworld source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for AppFigures. */\nexport interface AppFiguresLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AppFigures\";\n /** The username of the Appfigures source. */\n userName: any;\n /** The password of the AppFigures source. */\n password: SecretBaseUnion;\n /** The client key for the AppFigures source. */\n clientKey: SecretBaseUnion;\n}\n\n/** Linked service for Asana. */\nexport interface AsanaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Asana\";\n /** The api token for the Asana source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Twilio. */\nexport interface TwilioLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Twilio\";\n /** The Account SID of Twilio service. */\n userName: any;\n /** The auth token of Twilio service. */\n password: SecretBaseUnion;\n}\n\n/** Linked service for GoogleSheets. */\nexport interface GoogleSheetsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleSheets\";\n /** The api token for the GoogleSheets source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Amazon S3. */\nexport interface AmazonS3LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3\";\n /** The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */\n accessKeyId?: any;\n /** The secret access key of the Amazon S3 Identity and Access Management (IAM) user. */\n secretAccessKey?: SecretBaseUnion;\n /** This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */\n serviceUrl?: any;\n /** The session token for the S3 temporary security credential. */\n sessionToken?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Amazon Redshift. */\nexport interface AmazonRedshiftLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRedshift\";\n /** The name of the Amazon Redshift server. Type: string (or Expression with resultType string). */\n server: any;\n /** The username of the Amazon Redshift source. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password of the Amazon Redshift source. */\n password?: SecretBaseUnion;\n /** The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). */\n database: any;\n /** The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). */\n port?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Custom linked service. */\nexport interface CustomDataSourceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomDataSource\";\n /** Custom linked service properties. */\n typeProperties: any;\n}\n\n/** Linked service for Windows Azure Search Service. */\nexport interface AzureSearchLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSearch\";\n /** URL for Azure Search service. Type: string (or Expression with resultType string). */\n url: any;\n /** Admin Key for Azure Search service */\n key?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for an HTTP source. */\nexport interface HttpLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpServer\";\n /** The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). */\n url: any;\n /** The authentication type to be used to connect to the HTTP server. */\n authenticationType?: HttpAuthenticationType;\n /** User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. */\n password?: SecretBaseUnion;\n /** Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */\n embeddedCertData?: any;\n /** Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */\n certThumbprint?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). */\n enableServerCertificateValidation?: any;\n}\n\n/** A FTP server Linked Service. */\nexport interface FtpServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FtpServer\";\n /** Host name of the FTP server. Type: string (or Expression with resultType string). */\n host: any;\n /** The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The authentication type to be used to connect to the FTP server. */\n authenticationType?: FtpAuthenticationType;\n /** Username to logon the FTP server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to logon the FTP server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */\n enableSsl?: any;\n /** If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */\n enableServerCertificateValidation?: any;\n}\n\n/** A linked service for an SSH File Transfer Protocol (SFTP) server. */\nexport interface SftpServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Sftp\";\n /** The SFTP server host name. Type: string (or Expression with resultType string). */\n host: any;\n /** The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The authentication type to be used to connect to the FTP server. */\n authenticationType?: SftpAuthenticationType;\n /** The username used to log on to the SFTP server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to logon the SFTP server for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). */\n privateKeyPath?: any;\n /** Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. */\n privateKeyContent?: SecretBaseUnion;\n /** The password to decrypt the SSH private key if the SSH private key is encrypted. */\n passPhrase?: SecretBaseUnion;\n /** If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). */\n skipHostKeyValidation?: any;\n /** The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). */\n hostKeyFingerprint?: any;\n}\n\n/** SAP Business Warehouse Linked Service. */\nexport interface SapBWLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapBW\";\n /** Host name of the SAP BW instance. Type: string (or Expression with resultType string). */\n server: any;\n /** System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber: any;\n /** Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId: any;\n /** Username to access the SAP BW server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP BW server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SAP HANA Linked Service. */\nexport interface SapHanaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapHana\";\n /** SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** Host name of the SAP HANA server. Type: string (or Expression with resultType string). */\n server: any;\n /** The authentication type to be used to connect to the SAP HANA server. */\n authenticationType?: SapHanaAuthenticationType;\n /** Username to access the SAP HANA server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP HANA server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Amazon Marketplace Web Service linked service. */\nexport interface AmazonMWSLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonMWS\";\n /** The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) */\n endpoint: any;\n /** The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) */\n marketplaceID: any;\n /** The Amazon seller ID. */\n sellerID: any;\n /** The Amazon MWS authentication token. */\n mwsAuthToken?: SecretBaseUnion;\n /** The access key id used to access data. */\n accessKeyId: any;\n /** The secret key used to access data. */\n secretKey?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure PostgreSQL linked service. */\nexport interface AzurePostgreSqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSql\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Concur Service linked service. */\nexport interface ConcurLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Concur\";\n /** Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** Application client_id supplied by Concur App Management. */\n clientId: any;\n /** The user name that you use to access Concur Service. */\n username: any;\n /** The password corresponding to the user name that you provided in the username field. */\n password?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Couchbase server linked service. */\nexport interface CouchbaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Couchbase\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of credString in connection string. */\n credString?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Drill server linked service. */\nexport interface DrillLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Drill\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Eloqua server linked service. */\nexport interface EloquaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Eloqua\";\n /** The endpoint of the Eloqua server. (i.e. eloqua.example.com) */\n endpoint: any;\n /** The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) */\n username: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Google BigQuery service linked service. */\nexport interface GoogleBigQueryLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleBigQuery\";\n /** The default BigQuery project to query against. */\n project: any;\n /** A comma-separated list of public BigQuery projects to access. */\n additionalProjects?: any;\n /** Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. */\n requestGoogleDriveScope?: any;\n /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */\n authenticationType: GoogleBigQueryAuthenticationType;\n /** The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. */\n refreshToken?: SecretBaseUnion;\n /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */\n clientId?: any;\n /** The client secret of the google application used to acquire the refresh token. */\n clientSecret?: SecretBaseUnion;\n /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */\n email?: any;\n /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */\n keyFilePath?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Greenplum Database linked service. */\nexport interface GreenplumLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Greenplum\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** HBase server linked service. */\nexport interface HBaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HBase\";\n /** The IP address or host name of the HBase server. (i.e. 192.168.222.160) */\n host: any;\n /** The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. */\n port?: any;\n /** The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) */\n httpPath?: any;\n /** The authentication mechanism to use to connect to the HBase server. */\n authenticationType: HBaseAuthenticationType;\n /** The user name used to connect to the HBase instance. */\n username?: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Hive Server linked service. */\nexport interface HiveLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Hive\";\n /** IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). */\n host: any;\n /** The TCP port that the Hive server uses to listen for client connections. */\n port?: any;\n /** The type of Hive server. */\n serverType?: HiveServerType;\n /** The transport protocol to use in the Thrift layer. */\n thriftTransportProtocol?: HiveThriftTransportProtocol;\n /** The authentication method used to access the Hive server. */\n authenticationType: HiveAuthenticationType;\n /** true to indicate using the ZooKeeper service, false not. */\n serviceDiscoveryMode?: any;\n /** The namespace on ZooKeeper under which Hive Server 2 nodes are added. */\n zooKeeperNameSpace?: any;\n /** Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. */\n useNativeQuery?: any;\n /** The user name that you use to access Hive Server. */\n username?: any;\n /** The password corresponding to the user name that you provided in the Username field */\n password?: SecretBaseUnion;\n /** The partial URL corresponding to the Hive server. */\n httpPath?: any;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Hubspot Service linked service. */\nexport interface HubspotLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Hubspot\";\n /** The client ID associated with your Hubspot application. */\n clientId: any;\n /** The client secret associated with your Hubspot application. */\n clientSecret?: SecretBaseUnion;\n /** The access token obtained when initially authenticating your OAuth integration. */\n accessToken?: SecretBaseUnion;\n /** The refresh token obtained when initially authenticating your OAuth integration. */\n refreshToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Impala server linked service. */\nexport interface ImpalaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Impala\";\n /** The IP address or host name of the Impala server. (i.e. 192.168.222.160) */\n host: any;\n /** The TCP port that the Impala server uses to listen for client connections. The default value is 21050. */\n port?: any;\n /** The authentication type to use. */\n authenticationType: ImpalaAuthenticationType;\n /** The user name used to access the Impala server. The default value is anonymous when using SASLUsername. */\n username?: any;\n /** The password corresponding to the user name when using UsernameAndPassword. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Jira Service linked service. */\nexport interface JiraLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Jira\";\n /** The IP address or host name of the Jira service. (e.g. jira.example.com) */\n host: any;\n /** The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. */\n port?: any;\n /** The user name that you use to access Jira Service. */\n username: any;\n /** The password corresponding to the user name that you provided in the username field. */\n password?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Magento server linked service. */\nexport interface MagentoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Magento\";\n /** The URL of the Magento instance. (i.e. 192.168.222.110/magento3) */\n host: any;\n /** The access token from Magento. */\n accessToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** MariaDB server linked service. */\nexport interface MariaDBLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MariaDB\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Database for MariaDB linked service. */\nexport interface AzureMariaDBLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMariaDB\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Marketo server linked service. */\nexport interface MarketoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Marketo\";\n /** The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) */\n endpoint: any;\n /** The client Id of your Marketo service. */\n clientId: any;\n /** The client secret of your Marketo service. */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Paypal Service linked service. */\nexport interface PaypalLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Paypal\";\n /** The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) */\n host: any;\n /** The client ID associated with your PayPal application. */\n clientId: any;\n /** The client secret associated with your PayPal application. */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Phoenix server linked service. */\nexport interface PhoenixLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Phoenix\";\n /** The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) */\n host: any;\n /** The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. */\n port?: any;\n /** The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. */\n httpPath?: any;\n /** The authentication mechanism used to connect to the Phoenix server. */\n authenticationType: PhoenixAuthenticationType;\n /** The user name used to connect to the Phoenix server. */\n username?: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Presto server linked service. */\nexport interface PrestoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Presto\";\n /** The IP address or host name of the Presto server. (i.e. 192.168.222.160) */\n host: any;\n /** The version of the Presto server. (i.e. 0.148-t) */\n serverVersion: any;\n /** The catalog context for all request against the server. */\n catalog: any;\n /** The TCP port that the Presto server uses to listen for client connections. The default value is 8080. */\n port?: any;\n /** The authentication mechanism used to connect to the Presto server. */\n authenticationType: PrestoAuthenticationType;\n /** The user name used to connect to the Presto server. */\n username?: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. */\n timeZoneID?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** QuickBooks server linked service. */\nexport interface QuickBooksLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"QuickBooks\";\n /** Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) */\n endpoint: any;\n /** The company ID of the QuickBooks company to authorize. */\n companyId: any;\n /** The consumer key for OAuth 1.0 authentication. */\n consumerKey: any;\n /** The consumer secret for OAuth 1.0 authentication. */\n consumerSecret: SecretBaseUnion;\n /** The access token for OAuth 1.0 authentication. */\n accessToken: SecretBaseUnion;\n /** The access token secret for OAuth 1.0 authentication. */\n accessTokenSecret: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** ServiceNow server linked service. */\nexport interface ServiceNowLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ServiceNow\";\n /** The endpoint of the ServiceNow server. (i.e. <instance>.service-now.com) */\n endpoint: any;\n /** The authentication type to use. */\n authenticationType: ServiceNowAuthenticationType;\n /** The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. */\n username?: any;\n /** The password corresponding to the user name for Basic and OAuth2 authentication. */\n password?: SecretBaseUnion;\n /** The client id for OAuth2 authentication. */\n clientId?: any;\n /** The client secret for OAuth2 authentication. */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Shopify Service linked service. */\nexport interface ShopifyLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Shopify\";\n /** The endpoint of the Shopify server. (i.e. mystore.myshopify.com) */\n host: any;\n /** The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. */\n accessToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Spark Server linked service. */\nexport interface SparkLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Spark\";\n /** IP address or host name of the Spark server */\n host: any;\n /** The TCP port that the Spark server uses to listen for client connections. */\n port: any;\n /** The type of Spark server. */\n serverType?: SparkServerType;\n /** The transport protocol to use in the Thrift layer. */\n thriftTransportProtocol?: SparkThriftTransportProtocol;\n /** The authentication method used to access the Spark server. */\n authenticationType: SparkAuthenticationType;\n /** The user name that you use to access Spark Server. */\n username?: any;\n /** The password corresponding to the user name that you provided in the Username field */\n password?: SecretBaseUnion;\n /** The partial URL corresponding to the Spark server. */\n httpPath?: any;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Square Service linked service. */\nexport interface SquareLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Square\";\n /** Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The URL of the Square instance. (i.e. mystore.mysquare.com) */\n host: any;\n /** The client ID associated with your Square application. */\n clientId: any;\n /** The client secret associated with your Square application. */\n clientSecret?: SecretBaseUnion;\n /** The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) */\n redirectUri: any;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Xero Service linked service. */\nexport interface XeroLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Xero\";\n /** Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The endpoint of the Xero server. (i.e. api.xero.com) */\n host: any;\n /** The consumer key associated with the Xero application. */\n consumerKey?: SecretBaseUnion;\n /**\n * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings(\n * ).\n */\n privateKey?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Zoho server linked service. */\nexport interface ZohoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Zoho\";\n /** Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) */\n endpoint: any;\n /** The access token for Zoho authentication. */\n accessToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Vertica linked service. */\nexport interface VerticaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Vertica\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Netezza linked service. */\nexport interface NetezzaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Netezza\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Salesforce Marketing Cloud linked service. */\nexport interface SalesforceMarketingCloudLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceMarketingCloud\";\n /** Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */\n clientId: any;\n /** The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** HDInsight ondemand linked service. */\nexport interface HDInsightOnDemandLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightOnDemand\";\n /** Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). */\n clusterSize: any;\n /** The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). */\n timeToLive: any;\n /** Version of the HDInsight cluster. Type: string (or Expression with resultType string). */\n version: any;\n /** Azure Storage linked service to be used by the on-demand cluster for storing and processing data. */\n linkedServiceName: LinkedServiceReference;\n /** The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). */\n hostSubscriptionId: any;\n /** The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key for the service principal id. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant: any;\n /** The resource group where the cluster belongs. Type: string (or Expression with resultType string). */\n clusterResourceGroup: any;\n /** The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). */\n clusterNamePrefix?: any;\n /** The username to access the cluster. Type: string (or Expression with resultType string). */\n clusterUserName?: any;\n /** The password to access the cluster. */\n clusterPassword?: SecretBaseUnion;\n /** The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). */\n clusterSshUserName?: any;\n /** The password to SSH remotely connect cluster’s node (for Linux). */\n clusterSshPassword?: SecretBaseUnion;\n /** Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. */\n additionalLinkedServiceNames?: LinkedServiceReference[];\n /** The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. */\n hcatalogLinkedServiceName?: LinkedServiceReference;\n /** The cluster type. Type: string (or Expression with resultType string). */\n clusterType?: any;\n /** The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). */\n sparkVersion?: any;\n /** Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. */\n coreConfiguration?: any;\n /** Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. */\n hBaseConfiguration?: any;\n /** Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. */\n hdfsConfiguration?: any;\n /** Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. */\n hiveConfiguration?: any;\n /** Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. */\n mapReduceConfiguration?: any;\n /** Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. */\n oozieConfiguration?: any;\n /** Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. */\n stormConfiguration?: any;\n /** Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. */\n yarnConfiguration?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** Specifies the size of the head node for the HDInsight cluster. */\n headNodeSize?: any;\n /** Specifies the size of the data node for the HDInsight cluster. */\n dataNodeSize?: any;\n /** Specifies the size of the Zoo Keeper node for the HDInsight cluster. */\n zookeeperNodeSize?: any;\n /** Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. */\n scriptActions?: ScriptAction[];\n /** The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). */\n virtualNetworkId?: any;\n /** The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). */\n subnetName?: any;\n}\n\n/** Azure Data Lake Analytics linked service. */\nexport interface AzureDataLakeAnalyticsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeAnalytics\";\n /** The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). */\n accountName: any;\n /** The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The Key of the application used to authenticate against the Azure Data Lake Analytics account. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant: any;\n /** Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */\n subscriptionId?: any;\n /** Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */\n resourceGroupName?: any;\n /** Azure Data Lake Analytics URI Type: string (or Expression with resultType string). */\n dataLakeAnalyticsUri?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Databricks linked service. */\nexport interface AzureDatabricksLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricks\";\n /** <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */\n domain: any;\n /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). */\n accessToken?: SecretBaseUnion;\n /** Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */\n authentication?: any;\n /** Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */\n workspaceResourceId?: any;\n /** The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). */\n existingClusterId?: any;\n /** The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). */\n instancePoolId?: any;\n /** If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). */\n newClusterVersion?: any;\n /** If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). */\n newClusterNumOfWorker?: any;\n /** The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). */\n newClusterNodeType?: any;\n /** A set of optional, user-specified Spark configuration key-value pairs. */\n newClusterSparkConf?: { [propertyName: string]: any };\n /** A set of optional, user-specified Spark environment variables key-value pairs. */\n newClusterSparkEnvVars?: { [propertyName: string]: any };\n /** Additional tags for cluster resources. This property is ignored in instance pool configurations. */\n newClusterCustomTags?: { [propertyName: string]: any };\n /** Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). */\n newClusterLogDestination?: any;\n /** The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). */\n newClusterDriverNodeType?: any;\n /** User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). */\n newClusterInitScripts?: any;\n /** Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). */\n newClusterEnableElasticDisk?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). */\n policyId?: any;\n}\n\n/** Azure Databricks Delta Lake linked service. */\nexport interface AzureDatabricksDeltaLakeLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLake\";\n /** <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */\n domain: any;\n /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. */\n accessToken: SecretBaseUnion;\n /** The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). */\n clusterId?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Responsys linked service. */\nexport interface ResponsysLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Responsys\";\n /** The endpoint of the Responsys server. */\n endpoint: any;\n /** The client ID associated with the Responsys application. Type: string (or Expression with resultType string). */\n clientId: any;\n /** The client secret associated with the Responsys application. Type: string (or Expression with resultType string). */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Dynamics AX linked service. */\nexport interface DynamicsAXLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsAX\";\n /** The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. */\n url: any;\n /** Specify the application's client ID. Type: string (or Expression with resultType string). */\n servicePrincipalId: any;\n /** Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). */\n servicePrincipalKey: SecretBaseUnion;\n /** Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). */\n tenant: any;\n /** Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). */\n aadResourceId: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Oracle Service Cloud linked service. */\nexport interface OracleServiceCloudLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleServiceCloud\";\n /** The URL of the Oracle Service Cloud instance. */\n host: any;\n /** The user name that you use to access Oracle Service Cloud server. */\n username: any;\n /** The password corresponding to the user name that you provided in the username key. */\n password: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Google AdWords service linked service. */\nexport interface GoogleAdWordsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleAdWords\";\n /** Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The Client customer ID of the AdWords account that you want to fetch report data for. */\n clientCustomerID?: any;\n /** The developer token associated with the manager account that you use to grant access to the AdWords API. */\n developerToken?: SecretBaseUnion;\n /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */\n authenticationType?: GoogleAdWordsAuthenticationType;\n /** The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. */\n refreshToken?: SecretBaseUnion;\n /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */\n clientId?: any;\n /** The client secret of the google application used to acquire the refresh token. */\n clientSecret?: SecretBaseUnion;\n /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */\n email?: any;\n /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */\n keyFilePath?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SAP Table Linked Service. */\nexport interface SapTableLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapTable\";\n /** Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */\n server?: any;\n /** System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber?: any;\n /** Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId?: any;\n /** Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */\n language?: any;\n /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */\n systemId?: any;\n /** Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP server where the table is located. */\n password?: SecretBaseUnion;\n /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */\n messageServer?: any;\n /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */\n messageServerService?: any;\n /** SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */\n sncMode?: any;\n /** Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncMyName?: any;\n /** Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncPartnerName?: any;\n /** External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncLibraryPath?: any;\n /** SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */\n sncQop?: any;\n /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */\n logonGroup?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Data Explorer (Kusto) linked service. */\nexport interface AzureDataExplorerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorer\";\n /** The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType string) */\n endpoint: any;\n /** The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Kusto. */\n servicePrincipalKey?: SecretBaseUnion;\n /** Database name for connection. Type: string (or Expression with resultType string). */\n database: any;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n}\n\n/** Azure Function linked service. */\nexport interface AzureFunctionLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFunction\";\n /** The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. */\n functionAppUrl: any;\n /** Function or Host key for Azure Function App. */\n functionKey?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Snowflake linked service. */\nexport interface SnowflakeLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Snowflake\";\n /** The connection string of snowflake. Type: string, SecureString. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SharePoint Online List linked service. */\nexport interface SharePointOnlineListLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SharePointOnlineList\";\n /** The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). */\n siteUrl: any;\n /** The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). */\n tenantId: any;\n /** The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). */\n servicePrincipalId: any;\n /** The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalKey: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Synapse Analytics (Artifacts) linked service. */\nexport interface AzureSynapseArtifactsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSynapseArtifacts\";\n /** https://<workspacename>.dev.azuresynapse.net, Azure Synapse Analytics workspace URL. Type: string (or Expression with resultType string). */\n endpoint: any;\n /** Required to specify MSI, if using system assigned managed identity as authentication method. Type: string (or Expression with resultType string). */\n authentication?: any;\n}\n\n/** Base class for all control activities like IfCondition, ForEach , Until. */\nexport interface ControlActivity extends Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"Container\"\n | \"ExecutePipeline\"\n | \"IfCondition\"\n | \"Switch\"\n | \"ForEach\"\n | \"Wait\"\n | \"Fail\"\n | \"Until\"\n | \"Validation\"\n | \"Filter\"\n | \"SetVariable\"\n | \"AppendVariable\"\n | \"WebHook\";\n}\n\n/** Base class for all execution activities. */\nexport interface ExecutionActivity extends Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"Execution\"\n | \"Copy\"\n | \"HDInsightHive\"\n | \"HDInsightPig\"\n | \"HDInsightMapReduce\"\n | \"HDInsightStreaming\"\n | \"HDInsightSpark\"\n | \"ExecuteSSISPackage\"\n | \"Custom\"\n | \"SqlServerStoredProcedure\"\n | \"Delete\"\n | \"AzureDataExplorerCommand\"\n | \"Lookup\"\n | \"WebActivity\"\n | \"GetMetadata\"\n | \"AzureMLBatchExecution\"\n | \"AzureMLUpdateResource\"\n | \"AzureMLExecutePipeline\"\n | \"DataLakeAnalyticsU-SQL\"\n | \"DatabricksNotebook\"\n | \"DatabricksSparkJar\"\n | \"DatabricksSparkPython\"\n | \"AzureFunctionActivity\"\n | \"ExecuteDataFlow\"\n | \"Script\"\n | \"SynapseNotebook\"\n | \"SparkJob\";\n /** Linked service reference. */\n linkedServiceName?: LinkedServiceReference;\n /** Activity policy. */\n policy?: ActivityPolicy;\n}\n\n/** Execute SQL pool stored procedure activity. */\nexport interface SqlPoolStoredProcedureActivity extends Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlPoolStoredProcedure\";\n /** SQL pool stored procedure reference. */\n sqlPool: SqlPoolReference;\n /** Stored procedure name. Type: string (or Expression with resultType string). */\n storedProcedureName: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n}\n\n/** Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. */\nexport interface RerunTumblingWindowTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RerunTumblingWindowTrigger\";\n /** The parent trigger reference. */\n parentTrigger: any;\n /** The start time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n requestedStartTime: Date;\n /** The end time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n requestedEndTime: Date;\n /** The max number of parallel time windows (ready for execution) for which a rerun is triggered. */\n rerunConcurrency: number;\n}\n\n/** Base class for all triggers that support one to many model for trigger to pipeline. */\nexport interface MultiplePipelineTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"MultiplePipelineTrigger\"\n | \"ScheduleTrigger\"\n | \"BlobTrigger\"\n | \"BlobEventsTrigger\"\n | \"CustomEventsTrigger\";\n /** Pipelines that need to be started. */\n pipelines?: TriggerPipelineReference[];\n}\n\n/** Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). */\nexport interface TumblingWindowTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TumblingWindowTrigger\";\n /** Pipeline for which runs are created when an event is fired for trigger window that is ready. */\n pipeline: TriggerPipelineReference;\n /** The frequency of the time windows. */\n frequency: TumblingWindowFrequency;\n /** The interval of the time windows. The minimum interval allowed is 15 Minutes. */\n interval: number;\n /** The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. */\n startTime: Date;\n /** The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. */\n endTime?: Date;\n /** Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n delay?: any;\n /** The max number of parallel time windows (ready for execution) for which a new run is triggered. */\n maxConcurrency: number;\n /** Retry policy that will be applied for failed pipeline runs. */\n retryPolicy?: RetryPolicy;\n /** Triggers that this trigger depends on. Only tumbling window triggers are supported. */\n dependsOn?: DependencyReferenceUnion[];\n}\n\n/** Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. */\nexport interface ChainingTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ChainingTrigger\";\n /** Pipeline for which runs are created when all upstream pipelines complete successfully. */\n pipeline: TriggerPipelineReference;\n /** Upstream Pipelines. */\n dependsOn: PipelineReference[];\n /** Run Dimension property that needs to be emitted by upstream pipelines. */\n runDimension: string;\n}\n\n/** Transformation for data flow source. */\nexport interface DataFlowSource extends Transformation {\n /** Schema linked service reference. */\n schemaLinkedService?: LinkedServiceReference;\n}\n\n/** Transformation for data flow sink. */\nexport interface DataFlowSink extends Transformation {\n /** Schema linked service reference. */\n schemaLinkedService?: LinkedServiceReference;\n /** Rejected data linked service reference. */\n rejectedDataLinkedService?: LinkedServiceReference;\n}\n\n/** The location of azure blob dataset. */\nexport interface AzureBlobStorageLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorageLocation\";\n /** Specify the container of azure blob. Type: string (or Expression with resultType string). */\n container?: any;\n}\n\n/** The location of azure blobFS dataset. */\nexport interface AzureBlobFSLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSLocation\";\n /** Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). */\n fileSystem?: any;\n}\n\n/** The location of azure data lake store dataset. */\nexport interface AzureDataLakeStoreLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreLocation\";\n}\n\n/** The location of amazon S3 dataset. */\nexport interface AmazonS3Location extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3Location\";\n /** Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) */\n bucketName?: any;\n /** Specify the version of amazon S3. Type: string (or Expression with resultType string). */\n version?: any;\n}\n\n/** The location of file server dataset. */\nexport interface FileServerLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServerLocation\";\n}\n\n/** The location of file server dataset. */\nexport interface AzureFileStorageLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorageLocation\";\n}\n\n/** The location of Google Cloud Storage dataset. */\nexport interface GoogleCloudStorageLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleCloudStorageLocation\";\n /** Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) */\n bucketName?: any;\n /** Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). */\n version?: any;\n}\n\n/** The location of ftp server dataset. */\nexport interface FtpServerLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FtpServerLocation\";\n}\n\n/** The location of SFTP dataset. */\nexport interface SftpLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SftpLocation\";\n}\n\n/** The location of http server. */\nexport interface HttpServerLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpServerLocation\";\n /** Specify the relativeUrl of http server. Type: string (or Expression with resultType string) */\n relativeUrl?: any;\n}\n\n/** The location of HDFS. */\nexport interface HdfsLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HdfsLocation\";\n}\n\n/** The data stored in text format. */\nexport interface TextFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TextFormat\";\n /** The column delimiter. Type: string (or Expression with resultType string). */\n columnDelimiter?: any;\n /** The row delimiter. Type: string (or Expression with resultType string). */\n rowDelimiter?: any;\n /** The escape character. Type: string (or Expression with resultType string). */\n escapeChar?: any;\n /** The quote character. Type: string (or Expression with resultType string). */\n quoteChar?: any;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n /** The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). */\n treatEmptyAsNull?: any;\n /** The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). */\n skipLineCount?: any;\n /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */\n firstRowAsHeader?: any;\n}\n\n/** The data stored in JSON format. */\nexport interface JsonFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonFormat\";\n /** File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. */\n filePattern?: any;\n /** The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). */\n nestingSeparator?: any;\n /** The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The JSONPath of the JSON array element to be flattened. Example: \"$.ArrayPath\". Type: string (or Expression with resultType string). */\n jsonNodeReference?: any;\n /** The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with \"$\"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {\"Column1\": \"$.Column1Path\", \"Column2\": \"Column2PathInArray\"}. Type: object (or Expression with resultType object). */\n jsonPathDefinition?: any;\n}\n\n/** The data stored in Avro format. */\nexport interface AvroFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroFormat\";\n}\n\n/** The data stored in Optimized Row Columnar (ORC) format. */\nexport interface OrcFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcFormat\";\n}\n\n/** The data stored in Parquet format. */\nexport interface ParquetFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetFormat\";\n}\n\n/** A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. */\nexport interface WebAnonymousAuthentication\n extends WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"Anonymous\";\n}\n\n/** A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. */\nexport interface WebBasicAuthentication extends WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"Basic\";\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n username: any;\n /** The password for Basic authentication. */\n password: SecretBaseUnion;\n}\n\n/** A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This scheme follows mutual authentication; the server must also provide valid credentials to the client. */\nexport interface WebClientCertificateAuthentication\n extends WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"ClientCertificate\";\n /** Base64-encoded contents of a PFX file. */\n pfx: SecretBaseUnion;\n /** Password for the PFX file. */\n password: SecretBaseUnion;\n}\n\n/** Azure blob read settings. */\nexport interface AzureBlobStorageReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorageReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Azure blob wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Azure blobFS read settings. */\nexport interface AzureBlobFSReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Azure data lake store read settings. */\nexport interface AzureDataLakeStoreReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** ADLS wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** ADLS wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */\n listAfter?: any;\n /** Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */\n listBefore?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Azure data lake store read settings. */\nexport interface AmazonS3ReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3ReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** File server read settings. */\nexport interface FileServerReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServerReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** FileServer wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** FileServer wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */\n fileFilter?: any;\n}\n\n/** Azure File Storage read settings. */\nexport interface AzureFileStorageReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorageReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Google Cloud Storage read settings. */\nexport interface GoogleCloudStorageReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleCloudStorageReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Ftp read settings. */\nexport interface FtpReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FtpReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Ftp wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Ftp wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Specify whether to use binary transfer mode for FTP stores. */\n useBinaryTransfer?: boolean;\n /** If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). */\n disableChunking?: any;\n}\n\n/** Sftp read settings. */\nexport interface SftpReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SftpReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Sftp wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Sftp wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). */\n disableChunking?: any;\n}\n\n/** Sftp read settings. */\nexport interface HttpReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpReadSettings\";\n /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** Specifies the timeout for a HTTP client to get HTTP response from HTTP server. */\n requestTimeout?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n}\n\n/** HDFS read settings. */\nexport interface HdfsReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HdfsReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** HDFS wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** HDFS wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** Specifies Distcp-related settings. */\n distcpSettings?: DistcpSettings;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n}\n\n/** Sftp write settings. */\nexport interface SftpWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SftpWriteSettings\";\n /** Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). */\n operationTimeout?: any;\n /** Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). */\n useTempFileRename?: any;\n}\n\n/** Azure blob write settings. */\nexport interface AzureBlobStorageWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorageWriteSettings\";\n /** Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */\n blockSizeInMB?: any;\n}\n\n/** Azure blobFS write settings. */\nexport interface AzureBlobFSWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSWriteSettings\";\n /** Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */\n blockSizeInMB?: any;\n}\n\n/** Azure data lake store write settings. */\nexport interface AzureDataLakeStoreWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreWriteSettings\";\n /** Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of \"2018-12-01T05:00:00Z\". Default value is NULL. Type: integer (or Expression with resultType integer). */\n expiryDateTime?: any;\n}\n\n/** File server write settings. */\nexport interface FileServerWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServerWriteSettings\";\n}\n\n/** Azure File Storage write settings. */\nexport interface AzureFileStorageWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorageWriteSettings\";\n}\n\n/** Delimited text read settings. */\nexport interface DelimitedTextReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextReadSettings\";\n /** Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). */\n skipLineCount?: any;\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n}\n\n/** Json read settings. */\nexport interface JsonReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonReadSettings\";\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n}\n\n/** Xml read settings. */\nexport interface XmlReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XmlReadSettings\";\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n /** Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). */\n validationMode?: any;\n /** Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */\n detectDataType?: any;\n /** Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */\n namespaces?: any;\n /** Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: \"{\"http://www.example.com/xml\":\"prefix\"}\" Type: object (or Expression with resultType object). */\n namespacePrefixes?: any;\n}\n\n/** Binary read settings. */\nexport interface BinaryReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BinaryReadSettings\";\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n}\n\n/** The ZipDeflate compression read settings. */\nexport interface ZipDeflateReadSettings extends CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZipDeflateReadSettings\";\n /** Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). */\n preserveZipFileNameAsFolder?: any;\n}\n\n/** The Tar compression read settings. */\nexport interface TarReadSettings extends CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TarReadSettings\";\n /** Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */\n preserveCompressionFileNameAsFolder?: any;\n}\n\n/** The TarGZip compression read settings. */\nexport interface TarGZipReadSettings extends CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TarGZipReadSettings\";\n /** Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */\n preserveCompressionFileNameAsFolder?: any;\n}\n\n/** Avro write settings. */\nexport interface AvroWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroWriteSettings\";\n /** Top level record name in write result, which is required in AVRO spec. */\n recordName?: string;\n /** Record namespace in the write result. */\n recordNamespace?: string;\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Orc write settings. */\nexport interface OrcWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcWriteSettings\";\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Parquet write settings. */\nexport interface ParquetWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetWriteSettings\";\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Delimited text write settings. */\nexport interface DelimitedTextWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextWriteSettings\";\n /** Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). */\n quoteAllText?: any;\n /** The file extension used to create the files. Type: string (or Expression with resultType string). */\n fileExtension: any;\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Json write settings. */\nexport interface JsonWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonWriteSettings\";\n /** File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. */\n filePattern?: any;\n}\n\n/** A copy activity Avro source. */\nexport interface AvroSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroSource\";\n /** Avro store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity excel source. */\nexport interface ExcelSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExcelSource\";\n /** Excel store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Parquet source. */\nexport interface ParquetSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetSource\";\n /** Parquet store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity DelimitedText source. */\nexport interface DelimitedTextSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextSource\";\n /** DelimitedText store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** DelimitedText format settings. */\n formatSettings?: DelimitedTextReadSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Json source. */\nexport interface JsonSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonSource\";\n /** Json store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Json format settings. */\n formatSettings?: JsonReadSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Xml source. */\nexport interface XmlSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XmlSource\";\n /** Xml store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Xml format settings. */\n formatSettings?: XmlReadSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity ORC source. */\nexport interface OrcSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcSource\";\n /** ORC store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Binary source. */\nexport interface BinarySource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BinarySource\";\n /** Binary store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Binary format settings. */\n formatSettings?: BinaryReadSettings;\n}\n\n/** Copy activity sources of tabular type. */\nexport interface TabularSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TabularSource\"\n | \"AzureTableSource\"\n | \"InformixSource\"\n | \"Db2Source\"\n | \"OdbcSource\"\n | \"MySqlSource\"\n | \"PostgreSqlSource\"\n | \"SybaseSource\"\n | \"SapBwSource\"\n | \"SalesforceSource\"\n | \"SapCloudForCustomerSource\"\n | \"SapEccSource\"\n | \"SapHanaSource\"\n | \"SapOpenHubSource\"\n | \"SapOdpSource\"\n | \"SapTableSource\"\n | \"SqlSource\"\n | \"SqlServerSource\"\n | \"AmazonRdsForSqlServerSource\"\n | \"AzureSqlSource\"\n | \"SqlMISource\"\n | \"SqlDWSource\"\n | \"AzureMySqlSource\"\n | \"TeradataSource\"\n | \"CassandraSource\"\n | \"AmazonMWSSource\"\n | \"AzurePostgreSqlSource\"\n | \"ConcurSource\"\n | \"CouchbaseSource\"\n | \"DrillSource\"\n | \"EloquaSource\"\n | \"GoogleBigQuerySource\"\n | \"GreenplumSource\"\n | \"HBaseSource\"\n | \"HiveSource\"\n | \"HubspotSource\"\n | \"ImpalaSource\"\n | \"JiraSource\"\n | \"MagentoSource\"\n | \"MariaDBSource\"\n | \"AzureMariaDBSource\"\n | \"MarketoSource\"\n | \"PaypalSource\"\n | \"PhoenixSource\"\n | \"PrestoSource\"\n | \"QuickBooksSource\"\n | \"ServiceNowSource\"\n | \"ShopifySource\"\n | \"SparkSource\"\n | \"SquareSource\"\n | \"XeroSource\"\n | \"ZohoSource\"\n | \"NetezzaSource\"\n | \"VerticaSource\"\n | \"SalesforceMarketingCloudSource\"\n | \"ResponsysSource\"\n | \"DynamicsAXSource\"\n | \"OracleServiceCloudSource\"\n | \"GoogleAdWordsSource\"\n | \"AmazonRedshiftSource\";\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Azure Blob source. */\nexport interface BlobSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobSource\";\n /** Treat empty as null. Type: boolean (or Expression with resultType boolean). */\n treatEmptyAsNull?: any;\n /** Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). */\n skipHeaderLineCount?: any;\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n}\n\n/** A copy activity Document Database Collection source. */\nexport interface DocumentDbCollectionSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DocumentDbCollectionSource\";\n /** Documents query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Nested properties separator. Type: string (or Expression with resultType string). */\n nestingSeparator?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Azure CosmosDB (SQL API) Collection source. */\nexport interface CosmosDbSqlApiSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbSqlApiSource\";\n /** SQL API query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Page size of the result. Type: integer (or Expression with resultType integer). */\n pageSize?: any;\n /** Preferred regions. Type: array of strings (or Expression with resultType array of strings). */\n preferredRegions?: any;\n /** Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). */\n detectDatetime?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Dynamics source. */\nexport interface DynamicsSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsSource\";\n /** FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Dynamics CRM source. */\nexport interface DynamicsCrmSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrmSource\";\n /** FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Common Data Service for Apps source. */\nexport interface CommonDataServiceForAppsSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForAppsSource\";\n /** FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for various relational databases. */\nexport interface RelationalSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RelationalSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for Microsoft Access. */\nexport interface MicrosoftAccessSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccessSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for OData source. */\nexport interface ODataSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ODataSource\";\n /** OData query. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Salesforce Service Cloud source. */\nexport interface SalesforceServiceCloudSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloudSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The read behavior for the operation. Default is Query. */\n readBehavior?: SalesforceSourceReadBehavior;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Rest service source. */\nexport interface RestSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestSource\";\n /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */\n paginationRules?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n /** The time to await before sending next page request. */\n requestInterval?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity file system source. */\nexport interface FileSystemSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileSystemSource\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity HDFS source. */\nexport interface HdfsSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HdfsSource\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Specifies Distcp-related settings. */\n distcpSettings?: DistcpSettings;\n}\n\n/** A copy activity Azure Data Explorer (Kusto) source. */\nexport interface AzureDataExplorerSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerSource\";\n /** Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). */\n query: any;\n /** The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. */\n noTruncation?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Oracle source. */\nexport interface OracleSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleSource\";\n /** Oracle reader query. Type: string (or Expression with resultType string). */\n oracleReaderQuery?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** The partition mechanism that will be used for Oracle read in parallel. */\n partitionOption?: OraclePartitionOption;\n /** The settings that will be leveraged for Oracle source partitioning. */\n partitionSettings?: OraclePartitionSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity AmazonRdsForOracle source. */\nexport interface AmazonRdsForOracleSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForOracleSource\";\n /** AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). */\n oracleReaderQuery?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). */\n partitionOption?: any;\n /** The settings that will be leveraged for AmazonRdsForOracle source partitioning. */\n partitionSettings?: AmazonRdsForOraclePartitionSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for web page table. */\nexport interface WebSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebSource\";\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a MongoDB database. */\nexport interface MongoDbSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbSource\";\n /** Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a MongoDB Atlas database. */\nexport interface MongoDbAtlasSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbAtlasSource\";\n /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */\n filter?: any;\n /** Cursor methods for Mongodb query */\n cursorMethods?: MongoDbCursorMethodsProperties;\n /** Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a MongoDB database. */\nexport interface MongoDbV2Source extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbV2Source\";\n /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */\n filter?: any;\n /** Cursor methods for Mongodb query */\n cursorMethods?: MongoDbCursorMethodsProperties;\n /** Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a CosmosDB (MongoDB API) database. */\nexport interface CosmosDbMongoDbApiSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApiSource\";\n /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */\n filter?: any;\n /** Cursor methods for Mongodb query. */\n cursorMethods?: MongoDbCursorMethodsProperties;\n /** Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for an Office 365 service. */\nexport interface Office365Source extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Office365Source\";\n /** The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). */\n allowedGroups?: any;\n /** The user scope uri. Type: string (or Expression with resultType string). */\n userScopeFilterUri?: any;\n /** The Column to apply the <paramref name=\"StartTime\"/> and <paramref name=\"EndTime\"/>. Type: string (or Expression with resultType string). */\n dateFilterColumn?: any;\n /** Start time of the requested range for this dataset. Type: string (or Expression with resultType string). */\n startTime?: any;\n /** End time of the requested range for this dataset. Type: string (or Expression with resultType string). */\n endTime?: any;\n /** The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { \"name\": \"Id\" }, { \"name\": \"CreatedDateTime\" } ] */\n outputColumns?: any;\n}\n\n/** A copy activity Azure Data Lake source. */\nexport interface AzureDataLakeStoreSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreSource\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n}\n\n/** A copy activity Azure BlobFS source. */\nexport interface AzureBlobFSSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSSource\";\n /** Treat empty as null. Type: boolean (or Expression with resultType boolean). */\n treatEmptyAsNull?: any;\n /** Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). */\n skipHeaderLineCount?: any;\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n}\n\n/** A copy activity source for an HTTP file. */\nexport interface HttpSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpSource\";\n /** Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity snowflake source. */\nexport interface SnowflakeSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeSource\";\n /** Snowflake Sql query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Snowflake export settings. */\n exportSettings?: SnowflakeExportCopyCommand;\n}\n\n/** A copy activity Azure Databricks Delta Lake source. */\nexport interface AzureDatabricksDeltaLakeSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeSource\";\n /** Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Azure Databricks Delta Lake export settings. */\n exportSettings?: AzureDatabricksDeltaLakeExportCommand;\n}\n\n/** A copy activity source for sharePoint online list source. */\nexport interface SharePointOnlineListSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SharePointOnlineListSource\";\n /** The OData query to filter the data in SharePoint Online list. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity DelimitedText sink. */\nexport interface DelimitedTextSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextSink\";\n /** DelimitedText store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** DelimitedText format settings. */\n formatSettings?: DelimitedTextWriteSettings;\n}\n\n/** A copy activity Json sink. */\nexport interface JsonSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonSink\";\n /** Json store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** Json format settings. */\n formatSettings?: JsonWriteSettings;\n}\n\n/** A copy activity ORC sink. */\nexport interface OrcSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcSink\";\n /** ORC store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** ORC format settings. */\n formatSettings?: OrcWriteSettings;\n}\n\n/** A copy activity Rest service Sink. */\nexport interface RestSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestSink\";\n /** The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n /** The time to await before sending next request, in milliseconds */\n requestInterval?: any;\n /** Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. */\n httpCompressionType?: any;\n}\n\n/** A copy activity Azure PostgreSQL sink. */\nexport interface AzurePostgreSqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSqlSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Azure MySql sink. */\nexport interface AzureMySqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySqlSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Azure Databricks Delta Lake sink. */\nexport interface AzureDatabricksDeltaLakeSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** Azure Databricks Delta Lake import settings. */\n importSettings?: AzureDatabricksDeltaLakeImportCommand;\n}\n\n/** A copy activity SAP Cloud for Customer sink. */\nexport interface SapCloudForCustomerSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomerSink\";\n /** The write behavior for the operation. Default is 'Insert'. */\n writeBehavior?: SapCloudForCustomerSinkWriteBehavior;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity Azure Queue sink. */\nexport interface AzureQueueSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureQueueSink\";\n}\n\n/** A copy activity Azure Table sink. */\nexport interface AzureTableSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTableSink\";\n /** Azure Table default partition key value. Type: string (or Expression with resultType string). */\n azureTableDefaultPartitionKeyValue?: any;\n /** Azure Table partition key name. Type: string (or Expression with resultType string). */\n azureTablePartitionKeyName?: any;\n /** Azure Table row key name. Type: string (or Expression with resultType string). */\n azureTableRowKeyName?: any;\n /** Azure Table insert type. Type: string (or Expression with resultType string). */\n azureTableInsertType?: any;\n}\n\n/** A copy activity Avro sink. */\nexport interface AvroSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroSink\";\n /** Avro store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** Avro format settings. */\n formatSettings?: AvroWriteSettings;\n}\n\n/** A copy activity Parquet sink. */\nexport interface ParquetSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetSink\";\n /** Parquet store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** Parquet format settings. */\n formatSettings?: ParquetWriteSettings;\n}\n\n/** A copy activity Binary sink. */\nexport interface BinarySink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BinarySink\";\n /** Binary store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n}\n\n/** A copy activity Azure Blob sink. */\nexport interface BlobSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobSink\";\n /** Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). */\n blobWriterOverwriteFiles?: any;\n /** Blob writer date time format. Type: string (or Expression with resultType string). */\n blobWriterDateTimeFormat?: any;\n /** Blob writer add header. Type: boolean (or Expression with resultType boolean). */\n blobWriterAddHeader?: any;\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** A copy activity file system sink. */\nexport interface FileSystemSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileSystemSink\";\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** A copy activity Document Database Collection sink. */\nexport interface DocumentDbCollectionSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DocumentDbCollectionSink\";\n /** Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). */\n nestingSeparator?: any;\n /** Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. */\n writeBehavior?: any;\n}\n\n/** A copy activity Azure CosmosDB (SQL API) Collection sink. */\nexport interface CosmosDbSqlApiSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbSqlApiSink\";\n /** Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. */\n writeBehavior?: any;\n}\n\n/** A copy activity SQL sink. */\nexport interface SqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlSink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity SQL server sink. */\nexport interface SqlServerSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerSink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity Azure SQL sink. */\nexport interface AzureSqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlSink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity Azure SQL Managed Instance sink. */\nexport interface SqlMISink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlMISink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity SQL Data Warehouse sink. */\nexport interface SqlDWSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlDWSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). */\n allowPolyBase?: any;\n /** Specifies PolyBase-related settings when allowPolyBase is true. */\n polyBaseSettings?: PolybaseSettings;\n /** Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). */\n allowCopyCommand?: any;\n /** Specifies Copy Command related settings when allowCopyCommand is true. */\n copyCommandSettings?: DWCopyCommandSettings;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity snowflake sink. */\nexport interface SnowflakeSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** Snowflake import settings. */\n importSettings?: SnowflakeImportCopyCommand;\n}\n\n/** A copy activity Oracle sink. */\nexport interface OracleSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Azure Data Lake Store sink. */\nexport interface AzureDataLakeStoreSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreSink\";\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n /** Single File Parallel. */\n enableAdlsSingleFileParallel?: any;\n}\n\n/** A copy activity Azure Data Lake Storage Gen2 sink. */\nexport interface AzureBlobFSSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSSink\";\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** A copy activity Azure Search Index sink. */\nexport interface AzureSearchIndexSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSearchIndexSink\";\n /** Specify the write behavior when upserting documents into Azure Search Index. */\n writeBehavior?: AzureSearchIndexWriteBehaviorType;\n}\n\n/** A copy activity ODBC sink. */\nexport interface OdbcSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OdbcSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Informix sink. */\nexport interface InformixSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"InformixSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Microsoft Access sink. */\nexport interface MicrosoftAccessSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccessSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Dynamics sink. */\nexport interface DynamicsSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsSink\";\n /** The write behavior for the operation. */\n writeBehavior: DynamicsSinkWriteBehavior;\n /** The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n /** The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */\n alternateKeyName?: any;\n}\n\n/** A copy activity Dynamics CRM sink. */\nexport interface DynamicsCrmSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrmSink\";\n /** The write behavior for the operation. */\n writeBehavior: DynamicsSinkWriteBehavior;\n /** The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n /** The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */\n alternateKeyName?: any;\n}\n\n/** A copy activity Common Data Service for Apps sink. */\nexport interface CommonDataServiceForAppsSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForAppsSink\";\n /** The write behavior for the operation. */\n writeBehavior: DynamicsSinkWriteBehavior;\n /** The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n /** The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */\n alternateKeyName?: any;\n}\n\n/** A copy activity Azure Data Explorer sink. */\nexport interface AzureDataExplorerSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerSink\";\n /** A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. */\n ingestionMappingName?: any;\n /** An explicit column mapping description provided in a json format. Type: string. */\n ingestionMappingAsJson?: any;\n /** If set to true, any aggregation will be skipped. Default is false. Type: boolean. */\n flushImmediately?: any;\n}\n\n/** A copy activity Salesforce sink. */\nexport interface SalesforceSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceSink\";\n /** The write behavior for the operation. Default is Insert. */\n writeBehavior?: SalesforceSinkWriteBehavior;\n /** The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */\n externalIdFieldName?: any;\n /** The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n}\n\n/** A copy activity Salesforce Service Cloud sink. */\nexport interface SalesforceServiceCloudSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloudSink\";\n /** The write behavior for the operation. Default is Insert. */\n writeBehavior?: SalesforceSinkWriteBehavior;\n /** The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */\n externalIdFieldName?: any;\n /** The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n}\n\n/** A copy activity sink for a CosmosDB (MongoDB API) database. */\nexport interface CosmosDbMongoDbApiSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApiSink\";\n /** Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is \"insert\". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */\n writeBehavior?: any;\n}\n\n/** Snowflake export command settings. */\nexport interface SnowflakeExportCopyCommand extends ExportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeExportCopyCommand\";\n /** Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalCopyOptions\": { \"DATE_FORMAT\": \"MM/DD/YYYY\", \"TIME_FORMAT\": \"'HH24:MI:SS.FF'\" } */\n additionalCopyOptions?: { [propertyName: string]: any };\n /** Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalFormatOptions\": { \"OVERWRITE\": \"TRUE\", \"MAX_FILE_SIZE\": \"'FALSE'\" } */\n additionalFormatOptions?: { [propertyName: string]: any };\n}\n\n/** Azure Databricks Delta Lake export command settings. */\nexport interface AzureDatabricksDeltaLakeExportCommand extends ExportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeExportCommand\";\n /** Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n dateFormat?: any;\n /** Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n timestampFormat?: any;\n}\n\n/** Azure Databricks Delta Lake import command settings. */\nexport interface AzureDatabricksDeltaLakeImportCommand extends ImportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeImportCommand\";\n /** Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n dateFormat?: any;\n /** Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n timestampFormat?: any;\n}\n\n/** Snowflake import command settings. */\nexport interface SnowflakeImportCopyCommand extends ImportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeImportCopyCommand\";\n /** Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalCopyOptions\": { \"DATE_FORMAT\": \"MM/DD/YYYY\", \"TIME_FORMAT\": \"'HH24:MI:SS.FF'\" } */\n additionalCopyOptions?: { [propertyName: string]: any };\n /** Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalFormatOptions\": { \"FORCE\": \"TRUE\", \"LOAD_UNCERTAIN_FILES\": \"'FALSE'\" } */\n additionalFormatOptions?: { [propertyName: string]: any };\n}\n\n/** A copy activity tabular translator. */\nexport interface TabularTranslator extends CopyTranslator {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TabularTranslator\";\n /** Column mappings. Example: \"UserId: MyUserId, Group: MyGroup, Name: MyName\" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. */\n columnMappings?: any;\n /** The schema mapping to map between tabular data and hierarchical data. Example: {\"Column1\": \"$.Column1\", \"Column2\": \"$.Column2.Property1\", \"Column3\": \"$.Column2.Property2\"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. */\n schemaMapping?: any;\n /** The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). */\n collectionReference?: any;\n /** Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). */\n mapComplexValuesToString?: any;\n /** Column mappings with logical types. Tabular->tabular example: [{\"source\":{\"name\":\"CustomerName\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}},{\"source\":{\"name\":\"CustomerAddress\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Hierarchical->tabular example: [{\"source\":{\"path\":\"$.CustomerName\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}},{\"source\":{\"path\":\"$.CustomerAddress\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Type: object (or Expression with resultType object). */\n mappings?: any;\n /** Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). */\n typeConversion?: any;\n /** Type conversion settings */\n typeConversionSettings?: TypeConversionSettings;\n}\n\n/** Trigger referenced dependency. */\nexport interface TriggerDependencyReference extends DependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TriggerDependencyReference\"\n | \"TumblingWindowTriggerDependencyReference\";\n /** Referenced trigger. */\n referenceTrigger: TriggerReference;\n}\n\n/** Self referenced tumbling window trigger dependency. */\nexport interface SelfDependencyTumblingWindowTriggerReference\n extends DependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SelfDependencyTumblingWindowTriggerReference\";\n /** Timespan applied to the start time of a tumbling window when evaluating dependency. */\n offset: string;\n /** The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. */\n size?: string;\n}\n\n/** The key authorization type integration runtime. */\nexport interface LinkedIntegrationRuntimeKeyAuthorization\n extends LinkedIntegrationRuntimeType {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authorizationType: \"Key\";\n /** The key used for authorization. */\n key: SecureString;\n}\n\n/** The role based access control (RBAC) authorization type integration runtime. */\nexport interface LinkedIntegrationRuntimeRbacAuthorization\n extends LinkedIntegrationRuntimeType {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authorizationType: \"RBAC\";\n /** The resource identifier of the integration runtime to be shared. */\n resourceId: string;\n}\n\n/** Azure Synapse nested resource, which belongs to a workspace. */\nexport interface SubResource extends AzureEntityResource {}\n\n/** A Big Data pool */\nexport interface BigDataPoolResourceInfo extends TrackedResource {\n /** The state of the Big Data pool. */\n provisioningState?: string;\n /** Auto-scaling properties */\n autoScale?: AutoScaleProperties;\n /** The time when the Big Data pool was created. */\n creationDate?: Date;\n /** Auto-pausing properties */\n autoPause?: AutoPauseProperties;\n /** Whether compute isolation is required or not. */\n isComputeIsolationEnabled?: boolean;\n /** Whether session level packages enabled. */\n sessionLevelPackagesEnabled?: boolean;\n /** The cache size */\n cacheSize?: number;\n /** Dynamic Executor Allocation */\n dynamicExecutorAllocation?: DynamicExecutorAllocation;\n /** The Spark events folder */\n sparkEventsFolder?: string;\n /** The number of nodes in the Big Data pool. */\n nodeCount?: number;\n /** Library version requirements */\n libraryRequirements?: LibraryRequirements;\n /** List of custom libraries/packages associated with the spark pool. */\n customLibraries?: LibraryInfo[];\n /** Spark configuration file to specify additional properties */\n sparkConfigProperties?: LibraryRequirements;\n /** The Apache Spark version. */\n sparkVersion?: string;\n /** The default folder where Spark logs will be written. */\n defaultSparkLogFolder?: string;\n /** The level of compute power that each node in the Big Data pool has. */\n nodeSize?: NodeSize;\n /** The kind of nodes that the Big Data pool provides. */\n nodeSizeFamily?: NodeSizeFamily;\n /**\n * The time when the Big Data pool was updated successfully.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly lastSucceededTimestamp?: Date;\n}\n\n/** A SQL Analytics pool */\nexport interface SqlPool extends TrackedResource {\n /** SQL pool SKU */\n sku?: Sku;\n /** Maximum size in bytes */\n maxSizeBytes?: number;\n /** Collation mode */\n collation?: string;\n /** Source database to create from */\n sourceDatabaseId?: string;\n /** Backup database to restore from */\n recoverableDatabaseId?: string;\n /** Resource state */\n provisioningState?: string;\n /** Resource status */\n status?: string;\n /** Snapshot time to restore */\n restorePointInTime?: string;\n /**\n * Specifies the mode of sql pool creation.\n *\n * Default: regular sql pool creation.\n *\n * PointInTimeRestore: Creates a sql pool by restoring a point in time backup of an existing sql pool. sourceDatabaseId must be specified as the resource ID of the existing sql pool, and restorePointInTime must be specified.\n *\n * Recovery: Creates a sql pool by a geo-replicated backup. sourceDatabaseId must be specified as the recoverableDatabaseId to restore.\n *\n * Restore: Creates a sql pool by restoring a backup of a deleted sql pool. SourceDatabaseId should be the sql pool's original resource ID. SourceDatabaseId and sourceDatabaseDeletionDate must be specified.\n */\n createMode?: CreateMode;\n /** Date the SQL pool was created */\n creationDate?: Date;\n}\n\n/** A workspace */\nexport interface Workspace extends TrackedResource {\n /** Identity of the workspace */\n identity?: ManagedIdentity;\n /** Workspace default data lake storage account details */\n defaultDataLakeStorage?: DataLakeStorageAccountDetails;\n /** SQL administrator login password */\n sqlAdministratorLoginPassword?: string;\n /** Workspace managed resource group. The resource group name uniquely identifies the resource group within the user subscriptionId. The resource group name must be no longer than 90 characters long, and must be alphanumeric characters (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.' */\n managedResourceGroupName?: string;\n /**\n * Resource provisioning state\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningState?: string;\n /** Login for workspace SQL active directory administrator */\n sqlAdministratorLogin?: string;\n /** Virtual Network profile */\n virtualNetworkProfile?: VirtualNetworkProfile;\n /** Connectivity endpoints */\n connectivityEndpoints?: { [propertyName: string]: string };\n /** Setting this to 'default' will ensure that all compute for this workspace is in a virtual network managed on behalf of the user. */\n managedVirtualNetwork?: string;\n /** Private endpoint connections to the workspace */\n privateEndpointConnections?: PrivateEndpointConnection[];\n /** The encryption details of the workspace */\n encryption?: EncryptionDetails;\n /**\n * The workspace unique identifier\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly workspaceUID?: string;\n /**\n * Workspace level configs and feature flags\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly extraProperties?: { [propertyName: string]: any };\n /** Managed Virtual Network Settings */\n managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings;\n /** Git integration settings */\n workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration;\n /** Purview Configuration */\n purviewConfiguration?: PurviewConfiguration;\n /**\n * The ADLA resource ID.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly adlaResourceId?: string;\n}\n\n/** A private endpoint connection */\nexport interface PrivateEndpointConnection extends ProxyResource {\n /** The private endpoint which the connection belongs to. */\n privateEndpoint?: PrivateEndpoint;\n /** Connection state of the private endpoint connection. */\n privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState;\n /**\n * Provisioning state of the private endpoint connection.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningState?: string;\n}\n\n/** Execute pipeline activity. */\nexport interface ExecutePipelineActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExecutePipeline\";\n /** Pipeline reference. */\n pipeline: PipelineReference;\n /** Pipeline parameters. */\n parameters?: { [propertyName: string]: any };\n /** Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. */\n waitOnCompletion?: boolean;\n}\n\n/** This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. */\nexport interface IfConditionActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"IfCondition\";\n /** An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. */\n expression: Expression;\n /** List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. */\n ifTrueActivities?: ActivityUnion[];\n /** List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. */\n ifFalseActivities?: ActivityUnion[];\n}\n\n/** This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. */\nexport interface SwitchActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Switch\";\n /** An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. */\n on: Expression;\n /** List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. */\n cases?: SwitchCase[];\n /** List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. */\n defaultActivities?: ActivityUnion[];\n}\n\n/** This activity is used for iterating over a collection and execute given activities. */\nexport interface ForEachActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ForEach\";\n /** Should the loop be executed in sequence or in parallel (max 50) */\n isSequential?: boolean;\n /** Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). */\n batchCount?: number;\n /** Collection to iterate. */\n items: Expression;\n /** List of activities to execute . */\n activities: ActivityUnion[];\n}\n\n/** This activity suspends pipeline execution for the specified interval. */\nexport interface WaitActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Wait\";\n /** Duration in seconds. */\n waitTimeInSeconds: any;\n}\n\n/** This activity will fail within its own scope and output a custom error message and error code. The error message and code can provided either as a string literal or as an expression that can be evaluated to a string at runtime. The activity scope can be the whole pipeline or a control activity (e.g. foreach, switch, until), if the fail activity is contained in it. */\nexport interface FailActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Fail\";\n /** The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */\n message: any;\n /** The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */\n errorCode: any;\n}\n\n/** This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. */\nexport interface UntilActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Until\";\n /** An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true */\n expression: Expression;\n /** Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: any;\n /** List of activities to execute. */\n activities: ActivityUnion[];\n}\n\n/** This activity verifies that an external resource exists. */\nexport interface ValidationActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Validation\";\n /** Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: any;\n /** A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). */\n sleep?: any;\n /** Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). */\n minimumSize?: any;\n /** Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). */\n childItems?: any;\n /** Validation activity dataset reference. */\n dataset: DatasetReference;\n}\n\n/** Filter and return results from input array based on the conditions. */\nexport interface FilterActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Filter\";\n /** Input array on which filter should be applied. */\n items: Expression;\n /** Condition to be used for filtering the input. */\n condition: Expression;\n}\n\n/** Set value for a Variable. */\nexport interface SetVariableActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SetVariable\";\n /** Name of the variable whose value needs to be set. */\n variableName?: string;\n /** Value to be set. Could be a static value or Expression */\n value?: any;\n}\n\n/** Append value for a Variable of type Array. */\nexport interface AppendVariableActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AppendVariable\";\n /** Name of the variable whose value needs to be appended to. */\n variableName?: string;\n /** Value to be appended. Could be a static value or Expression */\n value?: any;\n}\n\n/** WebHook activity. */\nexport interface WebHookActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebHook\";\n /** Rest API method for target endpoint. */\n method: WebHookActivityMethod;\n /** WebHook activity target endpoint and path. Type: string (or Expression with resultType string). */\n url: any;\n /** The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: string;\n /** Represents the headers that will be sent to the request. For example, to set the language and type on a request: \"headers\" : { \"Accept-Language\": \"en-us\", \"Content-Type\": \"application/json\" }. Type: string (or Expression with resultType string). */\n headers?: any;\n /** Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */\n body?: any;\n /** Authentication method used for calling the endpoint. */\n authentication?: WebActivityAuthentication;\n /** When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). */\n reportStatusOnCallBack?: any;\n}\n\n/** Copy activity. */\nexport interface CopyActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Copy\";\n /** List of inputs for the activity. */\n inputs?: DatasetReference[];\n /** List of outputs for the activity. */\n outputs?: DatasetReference[];\n /** Copy activity source. */\n source: CopySourceUnion;\n /** Copy activity sink. */\n sink: CopySinkUnion;\n /** Copy activity translator. If not specified, tabular translator is used. */\n translator?: any;\n /** Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableStaging?: any;\n /** Specifies interim staging settings when EnableStaging is true. */\n stagingSettings?: StagingSettings;\n /** Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. */\n parallelCopies?: any;\n /** Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. */\n dataIntegrationUnits?: any;\n /** Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableSkipIncompatibleRow?: any;\n /** Redirect incompatible row settings when EnableSkipIncompatibleRow is true. */\n redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings;\n /** (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. */\n logStorageSettings?: LogStorageSettings;\n /** Log settings customer needs provide when enabling log. */\n logSettings?: LogSettings;\n /** Preserve Rules. */\n preserveRules?: any[];\n /** Preserve rules. */\n preserve?: any[];\n /** Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). */\n validateDataConsistency?: any;\n /** Specify the fault tolerance for data consistency. */\n skipErrorFile?: SkipErrorFile;\n}\n\n/** HDInsight Hive activity type. */\nexport interface HDInsightHiveActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightHive\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Script path. Type: string (or Expression with resultType string). */\n scriptPath?: any;\n /** Script linked service reference. */\n scriptLinkedService?: LinkedServiceReference;\n /** Allows user to specify defines for Hive job request. */\n defines?: { [propertyName: string]: any };\n /** User specified arguments under hivevar namespace. */\n variables?: any[];\n /** Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) */\n queryTimeout?: number;\n}\n\n/** HDInsight Pig activity type. */\nexport interface HDInsightPigActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightPig\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). */\n arguments?: any;\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Script path. Type: string (or Expression with resultType string). */\n scriptPath?: any;\n /** Script linked service reference. */\n scriptLinkedService?: LinkedServiceReference;\n /** Allows user to specify defines for Pig job request. */\n defines?: { [propertyName: string]: any };\n}\n\n/** HDInsight MapReduce activity type. */\nexport interface HDInsightMapReduceActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightMapReduce\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Class name. Type: string (or Expression with resultType string). */\n className: any;\n /** Jar path. Type: string (or Expression with resultType string). */\n jarFilePath: any;\n /** Jar linked service reference. */\n jarLinkedService?: LinkedServiceReference;\n /** Jar libs. */\n jarLibs?: any[];\n /** Allows user to specify defines for the MapReduce job request. */\n defines?: { [propertyName: string]: any };\n}\n\n/** HDInsight streaming activity type. */\nexport interface HDInsightStreamingActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightStreaming\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Mapper executable name. Type: string (or Expression with resultType string). */\n mapper: any;\n /** Reducer executable name. Type: string (or Expression with resultType string). */\n reducer: any;\n /** Input blob path. Type: string (or Expression with resultType string). */\n input: any;\n /** Output blob path. Type: string (or Expression with resultType string). */\n output: any;\n /** Paths to streaming job files. Can be directories. */\n filePaths: any[];\n /** Linked service reference where the files are located. */\n fileLinkedService?: LinkedServiceReference;\n /** Combiner executable name. Type: string (or Expression with resultType string). */\n combiner?: any;\n /** Command line environment values. */\n commandEnvironment?: any[];\n /** Allows user to specify defines for streaming job request. */\n defines?: { [propertyName: string]: any };\n}\n\n/** HDInsight Spark activity. */\nexport interface HDInsightSparkActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightSpark\";\n /** The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). */\n rootPath: any;\n /** The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). */\n entryFilePath: any;\n /** The user-specified arguments to HDInsightSparkActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** The storage linked service for uploading the entry file and dependencies, and for receiving logs. */\n sparkJobLinkedService?: LinkedServiceReference;\n /** The application's Java/Spark main class. */\n className?: string;\n /** The user to impersonate that will execute the job. Type: string (or Expression with resultType string). */\n proxyUser?: any;\n /** Spark configuration property. */\n sparkConfig?: { [propertyName: string]: any };\n}\n\n/** Execute SSIS package activity. */\nexport interface ExecuteSsisPackageActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExecuteSSISPackage\";\n /** SSIS package location. */\n packageLocation: SsisPackageLocation;\n /** Specifies the runtime to execute SSIS package. The value should be \"x86\" or \"x64\". Type: string (or Expression with resultType string). */\n runtime?: any;\n /** The logging level of SSIS package execution. Type: string (or Expression with resultType string). */\n loggingLevel?: any;\n /** The environment path to execute the SSIS package. Type: string (or Expression with resultType string). */\n environmentPath?: any;\n /** The package execution credential. */\n executionCredential?: SsisExecutionCredential;\n /** The integration runtime reference. */\n connectVia: IntegrationRuntimeReference;\n /** The project level parameters to execute the SSIS package. */\n projectParameters?: { [propertyName: string]: SsisExecutionParameter };\n /** The package level parameters to execute the SSIS package. */\n packageParameters?: { [propertyName: string]: SsisExecutionParameter };\n /** The project level connection managers to execute the SSIS package. */\n projectConnectionManagers?: {\n [propertyName: string]: { [propertyName: string]: SsisExecutionParameter };\n };\n /** The package level connection managers to execute the SSIS package. */\n packageConnectionManagers?: {\n [propertyName: string]: { [propertyName: string]: SsisExecutionParameter };\n };\n /** The property overrides to execute the SSIS package. */\n propertyOverrides?: { [propertyName: string]: SsisPropertyOverride };\n /** SSIS package execution log location. */\n logLocation?: SsisLogLocation;\n}\n\n/** Custom activity type. */\nexport interface CustomActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Custom\";\n /** Command for custom activity Type: string (or Expression with resultType string). */\n command: any;\n /** Resource linked service reference. */\n resourceLinkedService?: LinkedServiceReference;\n /** Folder path for resource files Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** Reference objects */\n referenceObjects?: CustomActivityReferenceObject;\n /** User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. */\n extendedProperties?: { [propertyName: string]: any };\n /** The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). */\n retentionTimeInDays?: any;\n /** Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). */\n autoUserSpecification?: any;\n}\n\n/** SQL stored procedure activity type. */\nexport interface SqlServerStoredProcedureActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerStoredProcedure\";\n /** Stored procedure name. Type: string (or Expression with resultType string). */\n storedProcedureName: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: any;\n}\n\n/** Delete activity. */\nexport interface DeleteActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Delete\";\n /** If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** The max concurrent connections to connect data source at the same time. */\n maxConcurrentConnections?: number;\n /** Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableLogging?: any;\n /** Log storage settings customer need to provide when enableLogging is true. */\n logStorageSettings?: LogStorageSettings;\n /** Delete activity dataset reference. */\n dataset: DatasetReference;\n /** Delete activity store settings. */\n storeSettings?: StoreReadSettingsUnion;\n}\n\n/** Azure Data Explorer command activity. */\nexport interface AzureDataExplorerCommandActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerCommand\";\n /** A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). */\n command: any;\n /** Control command timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) */\n commandTimeout?: any;\n}\n\n/** Lookup activity. */\nexport interface LookupActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Lookup\";\n /** Dataset-specific source properties, same as copy activity source. */\n source: CopySourceUnion;\n /** Lookup activity dataset reference. */\n dataset: DatasetReference;\n /** Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). */\n firstRowOnly?: any;\n}\n\n/** Web activity. */\nexport interface WebActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebActivity\";\n /** Rest API method for target endpoint. */\n method: WebActivityMethod;\n /** Web activity target endpoint and path. Type: string (or Expression with resultType string). */\n url: any;\n /** Represents the headers that will be sent to the request. For example, to set the language and type on a request: \"headers\" : { \"Accept-Language\": \"en-us\", \"Content-Type\": \"application/json\" }. Type: string (or Expression with resultType string). */\n headers?: any;\n /** Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */\n body?: any;\n /** Authentication method used for calling the endpoint. */\n authentication?: WebActivityAuthentication;\n /** List of datasets passed to web endpoint. */\n datasets?: DatasetReference[];\n /** List of linked services passed to web endpoint. */\n linkedServices?: LinkedServiceReference[];\n /** The integration runtime reference. */\n connectVia?: IntegrationRuntimeReference;\n}\n\n/** Activity to get metadata of dataset */\nexport interface GetMetadataActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GetMetadata\";\n /** GetMetadata activity dataset reference. */\n dataset: DatasetReference;\n /** Fields of metadata to get from dataset. */\n fieldList?: any[];\n /** GetMetadata activity store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** GetMetadata activity format settings. */\n formatSettings?: FormatReadSettingsUnion;\n}\n\n/** Azure ML Batch Execution activity. */\nexport interface AzureMLBatchExecutionActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLBatchExecution\";\n /** Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. */\n globalParameters?: { [propertyName: string]: any };\n /** Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. */\n webServiceOutputs?: { [propertyName: string]: AzureMLWebServiceFile };\n /** Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. */\n webServiceInputs?: { [propertyName: string]: AzureMLWebServiceFile };\n}\n\n/** Azure ML Update Resource management activity. */\nexport interface AzureMLUpdateResourceActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLUpdateResource\";\n /** Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). */\n trainedModelName: any;\n /** Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. */\n trainedModelLinkedServiceName: LinkedServiceReference;\n /** The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). */\n trainedModelFilePath: any;\n}\n\n/** Azure ML Execute Pipeline activity. */\nexport interface AzureMLExecutePipelineActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLExecutePipeline\";\n /** ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). */\n mlPipelineId: any;\n /** Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). */\n experimentName?: any;\n /** Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). */\n mlPipelineParameters?: any;\n /** The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). */\n mlParentRunId?: any;\n /** Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). */\n continueOnStepFailure?: any;\n}\n\n/** Data Lake Analytics U-SQL activity. */\nexport interface DataLakeAnalyticsUsqlActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DataLakeAnalyticsU-SQL\";\n /** Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). */\n scriptPath: any;\n /** Script linked service reference. */\n scriptLinkedService: LinkedServiceReference;\n /** The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. */\n degreeOfParallelism?: any;\n /** Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. */\n priority?: any;\n /** Parameters for U-SQL job request. */\n parameters?: { [propertyName: string]: any };\n /** Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). */\n runtimeVersion?: any;\n /** Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). */\n compilationMode?: any;\n}\n\n/** DatabricksNotebook activity. */\nexport interface DatabricksNotebookActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DatabricksNotebook\";\n /** The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). */\n notebookPath: any;\n /** Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. */\n baseParameters?: { [propertyName: string]: any };\n /** A list of libraries to be installed on the cluster that will execute the job. */\n libraries?: { [propertyName: string]: any }[];\n}\n\n/** DatabricksSparkJar activity. */\nexport interface DatabricksSparkJarActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DatabricksSparkJar\";\n /** The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). */\n mainClassName: any;\n /** Parameters that will be passed to the main method. */\n parameters?: any[];\n /** A list of libraries to be installed on the cluster that will execute the job. */\n libraries?: { [propertyName: string]: any }[];\n}\n\n/** DatabricksSparkPython activity. */\nexport interface DatabricksSparkPythonActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DatabricksSparkPython\";\n /** The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). */\n pythonFile: any;\n /** Command line parameters that will be passed to the Python file. */\n parameters?: any[];\n /** A list of libraries to be installed on the cluster that will execute the job. */\n libraries?: { [propertyName: string]: any }[];\n}\n\n/** Azure Function activity. */\nexport interface AzureFunctionActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFunctionActivity\";\n /** Rest API method for target endpoint. */\n method: AzureFunctionActivityMethod;\n /** Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) */\n functionName: any;\n /** Represents the headers that will be sent to the request. For example, to set the language and type on a request: \"headers\" : { \"Accept-Language\": \"en-us\", \"Content-Type\": \"application/json\" }. Type: string (or Expression with resultType string). */\n headers?: any;\n /** Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */\n body?: any;\n}\n\n/** Execute data flow activity. */\nexport interface ExecuteDataFlowActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExecuteDataFlow\";\n /** Data flow reference. */\n dataflow: DataFlowReference;\n /** Staging info for execute data flow activity. */\n staging?: DataFlowStagingInfo;\n /** The integration runtime reference. */\n integrationRuntime?: IntegrationRuntimeReference;\n /** Compute properties for data flow activity. */\n compute?: ExecuteDataFlowActivityTypePropertiesCompute;\n /** Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) */\n traceLevel?: any;\n /** Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) */\n continueOnError?: any;\n /** Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) */\n runConcurrently?: any;\n /** Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer) */\n sourceStagingConcurrency?: any;\n}\n\n/** Script activity type. */\nexport interface ScriptActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Script\";\n /** Array of script blocks. Type: array. */\n scripts?: ScriptActivityScriptBlock[];\n /** Log settings of script activity. */\n logSettings?: ScriptActivityTypePropertiesLogSettings;\n}\n\n/** Execute Synapse notebook activity. */\nexport interface SynapseNotebookActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SynapseNotebook\";\n /** Synapse notebook reference. */\n notebook: SynapseNotebookReference;\n /** The name of the big data pool which will be used to execute the notebook. */\n sparkPool?: BigDataPoolParametrizationReference;\n /** Notebook parameters. */\n parameters?: { [propertyName: string]: NotebookParameter };\n /** Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string). */\n executorSize?: any;\n /** Spark configuration properties, which will override the 'conf' of the notebook you provide. */\n conf?: any;\n /** Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string). */\n driverSize?: any;\n /** Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. */\n numExecutors?: number;\n}\n\n/** Execute spark job activity. */\nexport interface SynapseSparkJobDefinitionActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SparkJob\";\n /** Synapse spark job reference. */\n sparkJob: SynapseSparkJobReference;\n /** User specified arguments to SynapseSparkJobDefinitionActivity. */\n arguments?: any[];\n /** The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n file?: any;\n /** The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n className?: any;\n /** Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide. */\n files?: any[];\n /** The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide. */\n targetBigDataPool?: BigDataPoolParametrizationReference;\n /** Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n executorSize?: any;\n /** Spark configuration properties, which will override the 'conf' of the spark job definition you provide. */\n conf?: any;\n /** Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n driverSize?: any;\n /** Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. */\n numExecutors?: number;\n}\n\n/** Trigger that creates pipeline runs periodically, on schedule. */\nexport interface ScheduleTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ScheduleTrigger\";\n /** Recurrence schedule configuration. */\n recurrence: ScheduleTriggerRecurrence;\n}\n\n/** Trigger that runs every time the selected Blob container changes. */\nexport interface BlobTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobTrigger\";\n /** The path of the container/folder that will trigger the pipeline. */\n folderPath: string;\n /** The max number of parallel files to handle when it is triggered. */\n maxConcurrency: number;\n /** The Azure Storage linked service reference. */\n linkedService: LinkedServiceReference;\n}\n\n/** Trigger that runs every time a Blob event occurs. */\nexport interface BlobEventsTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobEventsTrigger\";\n /** The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. */\n blobPathBeginsWith?: string;\n /** The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. */\n blobPathEndsWith?: string;\n /** If set to true, blobs with zero bytes will be ignored. */\n ignoreEmptyBlobs?: boolean;\n /** The type of events that cause this trigger to fire. */\n events: BlobEventType[];\n /** The ARM resource ID of the Storage Account. */\n scope: string;\n}\n\n/** Trigger that runs every time a custom event is received. */\nexport interface CustomEventsTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomEventsTrigger\";\n /** The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */\n subjectBeginsWith?: string;\n /** The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */\n subjectEndsWith?: string;\n /** The list of event types that cause this trigger to fire. */\n events: any[];\n /** The ARM resource ID of the Azure Event Grid Topic. */\n scope: string;\n}\n\n/** A copy activity Azure Table source. */\nexport interface AzureTableSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTableSource\";\n /** Azure Table source query. Type: string (or Expression with resultType string). */\n azureTableSourceQuery?: any;\n /** Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). */\n azureTableSourceIgnoreTableNotFound?: any;\n}\n\n/** A copy activity source for Informix. */\nexport interface InformixSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"InformixSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for Db2 databases. */\nexport interface Db2Source extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Db2Source\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for ODBC databases. */\nexport interface OdbcSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OdbcSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for MySQL databases. */\nexport interface MySqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MySqlSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for PostgreSQL databases. */\nexport interface PostgreSqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PostgreSqlSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for Sybase databases. */\nexport interface SybaseSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SybaseSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for SapBW server via MDX. */\nexport interface SapBwSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapBwSource\";\n /** MDX query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Salesforce source. */\nexport interface SalesforceSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The read behavior for the operation. Default is Query. */\n readBehavior?: SalesforceSourceReadBehavior;\n}\n\n/** A copy activity source for SAP Cloud for Customer source. */\nexport interface SapCloudForCustomerSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomerSource\";\n /** SAP Cloud for Customer OData query. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity source for SAP ECC source. */\nexport interface SapEccSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapEccSource\";\n /** SAP ECC OData query. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity source for SAP HANA source. */\nexport interface SapHanaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapHanaSource\";\n /** SAP HANA Sql query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). */\n packetSize?: any;\n /** The partition mechanism that will be used for SAP HANA read in parallel. */\n partitionOption?: SapHanaPartitionOption;\n /** The settings that will be leveraged for SAP HANA source partitioning. */\n partitionSettings?: SapHanaPartitionSettings;\n}\n\n/** A copy activity source for SAP Business Warehouse Open Hub Destination source. */\nexport interface SapOpenHubSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOpenHubSource\";\n /** Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */\n excludeLastRequest?: any;\n /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */\n baseRequestId?: any;\n /** Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */\n customRfcReadTableFunctionModule?: any;\n /** The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */\n sapDataColumnDelimiter?: any;\n}\n\n/** A copy activity source for SAP ODP source. */\nexport interface SapOdpSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOdpSource\";\n /** The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). */\n extractionMode?: any;\n /** The subscriber process to manage the delta process. Type: string (or Expression with resultType string). */\n subscriberProcess?: any;\n /** Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). */\n selection?: any;\n /** Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). */\n projection?: any;\n}\n\n/** A copy activity source for SAP Table source. */\nexport interface SapTableSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapTableSource\";\n /** The number of rows to be retrieved. Type: integer(or Expression with resultType integer). */\n rowCount?: any;\n /** The number of rows that will be skipped. Type: integer (or Expression with resultType integer). */\n rowSkips?: any;\n /** The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). */\n rfcTableFields?: any;\n /** The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). */\n rfcTableOptions?: any;\n /** Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */\n customRfcReadTableFunctionModule?: any;\n /** The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */\n sapDataColumnDelimiter?: any;\n /** The partition mechanism that will be used for SAP table read in parallel. */\n partitionOption?: SapTablePartitionOption;\n /** The settings that will be leveraged for SAP table source partitioning. */\n partitionSettings?: SapTablePartitionSettings;\n}\n\n/** A copy activity SQL source. */\nexport interface SqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */\n isolationLevel?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity SQL server source. */\nexport interface SqlServerSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Amazon RDS for SQL Server source. */\nexport interface AmazonRdsForSqlServerSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForSqlServerSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Azure SQL source. */\nexport interface AzureSqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Azure SQL Managed Instance source. */\nexport interface SqlMISource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlMISource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity SQL Data Warehouse source. */\nexport interface SqlDWSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlDWSource\";\n /** SQL Data Warehouse reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. */\n storedProcedureParameters?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Azure MySQL source. */\nexport interface AzureMySqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySqlSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Teradata source. */\nexport interface TeradataSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TeradataSource\";\n /** Teradata query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The partition mechanism that will be used for teradata read in parallel. */\n partitionOption?: TeradataPartitionOption;\n /** The settings that will be leveraged for teradata source partitioning. */\n partitionSettings?: TeradataPartitionSettings;\n}\n\n/** A copy activity source for a Cassandra database. */\nexport interface CassandraSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CassandraSource\";\n /** Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). */\n query?: any;\n /** The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. */\n consistencyLevel?: CassandraSourceReadConsistencyLevels;\n}\n\n/** A copy activity Amazon Marketplace Web Service source. */\nexport interface AmazonMWSSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonMWSSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Azure PostgreSQL source. */\nexport interface AzurePostgreSqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSqlSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Concur Service source. */\nexport interface ConcurSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ConcurSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Couchbase server source. */\nexport interface CouchbaseSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CouchbaseSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Drill server source. */\nexport interface DrillSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DrillSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Eloqua server source. */\nexport interface EloquaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"EloquaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Google BigQuery service source. */\nexport interface GoogleBigQuerySource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleBigQuerySource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Greenplum Database source. */\nexport interface GreenplumSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GreenplumSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity HBase server source. */\nexport interface HBaseSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HBaseSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Hive Server source. */\nexport interface HiveSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HiveSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Hubspot Service source. */\nexport interface HubspotSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HubspotSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Impala server source. */\nexport interface ImpalaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ImpalaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Jira Service source. */\nexport interface JiraSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JiraSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Magento server source. */\nexport interface MagentoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MagentoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity MariaDB server source. */\nexport interface MariaDBSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MariaDBSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Azure MariaDB source. */\nexport interface AzureMariaDBSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMariaDBSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Marketo server source. */\nexport interface MarketoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MarketoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Paypal Service source. */\nexport interface PaypalSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PaypalSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Phoenix server source. */\nexport interface PhoenixSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PhoenixSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Presto server source. */\nexport interface PrestoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PrestoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity QuickBooks server source. */\nexport interface QuickBooksSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"QuickBooksSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity ServiceNow server source. */\nexport interface ServiceNowSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ServiceNowSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Shopify Service source. */\nexport interface ShopifySource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ShopifySource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Spark Server source. */\nexport interface SparkSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SparkSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Square Service source. */\nexport interface SquareSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SquareSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Xero Service source. */\nexport interface XeroSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XeroSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Zoho server source. */\nexport interface ZohoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZohoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Netezza source. */\nexport interface NetezzaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"NetezzaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n /** The partition mechanism that will be used for Netezza read in parallel. */\n partitionOption?: NetezzaPartitionOption;\n /** The settings that will be leveraged for Netezza source partitioning. */\n partitionSettings?: NetezzaPartitionSettings;\n}\n\n/** A copy activity Vertica source. */\nexport interface VerticaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"VerticaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Salesforce Marketing Cloud source. */\nexport interface SalesforceMarketingCloudSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceMarketingCloudSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Responsys source. */\nexport interface ResponsysSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ResponsysSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Dynamics AX source. */\nexport interface DynamicsAXSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsAXSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity Oracle Service Cloud source. */\nexport interface OracleServiceCloudSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleServiceCloudSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Google AdWords service source. */\nexport interface GoogleAdWordsSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleAdWordsSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for Amazon Redshift Source. */\nexport interface AmazonRedshiftSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRedshiftSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. */\n redshiftUnloadSettings?: RedshiftUnloadSettings;\n}\n\n/** Referenced tumbling window trigger dependency. */\nexport interface TumblingWindowTriggerDependencyReference\n extends TriggerDependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TumblingWindowTriggerDependencyReference\";\n /** Timespan applied to the start time of a tumbling window when evaluating dependency. */\n offset?: string;\n /** The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. */\n size?: string;\n}\n\n/** Spark Configuration resource type. */\nexport interface SparkConfigurationResource extends SubResource {\n /** Properties of Spark Configuration. */\n properties: SparkConfiguration;\n}\n\n/** Data flow resource type. */\nexport interface DataFlowResource extends SubResource {\n /** Data flow properties. */\n properties: DataFlowUnion;\n}\n\n/** Dataset resource type. */\nexport interface DatasetResource extends SubResource {\n /** Dataset properties. */\n properties: DatasetUnion;\n}\n\n/** Integration runtime resource type. */\nexport interface IntegrationRuntimeResource extends SubResource {\n /** Integration runtime properties. */\n properties: IntegrationRuntimeUnion;\n}\n\n/** Library response details */\nexport interface LibraryResource extends SubResource {\n /** Library/package properties. */\n properties: LibraryResourceProperties;\n}\n\n/** Linked service resource type. */\nexport interface LinkedServiceResource extends SubResource {\n /** Properties of linked service. */\n properties: LinkedServiceUnion;\n}\n\n/** Pipeline resource type. */\nexport interface PipelineResource extends SubResource {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the pipeline. */\n description?: string;\n /** List of activities in pipeline. */\n activities?: ActivityUnion[];\n /** List of parameters for pipeline. */\n parameters?: { [propertyName: string]: ParameterSpecification };\n /** List of variables for pipeline. */\n variables?: { [propertyName: string]: VariableSpecification };\n /** The max number of concurrent runs for the pipeline. */\n concurrency?: number;\n /** List of tags that can be used for describing the Pipeline. */\n annotations?: any[];\n /** Dimensions emitted by Pipeline. */\n runDimensions?: { [propertyName: string]: any };\n /** The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. */\n folder?: PipelineFolder;\n}\n\n/** Spark job definition resource type. */\nexport interface SparkJobDefinitionResource extends SubResource {\n /** Properties of spark job definition. */\n properties: SparkJobDefinition;\n}\n\n/** Trigger resource type. */\nexport interface TriggerResource extends SubResource {\n /** Properties of the trigger. */\n properties: TriggerUnion;\n}\n\n/** RerunTrigger resource type. */\nexport interface RerunTriggerResource extends SubResource {\n /** Properties of the rerun trigger. */\n properties: RerunTumblingWindowTrigger;\n}\n\n/** Defines headers for DataFlowDebugSession_createDataFlowDebugSession operation. */\nexport interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders {\n /** URI to poll for asynchronous operation status. */\n location?: string;\n}\n\n/** Defines headers for DataFlowDebugSession_executeCommand operation. */\nexport interface DataFlowDebugSessionExecuteCommandHeaders {\n /** URI to poll for asynchronous operation status. */\n location?: string;\n}\n\n/** Known values of {@link Type} that the service accepts. */\nexport enum KnownType {\n /** LinkedServiceReference */\n LinkedServiceReference = \"LinkedServiceReference\"\n}\n\n/**\n * Defines values for Type. \\\n * {@link KnownType} can be used interchangeably with Type,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **LinkedServiceReference**\n */\nexport type Type = string;\n\n/** Known values of {@link RequestStatus} that the service accepts. */\nexport enum KnownRequestStatus {\n /** Running */\n Running = \"Running\",\n /** Completed */\n Completed = \"Completed\",\n /** Failed */\n Failed = \"Failed\"\n}\n\n/**\n * Defines values for RequestStatus. \\\n * {@link KnownRequestStatus} can be used interchangeably with RequestStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Running** \\\n * **Completed** \\\n * **Failed**\n */\nexport type RequestStatus = string;\n\n/** Known values of {@link ResourceStatus} that the service accepts. */\nexport enum KnownResourceStatus {\n /** Creating */\n Creating = \"Creating\",\n /** Created */\n Created = \"Created\",\n /** Failed */\n Failed = \"Failed\"\n}\n\n/**\n * Defines values for ResourceStatus. \\\n * {@link KnownResourceStatus} can be used interchangeably with ResourceStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Creating** \\\n * **Created** \\\n * **Failed**\n */\nexport type ResourceStatus = string;\n\n/** Known values of {@link NodeSize} that the service accepts. */\nexport enum KnownNodeSize {\n /** None */\n None = \"None\",\n /** Small */\n Small = \"Small\",\n /** Medium */\n Medium = \"Medium\",\n /** Large */\n Large = \"Large\",\n /** XLarge */\n XLarge = \"XLarge\",\n /** XXLarge */\n XXLarge = \"XXLarge\",\n /** XXXLarge */\n XXXLarge = \"XXXLarge\"\n}\n\n/**\n * Defines values for NodeSize. \\\n * {@link KnownNodeSize} can be used interchangeably with NodeSize,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **Small** \\\n * **Medium** \\\n * **Large** \\\n * **XLarge** \\\n * **XXLarge** \\\n * **XXXLarge**\n */\nexport type NodeSize = string;\n\n/** Known values of {@link NodeSizeFamily} that the service accepts. */\nexport enum KnownNodeSizeFamily {\n /** None */\n None = \"None\",\n /** MemoryOptimized */\n MemoryOptimized = \"MemoryOptimized\"\n}\n\n/**\n * Defines values for NodeSizeFamily. \\\n * {@link KnownNodeSizeFamily} can be used interchangeably with NodeSizeFamily,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **MemoryOptimized**\n */\nexport type NodeSizeFamily = string;\n\n/** Known values of {@link IntegrationRuntimeType} that the service accepts. */\nexport enum KnownIntegrationRuntimeType {\n /** Managed */\n Managed = \"Managed\",\n /** SelfHosted */\n SelfHosted = \"SelfHosted\"\n}\n\n/**\n * Defines values for IntegrationRuntimeType. \\\n * {@link KnownIntegrationRuntimeType} can be used interchangeably with IntegrationRuntimeType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Managed** \\\n * **SelfHosted**\n */\nexport type IntegrationRuntimeType = string;\n\n/** Known values of {@link ParameterType} that the service accepts. */\nexport enum KnownParameterType {\n /** Object */\n Object = \"Object\",\n /** String */\n String = \"String\",\n /** Int */\n Int = \"Int\",\n /** Float */\n Float = \"Float\",\n /** Bool */\n Bool = \"Bool\",\n /** Array */\n Array = \"Array\",\n /** SecureString */\n SecureString = \"SecureString\"\n}\n\n/**\n * Defines values for ParameterType. \\\n * {@link KnownParameterType} can be used interchangeably with ParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Object** \\\n * **String** \\\n * **Int** \\\n * **Float** \\\n * **Bool** \\\n * **Array** \\\n * **SecureString**\n */\nexport type ParameterType = string;\n\n/** Known values of {@link IntegrationRuntimeReferenceType} that the service accepts. */\nexport enum KnownIntegrationRuntimeReferenceType {\n /** IntegrationRuntimeReference */\n IntegrationRuntimeReference = \"IntegrationRuntimeReference\"\n}\n\n/**\n * Defines values for IntegrationRuntimeReferenceType. \\\n * {@link KnownIntegrationRuntimeReferenceType} can be used interchangeably with IntegrationRuntimeReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **IntegrationRuntimeReference**\n */\nexport type IntegrationRuntimeReferenceType = string;\n\n/** Known values of {@link DataFlowDebugCommandType} that the service accepts. */\nexport enum KnownDataFlowDebugCommandType {\n /** ExecutePreviewQuery */\n ExecutePreviewQuery = \"executePreviewQuery\",\n /** ExecuteStatisticsQuery */\n ExecuteStatisticsQuery = \"executeStatisticsQuery\",\n /** ExecuteExpressionQuery */\n ExecuteExpressionQuery = \"executeExpressionQuery\"\n}\n\n/**\n * Defines values for DataFlowDebugCommandType. \\\n * {@link KnownDataFlowDebugCommandType} can be used interchangeably with DataFlowDebugCommandType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **executePreviewQuery** \\\n * **executeStatisticsQuery** \\\n * **executeExpressionQuery**\n */\nexport type DataFlowDebugCommandType = string;\n\n/** Known values of {@link BigDataPoolReferenceType} that the service accepts. */\nexport enum KnownBigDataPoolReferenceType {\n /** BigDataPoolReference */\n BigDataPoolReference = \"BigDataPoolReference\"\n}\n\n/**\n * Defines values for BigDataPoolReferenceType. \\\n * {@link KnownBigDataPoolReferenceType} can be used interchangeably with BigDataPoolReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **BigDataPoolReference**\n */\nexport type BigDataPoolReferenceType = string;\n\n/** Known values of {@link SparkConfigurationReferenceType} that the service accepts. */\nexport enum KnownSparkConfigurationReferenceType {\n /** SparkConfigurationReference */\n SparkConfigurationReference = \"SparkConfigurationReference\"\n}\n\n/**\n * Defines values for SparkConfigurationReferenceType. \\\n * {@link KnownSparkConfigurationReferenceType} can be used interchangeably with SparkConfigurationReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkConfigurationReference**\n */\nexport type SparkConfigurationReferenceType = string;\n\n/** Known values of {@link CellOutputType} that the service accepts. */\nexport enum KnownCellOutputType {\n /** ExecuteResult */\n ExecuteResult = \"execute_result\",\n /** DisplayData */\n DisplayData = \"display_data\",\n /** Stream */\n Stream = \"stream\",\n /** Error */\n Error = \"error\"\n}\n\n/**\n * Defines values for CellOutputType. \\\n * {@link KnownCellOutputType} can be used interchangeably with CellOutputType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **execute_result** \\\n * **display_data** \\\n * **stream** \\\n * **error**\n */\nexport type CellOutputType = string;\n\n/** Known values of {@link DependencyCondition} that the service accepts. */\nexport enum KnownDependencyCondition {\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Skipped */\n Skipped = \"Skipped\",\n /** Completed */\n Completed = \"Completed\"\n}\n\n/**\n * Defines values for DependencyCondition. \\\n * {@link KnownDependencyCondition} can be used interchangeably with DependencyCondition,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Succeeded** \\\n * **Failed** \\\n * **Skipped** \\\n * **Completed**\n */\nexport type DependencyCondition = string;\n\n/** Known values of {@link VariableType} that the service accepts. */\nexport enum KnownVariableType {\n /** String */\n String = \"String\",\n /** Bool */\n Bool = \"Bool\",\n /** Boolean */\n Boolean = \"Boolean\",\n /** Array */\n Array = \"Array\"\n}\n\n/**\n * Defines values for VariableType. \\\n * {@link KnownVariableType} can be used interchangeably with VariableType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **String** \\\n * **Bool** \\\n * **Boolean** \\\n * **Array**\n */\nexport type VariableType = string;\n\n/** Known values of {@link RunQueryFilterOperand} that the service accepts. */\nexport enum KnownRunQueryFilterOperand {\n /** PipelineName */\n PipelineName = \"PipelineName\",\n /** Status */\n Status = \"Status\",\n /** RunStart */\n RunStart = \"RunStart\",\n /** RunEnd */\n RunEnd = \"RunEnd\",\n /** ActivityName */\n ActivityName = \"ActivityName\",\n /** ActivityRunStart */\n ActivityRunStart = \"ActivityRunStart\",\n /** ActivityRunEnd */\n ActivityRunEnd = \"ActivityRunEnd\",\n /** ActivityType */\n ActivityType = \"ActivityType\",\n /** TriggerName */\n TriggerName = \"TriggerName\",\n /** TriggerRunTimestamp */\n TriggerRunTimestamp = \"TriggerRunTimestamp\",\n /** RunGroupId */\n RunGroupId = \"RunGroupId\",\n /** LatestOnly */\n LatestOnly = \"LatestOnly\"\n}\n\n/**\n * Defines values for RunQueryFilterOperand. \\\n * {@link KnownRunQueryFilterOperand} can be used interchangeably with RunQueryFilterOperand,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **PipelineName** \\\n * **Status** \\\n * **RunStart** \\\n * **RunEnd** \\\n * **ActivityName** \\\n * **ActivityRunStart** \\\n * **ActivityRunEnd** \\\n * **ActivityType** \\\n * **TriggerName** \\\n * **TriggerRunTimestamp** \\\n * **RunGroupId** \\\n * **LatestOnly**\n */\nexport type RunQueryFilterOperand = string;\n\n/** Known values of {@link RunQueryFilterOperator} that the service accepts. */\nexport enum KnownRunQueryFilterOperator {\n /** Equals */\n Equals = \"Equals\",\n /** NotEquals */\n NotEquals = \"NotEquals\",\n /** In */\n In = \"In\",\n /** NotIn */\n NotIn = \"NotIn\"\n}\n\n/**\n * Defines values for RunQueryFilterOperator. \\\n * {@link KnownRunQueryFilterOperator} can be used interchangeably with RunQueryFilterOperator,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Equals** \\\n * **NotEquals** \\\n * **In** \\\n * **NotIn**\n */\nexport type RunQueryFilterOperator = string;\n\n/** Known values of {@link RunQueryOrderByField} that the service accepts. */\nexport enum KnownRunQueryOrderByField {\n /** RunStart */\n RunStart = \"RunStart\",\n /** RunEnd */\n RunEnd = \"RunEnd\",\n /** PipelineName */\n PipelineName = \"PipelineName\",\n /** Status */\n Status = \"Status\",\n /** ActivityName */\n ActivityName = \"ActivityName\",\n /** ActivityRunStart */\n ActivityRunStart = \"ActivityRunStart\",\n /** ActivityRunEnd */\n ActivityRunEnd = \"ActivityRunEnd\",\n /** TriggerName */\n TriggerName = \"TriggerName\",\n /** TriggerRunTimestamp */\n TriggerRunTimestamp = \"TriggerRunTimestamp\"\n}\n\n/**\n * Defines values for RunQueryOrderByField. \\\n * {@link KnownRunQueryOrderByField} can be used interchangeably with RunQueryOrderByField,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **RunStart** \\\n * **RunEnd** \\\n * **PipelineName** \\\n * **Status** \\\n * **ActivityName** \\\n * **ActivityRunStart** \\\n * **ActivityRunEnd** \\\n * **TriggerName** \\\n * **TriggerRunTimestamp**\n */\nexport type RunQueryOrderByField = string;\n\n/** Known values of {@link RunQueryOrder} that the service accepts. */\nexport enum KnownRunQueryOrder {\n /** ASC */\n ASC = \"ASC\",\n /** Desc */\n Desc = \"DESC\"\n}\n\n/**\n * Defines values for RunQueryOrder. \\\n * {@link KnownRunQueryOrder} can be used interchangeably with RunQueryOrder,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ASC** \\\n * **DESC**\n */\nexport type RunQueryOrder = string;\n\n/** Known values of {@link SparkJobType} that the service accepts. */\nexport enum KnownSparkJobType {\n /** SparkBatch */\n SparkBatch = \"SparkBatch\",\n /** SparkSession */\n SparkSession = \"SparkSession\"\n}\n\n/**\n * Defines values for SparkJobType. \\\n * {@link KnownSparkJobType} can be used interchangeably with SparkJobType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkBatch** \\\n * **SparkSession**\n */\nexport type SparkJobType = string;\n\n/** Known values of {@link SparkBatchJobResultType} that the service accepts. */\nexport enum KnownSparkBatchJobResultType {\n /** Uncertain */\n Uncertain = \"Uncertain\",\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Cancelled */\n Cancelled = \"Cancelled\"\n}\n\n/**\n * Defines values for SparkBatchJobResultType. \\\n * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Uncertain** \\\n * **Succeeded** \\\n * **Failed** \\\n * **Cancelled**\n */\nexport type SparkBatchJobResultType = string;\n\n/** Known values of {@link SchedulerCurrentState} that the service accepts. */\nexport enum KnownSchedulerCurrentState {\n /** Queued */\n Queued = \"Queued\",\n /** Scheduled */\n Scheduled = \"Scheduled\",\n /** Ended */\n Ended = \"Ended\"\n}\n\n/**\n * Defines values for SchedulerCurrentState. \\\n * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Queued** \\\n * **Scheduled** \\\n * **Ended**\n */\nexport type SchedulerCurrentState = string;\n\n/** Known values of {@link PluginCurrentState} that the service accepts. */\nexport enum KnownPluginCurrentState {\n /** Preparation */\n Preparation = \"Preparation\",\n /** ResourceAcquisition */\n ResourceAcquisition = \"ResourceAcquisition\",\n /** Queued */\n Queued = \"Queued\",\n /** Submission */\n Submission = \"Submission\",\n /** Monitoring */\n Monitoring = \"Monitoring\",\n /** Cleanup */\n Cleanup = \"Cleanup\",\n /** Ended */\n Ended = \"Ended\"\n}\n\n/**\n * Defines values for PluginCurrentState. \\\n * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Preparation** \\\n * **ResourceAcquisition** \\\n * **Queued** \\\n * **Submission** \\\n * **Monitoring** \\\n * **Cleanup** \\\n * **Ended**\n */\nexport type PluginCurrentState = string;\n\n/** Known values of {@link SparkErrorSource} that the service accepts. */\nexport enum KnownSparkErrorSource {\n /** System */\n System = \"System\",\n /** User */\n User = \"User\",\n /** Unknown */\n Unknown = \"Unknown\",\n /** Dependency */\n Dependency = \"Dependency\"\n}\n\n/**\n * Defines values for SparkErrorSource. \\\n * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **System** \\\n * **User** \\\n * **Unknown** \\\n * **Dependency**\n */\nexport type SparkErrorSource = string;\n\n/** Known values of {@link LivyStates} that the service accepts. */\nexport enum KnownLivyStates {\n /** NotStarted */\n NotStarted = \"not_started\",\n /** Starting */\n Starting = \"starting\",\n /** Idle */\n Idle = \"idle\",\n /** Busy */\n Busy = \"busy\",\n /** ShuttingDown */\n ShuttingDown = \"shutting_down\",\n /** Error */\n Error = \"error\",\n /** Dead */\n Dead = \"dead\",\n /** Killed */\n Killed = \"killed\",\n /** Success */\n Success = \"success\",\n /** Running */\n Running = \"running\",\n /** Recovering */\n Recovering = \"recovering\"\n}\n\n/**\n * Defines values for LivyStates. \\\n * {@link KnownLivyStates} can be used interchangeably with LivyStates,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **not_started** \\\n * **starting** \\\n * **idle** \\\n * **busy** \\\n * **shutting_down** \\\n * **error** \\\n * **dead** \\\n * **killed** \\\n * **success** \\\n * **running** \\\n * **recovering**\n */\nexport type LivyStates = string;\n\n/** Known values of {@link CreateMode} that the service accepts. */\nexport enum KnownCreateMode {\n /** Default */\n Default = \"Default\",\n /** PointInTimeRestore */\n PointInTimeRestore = \"PointInTimeRestore\",\n /** Recovery */\n Recovery = \"Recovery\",\n /** Restore */\n Restore = \"Restore\"\n}\n\n/**\n * Defines values for CreateMode. \\\n * {@link KnownCreateMode} can be used interchangeably with CreateMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Default** \\\n * **PointInTimeRestore** \\\n * **Recovery** \\\n * **Restore**\n */\nexport type CreateMode = string;\n\n/** Known values of {@link SqlScriptType} that the service accepts. */\nexport enum KnownSqlScriptType {\n /** SqlQuery */\n SqlQuery = \"SqlQuery\"\n}\n\n/**\n * Defines values for SqlScriptType. \\\n * {@link KnownSqlScriptType} can be used interchangeably with SqlScriptType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SqlQuery**\n */\nexport type SqlScriptType = string;\n\n/** Known values of {@link SqlConnectionType} that the service accepts. */\nexport enum KnownSqlConnectionType {\n /** SqlOnDemand */\n SqlOnDemand = \"SqlOnDemand\",\n /** SqlPool */\n SqlPool = \"SqlPool\"\n}\n\n/**\n * Defines values for SqlConnectionType. \\\n * {@link KnownSqlConnectionType} can be used interchangeably with SqlConnectionType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SqlOnDemand** \\\n * **SqlPool**\n */\nexport type SqlConnectionType = string;\n\n/** Known values of {@link TriggerRuntimeState} that the service accepts. */\nexport enum KnownTriggerRuntimeState {\n /** Started */\n Started = \"Started\",\n /** Stopped */\n Stopped = \"Stopped\",\n /** Disabled */\n Disabled = \"Disabled\"\n}\n\n/**\n * Defines values for TriggerRuntimeState. \\\n * {@link KnownTriggerRuntimeState} can be used interchangeably with TriggerRuntimeState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Started** \\\n * **Stopped** \\\n * **Disabled**\n */\nexport type TriggerRuntimeState = string;\n\n/** Known values of {@link EventSubscriptionStatus} that the service accepts. */\nexport enum KnownEventSubscriptionStatus {\n /** Enabled */\n Enabled = \"Enabled\",\n /** Provisioning */\n Provisioning = \"Provisioning\",\n /** Deprovisioning */\n Deprovisioning = \"Deprovisioning\",\n /** Disabled */\n Disabled = \"Disabled\",\n /** Unknown */\n Unknown = \"Unknown\"\n}\n\n/**\n * Defines values for EventSubscriptionStatus. \\\n * {@link KnownEventSubscriptionStatus} can be used interchangeably with EventSubscriptionStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Enabled** \\\n * **Provisioning** \\\n * **Deprovisioning** \\\n * **Disabled** \\\n * **Unknown**\n */\nexport type EventSubscriptionStatus = string;\n\n/** Known values of {@link TriggerRunStatus} that the service accepts. */\nexport enum KnownTriggerRunStatus {\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Inprogress */\n Inprogress = \"Inprogress\"\n}\n\n/**\n * Defines values for TriggerRunStatus. \\\n * {@link KnownTriggerRunStatus} can be used interchangeably with TriggerRunStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Succeeded** \\\n * **Failed** \\\n * **Inprogress**\n */\nexport type TriggerRunStatus = string;\n\n/** Known values of {@link ExpressionType} that the service accepts. */\nexport enum KnownExpressionType {\n /** Expression */\n Expression = \"Expression\"\n}\n\n/**\n * Defines values for ExpressionType. \\\n * {@link KnownExpressionType} can be used interchangeably with ExpressionType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Expression**\n */\nexport type ExpressionType = string;\n\n/** Known values of {@link PipelineReferenceType} that the service accepts. */\nexport enum KnownPipelineReferenceType {\n /** PipelineReference */\n PipelineReference = \"PipelineReference\"\n}\n\n/**\n * Defines values for PipelineReferenceType. \\\n * {@link KnownPipelineReferenceType} can be used interchangeably with PipelineReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **PipelineReference**\n */\nexport type PipelineReferenceType = string;\n\n/** Known values of {@link DatasetReferenceType} that the service accepts. */\nexport enum KnownDatasetReferenceType {\n /** DatasetReference */\n DatasetReference = \"DatasetReference\"\n}\n\n/**\n * Defines values for DatasetReferenceType. \\\n * {@link KnownDatasetReferenceType} can be used interchangeably with DatasetReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **DatasetReference**\n */\nexport type DatasetReferenceType = string;\n\n/** Known values of {@link DataFlowReferenceType} that the service accepts. */\nexport enum KnownDataFlowReferenceType {\n /** DataFlowReference */\n DataFlowReference = \"DataFlowReference\"\n}\n\n/**\n * Defines values for DataFlowReferenceType. \\\n * {@link KnownDataFlowReferenceType} can be used interchangeably with DataFlowReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **DataFlowReference**\n */\nexport type DataFlowReferenceType = string;\n\n/** Known values of {@link NotebookReferenceType} that the service accepts. */\nexport enum KnownNotebookReferenceType {\n /** NotebookReference */\n NotebookReference = \"NotebookReference\"\n}\n\n/**\n * Defines values for NotebookReferenceType. \\\n * {@link KnownNotebookReferenceType} can be used interchangeably with NotebookReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **NotebookReference**\n */\nexport type NotebookReferenceType = string;\n\n/** Known values of {@link SparkJobReferenceType} that the service accepts. */\nexport enum KnownSparkJobReferenceType {\n /** SparkJobDefinitionReference */\n SparkJobDefinitionReference = \"SparkJobDefinitionReference\"\n}\n\n/**\n * Defines values for SparkJobReferenceType. \\\n * {@link KnownSparkJobReferenceType} can be used interchangeably with SparkJobReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkJobDefinitionReference**\n */\nexport type SparkJobReferenceType = string;\n\n/** Known values of {@link SqlPoolReferenceType} that the service accepts. */\nexport enum KnownSqlPoolReferenceType {\n /** SqlPoolReference */\n SqlPoolReference = \"SqlPoolReference\"\n}\n\n/**\n * Defines values for SqlPoolReferenceType. \\\n * {@link KnownSqlPoolReferenceType} can be used interchangeably with SqlPoolReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SqlPoolReference**\n */\nexport type SqlPoolReferenceType = string;\n\n/** Known values of {@link SybaseAuthenticationType} that the service accepts. */\nexport enum KnownSybaseAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Windows */\n Windows = \"Windows\"\n}\n\n/**\n * Defines values for SybaseAuthenticationType. \\\n * {@link KnownSybaseAuthenticationType} can be used interchangeably with SybaseAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Windows**\n */\nexport type SybaseAuthenticationType = string;\n\n/** Known values of {@link Db2AuthenticationType} that the service accepts. */\nexport enum KnownDb2AuthenticationType {\n /** Basic */\n Basic = \"Basic\"\n}\n\n/**\n * Defines values for Db2AuthenticationType. \\\n * {@link KnownDb2AuthenticationType} can be used interchangeably with Db2AuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic**\n */\nexport type Db2AuthenticationType = string;\n\n/** Known values of {@link TeradataAuthenticationType} that the service accepts. */\nexport enum KnownTeradataAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Windows */\n Windows = \"Windows\"\n}\n\n/**\n * Defines values for TeradataAuthenticationType. \\\n * {@link KnownTeradataAuthenticationType} can be used interchangeably with TeradataAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Windows**\n */\nexport type TeradataAuthenticationType = string;\n\n/** Known values of {@link ODataAuthenticationType} that the service accepts. */\nexport enum KnownODataAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Windows */\n Windows = \"Windows\",\n /** AadServicePrincipal */\n AadServicePrincipal = \"AadServicePrincipal\",\n /** ManagedServiceIdentity */\n ManagedServiceIdentity = \"ManagedServiceIdentity\"\n}\n\n/**\n * Defines values for ODataAuthenticationType. \\\n * {@link KnownODataAuthenticationType} can be used interchangeably with ODataAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous** \\\n * **Windows** \\\n * **AadServicePrincipal** \\\n * **ManagedServiceIdentity**\n */\nexport type ODataAuthenticationType = string;\n\n/** Known values of {@link ODataAadServicePrincipalCredentialType} that the service accepts. */\nexport enum KnownODataAadServicePrincipalCredentialType {\n /** ServicePrincipalKey */\n ServicePrincipalKey = \"ServicePrincipalKey\",\n /** ServicePrincipalCert */\n ServicePrincipalCert = \"ServicePrincipalCert\"\n}\n\n/**\n * Defines values for ODataAadServicePrincipalCredentialType. \\\n * {@link KnownODataAadServicePrincipalCredentialType} can be used interchangeably with ODataAadServicePrincipalCredentialType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServicePrincipalKey** \\\n * **ServicePrincipalCert**\n */\nexport type ODataAadServicePrincipalCredentialType = string;\n\n/** Known values of {@link WebAuthenticationType} that the service accepts. */\nexport enum KnownWebAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** ClientCertificate */\n ClientCertificate = \"ClientCertificate\"\n}\n\n/**\n * Defines values for WebAuthenticationType. \\\n * {@link KnownWebAuthenticationType} can be used interchangeably with WebAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous** \\\n * **ClientCertificate**\n */\nexport type WebAuthenticationType = string;\n\n/** Known values of {@link MongoDbAuthenticationType} that the service accepts. */\nexport enum KnownMongoDbAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\"\n}\n\n/**\n * Defines values for MongoDbAuthenticationType. \\\n * {@link KnownMongoDbAuthenticationType} can be used interchangeably with MongoDbAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous**\n */\nexport type MongoDbAuthenticationType = string;\n\n/** Known values of {@link RestServiceAuthenticationType} that the service accepts. */\nexport enum KnownRestServiceAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Basic */\n Basic = \"Basic\",\n /** AadServicePrincipal */\n AadServicePrincipal = \"AadServicePrincipal\",\n /** ManagedServiceIdentity */\n ManagedServiceIdentity = \"ManagedServiceIdentity\",\n /** OAuth2ClientCredential */\n OAuth2ClientCredential = \"OAuth2ClientCredential\"\n}\n\n/**\n * Defines values for RestServiceAuthenticationType. \\\n * {@link KnownRestServiceAuthenticationType} can be used interchangeably with RestServiceAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Basic** \\\n * **AadServicePrincipal** \\\n * **ManagedServiceIdentity** \\\n * **OAuth2ClientCredential**\n */\nexport type RestServiceAuthenticationType = string;\n\n/** Known values of {@link TeamDeskAuthenticationType} that the service accepts. */\nexport enum KnownTeamDeskAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Token */\n Token = \"Token\"\n}\n\n/**\n * Defines values for TeamDeskAuthenticationType. \\\n * {@link KnownTeamDeskAuthenticationType} can be used interchangeably with TeamDeskAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Token**\n */\nexport type TeamDeskAuthenticationType = string;\n\n/** Known values of {@link ZendeskAuthenticationType} that the service accepts. */\nexport enum KnownZendeskAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Token */\n Token = \"Token\"\n}\n\n/**\n * Defines values for ZendeskAuthenticationType. \\\n * {@link KnownZendeskAuthenticationType} can be used interchangeably with ZendeskAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Token**\n */\nexport type ZendeskAuthenticationType = string;\n\n/** Known values of {@link HttpAuthenticationType} that the service accepts. */\nexport enum KnownHttpAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Digest */\n Digest = \"Digest\",\n /** Windows */\n Windows = \"Windows\",\n /** ClientCertificate */\n ClientCertificate = \"ClientCertificate\"\n}\n\n/**\n * Defines values for HttpAuthenticationType. \\\n * {@link KnownHttpAuthenticationType} can be used interchangeably with HttpAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous** \\\n * **Digest** \\\n * **Windows** \\\n * **ClientCertificate**\n */\nexport type HttpAuthenticationType = string;\n\n/** Known values of {@link FtpAuthenticationType} that the service accepts. */\nexport enum KnownFtpAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\"\n}\n\n/**\n * Defines values for FtpAuthenticationType. \\\n * {@link KnownFtpAuthenticationType} can be used interchangeably with FtpAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous**\n */\nexport type FtpAuthenticationType = string;\n\n/** Known values of {@link SftpAuthenticationType} that the service accepts. */\nexport enum KnownSftpAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** SshPublicKey */\n SshPublicKey = \"SshPublicKey\"\n}\n\n/**\n * Defines values for SftpAuthenticationType. \\\n * {@link KnownSftpAuthenticationType} can be used interchangeably with SftpAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **SshPublicKey**\n */\nexport type SftpAuthenticationType = string;\n\n/** Known values of {@link SapHanaAuthenticationType} that the service accepts. */\nexport enum KnownSapHanaAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Windows */\n Windows = \"Windows\"\n}\n\n/**\n * Defines values for SapHanaAuthenticationType. \\\n * {@link KnownSapHanaAuthenticationType} can be used interchangeably with SapHanaAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Windows**\n */\nexport type SapHanaAuthenticationType = string;\n\n/** Known values of {@link GoogleBigQueryAuthenticationType} that the service accepts. */\nexport enum KnownGoogleBigQueryAuthenticationType {\n /** ServiceAuthentication */\n ServiceAuthentication = \"ServiceAuthentication\",\n /** UserAuthentication */\n UserAuthentication = \"UserAuthentication\"\n}\n\n/**\n * Defines values for GoogleBigQueryAuthenticationType. \\\n * {@link KnownGoogleBigQueryAuthenticationType} can be used interchangeably with GoogleBigQueryAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServiceAuthentication** \\\n * **UserAuthentication**\n */\nexport type GoogleBigQueryAuthenticationType = string;\n\n/** Known values of {@link HBaseAuthenticationType} that the service accepts. */\nexport enum KnownHBaseAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Basic */\n Basic = \"Basic\"\n}\n\n/**\n * Defines values for HBaseAuthenticationType. \\\n * {@link KnownHBaseAuthenticationType} can be used interchangeably with HBaseAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Basic**\n */\nexport type HBaseAuthenticationType = string;\n\n/** Known values of {@link HiveServerType} that the service accepts. */\nexport enum KnownHiveServerType {\n /** HiveServer1 */\n HiveServer1 = \"HiveServer1\",\n /** HiveServer2 */\n HiveServer2 = \"HiveServer2\",\n /** HiveThriftServer */\n HiveThriftServer = \"HiveThriftServer\"\n}\n\n/**\n * Defines values for HiveServerType. \\\n * {@link KnownHiveServerType} can be used interchangeably with HiveServerType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **HiveServer1** \\\n * **HiveServer2** \\\n * **HiveThriftServer**\n */\nexport type HiveServerType = string;\n\n/** Known values of {@link HiveThriftTransportProtocol} that the service accepts. */\nexport enum KnownHiveThriftTransportProtocol {\n /** Binary */\n Binary = \"Binary\",\n /** Sasl */\n Sasl = \"SASL\",\n /** Http */\n Http = \"HTTP \"\n}\n\n/**\n * Defines values for HiveThriftTransportProtocol. \\\n * {@link KnownHiveThriftTransportProtocol} can be used interchangeably with HiveThriftTransportProtocol,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Binary** \\\n * **SASL** \\\n * **HTTP **\n */\nexport type HiveThriftTransportProtocol = string;\n\n/** Known values of {@link HiveAuthenticationType} that the service accepts. */\nexport enum KnownHiveAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Username */\n Username = \"Username\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\",\n /** WindowsAzureHDInsightService */\n WindowsAzureHDInsightService = \"WindowsAzureHDInsightService\"\n}\n\n/**\n * Defines values for HiveAuthenticationType. \\\n * {@link KnownHiveAuthenticationType} can be used interchangeably with HiveAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Username** \\\n * **UsernameAndPassword** \\\n * **WindowsAzureHDInsightService**\n */\nexport type HiveAuthenticationType = string;\n\n/** Known values of {@link ImpalaAuthenticationType} that the service accepts. */\nexport enum KnownImpalaAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** SaslUsername */\n SaslUsername = \"SASLUsername\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\"\n}\n\n/**\n * Defines values for ImpalaAuthenticationType. \\\n * {@link KnownImpalaAuthenticationType} can be used interchangeably with ImpalaAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **SASLUsername** \\\n * **UsernameAndPassword**\n */\nexport type ImpalaAuthenticationType = string;\n\n/** Known values of {@link PhoenixAuthenticationType} that the service accepts. */\nexport enum KnownPhoenixAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\",\n /** WindowsAzureHDInsightService */\n WindowsAzureHDInsightService = \"WindowsAzureHDInsightService\"\n}\n\n/**\n * Defines values for PhoenixAuthenticationType. \\\n * {@link KnownPhoenixAuthenticationType} can be used interchangeably with PhoenixAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **UsernameAndPassword** \\\n * **WindowsAzureHDInsightService**\n */\nexport type PhoenixAuthenticationType = string;\n\n/** Known values of {@link PrestoAuthenticationType} that the service accepts. */\nexport enum KnownPrestoAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Ldap */\n Ldap = \"LDAP\"\n}\n\n/**\n * Defines values for PrestoAuthenticationType. \\\n * {@link KnownPrestoAuthenticationType} can be used interchangeably with PrestoAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **LDAP**\n */\nexport type PrestoAuthenticationType = string;\n\n/** Known values of {@link ServiceNowAuthenticationType} that the service accepts. */\nexport enum KnownServiceNowAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** OAuth2 */\n OAuth2 = \"OAuth2\"\n}\n\n/**\n * Defines values for ServiceNowAuthenticationType. \\\n * {@link KnownServiceNowAuthenticationType} can be used interchangeably with ServiceNowAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **OAuth2**\n */\nexport type ServiceNowAuthenticationType = string;\n\n/** Known values of {@link SparkServerType} that the service accepts. */\nexport enum KnownSparkServerType {\n /** SharkServer */\n SharkServer = \"SharkServer\",\n /** SharkServer2 */\n SharkServer2 = \"SharkServer2\",\n /** SparkThriftServer */\n SparkThriftServer = \"SparkThriftServer\"\n}\n\n/**\n * Defines values for SparkServerType. \\\n * {@link KnownSparkServerType} can be used interchangeably with SparkServerType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SharkServer** \\\n * **SharkServer2** \\\n * **SparkThriftServer**\n */\nexport type SparkServerType = string;\n\n/** Known values of {@link SparkThriftTransportProtocol} that the service accepts. */\nexport enum KnownSparkThriftTransportProtocol {\n /** Binary */\n Binary = \"Binary\",\n /** Sasl */\n Sasl = \"SASL\",\n /** Http */\n Http = \"HTTP \"\n}\n\n/**\n * Defines values for SparkThriftTransportProtocol. \\\n * {@link KnownSparkThriftTransportProtocol} can be used interchangeably with SparkThriftTransportProtocol,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Binary** \\\n * **SASL** \\\n * **HTTP **\n */\nexport type SparkThriftTransportProtocol = string;\n\n/** Known values of {@link SparkAuthenticationType} that the service accepts. */\nexport enum KnownSparkAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Username */\n Username = \"Username\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\",\n /** WindowsAzureHDInsightService */\n WindowsAzureHDInsightService = \"WindowsAzureHDInsightService\"\n}\n\n/**\n * Defines values for SparkAuthenticationType. \\\n * {@link KnownSparkAuthenticationType} can be used interchangeably with SparkAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Username** \\\n * **UsernameAndPassword** \\\n * **WindowsAzureHDInsightService**\n */\nexport type SparkAuthenticationType = string;\n\n/** Known values of {@link GoogleAdWordsAuthenticationType} that the service accepts. */\nexport enum KnownGoogleAdWordsAuthenticationType {\n /** ServiceAuthentication */\n ServiceAuthentication = \"ServiceAuthentication\",\n /** UserAuthentication */\n UserAuthentication = \"UserAuthentication\"\n}\n\n/**\n * Defines values for GoogleAdWordsAuthenticationType. \\\n * {@link KnownGoogleAdWordsAuthenticationType} can be used interchangeably with GoogleAdWordsAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServiceAuthentication** \\\n * **UserAuthentication**\n */\nexport type GoogleAdWordsAuthenticationType = string;\n\n/** Known values of {@link SalesforceSourceReadBehavior} that the service accepts. */\nexport enum KnownSalesforceSourceReadBehavior {\n /** Query */\n Query = \"Query\",\n /** QueryAll */\n QueryAll = \"QueryAll\"\n}\n\n/**\n * Defines values for SalesforceSourceReadBehavior. \\\n * {@link KnownSalesforceSourceReadBehavior} can be used interchangeably with SalesforceSourceReadBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Query** \\\n * **QueryAll**\n */\nexport type SalesforceSourceReadBehavior = string;\n\n/** Known values of {@link SapHanaPartitionOption} that the service accepts. */\nexport enum KnownSapHanaPartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** SapHanaDynamicRange */\n SapHanaDynamicRange = \"SapHanaDynamicRange\"\n}\n\n/**\n * Defines values for SapHanaPartitionOption. \\\n * {@link KnownSapHanaPartitionOption} can be used interchangeably with SapHanaPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **SapHanaDynamicRange**\n */\nexport type SapHanaPartitionOption = string;\n\n/** Known values of {@link SapTablePartitionOption} that the service accepts. */\nexport enum KnownSapTablePartitionOption {\n /** None */\n None = \"None\",\n /** PartitionOnInt */\n PartitionOnInt = \"PartitionOnInt\",\n /** PartitionOnCalendarYear */\n PartitionOnCalendarYear = \"PartitionOnCalendarYear\",\n /** PartitionOnCalendarMonth */\n PartitionOnCalendarMonth = \"PartitionOnCalendarMonth\",\n /** PartitionOnCalendarDate */\n PartitionOnCalendarDate = \"PartitionOnCalendarDate\",\n /** PartitionOnTime */\n PartitionOnTime = \"PartitionOnTime\"\n}\n\n/**\n * Defines values for SapTablePartitionOption. \\\n * {@link KnownSapTablePartitionOption} can be used interchangeably with SapTablePartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PartitionOnInt** \\\n * **PartitionOnCalendarYear** \\\n * **PartitionOnCalendarMonth** \\\n * **PartitionOnCalendarDate** \\\n * **PartitionOnTime**\n */\nexport type SapTablePartitionOption = string;\n\n/** Known values of {@link StoredProcedureParameterType} that the service accepts. */\nexport enum KnownStoredProcedureParameterType {\n /** String */\n String = \"String\",\n /** Int */\n Int = \"Int\",\n /** Int64 */\n Int64 = \"Int64\",\n /** Decimal */\n Decimal = \"Decimal\",\n /** Guid */\n Guid = \"Guid\",\n /** Boolean */\n Boolean = \"Boolean\",\n /** Date */\n Date = \"Date\"\n}\n\n/**\n * Defines values for StoredProcedureParameterType. \\\n * {@link KnownStoredProcedureParameterType} can be used interchangeably with StoredProcedureParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **String** \\\n * **Int** \\\n * **Int64** \\\n * **Decimal** \\\n * **Guid** \\\n * **Boolean** \\\n * **Date**\n */\nexport type StoredProcedureParameterType = string;\n\n/** Known values of {@link OraclePartitionOption} that the service accepts. */\nexport enum KnownOraclePartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for OraclePartitionOption. \\\n * {@link KnownOraclePartitionOption} can be used interchangeably with OraclePartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **DynamicRange**\n */\nexport type OraclePartitionOption = string;\n\n/** Known values of {@link TeradataPartitionOption} that the service accepts. */\nexport enum KnownTeradataPartitionOption {\n /** None */\n None = \"None\",\n /** Hash */\n Hash = \"Hash\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for TeradataPartitionOption. \\\n * {@link KnownTeradataPartitionOption} can be used interchangeably with TeradataPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **Hash** \\\n * **DynamicRange**\n */\nexport type TeradataPartitionOption = string;\n\n/** Known values of {@link CassandraSourceReadConsistencyLevels} that the service accepts. */\nexport enum KnownCassandraSourceReadConsistencyLevels {\n /** ALL */\n ALL = \"ALL\",\n /** EachQuorum */\n EachQuorum = \"EACH_QUORUM\",\n /** Quorum */\n Quorum = \"QUORUM\",\n /** LocalQuorum */\n LocalQuorum = \"LOCAL_QUORUM\",\n /** ONE */\n ONE = \"ONE\",\n /** TWO */\n TWO = \"TWO\",\n /** Three */\n Three = \"THREE\",\n /** LocalONE */\n LocalONE = \"LOCAL_ONE\",\n /** Serial */\n Serial = \"SERIAL\",\n /** LocalSerial */\n LocalSerial = \"LOCAL_SERIAL\"\n}\n\n/**\n * Defines values for CassandraSourceReadConsistencyLevels. \\\n * {@link KnownCassandraSourceReadConsistencyLevels} can be used interchangeably with CassandraSourceReadConsistencyLevels,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ALL** \\\n * **EACH_QUORUM** \\\n * **QUORUM** \\\n * **LOCAL_QUORUM** \\\n * **ONE** \\\n * **TWO** \\\n * **THREE** \\\n * **LOCAL_ONE** \\\n * **SERIAL** \\\n * **LOCAL_SERIAL**\n */\nexport type CassandraSourceReadConsistencyLevels = string;\n\n/** Known values of {@link NetezzaPartitionOption} that the service accepts. */\nexport enum KnownNetezzaPartitionOption {\n /** None */\n None = \"None\",\n /** DataSlice */\n DataSlice = \"DataSlice\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for NetezzaPartitionOption. \\\n * {@link KnownNetezzaPartitionOption} can be used interchangeably with NetezzaPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **DataSlice** \\\n * **DynamicRange**\n */\nexport type NetezzaPartitionOption = string;\n\n/** Known values of {@link NotebookParameterType} that the service accepts. */\nexport enum KnownNotebookParameterType {\n /** String */\n String = \"string\",\n /** Int */\n Int = \"int\",\n /** Float */\n Float = \"float\",\n /** Bool */\n Bool = \"bool\"\n}\n\n/**\n * Defines values for NotebookParameterType. \\\n * {@link KnownNotebookParameterType} can be used interchangeably with NotebookParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **string** \\\n * **int** \\\n * **float** \\\n * **bool**\n */\nexport type NotebookParameterType = string;\n\n/** Known values of {@link SapCloudForCustomerSinkWriteBehavior} that the service accepts. */\nexport enum KnownSapCloudForCustomerSinkWriteBehavior {\n /** Insert */\n Insert = \"Insert\",\n /** Update */\n Update = \"Update\"\n}\n\n/**\n * Defines values for SapCloudForCustomerSinkWriteBehavior. \\\n * {@link KnownSapCloudForCustomerSinkWriteBehavior} can be used interchangeably with SapCloudForCustomerSinkWriteBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Insert** \\\n * **Update**\n */\nexport type SapCloudForCustomerSinkWriteBehavior = string;\n\n/** Known values of {@link PolybaseSettingsRejectType} that the service accepts. */\nexport enum KnownPolybaseSettingsRejectType {\n /** Value */\n Value = \"value\",\n /** Percentage */\n Percentage = \"percentage\"\n}\n\n/**\n * Defines values for PolybaseSettingsRejectType. \\\n * {@link KnownPolybaseSettingsRejectType} can be used interchangeably with PolybaseSettingsRejectType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **value** \\\n * **percentage**\n */\nexport type PolybaseSettingsRejectType = string;\n\n/** Known values of {@link AzureSearchIndexWriteBehaviorType} that the service accepts. */\nexport enum KnownAzureSearchIndexWriteBehaviorType {\n /** Merge */\n Merge = \"Merge\",\n /** Upload */\n Upload = \"Upload\"\n}\n\n/**\n * Defines values for AzureSearchIndexWriteBehaviorType. \\\n * {@link KnownAzureSearchIndexWriteBehaviorType} can be used interchangeably with AzureSearchIndexWriteBehaviorType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Merge** \\\n * **Upload**\n */\nexport type AzureSearchIndexWriteBehaviorType = string;\n\n/** Known values of {@link DynamicsSinkWriteBehavior} that the service accepts. */\nexport enum KnownDynamicsSinkWriteBehavior {\n /** Upsert */\n Upsert = \"Upsert\"\n}\n\n/**\n * Defines values for DynamicsSinkWriteBehavior. \\\n * {@link KnownDynamicsSinkWriteBehavior} can be used interchangeably with DynamicsSinkWriteBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Upsert**\n */\nexport type DynamicsSinkWriteBehavior = string;\n\n/** Known values of {@link SalesforceSinkWriteBehavior} that the service accepts. */\nexport enum KnownSalesforceSinkWriteBehavior {\n /** Insert */\n Insert = \"Insert\",\n /** Upsert */\n Upsert = \"Upsert\"\n}\n\n/**\n * Defines values for SalesforceSinkWriteBehavior. \\\n * {@link KnownSalesforceSinkWriteBehavior} can be used interchangeably with SalesforceSinkWriteBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Insert** \\\n * **Upsert**\n */\nexport type SalesforceSinkWriteBehavior = string;\n\n/** Known values of {@link HDInsightActivityDebugInfoOption} that the service accepts. */\nexport enum KnownHDInsightActivityDebugInfoOption {\n /** None */\n None = \"None\",\n /** Always */\n Always = \"Always\",\n /** Failure */\n Failure = \"Failure\"\n}\n\n/**\n * Defines values for HDInsightActivityDebugInfoOption. \\\n * {@link KnownHDInsightActivityDebugInfoOption} can be used interchangeably with HDInsightActivityDebugInfoOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **Always** \\\n * **Failure**\n */\nexport type HDInsightActivityDebugInfoOption = string;\n\n/** Known values of {@link SsisPackageLocationType} that the service accepts. */\nexport enum KnownSsisPackageLocationType {\n /** Ssisdb */\n Ssisdb = \"SSISDB\",\n /** File */\n File = \"File\",\n /** InlinePackage */\n InlinePackage = \"InlinePackage\",\n /** PackageStore */\n PackageStore = \"PackageStore\"\n}\n\n/**\n * Defines values for SsisPackageLocationType. \\\n * {@link KnownSsisPackageLocationType} can be used interchangeably with SsisPackageLocationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SSISDB** \\\n * **File** \\\n * **InlinePackage** \\\n * **PackageStore**\n */\nexport type SsisPackageLocationType = string;\n\n/** Known values of {@link SsisLogLocationType} that the service accepts. */\nexport enum KnownSsisLogLocationType {\n /** File */\n File = \"File\"\n}\n\n/**\n * Defines values for SsisLogLocationType. \\\n * {@link KnownSsisLogLocationType} can be used interchangeably with SsisLogLocationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **File**\n */\nexport type SsisLogLocationType = string;\n\n/** Known values of {@link WebActivityMethod} that the service accepts. */\nexport enum KnownWebActivityMethod {\n /** GET */\n GET = \"GET\",\n /** Post */\n Post = \"POST\",\n /** PUT */\n PUT = \"PUT\",\n /** Delete */\n Delete = \"DELETE\"\n}\n\n/**\n * Defines values for WebActivityMethod. \\\n * {@link KnownWebActivityMethod} can be used interchangeably with WebActivityMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **GET** \\\n * **POST** \\\n * **PUT** \\\n * **DELETE**\n */\nexport type WebActivityMethod = string;\n\n/** Known values of {@link AzureFunctionActivityMethod} that the service accepts. */\nexport enum KnownAzureFunctionActivityMethod {\n /** GET */\n GET = \"GET\",\n /** Post */\n Post = \"POST\",\n /** PUT */\n PUT = \"PUT\",\n /** Delete */\n Delete = \"DELETE\",\n /** Options */\n Options = \"OPTIONS\",\n /** Head */\n Head = \"HEAD\",\n /** Trace */\n Trace = \"TRACE\"\n}\n\n/**\n * Defines values for AzureFunctionActivityMethod. \\\n * {@link KnownAzureFunctionActivityMethod} can be used interchangeably with AzureFunctionActivityMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **GET** \\\n * **POST** \\\n * **PUT** \\\n * **DELETE** \\\n * **OPTIONS** \\\n * **HEAD** \\\n * **TRACE**\n */\nexport type AzureFunctionActivityMethod = string;\n\n/** Known values of {@link WebHookActivityMethod} that the service accepts. */\nexport enum KnownWebHookActivityMethod {\n /** Post */\n Post = \"POST\"\n}\n\n/**\n * Defines values for WebHookActivityMethod. \\\n * {@link KnownWebHookActivityMethod} can be used interchangeably with WebHookActivityMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **POST**\n */\nexport type WebHookActivityMethod = string;\n\n/** Known values of {@link DataFlowComputeType} that the service accepts. */\nexport enum KnownDataFlowComputeType {\n /** General */\n General = \"General\",\n /** MemoryOptimized */\n MemoryOptimized = \"MemoryOptimized\",\n /** ComputeOptimized */\n ComputeOptimized = \"ComputeOptimized\"\n}\n\n/**\n * Defines values for DataFlowComputeType. \\\n * {@link KnownDataFlowComputeType} can be used interchangeably with DataFlowComputeType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **General** \\\n * **MemoryOptimized** \\\n * **ComputeOptimized**\n */\nexport type DataFlowComputeType = string;\n\n/** Known values of {@link ScriptType} that the service accepts. */\nexport enum KnownScriptType {\n /** Query */\n Query = \"Query\",\n /** NonQuery */\n NonQuery = \"NonQuery\"\n}\n\n/**\n * Defines values for ScriptType. \\\n * {@link KnownScriptType} can be used interchangeably with ScriptType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Query** \\\n * **NonQuery**\n */\nexport type ScriptType = string;\n\n/** Known values of {@link ScriptActivityParameterType} that the service accepts. */\nexport enum KnownScriptActivityParameterType {\n /** Boolean */\n Boolean = \"Boolean\",\n /** DateTime */\n DateTime = \"DateTime\",\n /** DateTimeOffset */\n DateTimeOffset = \"DateTimeOffset\",\n /** Decimal */\n Decimal = \"Decimal\",\n /** Double */\n Double = \"Double\",\n /** Guid */\n Guid = \"Guid\",\n /** Int16 */\n Int16 = \"Int16\",\n /** Int32 */\n Int32 = \"Int32\",\n /** Int64 */\n Int64 = \"Int64\",\n /** Single */\n Single = \"Single\",\n /** String */\n String = \"String\",\n /** Timespan */\n Timespan = \"Timespan\"\n}\n\n/**\n * Defines values for ScriptActivityParameterType. \\\n * {@link KnownScriptActivityParameterType} can be used interchangeably with ScriptActivityParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Boolean** \\\n * **DateTime** \\\n * **DateTimeOffset** \\\n * **Decimal** \\\n * **Double** \\\n * **Guid** \\\n * **Int16** \\\n * **Int32** \\\n * **Int64** \\\n * **Single** \\\n * **String** \\\n * **Timespan**\n */\nexport type ScriptActivityParameterType = string;\n\n/** Known values of {@link ScriptActivityParameterDirection} that the service accepts. */\nexport enum KnownScriptActivityParameterDirection {\n /** Input */\n Input = \"Input\",\n /** Output */\n Output = \"Output\",\n /** InputOutput */\n InputOutput = \"InputOutput\"\n}\n\n/**\n * Defines values for ScriptActivityParameterDirection. \\\n * {@link KnownScriptActivityParameterDirection} can be used interchangeably with ScriptActivityParameterDirection,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Input** \\\n * **Output** \\\n * **InputOutput**\n */\nexport type ScriptActivityParameterDirection = string;\n\n/** Known values of {@link ScriptActivityLogDestination} that the service accepts. */\nexport enum KnownScriptActivityLogDestination {\n /** ActivityOutput */\n ActivityOutput = \"ActivityOutput\",\n /** ExternalStore */\n ExternalStore = \"ExternalStore\"\n}\n\n/**\n * Defines values for ScriptActivityLogDestination. \\\n * {@link KnownScriptActivityLogDestination} can be used interchangeably with ScriptActivityLogDestination,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ActivityOutput** \\\n * **ExternalStore**\n */\nexport type ScriptActivityLogDestination = string;\n\n/** Known values of {@link RecurrenceFrequency} that the service accepts. */\nexport enum KnownRecurrenceFrequency {\n /** NotSpecified */\n NotSpecified = \"NotSpecified\",\n /** Minute */\n Minute = \"Minute\",\n /** Hour */\n Hour = \"Hour\",\n /** Day */\n Day = \"Day\",\n /** Week */\n Week = \"Week\",\n /** Month */\n Month = \"Month\",\n /** Year */\n Year = \"Year\"\n}\n\n/**\n * Defines values for RecurrenceFrequency. \\\n * {@link KnownRecurrenceFrequency} can be used interchangeably with RecurrenceFrequency,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **NotSpecified** \\\n * **Minute** \\\n * **Hour** \\\n * **Day** \\\n * **Week** \\\n * **Month** \\\n * **Year**\n */\nexport type RecurrenceFrequency = string;\n\n/** Known values of {@link BlobEventType} that the service accepts. */\nexport enum KnownBlobEventType {\n /** MicrosoftStorageBlobCreated */\n MicrosoftStorageBlobCreated = \"Microsoft.Storage.BlobCreated\",\n /** MicrosoftStorageBlobDeleted */\n MicrosoftStorageBlobDeleted = \"Microsoft.Storage.BlobDeleted\"\n}\n\n/**\n * Defines values for BlobEventType. \\\n * {@link KnownBlobEventType} can be used interchangeably with BlobEventType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Microsoft.Storage.BlobCreated** \\\n * **Microsoft.Storage.BlobDeleted**\n */\nexport type BlobEventType = string;\n\n/** Known values of {@link TumblingWindowFrequency} that the service accepts. */\nexport enum KnownTumblingWindowFrequency {\n /** Minute */\n Minute = \"Minute\",\n /** Hour */\n Hour = \"Hour\",\n /** Month */\n Month = \"Month\"\n}\n\n/**\n * Defines values for TumblingWindowFrequency. \\\n * {@link KnownTumblingWindowFrequency} can be used interchangeably with TumblingWindowFrequency,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Minute** \\\n * **Hour** \\\n * **Month**\n */\nexport type TumblingWindowFrequency = string;\n\n/** Known values of {@link TriggerReferenceType} that the service accepts. */\nexport enum KnownTriggerReferenceType {\n /** TriggerReference */\n TriggerReference = \"TriggerReference\"\n}\n\n/**\n * Defines values for TriggerReferenceType. \\\n * {@link KnownTriggerReferenceType} can be used interchangeably with TriggerReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **TriggerReference**\n */\nexport type TriggerReferenceType = string;\n\n/** Known values of {@link IntegrationRuntimeState} that the service accepts. */\nexport enum KnownIntegrationRuntimeState {\n /** Initial */\n Initial = \"Initial\",\n /** Stopped */\n Stopped = \"Stopped\",\n /** Started */\n Started = \"Started\",\n /** Starting */\n Starting = \"Starting\",\n /** Stopping */\n Stopping = \"Stopping\",\n /** NeedRegistration */\n NeedRegistration = \"NeedRegistration\",\n /** Online */\n Online = \"Online\",\n /** Limited */\n Limited = \"Limited\",\n /** Offline */\n Offline = \"Offline\",\n /** AccessDenied */\n AccessDenied = \"AccessDenied\"\n}\n\n/**\n * Defines values for IntegrationRuntimeState. \\\n * {@link KnownIntegrationRuntimeState} can be used interchangeably with IntegrationRuntimeState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Initial** \\\n * **Stopped** \\\n * **Started** \\\n * **Starting** \\\n * **Stopping** \\\n * **NeedRegistration** \\\n * **Online** \\\n * **Limited** \\\n * **Offline** \\\n * **AccessDenied**\n */\nexport type IntegrationRuntimeState = string;\n\n/** Known values of {@link IntegrationRuntimeSsisCatalogPricingTier} that the service accepts. */\nexport enum KnownIntegrationRuntimeSsisCatalogPricingTier {\n /** Basic */\n Basic = \"Basic\",\n /** Standard */\n Standard = \"Standard\",\n /** Premium */\n Premium = \"Premium\",\n /** PremiumRS */\n PremiumRS = \"PremiumRS\"\n}\n\n/**\n * Defines values for IntegrationRuntimeSsisCatalogPricingTier. \\\n * {@link KnownIntegrationRuntimeSsisCatalogPricingTier} can be used interchangeably with IntegrationRuntimeSsisCatalogPricingTier,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Standard** \\\n * **Premium** \\\n * **PremiumRS**\n */\nexport type IntegrationRuntimeSsisCatalogPricingTier = string;\n\n/** Known values of {@link IntegrationRuntimeLicenseType} that the service accepts. */\nexport enum KnownIntegrationRuntimeLicenseType {\n /** BasePrice */\n BasePrice = \"BasePrice\",\n /** LicenseIncluded */\n LicenseIncluded = \"LicenseIncluded\"\n}\n\n/**\n * Defines values for IntegrationRuntimeLicenseType. \\\n * {@link KnownIntegrationRuntimeLicenseType} can be used interchangeably with IntegrationRuntimeLicenseType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **BasePrice** \\\n * **LicenseIncluded**\n */\nexport type IntegrationRuntimeLicenseType = string;\n\n/** Known values of {@link IntegrationRuntimeEntityReferenceType} that the service accepts. */\nexport enum KnownIntegrationRuntimeEntityReferenceType {\n /** IntegrationRuntimeReference */\n IntegrationRuntimeReference = \"IntegrationRuntimeReference\",\n /** LinkedServiceReference */\n LinkedServiceReference = \"LinkedServiceReference\"\n}\n\n/**\n * Defines values for IntegrationRuntimeEntityReferenceType. \\\n * {@link KnownIntegrationRuntimeEntityReferenceType} can be used interchangeably with IntegrationRuntimeEntityReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **IntegrationRuntimeReference** \\\n * **LinkedServiceReference**\n */\nexport type IntegrationRuntimeEntityReferenceType = string;\n\n/** Known values of {@link IntegrationRuntimeEdition} that the service accepts. */\nexport enum KnownIntegrationRuntimeEdition {\n /** Standard */\n Standard = \"Standard\",\n /** Enterprise */\n Enterprise = \"Enterprise\"\n}\n\n/**\n * Defines values for IntegrationRuntimeEdition. \\\n * {@link KnownIntegrationRuntimeEdition} can be used interchangeably with IntegrationRuntimeEdition,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Standard** \\\n * **Enterprise**\n */\nexport type IntegrationRuntimeEdition = string;\n\n/** Known values of {@link JsonFormatFilePattern} that the service accepts. */\nexport enum KnownJsonFormatFilePattern {\n /** SetOfObjects */\n SetOfObjects = \"setOfObjects\",\n /** ArrayOfObjects */\n ArrayOfObjects = \"arrayOfObjects\"\n}\n\n/**\n * Defines values for JsonFormatFilePattern. \\\n * {@link KnownJsonFormatFilePattern} can be used interchangeably with JsonFormatFilePattern,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **setOfObjects** \\\n * **arrayOfObjects**\n */\nexport type JsonFormatFilePattern = string;\n\n/** Known values of {@link DatasetCompressionLevel} that the service accepts. */\nexport enum KnownDatasetCompressionLevel {\n /** Optimal */\n Optimal = \"Optimal\",\n /** Fastest */\n Fastest = \"Fastest\"\n}\n\n/**\n * Defines values for DatasetCompressionLevel. \\\n * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Optimal** \\\n * **Fastest**\n */\nexport type DatasetCompressionLevel = string;\n\n/** Known values of {@link AvroCompressionCodec} that the service accepts. */\nexport enum KnownAvroCompressionCodec {\n /** None */\n None = \"none\",\n /** Deflate */\n Deflate = \"deflate\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Xz */\n Xz = \"xz\",\n /** Bzip2 */\n Bzip2 = \"bzip2\"\n}\n\n/**\n * Defines values for AvroCompressionCodec. \\\n * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **deflate** \\\n * **snappy** \\\n * **xz** \\\n * **bzip2**\n */\nexport type AvroCompressionCodec = string;\n\n/** Known values of {@link ParquetCompressionCodecEnum} that the service accepts. */\nexport enum KnownParquetCompressionCodecEnum {\n /** None */\n None = \"none\",\n /** Gzip */\n Gzip = \"gzip\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Lzo */\n Lzo = \"lzo\"\n}\n\n/**\n * Defines values for ParquetCompressionCodecEnum. \\\n * {@link KnownParquetCompressionCodecEnum} can be used interchangeably with ParquetCompressionCodecEnum,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **gzip** \\\n * **snappy** \\\n * **lzo**\n */\nexport type ParquetCompressionCodecEnum = string;\n\n/** Known values of {@link CompressionCodec} that the service accepts. */\nexport enum KnownCompressionCodec {\n /** None */\n None = \"none\",\n /** Lzo */\n Lzo = \"lzo\",\n /** Bzip2 */\n Bzip2 = \"bzip2\",\n /** Gzip */\n Gzip = \"gzip\",\n /** Deflate */\n Deflate = \"deflate\",\n /** ZipDeflate */\n ZipDeflate = \"zipDeflate\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Lz4 */\n Lz4 = \"lz4\",\n /** Tar */\n Tar = \"tar\",\n /** TarGZip */\n TarGZip = \"tarGZip\"\n}\n\n/**\n * Defines values for CompressionCodec. \\\n * {@link KnownCompressionCodec} can be used interchangeably with CompressionCodec,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **lzo** \\\n * **bzip2** \\\n * **gzip** \\\n * **deflate** \\\n * **zipDeflate** \\\n * **snappy** \\\n * **lz4** \\\n * **tar** \\\n * **tarGZip**\n */\nexport type CompressionCodec = string;\n\n/** Known values of {@link OrcCompressionCodec} that the service accepts. */\nexport enum KnownOrcCompressionCodec {\n /** None */\n None = \"none\",\n /** Zlib */\n Zlib = \"zlib\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Lzo */\n Lzo = \"lzo\"\n}\n\n/**\n * Defines values for OrcCompressionCodec. \\\n * {@link KnownOrcCompressionCodec} can be used interchangeably with OrcCompressionCodec,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **zlib** \\\n * **snappy** \\\n * **lzo**\n */\nexport type OrcCompressionCodec = string;\n\n/** Known values of {@link DynamicsDeploymentType} that the service accepts. */\nexport enum KnownDynamicsDeploymentType {\n /** Online */\n Online = \"Online\",\n /** OnPremisesWithIfd */\n OnPremisesWithIfd = \"OnPremisesWithIfd\"\n}\n\n/**\n * Defines values for DynamicsDeploymentType. \\\n * {@link KnownDynamicsDeploymentType} can be used interchangeably with DynamicsDeploymentType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Online** \\\n * **OnPremisesWithIfd**\n */\nexport type DynamicsDeploymentType = string;\n\n/** Known values of {@link DynamicsAuthenticationType} that the service accepts. */\nexport enum KnownDynamicsAuthenticationType {\n /** Office365 */\n Office365 = \"Office365\",\n /** Ifd */\n Ifd = \"Ifd\",\n /** AADServicePrincipal */\n AADServicePrincipal = \"AADServicePrincipal\"\n}\n\n/**\n * Defines values for DynamicsAuthenticationType. \\\n * {@link KnownDynamicsAuthenticationType} can be used interchangeably with DynamicsAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Office365** \\\n * **Ifd** \\\n * **AADServicePrincipal**\n */\nexport type DynamicsAuthenticationType = string;\n\n/** Known values of {@link DynamicsServicePrincipalCredentialType} that the service accepts. */\nexport enum KnownDynamicsServicePrincipalCredentialType {\n /** ServicePrincipalKey */\n ServicePrincipalKey = \"ServicePrincipalKey\",\n /** ServicePrincipalCert */\n ServicePrincipalCert = \"ServicePrincipalCert\"\n}\n\n/**\n * Defines values for DynamicsServicePrincipalCredentialType. \\\n * {@link KnownDynamicsServicePrincipalCredentialType} can be used interchangeably with DynamicsServicePrincipalCredentialType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServicePrincipalKey** \\\n * **ServicePrincipalCert**\n */\nexport type DynamicsServicePrincipalCredentialType = string;\n\n/** Known values of {@link HdiNodeTypes} that the service accepts. */\nexport enum KnownHdiNodeTypes {\n /** Headnode */\n Headnode = \"Headnode\",\n /** Workernode */\n Workernode = \"Workernode\",\n /** Zookeeper */\n Zookeeper = \"Zookeeper\"\n}\n\n/**\n * Defines values for HdiNodeTypes. \\\n * {@link KnownHdiNodeTypes} can be used interchangeably with HdiNodeTypes,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Headnode** \\\n * **Workernode** \\\n * **Zookeeper**\n */\nexport type HdiNodeTypes = string;\n\n/** Known values of {@link JsonWriteFilePattern} that the service accepts. */\nexport enum KnownJsonWriteFilePattern {\n /** SetOfObjects */\n SetOfObjects = \"setOfObjects\",\n /** ArrayOfObjects */\n ArrayOfObjects = \"arrayOfObjects\"\n}\n\n/**\n * Defines values for JsonWriteFilePattern. \\\n * {@link KnownJsonWriteFilePattern} can be used interchangeably with JsonWriteFilePattern,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **setOfObjects** \\\n * **arrayOfObjects**\n */\nexport type JsonWriteFilePattern = string;\n\n/** Known values of {@link AmazonRdsForOraclePartitionOption} that the service accepts. */\nexport enum KnownAmazonRdsForOraclePartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for AmazonRdsForOraclePartitionOption. \\\n * {@link KnownAmazonRdsForOraclePartitionOption} can be used interchangeably with AmazonRdsForOraclePartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **DynamicRange**\n */\nexport type AmazonRdsForOraclePartitionOption = string;\n\n/** Known values of {@link CopyBehaviorType} that the service accepts. */\nexport enum KnownCopyBehaviorType {\n /** PreserveHierarchy */\n PreserveHierarchy = \"PreserveHierarchy\",\n /** FlattenHierarchy */\n FlattenHierarchy = \"FlattenHierarchy\",\n /** MergeFiles */\n MergeFiles = \"MergeFiles\"\n}\n\n/**\n * Defines values for CopyBehaviorType. \\\n * {@link KnownCopyBehaviorType} can be used interchangeably with CopyBehaviorType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **PreserveHierarchy** \\\n * **FlattenHierarchy** \\\n * **MergeFiles**\n */\nexport type CopyBehaviorType = string;\n\n/** Known values of {@link SqlPartitionOption} that the service accepts. */\nexport enum KnownSqlPartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for SqlPartitionOption. \\\n * {@link KnownSqlPartitionOption} can be used interchangeably with SqlPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **DynamicRange**\n */\nexport type SqlPartitionOption = string;\n/** Defines values for ResourceIdentityType. */\nexport type ResourceIdentityType = \"None\" | \"SystemAssigned\";\n/** Defines values for DayOfWeek. */\nexport type DayOfWeek =\n | \"Sunday\"\n | \"Monday\"\n | \"Tuesday\"\n | \"Wednesday\"\n | \"Thursday\"\n | \"Friday\"\n | \"Saturday\";\n\n/** Optional parameters. */\nexport interface LinkConnectionListLinkConnectionsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listLinkConnectionsByWorkspace operation. */\nexport type LinkConnectionListLinkConnectionsByWorkspaceResponse = LinkConnectionListResponse;\n\n/** Optional parameters. */\nexport interface LinkConnectionCreateOrUpdateLinkConnectionOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the createOrUpdateLinkConnection operation. */\nexport type LinkConnectionCreateOrUpdateLinkConnectionResponse = LinkConnectionResource;\n\n/** Optional parameters. */\nexport interface LinkConnectionGetLinkConnectionOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getLinkConnection operation. */\nexport type LinkConnectionGetLinkConnectionResponse = LinkConnectionResource;\n\n/** Optional parameters. */\nexport interface LinkConnectionDeleteLinkConnectionOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionEditTablesOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionStartOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionStopOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionGetDetailedStatusOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDetailedStatus operation. */\nexport type LinkConnectionGetDetailedStatusResponse = LinkConnectionDetailedStatus;\n\n/** Optional parameters. */\nexport interface LinkConnectionListLinkTablesOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listLinkTables operation. */\nexport type LinkConnectionListLinkTablesResponse = LinkTableListResponse;\n\n/** Optional parameters. */\nexport interface LinkConnectionQueryTableStatusOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryTableStatus operation. */\nexport type LinkConnectionQueryTableStatusResponse = LinkConnectionQueryTableStatus;\n\n/** Optional parameters. */\nexport interface LinkConnectionUpdateLandingZoneCredentialOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionListLinkConnectionsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listLinkConnectionsByWorkspaceNext operation. */\nexport type LinkConnectionListLinkConnectionsByWorkspaceNextResponse = LinkConnectionListResponse;\n\n/** Optional parameters. */\nexport interface KqlScriptsGetAllOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getAll operation. */\nexport type KqlScriptsGetAllResponse = KqlScriptsResourceCollectionResponse;\n\n/** Optional parameters. */\nexport interface KqlScriptsGetAllNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getAllNext operation. */\nexport type KqlScriptsGetAllNextResponse = KqlScriptsResourceCollectionResponse;\n\n/** Optional parameters. */\nexport interface KqlScriptCreateOrUpdateOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdate operation. */\nexport type KqlScriptCreateOrUpdateResponse = KqlScriptResource;\n\n/** Optional parameters. */\nexport interface KqlScriptGetByNameOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getByName operation. */\nexport type KqlScriptGetByNameResponse = KqlScriptResource;\n\n/** Optional parameters. */\nexport interface KqlScriptDeleteByNameOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface KqlScriptRenameOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface MetastoreRegisterOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the register operation. */\nexport type MetastoreRegisterResponse = MetastoreRegistrationResponse;\n\n/** Optional parameters. */\nexport interface MetastoreGetDatabaseOperationsOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDatabaseOperations operation. */\nexport type MetastoreGetDatabaseOperationsResponse = MetastoreRequestSuccessResponse;\n\n/** Optional parameters. */\nexport interface MetastoreUpdateOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the update operation. */\nexport type MetastoreUpdateResponse = MetastoreUpdationResponse;\n\n/** Optional parameters. */\nexport interface MetastoreDeleteOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkConfigurationsByWorkspace operation. */\nexport type SparkConfigurationGetSparkConfigurationsByWorkspaceResponse = SparkConfigurationListResponse;\n\n/** Optional parameters. */\nexport interface SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the sparkConfiguration entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateSparkConfiguration operation. */\nexport type SparkConfigurationCreateOrUpdateSparkConfigurationResponse = SparkConfigurationResource;\n\n/** Optional parameters. */\nexport interface SparkConfigurationGetSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the sparkConfiguration entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getSparkConfiguration operation. */\nexport type SparkConfigurationGetSparkConfigurationResponse = SparkConfigurationResource;\n\n/** Optional parameters. */\nexport interface SparkConfigurationDeleteSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkConfigurationRenameSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkConfigurationsByWorkspaceNext operation. */\nexport type SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse = SparkConfigurationListResponse;\n\n/** Optional parameters. */\nexport interface BigDataPoolsListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult;\n\n/** Optional parameters. */\nexport interface BigDataPoolsGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type BigDataPoolsGetResponse = BigDataPoolResourceInfo;\n\n/** Optional parameters. */\nexport interface DataFlowCreateOrUpdateDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateDataFlow operation. */\nexport type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource;\n\n/** Optional parameters. */\nexport interface DataFlowGetDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getDataFlow operation. */\nexport type DataFlowGetDataFlowResponse = DataFlowResource;\n\n/** Optional parameters. */\nexport interface DataFlowDeleteDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DataFlowRenameDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DataFlowGetDataFlowsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDataFlowsByWorkspace operation. */\nexport type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse;\n\n/** Optional parameters. */\nexport interface DataFlowGetDataFlowsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDataFlowsByWorkspaceNext operation. */\nexport type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionCreateDataFlowDebugSessionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createDataFlowDebugSession operation. */\nexport type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryDataFlowDebugSessionsByWorkspace operation. */\nexport type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionAddDataFlowOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the addDataFlow operation. */\nexport type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionDeleteDataFlowDebugSessionOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionExecuteCommandOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the executeCommand operation. */\nexport type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryDataFlowDebugSessionsByWorkspaceNext operation. */\nexport type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse;\n\n/** Optional parameters. */\nexport interface DatasetGetDatasetsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDatasetsByWorkspace operation. */\nexport type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse;\n\n/** Optional parameters. */\nexport interface DatasetCreateOrUpdateDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateDataset operation. */\nexport type DatasetCreateOrUpdateDatasetResponse = DatasetResource;\n\n/** Optional parameters. */\nexport interface DatasetGetDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getDataset operation. */\nexport type DatasetGetDatasetResponse = DatasetResource;\n\n/** Optional parameters. */\nexport interface DatasetDeleteDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DatasetRenameDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DatasetGetDatasetsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDatasetsByWorkspaceNext operation. */\nexport type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse;\n\n/** Optional parameters. */\nexport interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams\n extends coreClient.OperationOptions {\n /** Can provide a guid, which is helpful for debugging and to provide better customer support */\n clientRequestId?: string;\n}\n\n/** Contains response data for the getGitHubAccessToken operation. */\nexport type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse;\n\n/** Optional parameters. */\nexport interface IntegrationRuntimesListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse;\n\n/** Optional parameters. */\nexport interface IntegrationRuntimesGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type IntegrationRuntimesGetResponse = IntegrationRuntimeResource;\n\n/** Optional parameters. */\nexport interface LibraryListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type LibraryListOperationResponse = LibraryListResponse;\n\n/** Optional parameters. */\nexport interface LibraryFlushOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LibraryGetOperationResultOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getOperationResult operation. */\nexport type LibraryGetOperationResultResponse = LibraryResource;\n\n/** Optional parameters. */\nexport interface LibraryDeleteOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LibraryGetOptionalParams extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type LibraryGetResponse = LibraryResource;\n\n/** Optional parameters. */\nexport interface LibraryCreateOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LibraryAppendOptionalParams\n extends coreClient.OperationOptions {\n /** Set this header to a byte offset at which the block is expected to be appended. The request succeeds only if the current offset matches this value. Otherwise, the request fails with the AppendPositionConditionNotMet error (HTTP status code 412 – Precondition Failed) */\n blobConditionAppendPosition?: number;\n}\n\n/** Optional parameters. */\nexport interface LibraryListNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listNext operation. */\nexport type LibraryListNextResponse = LibraryListResponse;\n\n/** Optional parameters. */\nexport interface LinkedServiceGetLinkedServicesByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getLinkedServicesByWorkspace operation. */\nexport type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse;\n\n/** Optional parameters. */\nexport interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateLinkedService operation. */\nexport type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource;\n\n/** Optional parameters. */\nexport interface LinkedServiceGetLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getLinkedService operation. */\nexport type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource;\n\n/** Optional parameters. */\nexport interface LinkedServiceDeleteLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LinkedServiceRenameLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LinkedServiceGetLinkedServicesByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getLinkedServicesByWorkspaceNext operation. */\nexport type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebooksByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebooksByWorkspace operation. */\nexport type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebookSummaryByWorkSpaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebookSummaryByWorkSpace operation. */\nexport type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookCreateOrUpdateNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateNotebook operation. */\nexport type NotebookCreateOrUpdateNotebookResponse = NotebookResource;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getNotebook operation. */\nexport type NotebookGetNotebookResponse = NotebookResource;\n\n/** Optional parameters. */\nexport interface NotebookDeleteNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface NotebookRenameNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface NotebookGetNotebooksByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebooksByWorkspaceNext operation. */\nexport type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebookSummaryByWorkSpaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebookSummaryByWorkSpaceNext operation. */\nexport type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookOperationResultGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface PipelineGetPipelinesByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getPipelinesByWorkspace operation. */\nexport type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse;\n\n/** Optional parameters. */\nexport interface PipelineCreateOrUpdatePipelineOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdatePipeline operation. */\nexport type PipelineCreateOrUpdatePipelineResponse = PipelineResource;\n\n/** Optional parameters. */\nexport interface PipelineGetPipelineOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getPipeline operation. */\nexport type PipelineGetPipelineResponse = PipelineResource;\n\n/** Optional parameters. */\nexport interface PipelineDeletePipelineOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface PipelineRenamePipelineOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface PipelineCreatePipelineRunOptionalParams\n extends coreClient.OperationOptions {\n /** Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */\n parameters?: { [propertyName: string]: any };\n /** The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. */\n referencePipelineRunId?: string;\n /** Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId. */\n isRecovery?: boolean;\n /** In recovery mode, the rerun will start from this activity. If not specified, all activities will run. */\n startActivityName?: string;\n}\n\n/** Contains response data for the createPipelineRun operation. */\nexport type PipelineCreatePipelineRunResponse = CreateRunResponse;\n\n/** Optional parameters. */\nexport interface PipelineGetPipelinesByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getPipelinesByWorkspaceNext operation. */\nexport type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse;\n\n/** Optional parameters. */\nexport interface PipelineRunQueryPipelineRunsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryPipelineRunsByWorkspace operation. */\nexport type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse;\n\n/** Optional parameters. */\nexport interface PipelineRunGetPipelineRunOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getPipelineRun operation. */\nexport type PipelineRunGetPipelineRunResponse = PipelineRun;\n\n/** Optional parameters. */\nexport interface PipelineRunQueryActivityRunsOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryActivityRuns operation. */\nexport type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse;\n\n/** Optional parameters. */\nexport interface PipelineRunCancelPipelineRunOptionalParams\n extends coreClient.OperationOptions {\n /** If true, cancel all the Child pipelines that are triggered by the current pipeline. */\n isRecursive?: boolean;\n}\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkJobDefinitionsByWorkspace operation. */\nexport type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Spark Job Definition entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateSparkJobDefinition operation. */\nexport type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Spark Job Definition entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getSparkJobDefinition operation. */\nexport type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the executeSparkJobDefinition operation. */\nexport type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionRenameSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionDebugSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the debugSparkJobDefinition operation. */\nexport type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkJobDefinitionsByWorkspaceNext operation. */\nexport type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse;\n\n/** Optional parameters. */\nexport interface SqlPoolsListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type SqlPoolsListResponse = SqlPoolInfoListResult;\n\n/** Optional parameters. */\nexport interface SqlPoolsGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type SqlPoolsGetResponse = SqlPool;\n\n/** Optional parameters. */\nexport interface SqlScriptGetSqlScriptsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSqlScriptsByWorkspace operation. */\nexport type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse;\n\n/** Optional parameters. */\nexport interface SqlScriptCreateOrUpdateSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateSqlScript operation. */\nexport type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource;\n\n/** Optional parameters. */\nexport interface SqlScriptGetSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getSqlScript operation. */\nexport type SqlScriptGetSqlScriptResponse = SqlScriptResource;\n\n/** Optional parameters. */\nexport interface SqlScriptDeleteSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SqlScriptRenameSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SqlScriptGetSqlScriptsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSqlScriptsByWorkspaceNext operation. */\nexport type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse;\n\n/** Optional parameters. */\nexport interface TriggerGetTriggersByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getTriggersByWorkspace operation. */\nexport type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse;\n\n/** Optional parameters. */\nexport interface TriggerCreateOrUpdateTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateTrigger operation. */\nexport type TriggerCreateOrUpdateTriggerResponse = TriggerResource;\n\n/** Optional parameters. */\nexport interface TriggerGetTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getTrigger operation. */\nexport type TriggerGetTriggerResponse = TriggerResource;\n\n/** Optional parameters. */\nexport interface TriggerDeleteTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface TriggerSubscribeTriggerToEventsOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the subscribeTriggerToEvents operation. */\nexport type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus;\n\n/** Optional parameters. */\nexport interface TriggerGetEventSubscriptionStatusOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getEventSubscriptionStatus operation. */\nexport type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus;\n\n/** Optional parameters. */\nexport interface TriggerUnsubscribeTriggerFromEventsOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the unsubscribeTriggerFromEvents operation. */\nexport type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus;\n\n/** Optional parameters. */\nexport interface TriggerStartTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface TriggerStopTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface TriggerGetTriggersByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getTriggersByWorkspaceNext operation. */\nexport type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse;\n\n/** Optional parameters. */\nexport interface TriggerRunRerunTriggerInstanceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface TriggerRunCancelTriggerInstanceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface TriggerRunQueryTriggerRunsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryTriggerRunsByWorkspace operation. */\nexport type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse;\n\n/** Optional parameters. */\nexport interface WorkspaceGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type WorkspaceGetResponse = Workspace;\n\n/** Optional parameters. */\nexport interface ArtifactsClientOptionalParams\n extends coreClient.ServiceClientOptions {\n /** Overrides client endpoint. */\n endpoint?: string;\n}\n"]}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/models/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AA6qUH,6DAA6D;AAC7D,MAAM,CAAN,IAAY,SAGX;AAHD,WAAY,SAAS;IACnB,6BAA6B;IAC7B,8DAAiD,CAAA;AACnD,CAAC,EAHW,SAAS,KAAT,SAAS,QAGpB;AAWD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAOX;AAPD,WAAY,kBAAkB;IAC5B,cAAc;IACd,yCAAmB,CAAA;IACnB,gBAAgB;IAChB,6CAAuB,CAAA;IACvB,aAAa;IACb,uCAAiB,CAAA;AACnB,CAAC,EAPW,kBAAkB,KAAlB,kBAAkB,QAO7B;AAaD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,eAAe;IACf,4CAAqB,CAAA;IACrB,cAAc;IACd,0CAAmB,CAAA;IACnB,aAAa;IACb,wCAAiB,CAAA;AACnB,CAAC,EAPW,mBAAmB,KAAnB,mBAAmB,QAO9B;AAaD,iEAAiE;AACjE,MAAM,CAAN,IAAY,aAeX;AAfD,WAAY,aAAa;IACvB,WAAW;IACX,8BAAa,CAAA;IACb,YAAY;IACZ,gCAAe,CAAA;IACf,aAAa;IACb,kCAAiB,CAAA;IACjB,YAAY;IACZ,gCAAe,CAAA;IACf,aAAa;IACb,kCAAiB,CAAA;IACjB,cAAc;IACd,oCAAmB,CAAA;IACnB,eAAe;IACf,sCAAqB,CAAA;AACvB,CAAC,EAfW,aAAa,KAAb,aAAa,QAexB;AAiBD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAKX;AALD,WAAY,mBAAmB;IAC7B,WAAW;IACX,oCAAa,CAAA;IACb,sBAAsB;IACtB,0DAAmC,CAAA;AACrC,CAAC,EALW,mBAAmB,KAAnB,mBAAmB,QAK9B;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,cAAc;IACd,kDAAmB,CAAA;IACnB,iBAAiB;IACjB,wDAAyB,CAAA;AAC3B,CAAC,EALW,2BAA2B,KAA3B,2BAA2B,QAKtC;AAYD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAeX;AAfD,WAAY,kBAAkB;IAC5B,aAAa;IACb,uCAAiB,CAAA;IACjB,aAAa;IACb,uCAAiB,CAAA;IACjB,UAAU;IACV,iCAAW,CAAA;IACX,YAAY;IACZ,qCAAe,CAAA;IACf,WAAW;IACX,mCAAa,CAAA;IACb,YAAY;IACZ,qCAAe,CAAA;IACf,mBAAmB;IACnB,mDAA6B,CAAA;AAC/B,CAAC,EAfW,kBAAkB,KAAlB,kBAAkB,QAe7B;AAiBD,wFAAwF;AACxF,MAAM,CAAN,IAAY,oCAGX;AAHD,WAAY,oCAAoC;IAC9C,kCAAkC;IAClC,mGAA2D,CAAA;AAC7D,CAAC,EAHW,oCAAoC,KAApC,oCAAoC,QAG/C;AAWD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAOX;AAPD,WAAY,6BAA6B;IACvC,0BAA0B;IAC1B,4EAA2C,CAAA;IAC3C,6BAA6B;IAC7B,kFAAiD,CAAA;IACjD,6BAA6B;IAC7B,kFAAiD,CAAA;AACnD,CAAC,EAPW,6BAA6B,KAA7B,6BAA6B,QAOxC;AAaD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAGX;AAHD,WAAY,6BAA6B;IACvC,2BAA2B;IAC3B,8EAA6C,CAAA;AAC/C,CAAC,EAHW,6BAA6B,KAA7B,6BAA6B,QAGxC;AAWD,wFAAwF;AACxF,MAAM,CAAN,IAAY,oCAGX;AAHD,WAAY,oCAAoC;IAC9C,kCAAkC;IAClC,mGAA2D,CAAA;AAC7D,CAAC,EAHW,oCAAoC,KAApC,oCAAoC,QAG/C;AAWD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBASX;AATD,WAAY,mBAAmB;IAC7B,oBAAoB;IACpB,uDAAgC,CAAA;IAChC,kBAAkB;IAClB,mDAA4B,CAAA;IAC5B,aAAa;IACb,wCAAiB,CAAA;IACjB,YAAY;IACZ,sCAAe,CAAA;AACjB,CAAC,EATW,mBAAmB,KAAnB,mBAAmB,QAS9B;AAcD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBASX;AATD,WAAY,wBAAwB;IAClC,gBAAgB;IAChB,mDAAuB,CAAA;IACvB,aAAa;IACb,6CAAiB,CAAA;IACjB,cAAc;IACd,+CAAmB,CAAA;IACnB,gBAAgB;IAChB,mDAAuB,CAAA;AACzB,CAAC,EATW,wBAAwB,KAAxB,wBAAwB,QASnC;AAcD,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBASX;AATD,WAAY,iBAAiB;IAC3B,aAAa;IACb,sCAAiB,CAAA;IACjB,WAAW;IACX,kCAAa,CAAA;IACb,cAAc;IACd,wCAAmB,CAAA;IACnB,YAAY;IACZ,oCAAe,CAAA;AACjB,CAAC,EATW,iBAAiB,KAAjB,iBAAiB,QAS5B;AAcD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAyBX;AAzBD,WAAY,0BAA0B;IACpC,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,aAAa;IACb,+CAAiB,CAAA;IACjB,eAAe;IACf,mDAAqB,CAAA;IACrB,aAAa;IACb,+CAAiB,CAAA;IACjB,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,uBAAuB;IACvB,mEAAqC,CAAA;IACrC,qBAAqB;IACrB,+DAAiC,CAAA;IACjC,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,kBAAkB;IAClB,yDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,yEAA2C,CAAA;IAC3C,iBAAiB;IACjB,uDAAyB,CAAA;IACzB,iBAAiB;IACjB,uDAAyB,CAAA;AAC3B,CAAC,EAzBW,0BAA0B,KAA1B,0BAA0B,QAyBrC;AAsBD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BASX;AATD,WAAY,2BAA2B;IACrC,aAAa;IACb,gDAAiB,CAAA;IACjB,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,SAAS;IACT,wCAAS,CAAA;IACT,YAAY;IACZ,8CAAe,CAAA;AACjB,CAAC,EATW,2BAA2B,KAA3B,2BAA2B,QAStC;AAcD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAmBX;AAnBD,WAAY,yBAAyB;IACnC,eAAe;IACf,kDAAqB,CAAA;IACrB,aAAa;IACb,8CAAiB,CAAA;IACjB,mBAAmB;IACnB,0DAA6B,CAAA;IAC7B,aAAa;IACb,8CAAiB,CAAA;IACjB,mBAAmB;IACnB,0DAA6B,CAAA;IAC7B,uBAAuB;IACvB,kEAAqC,CAAA;IACrC,qBAAqB;IACrB,8DAAiC,CAAA;IACjC,kBAAkB;IAClB,wDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,wEAA2C,CAAA;AAC7C,CAAC,EAnBW,yBAAyB,KAAzB,yBAAyB,QAmBpC;AAmBD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAKX;AALD,WAAY,kBAAkB;IAC5B,UAAU;IACV,iCAAW,CAAA;IACX,WAAW;IACX,mCAAa,CAAA;AACf,CAAC,EALW,kBAAkB,KAAlB,kBAAkB,QAK7B;AAYD,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBAKX;AALD,WAAY,iBAAiB;IAC3B,iBAAiB;IACjB,8CAAyB,CAAA;IACzB,mBAAmB;IACnB,kDAA6B,CAAA;AAC/B,CAAC,EALW,iBAAiB,KAAjB,iBAAiB,QAK5B;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,aAAa;IACb,iDAAiB,CAAA;IACjB,gBAAgB;IAChB,uDAAuB,CAAA;AACzB,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,aAAa;IACb,+CAAiB,CAAA;IACjB,gBAAgB;IAChB,qDAAuB,CAAA;IACvB,YAAY;IACZ,6CAAe,CAAA;AACjB,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,2EAA2E;AAC3E,MAAM,CAAN,IAAY,uBAeX;AAfD,WAAY,uBAAuB;IACjC,kBAAkB;IAClB,sDAA2B,CAAA;IAC3B,0BAA0B;IAC1B,sEAA2C,CAAA;IAC3C,aAAa;IACb,4CAAiB,CAAA;IACjB,iBAAiB;IACjB,oDAAyB,CAAA;IACzB,iBAAiB;IACjB,oDAAyB,CAAA;IACzB,cAAc;IACd,8CAAmB,CAAA;IACnB,YAAY;IACZ,0CAAe,CAAA;AACjB,CAAC,EAfW,uBAAuB,KAAvB,uBAAuB,QAelC;AAiBD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBASX;AATD,WAAY,qBAAqB;IAC/B,aAAa;IACb,0CAAiB,CAAA;IACjB,WAAW;IACX,sCAAa,CAAA;IACb,cAAc;IACd,4CAAmB,CAAA;IACnB,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EATW,qBAAqB,KAArB,qBAAqB,QAShC;AAcD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eAuBX;AAvBD,WAAY,eAAe;IACzB,iBAAiB;IACjB,6CAA0B,CAAA;IAC1B,eAAe;IACf,wCAAqB,CAAA;IACrB,WAAW;IACX,gCAAa,CAAA;IACb,WAAW;IACX,gCAAa,CAAA;IACb,mBAAmB;IACnB,iDAA8B,CAAA;IAC9B,YAAY;IACZ,kCAAe,CAAA;IACf,WAAW;IACX,gCAAa,CAAA;IACb,aAAa;IACb,oCAAiB,CAAA;IACjB,cAAc;IACd,sCAAmB,CAAA;IACnB,cAAc;IACd,sCAAmB,CAAA;IACnB,iBAAiB;IACjB,4CAAyB,CAAA;AAC3B,CAAC,EAvBW,eAAe,KAAf,eAAe,QAuB1B;AAqBD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eASX;AATD,WAAY,eAAe;IACzB,cAAc;IACd,sCAAmB,CAAA;IACnB,yBAAyB;IACzB,4DAAyC,CAAA;IACzC,eAAe;IACf,wCAAqB,CAAA;IACrB,cAAc;IACd,sCAAmB,CAAA;AACrB,CAAC,EATW,eAAe,KAAf,eAAe,QAS1B;AAcD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAGX;AAHD,WAAY,kBAAkB;IAC5B,eAAe;IACf,2CAAqB,CAAA;AACvB,CAAC,EAHW,kBAAkB,KAAlB,kBAAkB,QAG7B;AAWD,0EAA0E;AAC1E,MAAM,CAAN,IAAY,sBAKX;AALD,WAAY,sBAAsB;IAChC,kBAAkB;IAClB,qDAA2B,CAAA;IAC3B,cAAc;IACd,6CAAmB,CAAA;AACrB,CAAC,EALW,sBAAsB,KAAtB,sBAAsB,QAKjC;AAYD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAOX;AAPD,WAAY,wBAAwB;IAClC,cAAc;IACd,+CAAmB,CAAA;IACnB,cAAc;IACd,+CAAmB,CAAA;IACnB,eAAe;IACf,iDAAqB,CAAA;AACvB,CAAC,EAPW,wBAAwB,KAAxB,wBAAwB,QAOnC;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAWX;AAXD,WAAY,4BAA4B;IACtC,cAAc;IACd,mDAAmB,CAAA;IACnB,mBAAmB;IACnB,6DAA6B,CAAA;IAC7B,qBAAqB;IACrB,iEAAiC,CAAA;IACjC,eAAe;IACf,qDAAqB,CAAA;IACrB,cAAc;IACd,mDAAmB,CAAA;AACrB,CAAC,EAXW,4BAA4B,KAA5B,4BAA4B,QAWvC;AAeD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBAOX;AAPD,WAAY,qBAAqB;IAC/B,gBAAgB;IAChB,gDAAuB,CAAA;IACvB,aAAa;IACb,0CAAiB,CAAA;IACjB,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EAPW,qBAAqB,KAArB,qBAAqB,QAOhC;AAaD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAGX;AAHD,WAAY,mBAAmB;IAC7B,iBAAiB;IACjB,gDAAyB,CAAA;AAC3B,CAAC,EAHW,mBAAmB,KAAnB,mBAAmB,QAG9B;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAGX;AAHD,WAAY,yBAAyB;IACnC,uBAAuB;IACvB,kEAAqC,CAAA;AACvC,CAAC,EAHW,yBAAyB,KAAzB,yBAAyB,QAGpC;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,kCAAkC;IAClC,yFAA2D,CAAA;AAC7D,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAGX;AAHD,WAAY,yBAAyB;IACnC,uBAAuB;IACvB,kEAAqC,CAAA;AACvC,CAAC,EAHW,yBAAyB,KAAzB,yBAAyB,QAGpC;AAWD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAKX;AALD,WAAY,6BAA6B;IACvC,YAAY;IACZ,gDAAe,CAAA;IACf,cAAc;IACd,oDAAmB,CAAA;AACrB,CAAC,EALW,6BAA6B,KAA7B,6BAA6B,QAKxC;AAYD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,YAAY;IACZ,6CAAe,CAAA;AACjB,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAKX;AALD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,cAAc;IACd,sDAAmB,CAAA;AACrB,CAAC,EALW,+BAA+B,KAA/B,+BAA+B,QAK1C;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAWX;AAXD,WAAY,4BAA4B;IACtC,YAAY;IACZ,+CAAe,CAAA;IACf,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,cAAc;IACd,mDAAmB,CAAA;IACnB,0BAA0B;IAC1B,2EAA2C,CAAA;IAC3C,6BAA6B;IAC7B,iFAAiD,CAAA;AACnD,CAAC,EAXW,4BAA4B,KAA5B,4BAA4B,QAWvC;AAeD,+FAA+F;AAC/F,MAAM,CAAN,IAAY,2CAKX;AALD,WAAY,2CAA2C;IACrD,0BAA0B;IAC1B,0FAA2C,CAAA;IAC3C,2BAA2B;IAC3B,4FAA6C,CAAA;AAC/C,CAAC,EALW,2CAA2C,KAA3C,2CAA2C,QAKtD;AAYD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,YAAY;IACZ,6CAAe,CAAA;IACf,gBAAgB;IAChB,qDAAuB,CAAA;IACvB,wBAAwB;IACxB,qEAAuC,CAAA;AACzC,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,YAAY;IACZ,iDAAe,CAAA;IACf,gBAAgB;IAChB,yDAAuB,CAAA;AACzB,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,sFAAsF;AACtF,MAAM,CAAN,IAAY,kCAWX;AAXD,WAAY,kCAAkC;IAC5C,gBAAgB;IAChB,6DAAuB,CAAA;IACvB,YAAY;IACZ,qDAAe,CAAA;IACf,0BAA0B;IAC1B,iFAA2C,CAAA;IAC3C,6BAA6B;IAC7B,uFAAiD,CAAA;IACjD,6BAA6B;IAC7B,uFAAiD,CAAA;AACnD,CAAC,EAXW,kCAAkC,KAAlC,kCAAkC,QAW7C;AAeD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAKX;AALD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,YAAY;IACZ,kDAAe,CAAA;AACjB,CAAC,EALW,+BAA+B,KAA/B,+BAA+B,QAK1C;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,YAAY;IACZ,iDAAe,CAAA;IACf,YAAY;IACZ,iDAAe,CAAA;AACjB,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAWX;AAXD,WAAY,2BAA2B;IACrC,YAAY;IACZ,8CAAe,CAAA;IACf,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,aAAa;IACb,gDAAiB,CAAA;IACjB,cAAc;IACd,kDAAmB,CAAA;IACnB,wBAAwB;IACxB,sEAAuC,CAAA;AACzC,CAAC,EAXW,2BAA2B,KAA3B,2BAA2B,QAWtC;AAeD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAKX;AALD,WAAY,0BAA0B;IACpC,YAAY;IACZ,6CAAe,CAAA;IACf,gBAAgB;IAChB,qDAAuB,CAAA;AACzB,CAAC,EALW,0BAA0B,KAA1B,0BAA0B,QAKrC;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,YAAY;IACZ,8CAAe,CAAA;IACf,mBAAmB;IACnB,4DAA6B,CAAA;AAC/B,CAAC,EALW,2BAA2B,KAA3B,2BAA2B,QAKtC;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,YAAY;IACZ,iDAAe,CAAA;IACf,cAAc;IACd,qDAAmB,CAAA;AACrB,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,yFAAyF;AACzF,MAAM,CAAN,IAAY,qCAKX;AALD,WAAY,qCAAqC;IAC/C,4BAA4B;IAC5B,wFAA+C,CAAA;IAC/C,yBAAyB;IACzB,kFAAyC,CAAA;AAC3C,CAAC,EALW,qCAAqC,KAArC,qCAAqC,QAKhD;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAKX;AALD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,YAAY;IACZ,+CAAe,CAAA;AACjB,CAAC,EALW,4BAA4B,KAA5B,4BAA4B,QAKvC;AAYD,uEAAuE;AACvE,MAAM,CAAN,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,kBAAkB;IAClB,kDAA2B,CAAA;IAC3B,kBAAkB;IAClB,kDAA2B,CAAA;IAC3B,uBAAuB;IACvB,4DAAqC,CAAA;AACvC,CAAC,EAPW,mBAAmB,KAAnB,mBAAmB,QAO9B;AAaD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAOX;AAPD,WAAY,gCAAgC;IAC1C,aAAa;IACb,qDAAiB,CAAA;IACjB,WAAW;IACX,iDAAa,CAAA;IACb,WAAW;IACX,kDAAc,CAAA;AAChB,CAAC,EAPW,gCAAgC,KAAhC,gCAAgC,QAO3C;AAaD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BASX;AATD,WAAY,2BAA2B;IACrC,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,eAAe;IACf,oDAAqB,CAAA;IACrB,0BAA0B;IAC1B,0EAA2C,CAAA;IAC3C,mCAAmC;IACnC,4FAA6D,CAAA;AAC/D,CAAC,EATW,2BAA2B,KAA3B,2BAA2B,QAStC;AAcD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAOX;AAPD,WAAY,6BAA6B;IACvC,gBAAgB;IAChB,wDAAuB,CAAA;IACvB,mBAAmB;IACnB,8DAA6B,CAAA;IAC7B,0BAA0B;IAC1B,4EAA2C,CAAA;AAC7C,CAAC,EAPW,6BAA6B,KAA7B,6BAA6B,QAOxC;AAaD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAOX;AAPD,WAAY,8BAA8B;IACxC,gBAAgB;IAChB,yDAAuB,CAAA;IACvB,0BAA0B;IAC1B,6EAA2C,CAAA;IAC3C,mCAAmC;IACnC,+FAA6D,CAAA;AAC/D,CAAC,EAPW,8BAA8B,KAA9B,8BAA8B,QAOzC;AAaD,iFAAiF;AACjF,MAAM,CAAN,IAAY,6BAKX;AALD,WAAY,6BAA6B;IACvC,gBAAgB;IAChB,wDAAuB,CAAA;IACvB,WAAW;IACX,8CAAa,CAAA;AACf,CAAC,EALW,6BAA6B,KAA7B,6BAA6B,QAKxC;AAYD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAKX;AALD,WAAY,iCAAiC;IAC3C,YAAY;IACZ,oDAAe,CAAA;IACf,aAAa;IACb,sDAAiB,CAAA;AACnB,CAAC,EALW,iCAAiC,KAAjC,iCAAiC,QAK5C;AAYD,wEAAwE;AACxE,MAAM,CAAN,IAAY,oBAOX;AAPD,WAAY,oBAAoB;IAC9B,kBAAkB;IAClB,mDAA2B,CAAA;IAC3B,mBAAmB;IACnB,qDAA6B,CAAA;IAC7B,wBAAwB;IACxB,+DAAuC,CAAA;AACzC,CAAC,EAPW,oBAAoB,KAApB,oBAAoB,QAO/B;AAaD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAOX;AAPD,WAAY,iCAAiC;IAC3C,aAAa;IACb,sDAAiB,CAAA;IACjB,WAAW;IACX,kDAAa,CAAA;IACb,WAAW;IACX,mDAAc,CAAA;AAChB,CAAC,EAPW,iCAAiC,KAAjC,iCAAiC,QAO5C;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,gBAAgB;IAChB,uDAAuB,CAAA;IACvB,eAAe;IACf,qDAAqB,CAAA;IACrB,0BAA0B;IAC1B,2EAA2C,CAAA;IAC3C,mCAAmC;IACnC,6FAA6D,CAAA;AAC/D,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,wFAAwF;AACxF,MAAM,CAAN,IAAY,oCAKX;AALD,WAAY,oCAAoC;IAC9C,4BAA4B;IAC5B,uFAA+C,CAAA;IAC/C,yBAAyB;IACzB,iFAAyC,CAAA;AAC3C,CAAC,EALW,oCAAoC,KAApC,oCAAoC,QAK/C;AAYD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAKX;AALD,WAAY,iCAAiC;IAC3C,YAAY;IACZ,oDAAe,CAAA;IACf,eAAe;IACf,0DAAqB,CAAA;AACvB,CAAC,EALW,iCAAiC,KAAjC,iCAAiC,QAK5C;AAYD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAOX;AAPD,WAAY,2BAA2B;IACrC,WAAW;IACX,4CAAa,CAAA;IACb,gCAAgC;IAChC,sFAAuD,CAAA;IACvD,0BAA0B;IAC1B,0EAA2C,CAAA;AAC7C,CAAC,EAPW,2BAA2B,KAA3B,2BAA2B,QAOtC;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAaX;AAbD,WAAY,4BAA4B;IACtC,WAAW;IACX,6CAAa,CAAA;IACb,qBAAqB;IACrB,iEAAiC,CAAA;IACjC,8BAA8B;IAC9B,mFAAmD,CAAA;IACnD,+BAA+B;IAC/B,qFAAqD,CAAA;IACrD,8BAA8B;IAC9B,mFAAmD,CAAA;IACnD,sBAAsB;IACtB,mEAAmC,CAAA;AACrC,CAAC,EAbW,4BAA4B,KAA5B,4BAA4B,QAavC;AAgBD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAeX;AAfD,WAAY,iCAAiC;IAC3C,aAAa;IACb,sDAAiB,CAAA;IACjB,UAAU;IACV,gDAAW,CAAA;IACX,YAAY;IACZ,oDAAe,CAAA;IACf,cAAc;IACd,wDAAmB,CAAA;IACnB,WAAW;IACX,kDAAa,CAAA;IACb,cAAc;IACd,wDAAmB,CAAA;IACnB,WAAW;IACX,kDAAa,CAAA;AACf,CAAC,EAfW,iCAAiC,KAAjC,iCAAiC,QAe5C;AAiBD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAOX;AAPD,WAAY,0BAA0B;IACpC,WAAW;IACX,2CAAa,CAAA;IACb,gCAAgC;IAChC,qFAAuD,CAAA;IACvD,mBAAmB;IACnB,2DAA6B,CAAA;AAC/B,CAAC,EAPW,0BAA0B,KAA1B,0BAA0B,QAOrC;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAOX;AAPD,WAAY,4BAA4B;IACtC,WAAW;IACX,6CAAa,CAAA;IACb,WAAW;IACX,6CAAa,CAAA;IACb,mBAAmB;IACnB,6DAA6B,CAAA;AAC/B,CAAC,EAPW,4BAA4B,KAA5B,4BAA4B,QAOvC;AAaD,6FAA6F;AAC7F,MAAM,CAAN,IAAY,yCAqBX;AArBD,WAAY,yCAAyC;IACnD,UAAU;IACV,wDAAW,CAAA;IACX,iBAAiB;IACjB,uEAA0B,CAAA;IAC1B,aAAa;IACb,8DAAiB,CAAA;IACjB,kBAAkB;IAClB,yEAA4B,CAAA;IAC5B,UAAU;IACV,wDAAW,CAAA;IACX,UAAU;IACV,wDAAW,CAAA;IACX,YAAY;IACZ,4DAAe,CAAA;IACf,eAAe;IACf,mEAAsB,CAAA;IACtB,aAAa;IACb,8DAAiB,CAAA;IACjB,kBAAkB;IAClB,yEAA4B,CAAA;AAC9B,CAAC,EArBW,yCAAyC,KAAzC,yCAAyC,QAqBpD;AAoBD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAOX;AAPD,WAAY,2BAA2B;IACrC,WAAW;IACX,4CAAa,CAAA;IACb,gBAAgB;IAChB,sDAAuB,CAAA;IACvB,mBAAmB;IACnB,4DAA6B,CAAA;AAC/B,CAAC,EAPW,2BAA2B,KAA3B,2BAA2B,QAOtC;AAaD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BASX;AATD,WAAY,0BAA0B;IACpC,aAAa;IACb,+CAAiB,CAAA;IACjB,UAAU;IACV,yCAAW,CAAA;IACX,YAAY;IACZ,6CAAe,CAAA;IACf,WAAW;IACX,2CAAa,CAAA;AACf,CAAC,EATW,0BAA0B,KAA1B,0BAA0B,QASrC;AAcD,6FAA6F;AAC7F,MAAM,CAAN,IAAY,yCAKX;AALD,WAAY,yCAAyC;IACnD,aAAa;IACb,8DAAiB,CAAA;IACjB,aAAa;IACb,8DAAiB,CAAA;AACnB,CAAC,EALW,yCAAyC,KAAzC,yCAAyC,QAKpD;AAYD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAKX;AALD,WAAY,+BAA+B;IACzC,YAAY;IACZ,kDAAe,CAAA;IACf,iBAAiB;IACjB,4DAAyB,CAAA;AAC3B,CAAC,EALW,+BAA+B,KAA/B,+BAA+B,QAK1C;AAYD,0FAA0F;AAC1F,MAAM,CAAN,IAAY,sCAKX;AALD,WAAY,sCAAsC;IAChD,YAAY;IACZ,yDAAe,CAAA;IACf,aAAa;IACb,2DAAiB,CAAA;AACnB,CAAC,EALW,sCAAsC,KAAtC,sCAAsC,QAKjD;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAGX;AAHD,WAAY,8BAA8B;IACxC,aAAa;IACb,mDAAiB,CAAA;AACnB,CAAC,EAHW,8BAA8B,KAA9B,8BAA8B,QAGzC;AAWD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAKX;AALD,WAAY,gCAAgC;IAC1C,aAAa;IACb,qDAAiB,CAAA;IACjB,aAAa;IACb,qDAAiB,CAAA;AACnB,CAAC,EALW,gCAAgC,KAAhC,gCAAgC,QAK3C;AAYD,yFAAyF;AACzF,MAAM,CAAN,IAAY,qCAOX;AAPD,WAAY,qCAAqC;IAC/C,WAAW;IACX,sDAAa,CAAA;IACb,aAAa;IACb,0DAAiB,CAAA;IACjB,cAAc;IACd,4DAAmB,CAAA;AACrB,CAAC,EAPW,qCAAqC,KAArC,qCAAqC,QAOhD;AAaD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BASX;AATD,WAAY,4BAA4B;IACtC,aAAa;IACb,iDAAiB,CAAA;IACjB,WAAW;IACX,6CAAa,CAAA;IACb,oBAAoB;IACpB,+DAA+B,CAAA;IAC/B,mBAAmB;IACnB,6DAA6B,CAAA;AAC/B,CAAC,EATW,4BAA4B,KAA5B,4BAA4B,QASvC;AAcD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAGX;AAHD,WAAY,wBAAwB;IAClC,WAAW;IACX,yCAAa,CAAA;AACf,CAAC,EAHW,wBAAwB,KAAxB,wBAAwB,QAGnC;AAWD,0EAA0E;AAC1E,MAAM,CAAN,IAAY,sBASX;AATD,WAAY,sBAAsB;IAChC,UAAU;IACV,qCAAW,CAAA;IACX,WAAW;IACX,uCAAa,CAAA;IACb,UAAU;IACV,qCAAW,CAAA;IACX,aAAa;IACb,2CAAiB,CAAA;AACnB,CAAC,EATW,sBAAsB,KAAtB,sBAAsB,QASjC;AAcD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAeX;AAfD,WAAY,gCAAgC;IAC1C,UAAU;IACV,+CAAW,CAAA;IACX,WAAW;IACX,iDAAa,CAAA;IACb,UAAU;IACV,+CAAW,CAAA;IACX,aAAa;IACb,qDAAiB,CAAA;IACjB,cAAc;IACd,uDAAmB,CAAA;IACnB,WAAW;IACX,iDAAa,CAAA;IACb,YAAY;IACZ,mDAAe,CAAA;AACjB,CAAC,EAfW,gCAAgC,KAAhC,gCAAgC,QAe3C;AAiBD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,WAAW;IACX,2CAAa,CAAA;AACf,CAAC,EAHW,0BAA0B,KAA1B,0BAA0B,QAGrC;AAWD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAOX;AAPD,WAAY,wBAAwB;IAClC,cAAc;IACd,+CAAmB,CAAA;IACnB,sBAAsB;IACtB,+DAAmC,CAAA;IACnC,uBAAuB;IACvB,iEAAqC,CAAA;AACvC,CAAC,EAPW,wBAAwB,KAAxB,wBAAwB,QAOnC;AAaD,mEAAmE;AACnE,MAAM,CAAN,IAAY,eAKX;AALD,WAAY,eAAe;IACzB,YAAY;IACZ,kCAAe,CAAA;IACf,eAAe;IACf,wCAAqB,CAAA;AACvB,CAAC,EALW,eAAe,KAAf,eAAe,QAK1B;AAYD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCAyBX;AAzBD,WAAY,gCAAgC;IAC1C,cAAc;IACd,uDAAmB,CAAA;IACnB,eAAe;IACf,yDAAqB,CAAA;IACrB,qBAAqB;IACrB,qEAAiC,CAAA;IACjC,cAAc;IACd,uDAAmB,CAAA;IACnB,aAAa;IACb,qDAAiB,CAAA;IACjB,WAAW;IACX,iDAAa,CAAA;IACb,YAAY;IACZ,mDAAe,CAAA;IACf,YAAY;IACZ,mDAAe,CAAA;IACf,YAAY;IACZ,mDAAe,CAAA;IACf,aAAa;IACb,qDAAiB,CAAA;IACjB,aAAa;IACb,qDAAiB,CAAA;IACjB,eAAe;IACf,yDAAqB,CAAA;AACvB,CAAC,EAzBW,gCAAgC,KAAhC,gCAAgC,QAyB3C;AAsBD,yFAAyF;AACzF,MAAM,CAAN,IAAY,qCAOX;AAPD,WAAY,qCAAqC;IAC/C,YAAY;IACZ,wDAAe,CAAA;IACf,aAAa;IACb,0DAAiB,CAAA;IACjB,kBAAkB;IAClB,oEAA2B,CAAA;AAC7B,CAAC,EAPW,qCAAqC,KAArC,qCAAqC,QAOhD;AAaD,qFAAqF;AACrF,MAAM,CAAN,IAAY,iCAKX;AALD,WAAY,iCAAiC;IAC3C,qBAAqB;IACrB,sEAAiC,CAAA;IACjC,oBAAoB;IACpB,oEAA+B,CAAA;AACjC,CAAC,EALW,iCAAiC,KAAjC,iCAAiC,QAK5C;AAYD,0EAA0E;AAC1E,MAAM,CAAN,IAAY,sBAOX;AAPD,WAAY,sBAAsB;IAChC,cAAc;IACd,6CAAmB,CAAA;IACnB,iBAAiB;IACjB,mDAAyB,CAAA;IACzB,eAAe;IACf,+CAAqB,CAAA;AACvB,CAAC,EAPW,sBAAsB,KAAtB,sBAAsB,QAOjC;AAaD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBAeX;AAfD,WAAY,wBAAwB;IAClC,mBAAmB;IACnB,yDAA6B,CAAA;IAC7B,aAAa;IACb,6CAAiB,CAAA;IACjB,WAAW;IACX,yCAAa,CAAA;IACb,UAAU;IACV,uCAAW,CAAA;IACX,WAAW;IACX,yCAAa,CAAA;IACb,YAAY;IACZ,2CAAe,CAAA;IACf,WAAW;IACX,yCAAa,CAAA;AACf,CAAC,EAfW,wBAAwB,KAAxB,wBAAwB,QAenC;AAiBD,sEAAsE;AACtE,MAAM,CAAN,IAAY,kBAKX;AALD,WAAY,kBAAkB;IAC5B,kCAAkC;IAClC,mFAA6D,CAAA;IAC7D,kCAAkC;IAClC,mFAA6D,CAAA;AAC/D,CAAC,EALW,kBAAkB,KAAlB,kBAAkB,QAK7B;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAOX;AAPD,WAAY,4BAA4B;IACtC,aAAa;IACb,iDAAiB,CAAA;IACjB,WAAW;IACX,6CAAa,CAAA;IACb,YAAY;IACZ,+CAAe,CAAA;AACjB,CAAC,EAPW,4BAA4B,KAA5B,4BAA4B,QAOvC;AAaD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAGX;AAHD,WAAY,yBAAyB;IACnC,uBAAuB;IACvB,kEAAqC,CAAA;AACvC,CAAC,EAHW,yBAAyB,KAAzB,yBAAyB,QAGpC;AAWD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAqBX;AArBD,WAAY,4BAA4B;IACtC,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;IACnB,eAAe;IACf,qDAAqB,CAAA;IACrB,eAAe;IACf,qDAAqB,CAAA;IACrB,uBAAuB;IACvB,qEAAqC,CAAA;IACrC,aAAa;IACb,iDAAiB,CAAA;IACjB,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;IACnB,mBAAmB;IACnB,6DAA6B,CAAA;AAC/B,CAAC,EArBW,4BAA4B,KAA5B,4BAA4B,QAqBvC;AAoBD,iGAAiG;AACjG,MAAM,CAAN,IAAY,6CASX;AATD,WAAY,6CAA6C;IACvD,YAAY;IACZ,gEAAe,CAAA;IACf,eAAe;IACf,sEAAqB,CAAA;IACrB,cAAc;IACd,oEAAmB,CAAA;IACnB,gBAAgB;IAChB,wEAAuB,CAAA;AACzB,CAAC,EATW,6CAA6C,KAA7C,6CAA6C,QASxD;AAcD,sFAAsF;AACtF,MAAM,CAAN,IAAY,kCAKX;AALD,WAAY,kCAAkC;IAC5C,gBAAgB;IAChB,6DAAuB,CAAA;IACvB,sBAAsB;IACtB,yEAAmC,CAAA;AACrC,CAAC,EALW,kCAAkC,KAAlC,kCAAkC,QAK7C;AAYD,8FAA8F;AAC9F,MAAM,CAAN,IAAY,0CAKX;AALD,WAAY,0CAA0C;IACpD,kCAAkC;IAClC,yGAA2D,CAAA;IAC3D,6BAA6B;IAC7B,+FAAiD,CAAA;AACnD,CAAC,EALW,0CAA0C,KAA1C,0CAA0C,QAKrD;AAYD,kFAAkF;AAClF,MAAM,CAAN,IAAY,8BAKX;AALD,WAAY,8BAA8B;IACxC,eAAe;IACf,uDAAqB,CAAA;IACrB,iBAAiB;IACjB,2DAAyB,CAAA;AAC3B,CAAC,EALW,8BAA8B,KAA9B,8BAA8B,QAKzC;AAYD,8EAA8E;AAC9E,MAAM,CAAN,IAAY,0BAKX;AALD,WAAY,0BAA0B;IACpC,mBAAmB;IACnB,2DAA6B,CAAA;IAC7B,qBAAqB;IACrB,+DAAiC,CAAA;AACnC,CAAC,EALW,0BAA0B,KAA1B,0BAA0B,QAKrC;AAYD,gFAAgF;AAChF,MAAM,CAAN,IAAY,4BAKX;AALD,WAAY,4BAA4B;IACtC,cAAc;IACd,mDAAmB,CAAA;IACnB,cAAc;IACd,mDAAmB,CAAA;AACrB,CAAC,EALW,4BAA4B,KAA5B,4BAA4B,QAKvC;AAYD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAWX;AAXD,WAAY,yBAAyB;IACnC,WAAW;IACX,0CAAa,CAAA;IACb,cAAc;IACd,gDAAmB,CAAA;IACnB,aAAa;IACb,8CAAiB,CAAA;IACjB,SAAS;IACT,sCAAS,CAAA;IACT,YAAY;IACZ,4CAAe,CAAA;AACjB,CAAC,EAXW,yBAAyB,KAAzB,yBAAyB,QAWpC;AAeD,oFAAoF;AACpF,MAAM,CAAN,IAAY,gCASX;AATD,WAAY,gCAAgC;IAC1C,WAAW;IACX,iDAAa,CAAA;IACb,WAAW;IACX,iDAAa,CAAA;IACb,aAAa;IACb,qDAAiB,CAAA;IACjB,UAAU;IACV,+CAAW,CAAA;AACb,CAAC,EATW,gCAAgC,KAAhC,gCAAgC,QAS3C;AAcD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBAqBX;AArBD,WAAY,qBAAqB;IAC/B,WAAW;IACX,sCAAa,CAAA;IACb,UAAU;IACV,oCAAW,CAAA;IACX,YAAY;IACZ,wCAAe,CAAA;IACf,WAAW;IACX,sCAAa,CAAA;IACb,cAAc;IACd,4CAAmB,CAAA;IACnB,iBAAiB;IACjB,kDAAyB,CAAA;IACzB,aAAa;IACb,0CAAiB,CAAA;IACjB,UAAU;IACV,oCAAW,CAAA;IACX,UAAU;IACV,oCAAW,CAAA;IACX,cAAc;IACd,4CAAmB,CAAA;AACrB,CAAC,EArBW,qBAAqB,KAArB,qBAAqB,QAqBhC;AAoBD,4EAA4E;AAC5E,MAAM,CAAN,IAAY,wBASX;AATD,WAAY,wBAAwB;IAClC,WAAW;IACX,yCAAa,CAAA;IACb,WAAW;IACX,yCAAa,CAAA;IACb,aAAa;IACb,6CAAiB,CAAA;IACjB,UAAU;IACV,uCAAW,CAAA;AACb,CAAC,EATW,wBAAwB,KAAxB,wBAAwB,QASnC;AAcD,+EAA+E;AAC/E,MAAM,CAAN,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,aAAa;IACb,gDAAiB,CAAA;IACjB,wBAAwB;IACxB,sEAAuC,CAAA;AACzC,CAAC,EALW,2BAA2B,KAA3B,2BAA2B,QAKtC;AAYD,mFAAmF;AACnF,MAAM,CAAN,IAAY,+BAOX;AAPD,WAAY,+BAA+B;IACzC,gBAAgB;IAChB,0DAAuB,CAAA;IACvB,UAAU;IACV,8CAAW,CAAA;IACX,0BAA0B;IAC1B,8EAA2C,CAAA;AAC7C,CAAC,EAPW,+BAA+B,KAA/B,+BAA+B,QAO1C;AAaD,+FAA+F;AAC/F,MAAM,CAAN,IAAY,2CAKX;AALD,WAAY,2CAA2C;IACrD,0BAA0B;IAC1B,0FAA2C,CAAA;IAC3C,2BAA2B;IAC3B,4FAA6C,CAAA;AAC/C,CAAC,EALW,2CAA2C,KAA3C,2CAA2C,QAKtD;AAYD,qEAAqE;AACrE,MAAM,CAAN,IAAY,iBAOX;AAPD,WAAY,iBAAiB;IAC3B,eAAe;IACf,0CAAqB,CAAA;IACrB,iBAAiB;IACjB,8CAAyB,CAAA;IACzB,gBAAgB;IAChB,4CAAuB,CAAA;AACzB,CAAC,EAPW,iBAAiB,KAAjB,iBAAiB,QAO5B;AAaD,6EAA6E;AAC7E,MAAM,CAAN,IAAY,yBAKX;AALD,WAAY,yBAAyB;IACnC,mBAAmB;IACnB,0DAA6B,CAAA;IAC7B,qBAAqB;IACrB,8DAAiC,CAAA;AACnC,CAAC,EALW,yBAAyB,KAAzB,yBAAyB,QAKpC;AAYD,0FAA0F;AAC1F,MAAM,CAAN,IAAY,sCAOX;AAPD,WAAY,sCAAsC;IAChD,WAAW;IACX,uDAAa,CAAA;IACb,gCAAgC;IAChC,iGAAuD,CAAA;IACvD,mBAAmB;IACnB,uEAA6B,CAAA;AAC/B,CAAC,EAPW,sCAAsC,KAAtC,sCAAsC,QAOjD;AAaD,yEAAyE;AACzE,MAAM,CAAN,IAAY,qBAOX;AAPD,WAAY,qBAAqB;IAC/B,wBAAwB;IACxB,gEAAuC,CAAA;IACvC,uBAAuB;IACvB,8DAAqC,CAAA;IACrC,iBAAiB;IACjB,kDAAyB,CAAA;AAC3B,CAAC,EAPW,qBAAqB,KAArB,qBAAqB,QAOhC;AAaD,2EAA2E;AAC3E,MAAM,CAAN,IAAY,uBAOX;AAPD,WAAY,uBAAuB;IACjC,WAAW;IACX,wCAAa,CAAA;IACb,gCAAgC;IAChC,kFAAuD,CAAA;IACvD,mBAAmB;IACnB,wDAA6B,CAAA;AAC/B,CAAC,EAPW,uBAAuB,KAAvB,uBAAuB,QAOlC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreClient from \"@azure/core-client\";\n\nexport type SecretBaseUnion =\n | SecretBase\n | SecureString\n | AzureKeyVaultSecretReference;\nexport type DataFlowUnion = DataFlow | MappingDataFlow | Flowlet;\nexport type IntegrationRuntimeUnion =\n | IntegrationRuntime\n | ManagedIntegrationRuntime\n | SelfHostedIntegrationRuntime;\nexport type DatasetUnion =\n | Dataset\n | AmazonS3Dataset\n | AvroDataset\n | ExcelDataset\n | ParquetDataset\n | DelimitedTextDataset\n | JsonDataset\n | XmlDataset\n | OrcDataset\n | BinaryDataset\n | AzureBlobDataset\n | AzureTableDataset\n | AzureSqlTableDataset\n | AzureSqlMITableDataset\n | AzureSqlDWTableDataset\n | CassandraTableDataset\n | CustomDataset\n | CosmosDbSqlApiCollectionDataset\n | DocumentDbCollectionDataset\n | DynamicsEntityDataset\n | DynamicsCrmEntityDataset\n | CommonDataServiceForAppsEntityDataset\n | AzureDataLakeStoreDataset\n | AzureBlobFSDataset\n | Office365Dataset\n | FileShareDataset\n | MongoDbCollectionDataset\n | MongoDbAtlasCollectionDataset\n | MongoDbV2CollectionDataset\n | CosmosDbMongoDbApiCollectionDataset\n | ODataResourceDataset\n | OracleTableDataset\n | AmazonRdsForOracleTableDataset\n | TeradataTableDataset\n | AzureMySqlTableDataset\n | AmazonRedshiftTableDataset\n | Db2TableDataset\n | RelationalTableDataset\n | InformixTableDataset\n | OdbcTableDataset\n | MySqlTableDataset\n | PostgreSqlTableDataset\n | MicrosoftAccessTableDataset\n | SalesforceObjectDataset\n | SalesforceServiceCloudObjectDataset\n | SybaseTableDataset\n | SapBwCubeDataset\n | SapCloudForCustomerResourceDataset\n | SapEccResourceDataset\n | SapHanaTableDataset\n | SapOpenHubTableDataset\n | SqlServerTableDataset\n | AmazonRdsForSqlServerTableDataset\n | RestResourceDataset\n | SapTableResourceDataset\n | SapOdpResourceDataset\n | WebTableDataset\n | AzureSearchIndexDataset\n | HttpDataset\n | AmazonMWSObjectDataset\n | AzurePostgreSqlTableDataset\n | ConcurObjectDataset\n | CouchbaseTableDataset\n | DrillTableDataset\n | EloquaObjectDataset\n | GoogleBigQueryObjectDataset\n | GreenplumTableDataset\n | HBaseObjectDataset\n | HiveObjectDataset\n | HubspotObjectDataset\n | ImpalaObjectDataset\n | JiraObjectDataset\n | MagentoObjectDataset\n | MariaDBTableDataset\n | AzureMariaDBTableDataset\n | MarketoObjectDataset\n | PaypalObjectDataset\n | PhoenixObjectDataset\n | PrestoObjectDataset\n | QuickBooksObjectDataset\n | ServiceNowObjectDataset\n | ShopifyObjectDataset\n | SparkObjectDataset\n | SquareObjectDataset\n | XeroObjectDataset\n | ZohoObjectDataset\n | NetezzaTableDataset\n | VerticaTableDataset\n | SalesforceMarketingCloudObjectDataset\n | ResponsysObjectDataset\n | DynamicsAXResourceDataset\n | OracleServiceCloudObjectDataset\n | AzureDataExplorerTableDataset\n | GoogleAdWordsObjectDataset\n | SnowflakeDataset\n | SharePointOnlineListResourceDataset\n | AzureDatabricksDeltaLakeDataset;\nexport type LinkedServiceUnion =\n | LinkedService\n | AzureStorageLinkedService\n | AzureBlobStorageLinkedService\n | AzureTableStorageLinkedService\n | AzureSqlDWLinkedService\n | SqlServerLinkedService\n | AmazonRdsForSqlServerLinkedService\n | AzureSqlDatabaseLinkedService\n | AzureSqlMILinkedService\n | AzureBatchLinkedService\n | AzureKeyVaultLinkedService\n | CosmosDbLinkedService\n | DynamicsLinkedService\n | DynamicsCrmLinkedService\n | CommonDataServiceForAppsLinkedService\n | HDInsightLinkedService\n | FileServerLinkedService\n | AzureFileStorageLinkedService\n | GoogleCloudStorageLinkedService\n | OracleLinkedService\n | AmazonRdsForOracleLinkedService\n | AzureMySqlLinkedService\n | MySqlLinkedService\n | PostgreSqlLinkedService\n | SybaseLinkedService\n | Db2LinkedService\n | TeradataLinkedService\n | AzureMLLinkedService\n | AzureMLServiceLinkedService\n | OdbcLinkedService\n | InformixLinkedService\n | MicrosoftAccessLinkedService\n | HdfsLinkedService\n | ODataLinkedService\n | WebLinkedService\n | CassandraLinkedService\n | MongoDbLinkedService\n | MongoDbAtlasLinkedService\n | MongoDbV2LinkedService\n | CosmosDbMongoDbApiLinkedService\n | AzureDataLakeStoreLinkedService\n | AzureBlobFSLinkedService\n | Office365LinkedService\n | SalesforceLinkedService\n | SalesforceServiceCloudLinkedService\n | SapCloudForCustomerLinkedService\n | SapEccLinkedService\n | SapOpenHubLinkedService\n | SapOdpLinkedService\n | RestServiceLinkedService\n | TeamDeskLinkedService\n | QuickbaseLinkedService\n | SmartsheetLinkedService\n | ZendeskLinkedService\n | DataworldLinkedService\n | AppFiguresLinkedService\n | AsanaLinkedService\n | TwilioLinkedService\n | GoogleSheetsLinkedService\n | AmazonS3LinkedService\n | AmazonRedshiftLinkedService\n | CustomDataSourceLinkedService\n | AzureSearchLinkedService\n | HttpLinkedService\n | FtpServerLinkedService\n | SftpServerLinkedService\n | SapBWLinkedService\n | SapHanaLinkedService\n | AmazonMWSLinkedService\n | AzurePostgreSqlLinkedService\n | ConcurLinkedService\n | CouchbaseLinkedService\n | DrillLinkedService\n | EloquaLinkedService\n | GoogleBigQueryLinkedService\n | GreenplumLinkedService\n | HBaseLinkedService\n | HiveLinkedService\n | HubspotLinkedService\n | ImpalaLinkedService\n | JiraLinkedService\n | MagentoLinkedService\n | MariaDBLinkedService\n | AzureMariaDBLinkedService\n | MarketoLinkedService\n | PaypalLinkedService\n | PhoenixLinkedService\n | PrestoLinkedService\n | QuickBooksLinkedService\n | ServiceNowLinkedService\n | ShopifyLinkedService\n | SparkLinkedService\n | SquareLinkedService\n | XeroLinkedService\n | ZohoLinkedService\n | VerticaLinkedService\n | NetezzaLinkedService\n | SalesforceMarketingCloudLinkedService\n | HDInsightOnDemandLinkedService\n | AzureDataLakeAnalyticsLinkedService\n | AzureDatabricksLinkedService\n | AzureDatabricksDeltaLakeLinkedService\n | ResponsysLinkedService\n | DynamicsAXLinkedService\n | OracleServiceCloudLinkedService\n | GoogleAdWordsLinkedService\n | SapTableLinkedService\n | AzureDataExplorerLinkedService\n | AzureFunctionLinkedService\n | SnowflakeLinkedService\n | SharePointOnlineListLinkedService\n | AzureSynapseArtifactsLinkedService\n | PowerBIWorkspaceLinkedService;\nexport type ActivityUnion =\n | Activity\n | ControlActivityUnion\n | ExecutionActivityUnion\n | SqlPoolStoredProcedureActivity;\nexport type TriggerUnion =\n | Trigger\n | RerunTumblingWindowTrigger\n | MultiplePipelineTriggerUnion\n | TumblingWindowTrigger\n | ChainingTrigger;\nexport type DatasetLocationUnion =\n | DatasetLocation\n | AzureBlobStorageLocation\n | AzureBlobFSLocation\n | AzureDataLakeStoreLocation\n | AmazonS3Location\n | FileServerLocation\n | AzureFileStorageLocation\n | GoogleCloudStorageLocation\n | FtpServerLocation\n | SftpLocation\n | HttpServerLocation\n | HdfsLocation;\nexport type DatasetStorageFormatUnion =\n | DatasetStorageFormat\n | TextFormat\n | JsonFormat\n | AvroFormat\n | OrcFormat\n | ParquetFormat;\nexport type WebLinkedServiceTypePropertiesUnion =\n | WebLinkedServiceTypeProperties\n | WebAnonymousAuthentication\n | WebBasicAuthentication\n | WebClientCertificateAuthentication;\nexport type StoreReadSettingsUnion =\n | StoreReadSettings\n | AzureBlobStorageReadSettings\n | AzureBlobFSReadSettings\n | AzureDataLakeStoreReadSettings\n | AmazonS3ReadSettings\n | FileServerReadSettings\n | AzureFileStorageReadSettings\n | GoogleCloudStorageReadSettings\n | FtpReadSettings\n | SftpReadSettings\n | HttpReadSettings\n | HdfsReadSettings;\nexport type StoreWriteSettingsUnion =\n | StoreWriteSettings\n | SftpWriteSettings\n | AzureBlobStorageWriteSettings\n | AzureBlobFSWriteSettings\n | AzureDataLakeStoreWriteSettings\n | FileServerWriteSettings\n | AzureFileStorageWriteSettings;\nexport type FormatReadSettingsUnion =\n | FormatReadSettings\n | DelimitedTextReadSettings\n | JsonReadSettings\n | XmlReadSettings\n | BinaryReadSettings;\nexport type CompressionReadSettingsUnion =\n | CompressionReadSettings\n | ZipDeflateReadSettings\n | TarReadSettings\n | TarGZipReadSettings;\nexport type FormatWriteSettingsUnion =\n | FormatWriteSettings\n | AvroWriteSettings\n | OrcWriteSettings\n | ParquetWriteSettings\n | DelimitedTextWriteSettings\n | JsonWriteSettings;\nexport type CopySourceUnion =\n | CopySource\n | AvroSource\n | ExcelSource\n | ParquetSource\n | DelimitedTextSource\n | JsonSource\n | XmlSource\n | OrcSource\n | BinarySource\n | TabularSourceUnion\n | BlobSource\n | DocumentDbCollectionSource\n | CosmosDbSqlApiSource\n | DynamicsSource\n | DynamicsCrmSource\n | CommonDataServiceForAppsSource\n | RelationalSource\n | MicrosoftAccessSource\n | ODataSource\n | SalesforceServiceCloudSource\n | RestSource\n | FileSystemSource\n | HdfsSource\n | AzureDataExplorerSource\n | OracleSource\n | AmazonRdsForOracleSource\n | WebSource\n | MongoDbSource\n | MongoDbAtlasSource\n | MongoDbV2Source\n | CosmosDbMongoDbApiSource\n | Office365Source\n | AzureDataLakeStoreSource\n | AzureBlobFSSource\n | HttpSource\n | SnowflakeSource\n | AzureDatabricksDeltaLakeSource\n | SharePointOnlineListSource;\nexport type CopySinkUnion =\n | CopySink\n | DelimitedTextSink\n | JsonSink\n | OrcSink\n | RestSink\n | AzurePostgreSqlSink\n | AzureMySqlSink\n | AzureDatabricksDeltaLakeSink\n | SapCloudForCustomerSink\n | AzureQueueSink\n | AzureTableSink\n | AvroSink\n | ParquetSink\n | BinarySink\n | BlobSink\n | FileSystemSink\n | DocumentDbCollectionSink\n | CosmosDbSqlApiSink\n | SqlSink\n | SqlServerSink\n | AzureSqlSink\n | SqlMISink\n | SqlDWSink\n | SnowflakeSink\n | OracleSink\n | AzureDataLakeStoreSink\n | AzureBlobFSSink\n | AzureSearchIndexSink\n | OdbcSink\n | InformixSink\n | MicrosoftAccessSink\n | DynamicsSink\n | DynamicsCrmSink\n | CommonDataServiceForAppsSink\n | AzureDataExplorerSink\n | SalesforceSink\n | SalesforceServiceCloudSink\n | CosmosDbMongoDbApiSink;\nexport type ExportSettingsUnion =\n | ExportSettings\n | SnowflakeExportCopyCommand\n | AzureDatabricksDeltaLakeExportCommand;\nexport type ImportSettingsUnion =\n | ImportSettings\n | AzureDatabricksDeltaLakeImportCommand\n | SnowflakeImportCopyCommand;\nexport type CopyTranslatorUnion = CopyTranslator | TabularTranslator;\nexport type DependencyReferenceUnion =\n | DependencyReference\n | TriggerDependencyReferenceUnion\n | SelfDependencyTumblingWindowTriggerReference;\nexport type LinkedIntegrationRuntimeTypeUnion =\n | LinkedIntegrationRuntimeType\n | LinkedIntegrationRuntimeKeyAuthorization\n | LinkedIntegrationRuntimeRbacAuthorization;\nexport type ControlActivityUnion =\n | ControlActivity\n | ExecutePipelineActivity\n | IfConditionActivity\n | SwitchActivity\n | ForEachActivity\n | WaitActivity\n | FailActivity\n | UntilActivity\n | ValidationActivity\n | FilterActivity\n | SetVariableActivity\n | AppendVariableActivity\n | WebHookActivity;\nexport type ExecutionActivityUnion =\n | ExecutionActivity\n | CopyActivity\n | HDInsightHiveActivity\n | HDInsightPigActivity\n | HDInsightMapReduceActivity\n | HDInsightStreamingActivity\n | HDInsightSparkActivity\n | ExecuteSsisPackageActivity\n | CustomActivity\n | SqlServerStoredProcedureActivity\n | DeleteActivity\n | AzureDataExplorerCommandActivity\n | LookupActivity\n | WebActivity\n | GetMetadataActivity\n | AzureMLBatchExecutionActivity\n | AzureMLUpdateResourceActivity\n | AzureMLExecutePipelineActivity\n | DataLakeAnalyticsUsqlActivity\n | DatabricksNotebookActivity\n | DatabricksSparkJarActivity\n | DatabricksSparkPythonActivity\n | AzureFunctionActivity\n | ExecuteDataFlowActivity\n | ScriptActivity\n | SynapseNotebookActivity\n | SynapseSparkJobDefinitionActivity;\nexport type MultiplePipelineTriggerUnion =\n | MultiplePipelineTrigger\n | ScheduleTrigger\n | BlobTrigger\n | BlobEventsTrigger\n | CustomEventsTrigger;\nexport type TabularSourceUnion =\n | TabularSource\n | AzureTableSource\n | InformixSource\n | Db2Source\n | OdbcSource\n | MySqlSource\n | PostgreSqlSource\n | SybaseSource\n | SapBwSource\n | SalesforceSource\n | SapCloudForCustomerSource\n | SapEccSource\n | SapHanaSource\n | SapOpenHubSource\n | SapOdpSource\n | SapTableSource\n | SqlSource\n | SqlServerSource\n | AmazonRdsForSqlServerSource\n | AzureSqlSource\n | SqlMISource\n | SqlDWSource\n | AzureMySqlSource\n | TeradataSource\n | CassandraSource\n | AmazonMWSSource\n | AzurePostgreSqlSource\n | ConcurSource\n | CouchbaseSource\n | DrillSource\n | EloquaSource\n | GoogleBigQuerySource\n | GreenplumSource\n | HBaseSource\n | HiveSource\n | HubspotSource\n | ImpalaSource\n | JiraSource\n | MagentoSource\n | MariaDBSource\n | AzureMariaDBSource\n | MarketoSource\n | PaypalSource\n | PhoenixSource\n | PrestoSource\n | QuickBooksSource\n | ServiceNowSource\n | ShopifySource\n | SparkSource\n | SquareSource\n | XeroSource\n | ZohoSource\n | NetezzaSource\n | VerticaSource\n | SalesforceMarketingCloudSource\n | ResponsysSource\n | DynamicsAXSource\n | OracleServiceCloudSource\n | GoogleAdWordsSource\n | AmazonRedshiftSource;\nexport type TriggerDependencyReferenceUnion =\n | TriggerDependencyReference\n | TumblingWindowTriggerDependencyReference;\n\nexport interface LinkConnectionListResponse {\n /** List link connection value */\n value: LinkConnectionResource[];\n /** List link connections next link */\n nextLink?: string;\n}\n\nexport interface LinkConnectionResource {\n /** Link connection id */\n id?: string;\n /** Link connection name */\n name?: string;\n /** Link connection type */\n type?: string;\n /** Properties of link connection */\n properties: LinkConnection;\n /** Link connection description */\n description?: string;\n}\n\nexport interface LinkConnection {\n /** Properties of link connection's source database */\n sourceDatabase?: LinkConnectionSourceDatabase;\n /** Properties of link connection's target database */\n targetDatabase?: LinkConnectionTargetDatabase;\n /** Properties of link connection's landing zone */\n landingZone?: LinkConnectionLandingZone;\n /** Properties of link connection's compute */\n compute?: LinkConnectionCompute;\n}\n\nexport interface LinkConnectionSourceDatabase {\n /** Linked service reference */\n linkedService?: LinkedServiceReference;\n /** Source database type properties */\n typeProperties?: LinkConnectionSourceDatabaseTypeProperties;\n}\n\n/** Linked service reference type. */\nexport interface LinkedServiceReference {\n /** Linked service reference type. */\n type: Type;\n /** Reference LinkedService name. */\n referenceName: string;\n /** Arguments for LinkedService. */\n parameters?: { [propertyName: string]: any };\n}\n\nexport interface LinkConnectionSourceDatabaseTypeProperties {\n /** Link connection source database server's resource id */\n resourceId?: string;\n /** Link connection source database server's principal id */\n principalId?: string;\n}\n\nexport interface LinkConnectionTargetDatabase {\n /** Linked service reference */\n linkedService?: LinkedServiceReference;\n /** Target database type properties */\n typeProperties?: LinkConnectionTargetDatabaseTypeProperties;\n}\n\nexport interface LinkConnectionTargetDatabaseTypeProperties {\n /** Enable cross table transaction consistency on target database */\n crossTableTransaction?: boolean;\n /** Drop and recreate same existing target table on link connection target database */\n dropExistingTargetTableOnStart?: boolean;\n}\n\nexport interface LinkConnectionLandingZone {\n /** Linked service reference */\n linkedService?: LinkedServiceReference;\n /** Landing zone's file system name */\n fileSystem?: string;\n /** Landing zone's folder path name */\n folderPath?: string;\n /** Landing zone's sas token */\n sasToken?: SecureString;\n}\n\n/** The base definition of a secret type. */\nexport interface SecretBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SecureString\" | \"AzureKeyVaultSecret\";\n}\n\nexport interface LinkConnectionCompute {\n /** Compute core count used by the link connection */\n coreCount?: number;\n /** Link connection's compute type */\n computeType?: string;\n /** Link connection's data process interval in minutes */\n dataProcessIntervalMinutes?: number;\n}\n\n/** The object that defines the structure of an Azure Synapse error response. */\nexport interface CloudError {\n /** Error code. */\n code: string;\n /** Error message. */\n message: string;\n /** Property name/path in request associated with error. */\n target?: string;\n /** Array with additional error details. */\n details?: CloudError[];\n}\n\nexport interface EditTablesRequest {\n /** Edit link tables request */\n linkTables?: LinkTableRequest[];\n}\n\nexport interface LinkTableRequest {\n /** Link table id */\n id?: string;\n /** Source table properties for link table request */\n source?: LinkTableRequestSource;\n /** Target table properties for link table request */\n target?: LinkTableRequestTarget;\n /** Link table operation type */\n operationType?: string;\n}\n\nexport interface LinkTableRequestSource {\n /** Source table table name */\n tableName?: string;\n /** Source table schema name */\n schemaName?: string;\n}\n\nexport interface LinkTableRequestTarget {\n /** Target table table name */\n tableName?: string;\n /** Target table schema name */\n schemaName?: string;\n /** Target table distribution options for link table request */\n distributionOptions?: LinkTableRequestTargetDistributionOptions;\n /** Target table structure options for link table request */\n structureOptions?: LinkTableRequestTargetStructureOptions;\n}\n\nexport interface LinkTableRequestTargetDistributionOptions {\n /** Target table distribution type */\n type?: string;\n /** Target table distribution column */\n distributionColumn?: string;\n}\n\nexport interface LinkTableRequestTargetStructureOptions {\n /** Target table structure type */\n type?: string;\n}\n\nexport interface LinkConnectionDetailedStatus {\n /** Link connection id */\n id?: string;\n /** Link connection name */\n name?: string;\n /** Is link connection applying changes */\n isApplyingChanges?: boolean;\n /** Is link connection partially failed */\n isPartiallyFailed?: boolean;\n /** Link connection start time */\n startTime?: any;\n /** Link connection stop time */\n stopTime?: any;\n /** Link connection status, please refer to this [articles](https://learn.microsoft.com/azure/synapse-analytics/synapse-link/sql-database-synapse-link#monitoring) for details. */\n status?: string;\n /** Link connection's corresponding continuous run id */\n continuousRunId?: string;\n /** Link connection error */\n error?: any;\n /** Link connection refresh status */\n refreshStatus?: LinkConnectionRefreshStatus;\n /** Link connection landing zone credential expire time */\n landingZoneCredentialExpireTime?: Date;\n}\n\nexport interface LinkConnectionRefreshStatus {\n /**\n * Link connection refresh status\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly refreshStatus?: string;\n /**\n * Link connection refresh error message\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly errorMessage?: string;\n}\n\nexport interface LinkTableListResponse {\n /** List link table value */\n value?: LinkTableResource[];\n}\n\nexport interface LinkTableResource {\n /** Link table id */\n id?: string;\n /** Link table name */\n name?: string;\n /** Source table properties for link table request */\n source?: LinkTableRequestSource;\n /** Target table properties for link table request */\n target?: LinkTableRequestTarget;\n}\n\nexport interface QueryTableStatusRequest {\n /** Max segment count to query table status */\n maxSegmentCount?: number;\n /** Continuation token to query table status */\n continuationToken?: any;\n}\n\nexport interface LinkConnectionQueryTableStatus {\n /** Link tables' status */\n value?: LinkTableStatus[];\n /** Continuation token to query table status */\n continuationToken?: any;\n}\n\nexport interface LinkTableStatus {\n /** ID provided by the client */\n id?: string;\n /** Link table status, please refer to this [articles](https://learn.microsoft.com/azure/synapse-analytics/synapse-link/sql-database-synapse-link#monitoring) for details. */\n status?: string;\n /** Link table error message */\n errorMessage?: string;\n /** Link table start time */\n startTime?: any;\n /** Link table stop time */\n stopTime?: any;\n /** Link table ID */\n linkTableId?: string;\n /** Link table error code */\n errorCode?: string;\n /** Link table last processed data time */\n lastProcessedData?: Date;\n /** Link table last transaction commit time */\n lastTransactionCommitTime?: Date;\n}\n\nexport interface UpdateLandingZoneCredential {\n /** Landing zone's sas token */\n sasToken?: SecureString;\n}\n\nexport interface KqlScriptsResourceCollectionResponse {\n value?: KqlScriptResource[];\n nextLink?: string;\n}\n\nexport interface KqlScriptResource {\n id?: string;\n name?: string;\n type?: string;\n /** Properties of sql script. */\n properties?: KqlScript;\n}\n\nexport interface KqlScript {\n content?: KqlScriptContent;\n}\n\nexport interface KqlScriptContent {\n query?: string;\n metadata?: KqlScriptContentMetadata;\n currentConnection?: KqlScriptContentCurrentConnection;\n}\n\nexport interface KqlScriptContentMetadata {\n language?: string;\n}\n\nexport interface KqlScriptContentCurrentConnection {\n name?: string;\n poolName?: string;\n databaseName?: string;\n type?: string;\n}\n\n/** Contains details when the response code indicates an error. */\nexport interface ErrorContract {\n /** The error details. */\n error?: ErrorResponse;\n}\n\n/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.) */\nexport interface ErrorResponse {\n /**\n * The error code.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly code?: string;\n /**\n * The error message.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly message?: string;\n /**\n * The error target.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly target?: string;\n /**\n * The error details.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly details?: ErrorResponse[];\n /**\n * The error additional info.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly additionalInfo?: ErrorAdditionalInfo[];\n}\n\n/** The resource management error additional info. */\nexport interface ErrorAdditionalInfo {\n /**\n * The additional info type.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * The additional info.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly info?: any;\n}\n\n/** Request body structure for rename artifact. */\nexport interface ArtifactRenameRequest {\n /** New name of the artifact. */\n newName?: string;\n}\n\nexport interface MetastoreRegisterObject {\n /** The input folder containing CDM files. */\n inputFolder: string;\n}\n\nexport interface MetastoreRegistrationResponse {\n /** Enumerates possible request statuses. */\n status?: RequestStatus;\n}\n\nexport interface MetastoreRequestSuccessResponse {\n /** Enumerates possible Status of the resource. */\n status?: ResourceStatus;\n}\n\nexport interface MetastoreUpdateObject {\n /** The input folder containing CDM files. */\n inputFolder: string;\n}\n\nexport interface MetastoreUpdationResponse {\n /** Enumerates possible request statuses. */\n status?: RequestStatus;\n}\n\n/** A list of sparkconfiguration resources. */\nexport interface SparkConfigurationListResponse {\n /** List of sparkconfigurations. */\n value: SparkConfigurationResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** SparkConfiguration Artifact information */\nexport interface SparkConfiguration {\n /** Description about the SparkConfiguration. */\n description?: string;\n /** SparkConfiguration configs. */\n configs: { [propertyName: string]: string };\n /** Annotations for SparkConfiguration. */\n annotations?: string[];\n /** additional Notes. */\n notes?: string;\n /** The identity that created the resource. */\n createdBy?: string;\n /** The timestamp of resource creation. */\n created?: Date;\n /** SparkConfiguration configMergeRule. */\n configMergeRule?: { [propertyName: string]: string };\n}\n\n/** Common fields that are returned in the response for all Azure Resource Manager resources */\nexport interface Resource {\n /**\n * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /**\n * The name of the resource\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or \"Microsoft.Storage/storageAccounts\"\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n}\n\n/** Collection of Big Data pool information */\nexport interface BigDataPoolResourceInfoListResult {\n /** Link to the next page of results */\n nextLink?: string;\n /** List of Big Data pools */\n value?: BigDataPoolResourceInfo[];\n}\n\n/** Auto-scaling properties of a Big Data pool powered by Apache Spark */\nexport interface AutoScaleProperties {\n /** The minimum number of nodes the Big Data pool can support. */\n minNodeCount?: number;\n /** Whether automatic scaling is enabled for the Big Data pool. */\n enabled?: boolean;\n /** The maximum number of nodes the Big Data pool can support. */\n maxNodeCount?: number;\n}\n\n/** Auto-pausing properties of a Big Data pool powered by Apache Spark */\nexport interface AutoPauseProperties {\n /** Number of minutes of idle time before the Big Data pool is automatically paused. */\n delayInMinutes?: number;\n /** Whether auto-pausing is enabled for the Big Data pool. */\n enabled?: boolean;\n}\n\n/** Dynamic Executor Allocation Properties */\nexport interface DynamicExecutorAllocation {\n /** Indicates whether Dynamic Executor Allocation is enabled or not. */\n enabled?: boolean;\n}\n\n/** Library requirements for a Big Data pool powered by Apache Spark */\nexport interface LibraryRequirements {\n /**\n * The last update time of the library requirements file.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly time?: Date;\n /** The library requirements. */\n content?: string;\n /** The filename of the library requirements file. */\n filename?: string;\n}\n\n/** Library/package information of a Big Data pool powered by Apache Spark */\nexport interface LibraryInfo {\n /** Name of the library. */\n name?: string;\n /** Storage blob path of library. */\n path?: string;\n /** Storage blob container name. */\n containerName?: string;\n /** The last update time of the library. */\n uploadedTimestamp?: Date;\n /** Type of the library. */\n type?: string;\n /**\n * Provisioning status of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningStatus?: string;\n /**\n * Creator Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly creatorId?: string;\n}\n\n/** Azure Synapse nested object which contains a flow with data movements and transformations. */\nexport interface DataFlow {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MappingDataFlow\" | \"Flowlet\";\n /** The description of the data flow. */\n description?: string;\n /** List of tags that can be used for describing the data flow. */\n annotations?: any[];\n /** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */\n folder?: DataFlowFolder;\n}\n\n/** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */\nexport interface DataFlowFolder {\n /** The name of the folder that this data flow is in. */\n name?: string;\n}\n\n/** A list of data flow resources. */\nexport interface DataFlowListResponse {\n /** List of data flows. */\n value: DataFlowResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Request body structure for creating data flow debug session. */\nexport interface CreateDataFlowDebugSessionRequest {\n /** Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */\n computeType?: string;\n /** Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */\n coreCount?: number;\n /** Time to live setting of the cluster in minutes. */\n timeToLive?: number;\n /** Set to use integration runtime setting for data flow debug session. */\n integrationRuntime?: IntegrationRuntimeDebugResource;\n}\n\n/** Azure Synapse nested object which serves as a compute resource for activities. */\nexport interface IntegrationRuntime {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Managed\" | \"SelfHosted\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Integration runtime description. */\n description?: string;\n}\n\n/** Azure Synapse nested debug resource. */\nexport interface SubResourceDebugResource {\n /** The resource name. */\n name?: string;\n}\n\n/** Response body structure for creating data flow debug session. */\nexport interface CreateDataFlowDebugSessionResponse {\n /** The ID of data flow debug session. */\n sessionId?: string;\n}\n\n/** A list of active debug sessions. */\nexport interface QueryDataFlowDebugSessionsResponse {\n /** Array with all active debug sessions. */\n value?: DataFlowDebugSessionInfo[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Data flow debug session info. */\nexport interface DataFlowDebugSessionInfo {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The name of the data flow. */\n dataFlowName?: string;\n /** Compute type of the cluster. */\n computeType?: string;\n /** Core count of the cluster. */\n coreCount?: number;\n /** Node count of the cluster. (deprecated property) */\n nodeCount?: number;\n /** Attached integration runtime name of data flow debug session. */\n integrationRuntimeName?: string;\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** Start time of data flow debug session. */\n startTime?: string;\n /** Compute type of the cluster. */\n timeToLiveInMinutes?: number;\n /** Last activity time of data flow debug session. */\n lastActivityTime?: string;\n}\n\n/** Request body structure for starting data flow debug session. */\nexport interface DataFlowDebugPackage {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** Data flow instance. */\n dataFlow?: DataFlowDebugResource;\n /** List of Data flows */\n dataFlows?: DataFlowDebugResource[];\n /** List of datasets. */\n datasets?: DatasetDebugResource[];\n /** List of linked services. */\n linkedServices?: LinkedServiceDebugResource[];\n /** Staging info for debug session. */\n staging?: DataFlowStagingInfo;\n /** Data flow debug settings. */\n debugSettings?: DataFlowDebugPackageDebugSettings;\n}\n\n/** The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. */\nexport interface Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AmazonS3Object\"\n | \"Avro\"\n | \"Excel\"\n | \"Parquet\"\n | \"DelimitedText\"\n | \"Json\"\n | \"Xml\"\n | \"Orc\"\n | \"Binary\"\n | \"AzureBlob\"\n | \"AzureTable\"\n | \"AzureSqlTable\"\n | \"AzureSqlMITable\"\n | \"AzureSqlDWTable\"\n | \"CassandraTable\"\n | \"CustomDataset\"\n | \"CosmosDbSqlApiCollection\"\n | \"DocumentDbCollection\"\n | \"DynamicsEntity\"\n | \"DynamicsCrmEntity\"\n | \"CommonDataServiceForAppsEntity\"\n | \"AzureDataLakeStoreFile\"\n | \"AzureBlobFSFile\"\n | \"Office365Table\"\n | \"FileShare\"\n | \"MongoDbCollection\"\n | \"MongoDbAtlasCollection\"\n | \"MongoDbV2Collection\"\n | \"CosmosDbMongoDbApiCollection\"\n | \"ODataResource\"\n | \"OracleTable\"\n | \"AmazonRdsForOracleTable\"\n | \"TeradataTable\"\n | \"AzureMySqlTable\"\n | \"AmazonRedshiftTable\"\n | \"Db2Table\"\n | \"RelationalTable\"\n | \"InformixTable\"\n | \"OdbcTable\"\n | \"MySqlTable\"\n | \"PostgreSqlTable\"\n | \"MicrosoftAccessTable\"\n | \"SalesforceObject\"\n | \"SalesforceServiceCloudObject\"\n | \"SybaseTable\"\n | \"SapBwCube\"\n | \"SapCloudForCustomerResource\"\n | \"SapEccResource\"\n | \"SapHanaTable\"\n | \"SapOpenHubTable\"\n | \"SqlServerTable\"\n | \"AmazonRdsForSqlServerTable\"\n | \"RestResource\"\n | \"SapTableResource\"\n | \"SapOdpResource\"\n | \"WebTable\"\n | \"AzureSearchIndex\"\n | \"HttpFile\"\n | \"AmazonMWSObject\"\n | \"AzurePostgreSqlTable\"\n | \"ConcurObject\"\n | \"CouchbaseTable\"\n | \"DrillTable\"\n | \"EloquaObject\"\n | \"GoogleBigQueryObject\"\n | \"GreenplumTable\"\n | \"HBaseObject\"\n | \"HiveObject\"\n | \"HubspotObject\"\n | \"ImpalaObject\"\n | \"JiraObject\"\n | \"MagentoObject\"\n | \"MariaDBTable\"\n | \"AzureMariaDBTable\"\n | \"MarketoObject\"\n | \"PaypalObject\"\n | \"PhoenixObject\"\n | \"PrestoObject\"\n | \"QuickBooksObject\"\n | \"ServiceNowObject\"\n | \"ShopifyObject\"\n | \"SparkObject\"\n | \"SquareObject\"\n | \"XeroObject\"\n | \"ZohoObject\"\n | \"NetezzaTable\"\n | \"VerticaTable\"\n | \"SalesforceMarketingCloudObject\"\n | \"ResponsysObject\"\n | \"DynamicsAXResource\"\n | \"OracleServiceCloudObject\"\n | \"AzureDataExplorerTable\"\n | \"GoogleAdWordsObject\"\n | \"SnowflakeTable\"\n | \"SharePointOnlineListResource\"\n | \"AzureDatabricksDeltaLakeDataset\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Dataset description. */\n description?: string;\n /** Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. */\n structure?: any;\n /** Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. */\n schema?: any;\n /** Linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** Parameters for dataset. */\n parameters?: { [propertyName: string]: ParameterSpecification };\n /** List of tags that can be used for describing the Dataset. */\n annotations?: any[];\n /** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */\n folder?: DatasetFolder;\n}\n\n/** Definition of a single parameter for an entity. */\nexport interface ParameterSpecification {\n /** Parameter type. */\n type: ParameterType;\n /** Default value of parameter. */\n defaultValue?: any;\n}\n\n/** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */\nexport interface DatasetFolder {\n /** The name of the folder that this Dataset is in. */\n name?: string;\n}\n\n/** The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. */\nexport interface LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AzureStorage\"\n | \"AzureBlobStorage\"\n | \"AzureTableStorage\"\n | \"AzureSqlDW\"\n | \"SqlServer\"\n | \"AmazonRdsForSqlServer\"\n | \"AzureSqlDatabase\"\n | \"AzureSqlMI\"\n | \"AzureBatch\"\n | \"AzureKeyVault\"\n | \"CosmosDb\"\n | \"Dynamics\"\n | \"DynamicsCrm\"\n | \"CommonDataServiceForApps\"\n | \"HDInsight\"\n | \"FileServer\"\n | \"AzureFileStorage\"\n | \"GoogleCloudStorage\"\n | \"Oracle\"\n | \"AmazonRdsForOracle\"\n | \"AzureMySql\"\n | \"MySql\"\n | \"PostgreSql\"\n | \"Sybase\"\n | \"Db2\"\n | \"Teradata\"\n | \"AzureML\"\n | \"AzureMLService\"\n | \"Odbc\"\n | \"Informix\"\n | \"MicrosoftAccess\"\n | \"Hdfs\"\n | \"OData\"\n | \"Web\"\n | \"Cassandra\"\n | \"MongoDb\"\n | \"MongoDbAtlas\"\n | \"MongoDbV2\"\n | \"CosmosDbMongoDbApi\"\n | \"AzureDataLakeStore\"\n | \"AzureBlobFS\"\n | \"Office365\"\n | \"Salesforce\"\n | \"SalesforceServiceCloud\"\n | \"SapCloudForCustomer\"\n | \"SapEcc\"\n | \"SapOpenHub\"\n | \"SapOdp\"\n | \"RestService\"\n | \"TeamDesk\"\n | \"Quickbase\"\n | \"Smartsheet\"\n | \"Zendesk\"\n | \"Dataworld\"\n | \"AppFigures\"\n | \"Asana\"\n | \"Twilio\"\n | \"GoogleSheets\"\n | \"AmazonS3\"\n | \"AmazonRedshift\"\n | \"CustomDataSource\"\n | \"AzureSearch\"\n | \"HttpServer\"\n | \"FtpServer\"\n | \"Sftp\"\n | \"SapBW\"\n | \"SapHana\"\n | \"AmazonMWS\"\n | \"AzurePostgreSql\"\n | \"Concur\"\n | \"Couchbase\"\n | \"Drill\"\n | \"Eloqua\"\n | \"GoogleBigQuery\"\n | \"Greenplum\"\n | \"HBase\"\n | \"Hive\"\n | \"Hubspot\"\n | \"Impala\"\n | \"Jira\"\n | \"Magento\"\n | \"MariaDB\"\n | \"AzureMariaDB\"\n | \"Marketo\"\n | \"Paypal\"\n | \"Phoenix\"\n | \"Presto\"\n | \"QuickBooks\"\n | \"ServiceNow\"\n | \"Shopify\"\n | \"Spark\"\n | \"Square\"\n | \"Xero\"\n | \"Zoho\"\n | \"Vertica\"\n | \"Netezza\"\n | \"SalesforceMarketingCloud\"\n | \"HDInsightOnDemand\"\n | \"AzureDataLakeAnalytics\"\n | \"AzureDatabricks\"\n | \"AzureDatabricksDeltaLake\"\n | \"Responsys\"\n | \"DynamicsAX\"\n | \"OracleServiceCloud\"\n | \"GoogleAdWords\"\n | \"SapTable\"\n | \"AzureDataExplorer\"\n | \"AzureFunction\"\n | \"Snowflake\"\n | \"SharePointOnlineList\"\n | \"AzureSynapseArtifacts\"\n | \"PowerBIWorkspace\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The integration runtime reference. */\n connectVia?: IntegrationRuntimeReference;\n /** Linked service description. */\n description?: string;\n /** Parameters for linked service. */\n parameters?: { [propertyName: string]: ParameterSpecification };\n /** List of tags that can be used for describing the linked service. */\n annotations?: any[];\n}\n\n/** Integration runtime reference type. */\nexport interface IntegrationRuntimeReference {\n /** Type of integration runtime. */\n type: IntegrationRuntimeReferenceType;\n /** Reference integration runtime name. */\n referenceName: string;\n /** Arguments for integration runtime. */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Staging info for execute data flow activity. */\nexport interface DataFlowStagingInfo {\n /** Staging linked service reference. */\n linkedService?: LinkedServiceReference;\n /** Folder path for staging blob. */\n folderPath?: any;\n}\n\n/** Data flow debug settings. */\nexport interface DataFlowDebugPackageDebugSettings {\n /** Source setting for data flow debug. */\n sourceSettings?: DataFlowSourceSetting[];\n /** Data flow parameters. */\n parameters?: { [propertyName: string]: any };\n /** Parameters for dataset. */\n datasetParameters?: any;\n}\n\n/** Definition of data flow source setting for debug. */\nexport interface DataFlowSourceSetting {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The data flow source name. */\n sourceName?: string;\n /** Defines the row limit of data flow source in debug. */\n rowLimit?: number;\n}\n\n/** Response body structure for starting data flow debug session. */\nexport interface AddDataFlowToDebugSessionResponse {\n /** The ID of data flow debug job version. */\n jobVersion?: string;\n}\n\n/** Request body structure for deleting data flow debug session. */\nexport interface DeleteDataFlowDebugSessionRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n}\n\n/** Request body structure for data flow expression preview. */\nexport interface DataFlowDebugCommandRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The command type. */\n command?: DataFlowDebugCommandType;\n /** The command payload object. */\n commandPayload?: DataFlowDebugCommandPayload;\n}\n\n/** Structure of command payload. */\nexport interface DataFlowDebugCommandPayload {\n /** The stream name which is used for preview. */\n streamName: string;\n /** Row limits for preview response. */\n rowLimits?: number;\n /** Array of column names. */\n columns?: string[];\n /** The expression which is used for preview. */\n expression?: string;\n}\n\n/** Response body structure of data flow result for data preview, statistics or expression preview. */\nexport interface DataFlowDebugCommandResponse {\n /** The run status of data preview, statistics or expression preview. */\n status?: string;\n /** The result data of data preview, statistics or expression preview. */\n data?: string;\n}\n\n/** A list of dataset resources. */\nexport interface DatasetListResponse {\n /** List of datasets. */\n value: DatasetResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\nexport interface GitHubAccessTokenRequest {\n /** The GitHub Client Id. */\n gitHubClientId: string;\n /** The GitHub Access code. */\n gitHubAccessCode: string;\n /** The GitHub access token base URL. */\n gitHubAccessTokenBaseUrl: string;\n}\n\nexport interface GitHubAccessTokenResponse {\n gitHubAccessToken?: string;\n}\n\n/** A list of integration runtime resources. */\nexport interface IntegrationRuntimeListResponse {\n /** List of integration runtimes. */\n value: IntegrationRuntimeResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** A list of Library resources. */\nexport interface LibraryListResponse {\n /** List of Library. */\n value: LibraryResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Library/package properties */\nexport interface LibraryResourceProperties {\n /**\n * Name of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * Location of library/package in storage account.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly path?: string;\n /**\n * Container name of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly containerName?: string;\n /**\n * The last update time of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly uploadedTimestamp?: string;\n /**\n * Type of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Provisioning status of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningStatus?: string;\n /**\n * Creator Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly creatorId?: string;\n}\n\n/** Library resource info */\nexport interface LibraryResourceInfo {\n /**\n * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /**\n * record Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly recordId?: number;\n /**\n * Provisioning status of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly state?: string;\n /**\n * The creation time of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly created?: string;\n /**\n * The last updated time of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly changed?: string;\n /**\n * The type of the resource. E.g. LibraryArtifact\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Name of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * Operation Id of the operation performed on library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly operationId?: string;\n /**\n * artifact Id of the library/package.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly artifactId?: string;\n}\n\n/** Operation status for the operation */\nexport interface OperationResult {\n /**\n * Operation status\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /** Error code. */\n code?: string;\n /** Error message. */\n message?: string;\n /** Property name/path in request associated with error. */\n target?: string;\n /** Array with additional error details. */\n details?: CloudError[];\n}\n\n/** A list of linked service resources. */\nexport interface LinkedServiceListResponse {\n /** List of linked services. */\n value: LinkedServiceResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** A list of Notebook resources. */\nexport interface NotebookListResponse {\n /** List of Notebooks. */\n value: NotebookResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Notebook resource type. */\nexport interface NotebookResource {\n /**\n * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /** The name of the resource */\n name: string;\n /**\n * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Resource Etag.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly etag?: string;\n /** Properties of Notebook. */\n properties: Notebook;\n}\n\n/** Notebook. */\nexport interface Notebook {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the notebook. */\n description?: string;\n /** Big data pool reference. */\n bigDataPool?: BigDataPoolReference;\n /** The spark configuration of the spark job. */\n targetSparkConfiguration?: SparkConfigurationReference;\n /** Session properties. */\n sessionProperties?: NotebookSessionProperties;\n /** Notebook root-level metadata. */\n metadata: NotebookMetadata;\n /** Notebook format (major number). Incremented between backwards incompatible changes to the notebook format. */\n nbformat: number;\n /** Notebook format (minor number). Incremented for backward compatible changes to the notebook format. */\n nbformatMinor: number;\n /** Array of cells of the current notebook. */\n cells: NotebookCell[];\n /** The folder that this notebook is in. If not specified, this notebook will appear at the root level. */\n folder?: NotebookFolder;\n}\n\n/** Big data pool reference. */\nexport interface BigDataPoolReference {\n /** Big data pool reference type. */\n type: BigDataPoolReferenceType;\n /** Reference big data pool name. */\n referenceName: string;\n}\n\n/** Spark configuration reference. */\nexport interface SparkConfigurationReference {\n /** Spark configuration reference type. */\n type: SparkConfigurationReferenceType;\n /** Reference spark configuration name. */\n referenceName: string;\n}\n\n/** Session properties. */\nexport interface NotebookSessionProperties {\n /** Amount of memory to use for the driver process. */\n driverMemory: string;\n /** Number of cores to use for the driver. */\n driverCores: number;\n /** Amount of memory to use per executor process. */\n executorMemory: string;\n /** Number of cores to use for each executor. */\n executorCores: number;\n /** Number of executors to launch for this session. */\n numExecutors: number;\n}\n\n/** Notebook root-level metadata. */\nexport interface NotebookMetadata {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Kernel information. */\n kernelspec?: NotebookKernelSpec;\n /** Language info. */\n languageInfo?: NotebookLanguageInfo;\n}\n\n/** Kernel information. */\nexport interface NotebookKernelSpec {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Name of the kernel specification. */\n name: string;\n /** Name to display in UI. */\n displayName: string;\n}\n\n/** Language info. */\nexport interface NotebookLanguageInfo {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The programming language which this kernel runs. */\n name: string;\n /** The codemirror mode to use for code in this language. */\n codemirrorMode?: string;\n}\n\n/** Notebook cell. */\nexport interface NotebookCell {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** String identifying the type of cell. */\n cellType: string;\n /** Cell-level metadata. */\n metadata: any;\n /** Contents of the cell, represented as an array of lines. */\n source: string[];\n /** Attachments associated with the cell. */\n attachments?: any;\n /** Cell-level output items. */\n outputs?: NotebookCellOutputItem[];\n}\n\n/** An item of the notebook cell execution output. */\nexport interface NotebookCellOutputItem {\n /** For output_type=stream, determines the name of stream (stdout / stderr). */\n name?: string;\n /** Execution sequence number. */\n executionCount?: number;\n /** Execution, display, or stream outputs. */\n outputType: CellOutputType;\n /** For output_type=stream, the stream's text output, represented as a string or an array of strings. */\n text?: any;\n /** Output data. Use MIME type as key, and content as value. */\n data?: any;\n /** Metadata for the output item. */\n metadata?: any;\n}\n\n/** The folder that this notebook is in. If not specified, this notebook will appear at the root level. */\nexport interface NotebookFolder {\n /** The name of the folder that this notebook is in. */\n name?: string;\n}\n\n/** A list of pipeline resources. */\nexport interface PipelineListResponse {\n /** List of pipelines. */\n value: PipelineResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** A pipeline activity. */\nexport interface Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"Container\"\n | \"Execution\"\n | \"Copy\"\n | \"HDInsightHive\"\n | \"HDInsightPig\"\n | \"HDInsightMapReduce\"\n | \"HDInsightStreaming\"\n | \"HDInsightSpark\"\n | \"ExecuteSSISPackage\"\n | \"Custom\"\n | \"SqlServerStoredProcedure\"\n | \"ExecutePipeline\"\n | \"Delete\"\n | \"AzureDataExplorerCommand\"\n | \"Lookup\"\n | \"WebActivity\"\n | \"GetMetadata\"\n | \"IfCondition\"\n | \"Switch\"\n | \"ForEach\"\n | \"AzureMLBatchExecution\"\n | \"AzureMLUpdateResource\"\n | \"AzureMLExecutePipeline\"\n | \"DataLakeAnalyticsU-SQL\"\n | \"Wait\"\n | \"Fail\"\n | \"Until\"\n | \"Validation\"\n | \"Filter\"\n | \"DatabricksNotebook\"\n | \"DatabricksSparkJar\"\n | \"DatabricksSparkPython\"\n | \"SetVariable\"\n | \"AppendVariable\"\n | \"AzureFunctionActivity\"\n | \"WebHook\"\n | \"ExecuteDataFlow\"\n | \"Script\"\n | \"SynapseNotebook\"\n | \"SparkJob\"\n | \"SqlPoolStoredProcedure\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Activity name. */\n name: string;\n /** Activity description. */\n description?: string;\n /** Activity depends on condition. */\n dependsOn?: ActivityDependency[];\n /** Activity user properties. */\n userProperties?: UserProperty[];\n}\n\n/** Activity dependency information. */\nexport interface ActivityDependency {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Activity name. */\n activity: string;\n /** Match-Condition for the dependency. */\n dependencyConditions: DependencyCondition[];\n}\n\n/** User property. */\nexport interface UserProperty {\n /** User property name. */\n name: string;\n /** User property value. Type: string (or Expression with resultType string). */\n value: any;\n}\n\n/** Definition of a single variable for a Pipeline. */\nexport interface VariableSpecification {\n /** Variable type. */\n type: VariableType;\n /** Default value of variable. */\n defaultValue?: any;\n}\n\n/** The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. */\nexport interface PipelineFolder {\n /** The name of the folder that this Pipeline is in. */\n name?: string;\n}\n\n/** Response body with a run identifier. */\nexport interface CreateRunResponse {\n /** Identifier of a run. */\n runId: string;\n}\n\n/** Query parameters for listing runs. */\nexport interface RunFilterParameters {\n /** The continuation token for getting the next page of results. Null for first page. */\n continuationToken?: string;\n /** The time at or after which the run event was updated in 'ISO 8601' format. */\n lastUpdatedAfter: Date;\n /** The time at or before which the run event was updated in 'ISO 8601' format. */\n lastUpdatedBefore: Date;\n /** List of filters. */\n filters?: RunQueryFilter[];\n /** List of OrderBy option. */\n orderBy?: RunQueryOrderBy[];\n}\n\n/** Query filter option for listing runs. */\nexport interface RunQueryFilter {\n /** Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. */\n operand: RunQueryFilterOperand;\n /** Operator to be used for filter. */\n operator: RunQueryFilterOperator;\n /** List of filter values. */\n values: string[];\n}\n\n/** An object to provide order by options for listing runs. */\nexport interface RunQueryOrderBy {\n /** Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. */\n orderBy: RunQueryOrderByField;\n /** Sorting order of the parameter. */\n order: RunQueryOrder;\n}\n\n/** A list pipeline runs. */\nexport interface PipelineRunsQueryResponse {\n /** List of pipeline runs. */\n value: PipelineRun[];\n /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */\n continuationToken?: string;\n}\n\n/** Information about a pipeline run. */\nexport interface PipelineRun {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /**\n * Identifier of a run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runId?: string;\n /**\n * Identifier that correlates all the recovery runs of a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runGroupId?: string;\n /**\n * Indicates if the recovered pipeline run is the latest in its group.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly isLatest?: boolean;\n /**\n * The pipeline name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly pipelineName?: string;\n /**\n * The full or partial list of parameter name, value pair used in the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly parameters?: { [propertyName: string]: string };\n /**\n * Entity that started the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly invokedBy?: PipelineRunInvokedBy;\n /**\n * The last updated timestamp for the pipeline run event in ISO8601 format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly lastUpdated?: Date;\n /**\n * The start time of a pipeline run in ISO8601 format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runStart?: Date;\n /**\n * The end time of a pipeline run in ISO8601 format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runEnd?: Date;\n /**\n * The duration of a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly durationInMs?: number;\n /**\n * The status of a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /**\n * The message from a pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly message?: string;\n}\n\n/** Provides entity name and id that started the pipeline run. */\nexport interface PipelineRunInvokedBy {\n /**\n * Name of the entity that started the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly name?: string;\n /**\n * The ID of the entity that started the run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /**\n * The type of the entity that started the run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly invokedByType?: string;\n}\n\n/** A list activity runs. */\nexport interface ActivityRunsQueryResponse {\n /** List of activity runs. */\n value: ActivityRun[];\n /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */\n continuationToken?: string;\n}\n\n/** Information about an activity run in a pipeline. */\nexport interface ActivityRun {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /**\n * The name of the pipeline.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly pipelineName?: string;\n /**\n * The id of the pipeline run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly pipelineRunId?: string;\n /**\n * The name of the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityName?: string;\n /**\n * The type of the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityType?: string;\n /**\n * The id of the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityRunId?: string;\n /**\n * The name of the compute linked service.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly linkedServiceName?: string;\n /**\n * The status of the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /**\n * The start time of the activity run in 'ISO 8601' format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityRunStart?: Date;\n /**\n * The end time of the activity run in 'ISO 8601' format.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly activityRunEnd?: Date;\n /**\n * The duration of the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly durationInMs?: number;\n /**\n * The input for the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly input?: any;\n /**\n * The output for the activity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly output?: any;\n /**\n * The error if any from the activity run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly error?: any;\n}\n\n/** A list of spark job definitions resources. */\nexport interface SparkJobDefinitionsListResponse {\n /** List of spark job definitions. */\n value: SparkJobDefinitionResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Spark job definition. */\nexport interface SparkJobDefinition {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the Spark job definition. */\n description?: string;\n /** Big data pool reference. */\n targetBigDataPool: BigDataPoolReference;\n /** The spark configuration of the spark job. */\n targetSparkConfiguration?: SparkConfigurationReference;\n /** The required Spark version of the application. */\n requiredSparkVersion?: string;\n /** The language of the Spark application. */\n language?: string;\n /** The properties of the Spark job. */\n jobProperties: SparkJobProperties;\n /** The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. */\n folder?: SparkJobDefinitionFolder;\n}\n\n/** The properties of the Spark job. */\nexport interface SparkJobProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The name of the job. */\n name?: string;\n /** File containing the application to execute. */\n file: string;\n /** Main class for Java/Scala application. */\n className?: string;\n /** Spark configuration properties. */\n conf?: any;\n /** Command line arguments for the application. */\n args?: string[];\n /** Jars to be used in this job. */\n jars?: string[];\n /** files to be used in this job. */\n files?: string[];\n /** Archives to be used in this job. */\n archives?: string[];\n /** Amount of memory to use for the driver process. */\n driverMemory: string;\n /** Number of cores to use for the driver. */\n driverCores: number;\n /** Amount of memory to use per executor process. */\n executorMemory: string;\n /** Number of cores to use for each executor. */\n executorCores: number;\n /** Number of executors to launch for this job. */\n numExecutors: number;\n}\n\n/** The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. */\nexport interface SparkJobDefinitionFolder {\n /** The name of the folder that this Spark job definition is in. */\n name?: string;\n}\n\nexport interface SparkBatchJob {\n livyInfo?: SparkBatchJobState;\n /** The batch name. */\n name?: string;\n /** The workspace name. */\n workspaceName?: string;\n /** The Spark pool name. */\n sparkPoolName?: string;\n /** The submitter name. */\n submitterName?: string;\n /** The submitter identifier. */\n submitterId?: string;\n /** The artifact identifier. */\n artifactId?: string;\n /** The job type. */\n jobType?: SparkJobType;\n /** The Spark batch job result. */\n result?: SparkBatchJobResultType;\n /** The scheduler information. */\n scheduler?: SparkScheduler;\n /** The plugin information. */\n plugin?: SparkServicePlugin;\n /** The error information. */\n errors?: SparkServiceError[];\n /** The tags. */\n tags?: { [propertyName: string]: string };\n /** The session Id. */\n id: number;\n /** The application id of this session */\n appId?: string;\n /** The detailed application info. */\n appInfo?: { [propertyName: string]: string };\n /** The batch state */\n state?: LivyStates;\n /** The log lines. */\n logLines?: string[];\n}\n\nexport interface SparkBatchJobState {\n /** the time that at which \"not_started\" livy state was first seen. */\n notStartedAt?: Date;\n /** the time that at which \"starting\" livy state was first seen. */\n startingAt?: Date;\n /** the time that at which \"running\" livy state was first seen. */\n runningAt?: Date;\n /** time that at which \"dead\" livy state was first seen. */\n deadAt?: Date;\n /** the time that at which \"success\" livy state was first seen. */\n successAt?: Date;\n /** the time that at which \"killed\" livy state was first seen. */\n terminatedAt?: Date;\n /** the time that at which \"recovering\" livy state was first seen. */\n recoveringAt?: Date;\n /** the Spark job state. */\n currentState?: string;\n jobCreationRequest?: SparkRequest;\n}\n\nexport interface SparkRequest {\n name?: string;\n file?: string;\n className?: string;\n arguments?: string[];\n jars?: string[];\n pythonFiles?: string[];\n files?: string[];\n archives?: string[];\n /** Dictionary of <string> */\n configuration?: { [propertyName: string]: string };\n driverMemory?: string;\n driverCores?: number;\n executorMemory?: string;\n executorCores?: number;\n executorCount?: number;\n}\n\nexport interface SparkScheduler {\n submittedAt?: Date;\n scheduledAt?: Date;\n endedAt?: Date;\n cancellationRequestedAt?: Date;\n currentState?: SchedulerCurrentState;\n}\n\nexport interface SparkServicePlugin {\n preparationStartedAt?: Date;\n resourceAcquisitionStartedAt?: Date;\n submissionStartedAt?: Date;\n monitoringStartedAt?: Date;\n cleanupStartedAt?: Date;\n currentState?: PluginCurrentState;\n}\n\nexport interface SparkServiceError {\n message?: string;\n errorCode?: string;\n source?: SparkErrorSource;\n}\n\n/** List of SQL pools */\nexport interface SqlPoolInfoListResult {\n /** Link to the next page of results */\n nextLink?: string;\n /** List of SQL pools */\n value?: SqlPool[];\n}\n\n/** SQL pool SKU */\nexport interface Sku {\n /** The service tier */\n tier?: string;\n /** The SKU name */\n name?: string;\n /** If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. */\n capacity?: number;\n}\n\n/** A list of sql scripts resources. */\nexport interface SqlScriptsListResponse {\n /** List of sql scripts. */\n value: SqlScriptResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Sql Script resource type. */\nexport interface SqlScriptResource {\n /**\n * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n /** The name of the resource */\n name: string;\n /**\n * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly type?: string;\n /**\n * Resource Etag.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly etag?: string;\n /** Properties of sql script. */\n properties: SqlScript;\n}\n\n/** SQL script. */\nexport interface SqlScript {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the SQL script. */\n description?: string;\n /** The type of the SQL script. */\n type?: SqlScriptType;\n /** The content of the SQL script. */\n content: SqlScriptContent;\n /** The folder that this SQL script is in. If not specified, this SQL script will appear at the root level. */\n folder?: SqlScriptFolder;\n}\n\n/** The content of the SQL script. */\nexport interface SqlScriptContent {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** SQL query to execute. */\n query: string;\n /** The connection used to execute the SQL script. */\n currentConnection?: SqlConnection;\n /** Limit of results, '-1' for no limit. */\n resultLimit?: number;\n /** The metadata of the SQL script. */\n metadata?: SqlScriptMetadata;\n}\n\n/** The connection used to execute the SQL script. */\nexport interface SqlConnection {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The type of the connection. */\n type?: SqlConnectionType;\n /** The identifier of the connection. */\n name?: string;\n /** The associated SQL pool name (supported by SQL pool v3) */\n poolName?: string;\n /** The associated database name (supported by SQL pool v3) */\n databaseName?: string;\n}\n\n/** The metadata of the SQL script. */\nexport interface SqlScriptMetadata {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The language of the SQL script. */\n language?: string;\n}\n\n/** The folder that this SQL script is in. If not specified, this SQL script will appear at the root level. */\nexport interface SqlScriptFolder {\n /** The name of the folder that this SQL script is in. */\n name?: string;\n}\n\n/** A list of trigger resources. */\nexport interface TriggerListResponse {\n /** List of triggers. */\n value: TriggerResource[];\n /** The link to the next page of results, if any remaining results exist. */\n nextLink?: string;\n}\n\n/** Azure Synapse nested object which contains information about creating pipeline run */\nexport interface Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"RerunTumblingWindowTrigger\"\n | \"MultiplePipelineTrigger\"\n | \"ScheduleTrigger\"\n | \"BlobTrigger\"\n | \"BlobEventsTrigger\"\n | \"CustomEventsTrigger\"\n | \"TumblingWindowTrigger\"\n | \"ChainingTrigger\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Trigger description. */\n description?: string;\n /**\n * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly runtimeState?: TriggerRuntimeState;\n /** List of tags that can be used for describing the trigger. */\n annotations?: any[];\n}\n\n/** Defines the response of a trigger subscription operation. */\nexport interface TriggerSubscriptionOperationStatus {\n /**\n * Trigger name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerName?: string;\n /**\n * Event Subscription Status.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: EventSubscriptionStatus;\n}\n\n/** A list of trigger runs. */\nexport interface TriggerRunsQueryResponse {\n /** List of trigger runs. */\n value: TriggerRun[];\n /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */\n continuationToken?: string;\n}\n\n/** Trigger runs. */\nexport interface TriggerRun {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /**\n * Trigger run id.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerRunId?: string;\n /**\n * Trigger name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerName?: string;\n /**\n * Trigger type.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerType?: string;\n /**\n * Trigger run start time.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggerRunTimestamp?: Date;\n /**\n * Trigger run status.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: TriggerRunStatus;\n /**\n * Trigger error message.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly message?: string;\n /**\n * List of property name and value related to trigger run. Name, value pair depends on type of trigger.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly properties?: { [propertyName: string]: string };\n /**\n * List of pipeline name and run Id triggered by the trigger run.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly triggeredPipelines?: { [propertyName: string]: string };\n}\n\n/** Details of the data lake storage account associated with the workspace */\nexport interface DataLakeStorageAccountDetails {\n /** Account URL */\n accountUrl?: string;\n /** Filesystem name */\n filesystem?: string;\n}\n\n/** Virtual Network Profile */\nexport interface VirtualNetworkProfile {\n /** Subnet ID used for computes in workspace */\n computeSubnetId?: string;\n}\n\n/** Private endpoint details */\nexport interface PrivateEndpoint {\n /**\n * Resource id of the private endpoint.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly id?: string;\n}\n\n/** Connection state details of the private endpoint */\nexport interface PrivateLinkServiceConnectionState {\n /** The private link service connection status. */\n status?: string;\n /** The private link service connection description. */\n description?: string;\n /**\n * The actions required for private link service connection.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly actionsRequired?: string;\n}\n\n/** Details of the encryption associated with the workspace */\nexport interface EncryptionDetails {\n /**\n * Double Encryption enabled\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly doubleEncryptionEnabled?: boolean;\n /** Customer Managed Key Details */\n cmk?: CustomerManagedKeyDetails;\n}\n\n/** Details of the customer managed key associated with the workspace */\nexport interface CustomerManagedKeyDetails {\n /**\n * The customer managed key status on the workspace\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly status?: string;\n /** The key object of the workspace */\n key?: WorkspaceKeyDetails;\n}\n\n/** Details of the customer managed key associated with the workspace */\nexport interface WorkspaceKeyDetails {\n /** Workspace Key sub-resource name */\n name?: string;\n /** Workspace Key sub-resource key vault url */\n keyVaultUrl?: string;\n}\n\n/** Managed Virtual Network Settings */\nexport interface ManagedVirtualNetworkSettings {\n /** Prevent Data Exfiltration */\n preventDataExfiltration?: boolean;\n /** Linked Access Check On Target Resource */\n linkedAccessCheckOnTargetResource?: boolean;\n /** Allowed Aad Tenant Ids For Linking */\n allowedAadTenantIdsForLinking?: string[];\n}\n\n/** Git integration settings */\nexport interface WorkspaceRepositoryConfiguration {\n /** Type of workspace repositoryID configuration. Example WorkspaceVSTSConfiguration, WorkspaceGitHubConfiguration */\n type?: string;\n /** GitHub Enterprise host name. For example: https://github.mydomain.com */\n hostName?: string;\n /** Account name */\n accountName?: string;\n /** VSTS project name */\n projectName?: string;\n /** Repository name */\n repositoryName?: string;\n /** Collaboration branch */\n collaborationBranch?: string;\n /** Root folder to use in the repository */\n rootFolder?: string;\n /** The last commit ID */\n lastCommitId?: string;\n /** The VSTS tenant ID */\n tenantId?: string;\n /** GitHub bring your own app client id */\n clientId?: string;\n /** GitHub bring your own app client secret information. */\n clientSecret?: GitHubClientSecret;\n}\n\n/** Client secret information for factory's bring your own app repository configuration */\nexport interface GitHubClientSecret {\n /** Bring your own app client secret AKV URL */\n byoaSecretAkvUrl?: string;\n /** Bring your own app client secret name in AKV */\n byoaSecretName?: string;\n}\n\n/** Purview Configuration */\nexport interface PurviewConfiguration {\n /** Purview Resource ID */\n purviewResourceId?: string;\n}\n\n/** The workspace managed identity */\nexport interface ManagedIdentity {\n /**\n * The principal ID of the workspace managed identity\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly principalId?: string;\n /**\n * The tenant ID of the workspace managed identity\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly tenantId?: string;\n /** The type of managed identity for the workspace */\n type?: ResourceIdentityType;\n}\n\n/** Azure Synapse expression definition. */\nexport interface Expression {\n /** Expression type. */\n type: ExpressionType;\n /** Expression value. */\n value: string;\n}\n\n/** Defines the response of a provision trigger dependency operation. */\nexport interface TriggerDependencyProvisioningStatus {\n /** Trigger name. */\n triggerName: string;\n /** Provisioning status. */\n provisioningStatus: string;\n}\n\n/** Pipeline reference type. */\nexport interface PipelineReference {\n /** Pipeline reference type. */\n type: PipelineReferenceType;\n /** Reference pipeline name. */\n referenceName: string;\n /** Reference name. */\n name?: string;\n}\n\n/** Pipeline that needs to be triggered with the given parameters. */\nexport interface TriggerPipelineReference {\n /** Pipeline reference. */\n pipelineReference?: PipelineReference;\n /** Pipeline parameters. */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Parameters for updating a workspace resource. */\nexport interface WorkspaceUpdateParameters {\n /** The resource tags. */\n tags?: { [propertyName: string]: string };\n /** Managed service identity of the workspace. */\n identity?: WorkspaceIdentity;\n}\n\n/** Identity properties of the workspace resource. */\nexport interface WorkspaceIdentity {\n /** The identity type. Currently the only supported type is 'SystemAssigned'. */\n type: \"SystemAssigned\";\n /**\n * The principal id of the identity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly principalId?: string;\n /**\n * The client tenant id of the identity.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly tenantId?: string;\n}\n\n/** Dataset reference type. */\nexport interface DatasetReference {\n /** Dataset reference type. */\n type: DatasetReferenceType;\n /** Reference dataset name. */\n referenceName: string;\n /** Arguments for dataset. */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Data flow reference type. */\nexport interface DataFlowReference {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Data flow reference type. */\n type: DataFlowReferenceType;\n /** Reference data flow name. */\n referenceName: string;\n /** Reference data flow parameters from dataset. */\n datasetParameters?: any;\n /** Data flow parameters */\n parameters?: { [propertyName: string]: any };\n}\n\n/** Rerun tumbling window trigger Parameters. */\nexport interface RerunTumblingWindowTriggerActionParameters {\n /** The start time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n startTime: Date;\n /** The end time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n endTime: Date;\n /** The max number of parallel time windows (ready for execution) for which a rerun is triggered. */\n maxConcurrency: number;\n}\n\n/** The request payload of get SSIS object metadata. */\nexport interface GetSsisObjectMetadataRequest {\n /** Metadata path. */\n metadataPath?: string;\n}\n\n/** The status of the operation. */\nexport interface SsisObjectMetadataStatusResponse {\n /** The status of the operation. */\n status?: string;\n /** The operation name. */\n name?: string;\n /** The operation properties. */\n properties?: string;\n /** The operation error message. */\n error?: string;\n}\n\n/** The exposure control request. */\nexport interface ExposureControlRequest {\n /** The feature name. */\n featureName?: string;\n /** The feature type. */\n featureType?: string;\n}\n\n/** The exposure control response. */\nexport interface ExposureControlResponse {\n /**\n * The feature name.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly featureName?: string;\n /**\n * The feature value.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly value?: string;\n}\n\n/** Synapse notebook reference type. */\nexport interface SynapseNotebookReference {\n /** Synapse notebook reference type. */\n type: NotebookReferenceType;\n /** Reference notebook name. Type: string (or Expression with resultType string). */\n referenceName: any;\n}\n\n/** Synapse spark job reference type. */\nexport interface SynapseSparkJobReference {\n /** Synapse spark job reference type. */\n type: SparkJobReferenceType;\n /** Reference spark job name. */\n referenceName: string;\n}\n\n/** SQL pool reference type. */\nexport interface SqlPoolReference {\n /** SQL pool reference type. */\n type: SqlPoolReferenceType;\n /** Reference SQL pool name. */\n referenceName: string;\n}\n\n/** Big data pool reference type. */\nexport interface BigDataPoolParametrizationReference {\n /** Big data pool reference type. */\n type: BigDataPoolReferenceType;\n /** Reference big data pool name. Type: string (or Expression with resultType string). */\n referenceName: any;\n}\n\n/** Request body structure for starting data flow debug session. */\nexport interface StartDataFlowDebugSessionRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** Data flow instance. */\n dataFlow?: DataFlowResource;\n /** List of Data flows */\n dataFlows?: DataFlowResource[];\n /** List of datasets. */\n datasets?: DatasetResource[];\n /** List of linked services. */\n linkedServices?: LinkedServiceResource[];\n /** Staging info for debug session. */\n staging?: any;\n /** Data flow debug settings. */\n debugSettings?: any;\n /** The type of new Databricks cluster. */\n incrementalDebug?: boolean;\n}\n\n/** Response body structure for starting data flow debug session. */\nexport interface StartDataFlowDebugSessionResponse {\n /** The ID of data flow debug job version. */\n jobVersion?: string;\n}\n\n/** Request body structure for data flow preview data. */\nexport interface DataFlowDebugPreviewDataRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n /** The output stream name. */\n streamName?: string;\n /** The row limit for preview request. */\n rowLimits?: number;\n}\n\n/** Request body structure for data flow statistics. */\nexport interface DataFlowDebugStatisticsRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n /** The output stream name. */\n streamName?: string;\n /** List of column names. */\n columns?: string[];\n}\n\n/** Request body structure for data flow expression preview. */\nexport interface EvaluateDataFlowExpressionRequest {\n /** The ID of data flow debug session. */\n sessionId?: string;\n /** The data flow which contains the debug session. */\n dataFlowName?: string;\n /** The output stream name. */\n streamName?: string;\n /** The row limit for preview request. */\n rowLimits?: number;\n /** The expression for preview. */\n expression?: string;\n}\n\n/** Response body structure of data flow query for data preview, statistics or expression preview. */\nexport interface DataFlowDebugQueryResponse {\n /** The run ID of data flow debug session. */\n runId?: string;\n}\n\n/** Response body structure of data flow result for data preview, statistics or expression preview. */\nexport interface DataFlowDebugResultResponse {\n /** The run status of data preview, statistics or expression preview. */\n status?: string;\n /** The result data of data preview, statistics or expression preview. */\n data?: string;\n}\n\n/** A list of rerun triggers. */\nexport interface RerunTriggerListResponse {\n /** List of rerun triggers. */\n value: RerunTriggerResource[];\n /**\n * The continuation token for getting the next page of results, if any remaining results exist, null otherwise.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly nextLink?: string;\n}\n\n/** A data flow transformation. */\nexport interface Transformation {\n /** Transformation name. */\n name: string;\n /** Transformation description. */\n description?: string;\n /** Dataset reference. */\n dataset?: DatasetReference;\n /** Linked service reference. */\n linkedService?: LinkedServiceReference;\n /** Flowlet Reference */\n flowlet?: DataFlowReference;\n}\n\n/** Dataset location. */\nexport interface DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AzureBlobStorageLocation\"\n | \"AzureBlobFSLocation\"\n | \"AzureDataLakeStoreLocation\"\n | \"AmazonS3Location\"\n | \"FileServerLocation\"\n | \"AzureFileStorageLocation\"\n | \"GoogleCloudStorageLocation\"\n | \"FtpServerLocation\"\n | \"SftpLocation\"\n | \"HttpServerLocation\"\n | \"HdfsLocation\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Specify the folder path of dataset. Type: string (or Expression with resultType string) */\n folderPath?: any;\n /** Specify the file name of dataset. Type: string (or Expression with resultType string). */\n fileName?: any;\n}\n\n/** Columns that define the structure of the dataset. */\nexport interface DatasetDataElement {\n /** Name of the column. Type: string (or Expression with resultType string). */\n name?: any;\n /** Type of the column. Type: string (or Expression with resultType string). */\n type?: any;\n}\n\n/** Columns that define the physical type schema of the dataset. */\nexport interface DatasetSchemaDataElement {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Name of the schema column. Type: string (or Expression with resultType string). */\n name?: any;\n /** Type of the schema column. Type: string (or Expression with resultType string). */\n type?: any;\n}\n\n/** The format definition of a storage. */\nexport interface DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TextFormat\"\n | \"JsonFormat\"\n | \"AvroFormat\"\n | \"OrcFormat\"\n | \"ParquetFormat\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Serializer. Type: string (or Expression with resultType string). */\n serializer?: any;\n /** Deserializer. Type: string (or Expression with resultType string). */\n deserializer?: any;\n}\n\n/** The compression method used on a dataset. */\nexport interface DatasetCompression {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Type of dataset compression. Type: string (or Expression with resultType string). */\n type: any;\n /** The dataset compression level. Type: string (or Expression with resultType string). */\n level?: any;\n}\n\n/** Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. */\nexport interface WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"Anonymous\" | \"Basic\" | \"ClientCertificate\";\n /** The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). */\n url: any;\n}\n\n/** Custom script action to run on HDI ondemand cluster once it's up. */\nexport interface ScriptAction {\n /** The user provided name of the script action. */\n name: string;\n /** The URI for the script action. */\n uri: string;\n /** The node types on which the script action should be executed. */\n roles: any;\n /** The parameters for the script action. */\n parameters?: string;\n}\n\n/** Execution policy for an activity. */\nexport interface ActivityPolicy {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: any;\n /** Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */\n retry?: any;\n /** Interval between each retry attempt (in seconds). The default is 30 sec. */\n retryIntervalInSeconds?: number;\n /** When set to true, Input from activity is considered as secure and will not be logged to monitoring. */\n secureInput?: boolean;\n /** When set to true, Output from activity is considered as secure and will not be logged to monitoring. */\n secureOutput?: boolean;\n}\n\n/** Connector read setting. */\nexport interface StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AzureBlobStorageReadSettings\"\n | \"AzureBlobFSReadSettings\"\n | \"AzureDataLakeStoreReadSettings\"\n | \"AmazonS3ReadSettings\"\n | \"FileServerReadSettings\"\n | \"AzureFileStorageReadSettings\"\n | \"GoogleCloudStorageReadSettings\"\n | \"FtpReadSettings\"\n | \"SftpReadSettings\"\n | \"HttpReadSettings\"\n | \"HdfsReadSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n}\n\n/** Connector write settings. */\nexport interface StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"SftpWriteSettings\"\n | \"AzureBlobStorageWriteSettings\"\n | \"AzureBlobFSWriteSettings\"\n | \"AzureDataLakeStoreWriteSettings\"\n | \"FileServerWriteSettings\"\n | \"AzureFileStorageWriteSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** Distcp settings. */\nexport interface DistcpSettings {\n /** Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). */\n resourceManagerEndpoint: any;\n /** Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). */\n tempScriptPath: any;\n /** Specifies the Distcp options. Type: string (or Expression with resultType string). */\n distcpOptions?: any;\n}\n\n/** Format read settings. */\nexport interface FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"DelimitedTextReadSettings\"\n | \"JsonReadSettings\"\n | \"XmlReadSettings\"\n | \"BinaryReadSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Compression read settings. */\nexport interface CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZipDeflateReadSettings\" | \"TarReadSettings\" | \"TarGZipReadSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Format write settings. */\nexport interface FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AvroWriteSettings\"\n | \"OrcWriteSettings\"\n | \"ParquetWriteSettings\"\n | \"DelimitedTextWriteSettings\"\n | \"JsonWriteSettings\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** A copy activity source. */\nexport interface CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"AvroSource\"\n | \"ExcelSource\"\n | \"ParquetSource\"\n | \"DelimitedTextSource\"\n | \"JsonSource\"\n | \"XmlSource\"\n | \"OrcSource\"\n | \"BinarySource\"\n | \"TabularSource\"\n | \"AzureTableSource\"\n | \"BlobSource\"\n | \"DocumentDbCollectionSource\"\n | \"CosmosDbSqlApiSource\"\n | \"DynamicsSource\"\n | \"DynamicsCrmSource\"\n | \"CommonDataServiceForAppsSource\"\n | \"RelationalSource\"\n | \"InformixSource\"\n | \"MicrosoftAccessSource\"\n | \"Db2Source\"\n | \"OdbcSource\"\n | \"MySqlSource\"\n | \"PostgreSqlSource\"\n | \"SybaseSource\"\n | \"SapBwSource\"\n | \"ODataSource\"\n | \"SalesforceSource\"\n | \"SalesforceServiceCloudSource\"\n | \"SapCloudForCustomerSource\"\n | \"SapEccSource\"\n | \"SapHanaSource\"\n | \"SapOpenHubSource\"\n | \"SapOdpSource\"\n | \"SapTableSource\"\n | \"RestSource\"\n | \"SqlSource\"\n | \"SqlServerSource\"\n | \"AmazonRdsForSqlServerSource\"\n | \"AzureSqlSource\"\n | \"SqlMISource\"\n | \"SqlDWSource\"\n | \"FileSystemSource\"\n | \"HdfsSource\"\n | \"AzureMySqlSource\"\n | \"AzureDataExplorerSource\"\n | \"OracleSource\"\n | \"AmazonRdsForOracleSource\"\n | \"TeradataSource\"\n | \"WebSource\"\n | \"CassandraSource\"\n | \"MongoDbSource\"\n | \"MongoDbAtlasSource\"\n | \"MongoDbV2Source\"\n | \"CosmosDbMongoDbApiSource\"\n | \"Office365Source\"\n | \"AzureDataLakeStoreSource\"\n | \"AzureBlobFSSource\"\n | \"HttpSource\"\n | \"AmazonMWSSource\"\n | \"AzurePostgreSqlSource\"\n | \"ConcurSource\"\n | \"CouchbaseSource\"\n | \"DrillSource\"\n | \"EloquaSource\"\n | \"GoogleBigQuerySource\"\n | \"GreenplumSource\"\n | \"HBaseSource\"\n | \"HiveSource\"\n | \"HubspotSource\"\n | \"ImpalaSource\"\n | \"JiraSource\"\n | \"MagentoSource\"\n | \"MariaDBSource\"\n | \"AzureMariaDBSource\"\n | \"MarketoSource\"\n | \"PaypalSource\"\n | \"PhoenixSource\"\n | \"PrestoSource\"\n | \"QuickBooksSource\"\n | \"ServiceNowSource\"\n | \"ShopifySource\"\n | \"SparkSource\"\n | \"SquareSource\"\n | \"XeroSource\"\n | \"ZohoSource\"\n | \"NetezzaSource\"\n | \"VerticaSource\"\n | \"SalesforceMarketingCloudSource\"\n | \"ResponsysSource\"\n | \"DynamicsAXSource\"\n | \"OracleServiceCloudSource\"\n | \"GoogleAdWordsSource\"\n | \"AmazonRedshiftSource\"\n | \"SnowflakeSource\"\n | \"AzureDatabricksDeltaLakeSource\"\n | \"SharePointOnlineListSource\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Source retry count. Type: integer (or Expression with resultType integer). */\n sourceRetryCount?: any;\n /** Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n sourceRetryWait?: any;\n /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n}\n\n/** A copy activity sink. */\nexport interface CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"DelimitedTextSink\"\n | \"JsonSink\"\n | \"OrcSink\"\n | \"RestSink\"\n | \"AzurePostgreSqlSink\"\n | \"AzureMySqlSink\"\n | \"AzureDatabricksDeltaLakeSink\"\n | \"SapCloudForCustomerSink\"\n | \"AzureQueueSink\"\n | \"AzureTableSink\"\n | \"AvroSink\"\n | \"ParquetSink\"\n | \"BinarySink\"\n | \"BlobSink\"\n | \"FileSystemSink\"\n | \"DocumentDbCollectionSink\"\n | \"CosmosDbSqlApiSink\"\n | \"SqlSink\"\n | \"SqlServerSink\"\n | \"AzureSqlSink\"\n | \"SqlMISink\"\n | \"SqlDWSink\"\n | \"SnowflakeSink\"\n | \"OracleSink\"\n | \"AzureDataLakeStoreSink\"\n | \"AzureBlobFSSink\"\n | \"AzureSearchIndexSink\"\n | \"OdbcSink\"\n | \"InformixSink\"\n | \"MicrosoftAccessSink\"\n | \"DynamicsSink\"\n | \"DynamicsCrmSink\"\n | \"CommonDataServiceForAppsSink\"\n | \"AzureDataExplorerSink\"\n | \"SalesforceSink\"\n | \"SalesforceServiceCloudSink\"\n | \"CosmosDbMongoDbApiSink\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. */\n writeBatchSize?: any;\n /** Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n writeBatchTimeout?: any;\n /** Sink retry count. Type: integer (or Expression with resultType integer). */\n sinkRetryCount?: any;\n /** Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n sinkRetryWait?: any;\n /** The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). */\n maxConcurrentConnections?: any;\n}\n\n/** Staging settings. */\nexport interface StagingSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Staging linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The path to storage for storing the interim data. Type: string (or Expression with resultType string). */\n path?: any;\n /** Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableCompression?: any;\n}\n\n/** Redirect incompatible row settings */\nexport interface RedirectIncompatibleRowSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). */\n linkedServiceName: any;\n /** The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** (Deprecated. Please use LogSettings) Log storage settings. */\nexport interface LogStorageSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Log storage linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */\n path?: any;\n /** Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */\n logLevel?: any;\n /** Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */\n enableReliableLogging?: any;\n}\n\n/** Log settings. */\nexport interface LogSettings {\n /** Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). */\n enableCopyActivityLog?: any;\n /** Specifies settings for copy activity log. */\n copyActivityLogSettings?: CopyActivityLogSettings;\n /** Log location settings customer needs to provide when enabling log. */\n logLocationSettings: LogLocationSettings;\n}\n\n/** Settings for copy activity log. */\nexport interface CopyActivityLogSettings {\n /** Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */\n logLevel?: any;\n /** Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */\n enableReliableLogging?: any;\n}\n\n/** Log location settings. */\nexport interface LogLocationSettings {\n /** Log storage linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** Skip error file. */\nexport interface SkipErrorFile {\n /** Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). */\n fileMissing?: any;\n /** Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). */\n dataInconsistency?: any;\n}\n\n/** The settings that will be leveraged for SAP HANA source partitioning. */\nexport interface SapHanaPartitionSettings {\n /** The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n}\n\n/** The settings that will be leveraged for SAP table source partitioning. */\nexport interface SapTablePartitionSettings {\n /** The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n /** The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). */\n maxPartitionsNumber?: any;\n}\n\n/** SQL stored procedure parameter. */\nexport interface StoredProcedureParameter {\n /** Stored procedure parameter value. Type: string (or Expression with resultType string). */\n value?: any;\n /** Stored procedure parameter type. */\n type?: StoredProcedureParameterType;\n}\n\n/** The settings that will be leveraged for Sql source partitioning. */\nexport interface SqlPartitionSettings {\n /** The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The settings that will be leveraged for Oracle source partitioning. */\nexport interface OraclePartitionSettings {\n /** Names of the physical partitions of Oracle table. */\n partitionNames?: any;\n /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The settings that will be leveraged for AmazonRdsForOracle source partitioning. */\nexport interface AmazonRdsForOraclePartitionSettings {\n /** Names of the physical partitions of AmazonRdsForOracle table. */\n partitionNames?: any;\n /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The settings that will be leveraged for teradata source partitioning. */\nexport interface TeradataPartitionSettings {\n /** The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** Cursor methods for Mongodb query */\nexport interface MongoDbCursorMethodsProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). */\n project?: any;\n /** Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */\n sort?: any;\n /** Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). */\n skip?: any;\n /** Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). */\n limit?: any;\n}\n\n/** The settings that will be leveraged for Netezza source partitioning. */\nexport interface NetezzaPartitionSettings {\n /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionColumnName?: any;\n /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionUpperBound?: any;\n /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */\n partitionLowerBound?: any;\n}\n\n/** The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. */\nexport interface RedshiftUnloadSettings {\n /** The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. */\n s3LinkedServiceName: LinkedServiceReference;\n /** The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). */\n bucketName: any;\n}\n\n/** Export command settings. */\nexport interface ExportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeExportCopyCommand\" | \"AzureDatabricksDeltaLakeExportCommand\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Import command settings. */\nexport interface ImportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeImportCommand\" | \"SnowflakeImportCopyCommand\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Notebook parameter. */\nexport interface NotebookParameter {\n /** Notebook parameter value. Type: string (or Expression with resultType string). */\n value?: any;\n /** Notebook parameter type. */\n type?: NotebookParameterType;\n}\n\n/** PolyBase settings. */\nexport interface PolybaseSettings {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Reject type. */\n rejectType?: PolybaseSettingsRejectType;\n /** Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. */\n rejectValue?: any;\n /** Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. */\n rejectSampleValue?: any;\n /** Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). */\n useTypeDefault?: any;\n}\n\n/** DW Copy Command settings. */\nexport interface DWCopyCommandSettings {\n /** Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). */\n defaultValues?: DWCopyCommandDefaultValue[];\n /** Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalOptions\": { \"MAXERRORS\": \"1000\", \"DATEFORMAT\": \"'ymd'\" } */\n additionalOptions?: { [propertyName: string]: string };\n}\n\n/** Default value. */\nexport interface DWCopyCommandDefaultValue {\n /** Column name. Type: object (or Expression with resultType string). */\n columnName?: any;\n /** The default value of the column. Type: object (or Expression with resultType string). */\n defaultValue?: any;\n}\n\n/** Specify the column name and value of additional columns. */\nexport interface AdditionalColumns {\n /** Additional column name. Type: string (or Expression with resultType string). */\n name?: any;\n /** Additional column value. Type: string (or Expression with resultType string). */\n value?: any;\n}\n\n/** A copy activity translator. */\nexport interface CopyTranslator {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TabularTranslator\";\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n}\n\n/** Type conversion settings */\nexport interface TypeConversionSettings {\n /** Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). */\n allowDataTruncation?: any;\n /** Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). */\n treatBooleanAsNumber?: any;\n /** The format for DateTime values. Type: string (or Expression with resultType string). */\n dateTimeFormat?: any;\n /** The format for DateTimeOffset values. Type: string (or Expression with resultType string). */\n dateTimeOffsetFormat?: any;\n /** The format for TimeSpan values. Type: string (or Expression with resultType string). */\n timeSpanFormat?: any;\n /** The culture used to convert data from/to string. Type: string (or Expression with resultType string). */\n culture?: any;\n}\n\n/** SSIS package location. */\nexport interface SsisPackageLocation {\n /** The SSIS package path. Type: string (or Expression with resultType string). */\n packagePath?: any;\n /** The type of SSIS package location. */\n type?: SsisPackageLocationType;\n /** Password of the package. */\n packagePassword?: SecretBaseUnion;\n /** The package access credential. */\n accessCredential?: SsisAccessCredential;\n /** The configuration file of the package execution. Type: string (or Expression with resultType string). */\n configurationPath?: any;\n /** The configuration file access credential. */\n configurationAccessCredential?: SsisAccessCredential;\n /** The package name. */\n packageName?: string;\n /** The embedded package content. Type: string (or Expression with resultType string). */\n packageContent?: any;\n /** The embedded package last modified date. */\n packageLastModifiedDate?: string;\n /** The embedded child package list. */\n childPackages?: SsisChildPackage[];\n}\n\n/** SSIS access credential. */\nexport interface SsisAccessCredential {\n /** Domain for windows authentication. */\n domain: any;\n /** UseName for windows authentication. */\n userName: any;\n /** Password for windows authentication. */\n password: SecretBaseUnion;\n}\n\n/** SSIS embedded child package. */\nexport interface SsisChildPackage {\n /** Path for embedded child package. Type: string (or Expression with resultType string). */\n packagePath: any;\n /** Name for embedded child package. */\n packageName?: string;\n /** Content for embedded child package. Type: string (or Expression with resultType string). */\n packageContent: any;\n /** Last modified date for embedded child package. */\n packageLastModifiedDate?: string;\n}\n\n/** SSIS package execution credential. */\nexport interface SsisExecutionCredential {\n /** Domain for windows authentication. */\n domain: any;\n /** UseName for windows authentication. */\n userName: any;\n /** Password for windows authentication. */\n password: SecureString;\n}\n\n/** SSIS execution parameter. */\nexport interface SsisExecutionParameter {\n /** SSIS package execution parameter value. Type: string (or Expression with resultType string). */\n value: any;\n}\n\n/** SSIS property override. */\nexport interface SsisPropertyOverride {\n /** SSIS package property override value. Type: string (or Expression with resultType string). */\n value: any;\n /** Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true */\n isSensitive?: boolean;\n}\n\n/** SSIS package execution log location */\nexport interface SsisLogLocation {\n /** The SSIS package execution log path. Type: string (or Expression with resultType string). */\n logPath: any;\n /** The type of SSIS log location. */\n type: SsisLogLocationType;\n /** The package execution log access credential. */\n accessCredential?: SsisAccessCredential;\n /** Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n logRefreshInterval?: any;\n}\n\n/** Reference objects for custom activity */\nexport interface CustomActivityReferenceObject {\n /** Linked service references. */\n linkedServices?: LinkedServiceReference[];\n /** Dataset references. */\n datasets?: DatasetReference[];\n}\n\n/** Web activity authentication properties. */\nexport interface WebActivityAuthentication {\n /** Web activity authentication (Basic/ClientCertificate/MSI) */\n type: string;\n /** Base64-encoded contents of a PFX file. */\n pfx?: SecretBaseUnion;\n /** Web activity authentication user name for basic authentication. */\n username?: string;\n /** Password for the PFX file or basic authentication. */\n password?: SecretBaseUnion;\n /** Resource for which Azure Auth token will be requested when using MSI Authentication. */\n resource?: string;\n}\n\n/** Switch cases with have a value and corresponding activities. */\nexport interface SwitchCase {\n /** Expected value that satisfies the expression result of the 'on' property. */\n value?: string;\n /** List of activities to execute for satisfied case condition. */\n activities?: ActivityUnion[];\n}\n\n/** Azure ML WebService Input/Output file */\nexport interface AzureMLWebServiceFile {\n /** The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). */\n filePath: any;\n /** Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. */\n linkedServiceName: LinkedServiceReference;\n}\n\n/** Compute properties for data flow activity. */\nexport interface ExecuteDataFlowActivityTypePropertiesCompute {\n /** Compute type of the cluster which will execute data flow job. */\n computeType?: DataFlowComputeType;\n /** Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. */\n coreCount?: number;\n}\n\n/** Script block of scripts. */\nexport interface ScriptActivityScriptBlock {\n /** The query text. Type: string (or Expression with resultType string). */\n text: any;\n /** The type of the query. Type: string. */\n type: ScriptType;\n /** Array of script parameters. Type: array. */\n parameters?: ScriptActivityParameter[];\n}\n\n/** Parameters of a script block. */\nexport interface ScriptActivityParameter {\n /** The name of the parameter. Type: string (or Expression with resultType string). */\n name?: any;\n /** The type of the parameter. */\n type?: ScriptActivityParameterType;\n /** The value of the parameter. */\n value?: any;\n /** The direction of the parameter. */\n direction?: ScriptActivityParameterDirection;\n /** The size of the output direction parameter. */\n size?: number;\n}\n\n/** Log settings of script activity. */\nexport interface ScriptActivityTypePropertiesLogSettings {\n /** The destination of logs. Type: string. */\n logDestination: ScriptActivityLogDestination;\n /** Log location settings customer needs to provide when enabling log. */\n logLocationSettings?: LogLocationSettings;\n}\n\n/** Spark configuration reference. */\nexport interface SparkConfigurationParametrizationReference {\n /** Spark configuration reference type. */\n type: SparkConfigurationReferenceType;\n /** Reference spark configuration name. Type: string (or Expression with resultType string). */\n referenceName: any;\n}\n\n/** The workflow trigger recurrence. */\nexport interface ScheduleTriggerRecurrence {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The frequency. */\n frequency?: RecurrenceFrequency;\n /** The interval. */\n interval?: number;\n /** The start time. */\n startTime?: Date;\n /** The end time. */\n endTime?: Date;\n /** The time zone. */\n timeZone?: string;\n /** The recurrence schedule. */\n schedule?: RecurrenceSchedule;\n}\n\n/** The recurrence schedule. */\nexport interface RecurrenceSchedule {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The minutes. */\n minutes?: number[];\n /** The hours. */\n hours?: number[];\n /** The days of the week. */\n weekDays?: DayOfWeek[];\n /** The month days. */\n monthDays?: number[];\n /** The monthly occurrences. */\n monthlyOccurrences?: RecurrenceScheduleOccurrence[];\n}\n\n/** The recurrence schedule occurrence. */\nexport interface RecurrenceScheduleOccurrence {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The day of the week. */\n day?: DayOfWeek;\n /** The occurrence. */\n occurrence?: number;\n}\n\n/** Execution policy for an activity. */\nexport interface RetryPolicy {\n /** Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */\n count?: any;\n /** Interval between retries in seconds. Default is 30. */\n intervalInSeconds?: number;\n}\n\n/** Referenced dependency. */\nexport interface DependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TriggerDependencyReference\"\n | \"TumblingWindowTriggerDependencyReference\"\n | \"SelfDependencyTumblingWindowTriggerReference\";\n}\n\n/** Trigger reference type. */\nexport interface TriggerReference {\n /** Trigger reference type. */\n type: TriggerReferenceType;\n /** Reference trigger name. */\n referenceName: string;\n}\n\n/** The compute resource properties for managed integration runtime. */\nexport interface IntegrationRuntimeComputeProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities */\n location?: string;\n /** The node size requirement to managed integration runtime. */\n nodeSize?: string;\n /** The required number of nodes for managed integration runtime. */\n numberOfNodes?: number;\n /** Maximum parallel executions count per node for managed integration runtime. */\n maxParallelExecutionsPerNode?: number;\n /** Data flow properties for managed integration runtime. */\n dataFlowProperties?: IntegrationRuntimeDataFlowProperties;\n /** VNet properties for managed integration runtime. */\n vNetProperties?: IntegrationRuntimeVNetProperties;\n}\n\n/** Data flow properties for managed integration runtime. */\nexport interface IntegrationRuntimeDataFlowProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Compute type of the cluster which will execute data flow job. */\n computeType?: DataFlowComputeType;\n /** Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. */\n coreCount?: number;\n /** Time to live (in minutes) setting of the cluster which will execute data flow job. */\n timeToLive?: number;\n /** Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. */\n cleanup?: boolean;\n}\n\n/** VNet properties for managed integration runtime. */\nexport interface IntegrationRuntimeVNetProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The ID of the VNet that this integration runtime will join. */\n vNetId?: string;\n /** The name of the subnet this integration runtime will join. */\n subnet?: string;\n /** Resource IDs of the public IP addresses that this integration runtime will use. */\n publicIPs?: string[];\n}\n\n/** SSIS properties for managed integration runtime. */\nexport interface IntegrationRuntimeSsisProperties {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** Catalog information for managed dedicated integration runtime. */\n catalogInfo?: IntegrationRuntimeSsisCatalogInfo;\n /** License type for bringing your own license scenario. */\n licenseType?: IntegrationRuntimeLicenseType;\n /** Custom setup script properties for a managed dedicated integration runtime. */\n customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties;\n /** Data proxy properties for a managed dedicated integration runtime. */\n dataProxyProperties?: IntegrationRuntimeDataProxyProperties;\n /** The edition for the SSIS Integration Runtime */\n edition?: IntegrationRuntimeEdition;\n /** Custom setup without script properties for a SSIS integration runtime. */\n expressCustomSetupProperties?: CustomSetupBase[];\n}\n\n/** Catalog information for managed dedicated integration runtime. */\nexport interface IntegrationRuntimeSsisCatalogInfo {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The catalog database server URL. */\n catalogServerEndpoint?: string;\n /** The administrator user name of catalog database. */\n catalogAdminUserName?: string;\n /** The password of the administrator user account of the catalog database. */\n catalogAdminPassword?: SecureString;\n /** The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ */\n catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier;\n}\n\n/** Custom setup script properties for a managed dedicated integration runtime. */\nexport interface IntegrationRuntimeCustomSetupScriptProperties {\n /** The URI of the Azure blob container that contains the custom setup script. */\n blobContainerUri?: string;\n /** The SAS token of the Azure blob container. */\n sasToken?: SecureString;\n}\n\n/** Data proxy properties for a managed dedicated integration runtime. */\nexport interface IntegrationRuntimeDataProxyProperties {\n /** The self-hosted integration runtime reference. */\n connectVia?: EntityReference;\n /** The staging linked service reference. */\n stagingLinkedService?: EntityReference;\n /** The path to contain the staged data in the Blob storage. */\n path?: string;\n}\n\n/** The entity reference. */\nexport interface EntityReference {\n /** The type of this referenced entity. */\n type?: IntegrationRuntimeEntityReferenceType;\n /** The name of this referenced entity. */\n referenceName?: string;\n}\n\n/** The base definition of the custom setup. */\nexport interface CustomSetupBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomSetupBase\";\n}\n\n/** Managed Virtual Network reference type. */\nexport interface ManagedVirtualNetworkReference {\n /** Managed Virtual Network reference type. */\n type: \"ManagedVirtualNetworkReference\";\n /** Reference ManagedVirtualNetwork name. */\n referenceName: string;\n}\n\n/** The base definition of a linked integration runtime. */\nexport interface LinkedIntegrationRuntimeType {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authorizationType: \"Key\" | \"RBAC\";\n}\n\n/** Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. */\nexport interface SecureString extends SecretBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SecureString\";\n /** Value of secure string. */\n value: string;\n}\n\n/** Azure Key Vault secret reference. */\nexport interface AzureKeyVaultSecretReference extends SecretBase {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureKeyVaultSecret\";\n /** The Azure Key Vault linked service reference. */\n store: LinkedServiceReference;\n /** The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). */\n secretName: any;\n /** The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). */\n secretVersion?: any;\n}\n\n/** The resource model definition for an Azure Resource Manager resource with an etag. */\nexport interface AzureEntityResource extends Resource {\n /**\n * Resource Etag.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly etag?: string;\n}\n\n/** The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location' */\nexport interface TrackedResource extends Resource {\n /** Resource tags. */\n tags?: { [propertyName: string]: string };\n /** The geo-location where the resource lives */\n location: string;\n}\n\n/** The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location */\nexport interface ProxyResource extends Resource {}\n\n/** Mapping data flow. */\nexport interface MappingDataFlow extends DataFlow {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MappingDataFlow\";\n /** List of sources in data flow. */\n sources?: DataFlowSource[];\n /** List of sinks in data flow. */\n sinks?: DataFlowSink[];\n /** List of transformations in data flow. */\n transformations?: Transformation[];\n /** DataFlow script. */\n script?: string;\n /** Data flow script lines. */\n scriptLines?: string[];\n}\n\n/** Data flow flowlet */\nexport interface Flowlet extends DataFlow {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Flowlet\";\n /** List of sources in Flowlet. */\n sources?: DataFlowSource[];\n /** List of sinks in Flowlet. */\n sinks?: DataFlowSink[];\n /** List of transformations in Flowlet. */\n transformations?: Transformation[];\n /** Flowlet script. */\n script?: string;\n /** Flowlet script lines. */\n scriptLines?: string[];\n}\n\n/** Managed integration runtime, including managed elastic and managed dedicated integration runtimes. */\nexport interface ManagedIntegrationRuntime extends IntegrationRuntime {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Managed\";\n /**\n * Integration runtime state, only valid for managed dedicated integration runtime.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly state?: IntegrationRuntimeState;\n /** Managed Virtual Network reference. */\n managedVirtualNetwork?: ManagedVirtualNetworkReference;\n /** The compute resource for managed integration runtime. */\n computeProperties?: IntegrationRuntimeComputeProperties;\n /** SSIS properties for managed integration runtime. */\n ssisProperties?: IntegrationRuntimeSsisProperties;\n}\n\n/** Self-hosted integration runtime. */\nexport interface SelfHostedIntegrationRuntime extends IntegrationRuntime {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SelfHosted\";\n /** Linked integration runtime type from data factory */\n linkedInfo?: LinkedIntegrationRuntimeTypeUnion;\n}\n\n/** Integration runtime debug resource. */\nexport interface IntegrationRuntimeDebugResource\n extends SubResourceDebugResource {\n /** Integration runtime properties. */\n properties: IntegrationRuntimeUnion;\n}\n\n/** Data flow debug resource. */\nexport interface DataFlowDebugResource extends SubResourceDebugResource {\n /** Data flow properties. */\n properties: DataFlowUnion;\n}\n\n/** Dataset debug resource. */\nexport interface DatasetDebugResource extends SubResourceDebugResource {\n /** Dataset properties. */\n properties: DatasetUnion;\n}\n\n/** Linked service debug resource. */\nexport interface LinkedServiceDebugResource extends SubResourceDebugResource {\n /** Properties of linked service. */\n properties: LinkedServiceUnion;\n}\n\n/** A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */\nexport interface AmazonS3Dataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3Object\";\n /** The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). */\n bucketName: any;\n /** The key of the Amazon S3 object. Type: string (or Expression with resultType string). */\n key?: any;\n /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** The version for the S3 object. Type: string (or Expression with resultType string). */\n version?: any;\n /** The start of S3 object's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of S3 object's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** The format of files. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the Amazon S3 object. */\n compression?: DatasetCompression;\n}\n\n/** Avro dataset. */\nexport interface AvroDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Avro\";\n /** The location of the avro storage. */\n location?: DatasetLocationUnion;\n /** A string from AvroCompressionCodecEnum or an expression */\n avroCompressionCodec?: any;\n avroCompressionLevel?: number;\n}\n\n/** Excel dataset. */\nexport interface ExcelDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Excel\";\n /** The location of the excel storage. */\n location?: DatasetLocationUnion;\n /** The sheet name of excel file. Type: string (or Expression with resultType string). */\n sheetName?: any;\n /** The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer) */\n sheetIndex?: any;\n /** The partial data of one sheet. Type: string (or Expression with resultType string). */\n range?: any;\n /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */\n firstRowAsHeader?: any;\n /** The data compression method used for the json dataset. */\n compression?: DatasetCompression;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n}\n\n/** Parquet dataset. */\nexport interface ParquetDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Parquet\";\n /** The location of the parquet storage. */\n location?: DatasetLocationUnion;\n /** A string from ParquetCompressionCodecEnum or an expression */\n compressionCodec?: any;\n}\n\n/** Delimited text dataset. */\nexport interface DelimitedTextDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedText\";\n /** The location of the delimited text storage. */\n location?: DatasetLocationUnion;\n /** The column delimiter. Type: string (or Expression with resultType string). */\n columnDelimiter?: any;\n /** The row delimiter. Type: string (or Expression with resultType string). */\n rowDelimiter?: any;\n /** The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The data compressionCodec. Type: string (or Expression with resultType string). */\n compressionCodec?: any;\n /** The data compression method used for DelimitedText. */\n compressionLevel?: any;\n /** The quote character. Type: string (or Expression with resultType string). */\n quoteChar?: any;\n /** The escape character. Type: string (or Expression with resultType string). */\n escapeChar?: any;\n /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */\n firstRowAsHeader?: any;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n}\n\n/** Json dataset. */\nexport interface JsonDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Json\";\n /** The location of the json data storage. */\n location?: DatasetLocationUnion;\n /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The data compression method used for the json dataset. */\n compression?: DatasetCompression;\n}\n\n/** Xml dataset. */\nexport interface XmlDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Xml\";\n /** The location of the json data storage. */\n location?: DatasetLocationUnion;\n /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n /** The data compression method used for the json dataset. */\n compression?: DatasetCompression;\n}\n\n/** ORC dataset. */\nexport interface OrcDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Orc\";\n /** The location of the ORC data storage. */\n location?: DatasetLocationUnion;\n /** The data orcCompressionCodec. Type: string (or Expression with resultType string). */\n orcCompressionCodec?: any;\n}\n\n/** Binary dataset. */\nexport interface BinaryDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Binary\";\n /** The location of the Binary storage. */\n location?: DatasetLocationUnion;\n /** The data compression method used for the binary dataset. */\n compression?: DatasetCompression;\n}\n\n/** The Azure Blob storage. */\nexport interface AzureBlobDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlob\";\n /** The path of the Azure Blob storage. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The root of blob path. Type: string (or Expression with resultType string). */\n tableRootLocation?: any;\n /** The name of the Azure Blob. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** The format of the Azure Blob storage. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the blob storage. */\n compression?: DatasetCompression;\n}\n\n/** The Azure Table storage dataset. */\nexport interface AzureTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTable\";\n /** The table name of the Azure Table storage. Type: string (or Expression with resultType string). */\n tableName: any;\n}\n\n/** The Azure SQL Server database dataset. */\nexport interface AzureSqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the Azure SQL database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Azure SQL database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Azure SQL Managed Instance dataset. */\nexport interface AzureSqlMITableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlMITable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Azure SQL Data Warehouse dataset. */\nexport interface AzureSqlDWTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlDWTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Cassandra database dataset. */\nexport interface CassandraTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CassandraTable\";\n /** The table name of the Cassandra database. Type: string (or Expression with resultType string). */\n tableName?: any;\n /** The keyspace of the Cassandra database. Type: string (or Expression with resultType string). */\n keyspace?: any;\n}\n\n/** The custom dataset. */\nexport interface CustomDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomDataset\";\n /** Custom dataset properties. */\n typeProperties?: any;\n}\n\n/** Microsoft Azure CosmosDB (SQL API) Collection dataset. */\nexport interface CosmosDbSqlApiCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbSqlApiCollection\";\n /** CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). */\n collectionName: any;\n}\n\n/** Microsoft Azure Document Database Collection dataset. */\nexport interface DocumentDbCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DocumentDbCollection\";\n /** Document Database collection name. Type: string (or Expression with resultType string). */\n collectionName: any;\n}\n\n/** The Dynamics entity dataset. */\nexport interface DynamicsEntityDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsEntity\";\n /** The logical name of the entity. Type: string (or Expression with resultType string). */\n entityName?: any;\n}\n\n/** The Dynamics CRM entity dataset. */\nexport interface DynamicsCrmEntityDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrmEntity\";\n /** The logical name of the entity. Type: string (or Expression with resultType string). */\n entityName?: any;\n}\n\n/** The Common Data Service for Apps entity dataset. */\nexport interface CommonDataServiceForAppsEntityDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForAppsEntity\";\n /** The logical name of the entity. Type: string (or Expression with resultType string). */\n entityName?: any;\n}\n\n/** Azure Data Lake Store dataset. */\nexport interface AzureDataLakeStoreDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreFile\";\n /** Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The format of the Data Lake Store. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the item(s) in the Azure Data Lake Store. */\n compression?: DatasetCompression;\n}\n\n/** The Azure Data Lake Storage Gen2 storage. */\nexport interface AzureBlobFSDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSFile\";\n /** The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The format of the Azure Data Lake Storage Gen2 storage. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used for the blob storage. */\n compression?: DatasetCompression;\n}\n\n/** The Office365 account. */\nexport interface Office365Dataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Office365Table\";\n /** Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). */\n tableName: any;\n /** A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). */\n predicate?: any;\n}\n\n/** An on-premises file system dataset. */\nexport interface FileShareDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileShare\";\n /** The path of the on-premises file system. Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** The name of the on-premises file system. Type: string (or Expression with resultType string). */\n fileName?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** The format of the files. */\n format?: DatasetStorageFormatUnion;\n /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */\n fileFilter?: any;\n /** The data compression method used for the file system. */\n compression?: DatasetCompression;\n}\n\n/** The MongoDB database dataset. */\nexport interface MongoDbCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbCollection\";\n /** The table name of the MongoDB database. Type: string (or Expression with resultType string). */\n collectionName: any;\n}\n\n/** The MongoDB Atlas database dataset. */\nexport interface MongoDbAtlasCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbAtlasCollection\";\n /** The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). */\n collection: any;\n}\n\n/** The MongoDB database dataset. */\nexport interface MongoDbV2CollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbV2Collection\";\n /** The collection name of the MongoDB database. Type: string (or Expression with resultType string). */\n collection: any;\n}\n\n/** The CosmosDB (MongoDB API) database dataset. */\nexport interface CosmosDbMongoDbApiCollectionDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApiCollection\";\n /** The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). */\n collection: any;\n}\n\n/** The Open Data Protocol (OData) resource dataset. */\nexport interface ODataResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ODataResource\";\n /** The OData resource path. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** The on-premises Oracle database dataset. */\nexport interface OracleTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The AmazonRdsForOracle database dataset. */\nexport interface AmazonRdsForOracleTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForOracleTable\";\n /** The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Teradata database dataset. */\nexport interface TeradataTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TeradataTable\";\n /** The database name of Teradata. Type: string (or Expression with resultType string). */\n database?: any;\n /** The table name of Teradata. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Azure MySQL database dataset. */\nexport interface AzureMySqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySqlTable\";\n /** The Azure MySQL database table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n /** The name of Azure MySQL database table. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Amazon Redshift table dataset. */\nexport interface AmazonRedshiftTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRedshiftTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The Amazon Redshift table name. Type: string (or Expression with resultType string). */\n table?: any;\n /** The Amazon Redshift schema name. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** The Db2 table dataset. */\nexport interface Db2TableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Db2Table\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The Db2 schema name. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The Db2 table name. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The relational table dataset. */\nexport interface RelationalTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RelationalTable\";\n /** The relational table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The Informix table dataset. */\nexport interface InformixTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"InformixTable\";\n /** The Informix table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The ODBC table dataset. */\nexport interface OdbcTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OdbcTable\";\n /** The ODBC table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The MySQL table dataset. */\nexport interface MySqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MySqlTable\";\n /** The MySQL table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The PostgreSQL table dataset. */\nexport interface PostgreSqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PostgreSqlTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The PostgreSQL table name. Type: string (or Expression with resultType string). */\n table?: any;\n /** The PostgreSQL schema name. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** The Microsoft Access table dataset. */\nexport interface MicrosoftAccessTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccessTable\";\n /** The Microsoft Access table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The Salesforce object dataset. */\nexport interface SalesforceObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceObject\";\n /** The Salesforce object API name. Type: string (or Expression with resultType string). */\n objectApiName?: any;\n}\n\n/** The Salesforce Service Cloud object dataset. */\nexport interface SalesforceServiceCloudObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloudObject\";\n /** The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). */\n objectApiName?: any;\n}\n\n/** The Sybase table dataset. */\nexport interface SybaseTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SybaseTable\";\n /** The Sybase table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The SAP BW cube dataset. */\nexport interface SapBwCubeDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapBwCube\";\n}\n\n/** The path of the SAP Cloud for Customer OData entity. */\nexport interface SapCloudForCustomerResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomerResource\";\n /** The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). */\n path: any;\n}\n\n/** The path of the SAP ECC OData entity. */\nexport interface SapEccResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapEccResource\";\n /** The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). */\n path: any;\n}\n\n/** SAP HANA Table properties. */\nexport interface SapHanaTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapHanaTable\";\n /** The schema name of SAP HANA. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of SAP HANA. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** Sap Business Warehouse Open Hub Destination Table properties. */\nexport interface SapOpenHubTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOpenHubTable\";\n /** The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). */\n openHubDestinationName: any;\n /** Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */\n excludeLastRequest?: any;\n /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */\n baseRequestId?: any;\n}\n\n/** The on-premises SQL Server dataset. */\nexport interface SqlServerTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The Amazon RDS for SQL Server dataset. */\nexport interface AmazonRdsForSqlServerTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForSqlServerTable\";\n /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** A Rest service dataset. */\nexport interface RestResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestResource\";\n /** The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). */\n relativeUrl?: any;\n /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */\n paginationRules?: any;\n}\n\n/** SAP Table Resource properties. */\nexport interface SapTableResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapTableResource\";\n /** The name of the SAP Table. Type: string (or Expression with resultType string). */\n tableName: any;\n}\n\n/** SAP ODP Resource properties. */\nexport interface SapOdpResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOdpResource\";\n /** The context of the SAP ODP Object. Type: string (or Expression with resultType string). */\n context: any;\n /** The name of the SAP ODP Object. Type: string (or Expression with resultType string). */\n objectName: any;\n}\n\n/** The dataset points to a HTML table in the web page. */\nexport interface WebTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebTable\";\n /** The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. */\n index: any;\n /** The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). */\n path?: any;\n}\n\n/** The Azure Search Index. */\nexport interface AzureSearchIndexDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSearchIndex\";\n /** The name of the Azure Search Index. Type: string (or Expression with resultType string). */\n indexName: any;\n}\n\n/** A file in an HTTP web server. */\nexport interface HttpDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpFile\";\n /** The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). */\n relativeUrl?: any;\n /** The HTTP method for the HTTP request. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The body for the HTTP request. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /**\n * The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1\n * ...\n * request-header-name-n:request-header-value-n Type: string (or Expression with resultType string).\n */\n additionalHeaders?: any;\n /** The format of files. */\n format?: DatasetStorageFormatUnion;\n /** The data compression method used on files. */\n compression?: DatasetCompression;\n}\n\n/** Amazon Marketplace Web Service dataset. */\nexport interface AmazonMWSObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonMWSObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Azure PostgreSQL dataset. */\nexport interface AzurePostgreSqlTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSqlTable\";\n /** The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). */\n tableName?: any;\n /** The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Concur Service dataset. */\nexport interface ConcurObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ConcurObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Couchbase server dataset. */\nexport interface CouchbaseTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CouchbaseTable\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Drill server dataset. */\nexport interface DrillTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DrillTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Drill. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Drill. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Eloqua server dataset. */\nexport interface EloquaObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"EloquaObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Google BigQuery service dataset. */\nexport interface GoogleBigQueryObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleBigQueryObject\";\n /** This property will be retired. Please consider using database + table properties instead. */\n tableName?: any;\n /** The table name of the Google BigQuery. Type: string (or Expression with resultType string). */\n table?: any;\n /** The database name of the Google BigQuery. Type: string (or Expression with resultType string). */\n dataset?: any;\n}\n\n/** Greenplum Database dataset. */\nexport interface GreenplumTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GreenplumTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of Greenplum. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of Greenplum. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** HBase server dataset. */\nexport interface HBaseObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HBaseObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Hive Server dataset. */\nexport interface HiveObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HiveObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Hive. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Hive. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Hubspot Service dataset. */\nexport interface HubspotObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HubspotObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Impala server dataset. */\nexport interface ImpalaObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ImpalaObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Impala. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Impala. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Jira Service dataset. */\nexport interface JiraObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JiraObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Magento server dataset. */\nexport interface MagentoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MagentoObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** MariaDB server dataset. */\nexport interface MariaDBTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MariaDBTable\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Azure Database for MariaDB dataset. */\nexport interface AzureMariaDBTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMariaDBTable\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Marketo server dataset. */\nexport interface MarketoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MarketoObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Paypal Service dataset. */\nexport interface PaypalObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PaypalObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Phoenix server dataset. */\nexport interface PhoenixObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PhoenixObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Phoenix. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Phoenix. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Presto server dataset. */\nexport interface PrestoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PrestoObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Presto. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Presto. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** QuickBooks server dataset. */\nexport interface QuickBooksObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"QuickBooksObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** ServiceNow server dataset. */\nexport interface ServiceNowObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ServiceNowObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Shopify Service dataset. */\nexport interface ShopifyObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ShopifyObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Spark Server dataset. */\nexport interface SparkObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SparkObject\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Spark. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Spark. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Square Service dataset. */\nexport interface SquareObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SquareObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Xero Service dataset. */\nexport interface XeroObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XeroObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Zoho server dataset. */\nexport interface ZohoObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZohoObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Netezza dataset. */\nexport interface NetezzaTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"NetezzaTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Netezza. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Netezza. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Vertica dataset. */\nexport interface VerticaTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"VerticaTable\";\n /** This property will be retired. Please consider using schema + table properties instead. */\n tableName?: any;\n /** The table name of the Vertica. Type: string (or Expression with resultType string). */\n table?: any;\n /** The schema name of the Vertica. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n}\n\n/** Salesforce Marketing Cloud dataset. */\nexport interface SalesforceMarketingCloudObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceMarketingCloudObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** Responsys dataset. */\nexport interface ResponsysObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ResponsysObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The path of the Dynamics AX OData entity. */\nexport interface DynamicsAXResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsAXResource\";\n /** The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). */\n path: any;\n}\n\n/** Oracle Service Cloud dataset. */\nexport interface OracleServiceCloudObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleServiceCloudObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The Azure Data Explorer (Kusto) dataset. */\nexport interface AzureDataExplorerTableDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerTable\";\n /** The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** Google AdWords service dataset. */\nexport interface GoogleAdWordsObjectDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleAdWordsObject\";\n /** The table name. Type: string (or Expression with resultType string). */\n tableName?: any;\n}\n\n/** The snowflake dataset. */\nexport interface SnowflakeDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeTable\";\n /** The schema name of the Snowflake database. Type: string (or Expression with resultType string). */\n schemaTypePropertiesSchema?: any;\n /** The table name of the Snowflake database. Type: string (or Expression with resultType string). */\n table?: any;\n}\n\n/** The sharepoint online list resource dataset. */\nexport interface SharePointOnlineListResourceDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SharePointOnlineListResource\";\n /** The name of the SharePoint Online list. Type: string (or Expression with resultType string). */\n listName?: any;\n}\n\n/** Azure Databricks Delta Lake dataset. */\nexport interface AzureDatabricksDeltaLakeDataset extends Dataset {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeDataset\";\n /** The name of delta table. Type: string (or Expression with resultType string). */\n table?: any;\n /** The database name of delta table. Type: string (or Expression with resultType string). */\n database?: any;\n}\n\n/** The storage account linked service. */\nexport interface AzureStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureStorage\";\n /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** The azure blob storage linked service. */\nexport interface AzureBlobStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorage\";\n /** The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. */\n serviceEndpoint?: string;\n /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). */\n accountKind?: string;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** The azure table storage linked service. */\nexport interface AzureTableStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTableStorage\";\n /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** Azure SQL Data Warehouse linked service. */\nexport interface AzureSqlDWLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlDW\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SQL Server linked service. */\nexport interface SqlServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServer\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The on-premises Windows authentication password. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Amazon RDS for SQL Server linked service. */\nexport interface AmazonRdsForSqlServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForSqlServer\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The on-premises Windows authentication password. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Microsoft Azure SQL Database linked service. */\nexport interface AzureSqlDatabaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlDatabase\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Database. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure SQL Managed Instance linked service. */\nexport interface AzureSqlMILinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlMI\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Azure SQL Managed Instance. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Batch linked service. */\nexport interface AzureBatchLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBatch\";\n /** The Azure Batch account name. Type: string (or Expression with resultType string). */\n accountName: any;\n /** The Azure Batch account access key. */\n accessKey?: SecretBaseUnion;\n /** The Azure Batch URI. Type: string (or Expression with resultType string). */\n batchUri: any;\n /** The Azure Batch pool name. Type: string (or Expression with resultType string). */\n poolName: any;\n /** The Azure Storage linked service reference. */\n linkedServiceName: LinkedServiceReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Key Vault linked service. */\nexport interface AzureKeyVaultLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureKeyVault\";\n /** The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). */\n baseUrl: any;\n}\n\n/** Microsoft Azure Cosmos Database (CosmosDB) linked service. */\nexport interface CosmosDbLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDb\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) */\n accountEndpoint?: any;\n /** The name of the database. Type: string (or Expression with resultType string) */\n database?: any;\n /** The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. */\n accountKey?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Dynamics linked service. */\nexport interface DynamicsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Dynamics\";\n /** The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). */\n deploymentType: any;\n /** The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */\n hostName?: any;\n /** The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */\n serviceUri?: any;\n /** The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). */\n organizationName?: any;\n /** The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */\n authenticationType: any;\n /** User name to access the Dynamics instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password to access the Dynamics instance. */\n password?: SecretBaseUnion;\n /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Dynamics CRM linked service. */\nexport interface DynamicsCrmLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrm\";\n /** The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). */\n deploymentType: any;\n /** The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */\n hostName?: any;\n /** The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */\n serviceUri?: any;\n /** The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). */\n organizationName?: any;\n /** The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */\n authenticationType: any;\n /** User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password to access the Dynamics CRM instance. */\n password?: SecretBaseUnion;\n /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** A string from ServicePrincipalCredentialEnum or an expression */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Common Data Service for Apps linked service. */\nexport interface CommonDataServiceForAppsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForApps\";\n /** The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). */\n deploymentType: any;\n /** The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */\n hostName?: any;\n /** The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */\n serviceUri?: any;\n /** The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). */\n organizationName?: any;\n /** The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */\n authenticationType: any;\n /** User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password to access the Common Data Service for Apps instance. */\n password?: SecretBaseUnion;\n /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** A string from ServicePrincipalCredentialEnum or an expression */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** HDInsight linked service. */\nexport interface HDInsightLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsight\";\n /** HDInsight cluster URI. Type: string (or Expression with resultType string). */\n clusterUri: any;\n /** HDInsight cluster user name. Type: string (or Expression with resultType string). */\n userName?: any;\n /** HDInsight cluster password. */\n password?: SecretBaseUnion;\n /** The Azure Storage linked service reference. */\n linkedServiceName?: LinkedServiceReference;\n /** A reference to the Azure SQL linked service that points to the HCatalog database. */\n hcatalogLinkedServiceName?: LinkedServiceReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. */\n isEspEnabled?: any;\n /** Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). */\n fileSystem?: any;\n}\n\n/** File system linked service. */\nexport interface FileServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServer\";\n /** Host name of the server. Type: string (or Expression with resultType string). */\n host: any;\n /** User ID to logon the server. Type: string (or Expression with resultType string). */\n userId?: any;\n /** Password to logon the server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure File Storage linked service. */\nexport interface AzureFileStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorage\";\n /** Host name of the server. Type: string (or Expression with resultType string). */\n host: any;\n /** User ID to logon the server. Type: string (or Expression with resultType string). */\n userId?: any;\n /** Password to logon the server. */\n password?: SecretBaseUnion;\n /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of accountKey in connection string. */\n accountKey?: AzureKeyVaultSecretReference;\n /** SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n sasUri?: any;\n /** The Azure key vault secret reference of sasToken in sas uri. */\n sasToken?: AzureKeyVaultSecretReference;\n /** The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). */\n fileShare?: any;\n /** The azure file share snapshot version. Type: string (or Expression with resultType string). */\n snapshot?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Google Cloud Storage. */\nexport interface GoogleCloudStorageLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleCloudStorage\";\n /** The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */\n accessKeyId?: any;\n /** The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. */\n secretAccessKey?: SecretBaseUnion;\n /** This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */\n serviceUrl?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Oracle database. */\nexport interface OracleLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Oracle\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** AmazonRdsForOracle database. */\nexport interface AmazonRdsForOracleLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForOracle\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure MySQL database linked service. */\nexport interface AzureMySqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySql\";\n /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for MySQL data source. */\nexport interface MySqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MySql\";\n /** The connection string. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for PostgreSQL data source. */\nexport interface PostgreSqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PostgreSql\";\n /** The connection string. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Sybase data source. */\nexport interface SybaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Sybase\";\n /** Server name for connection. Type: string (or Expression with resultType string). */\n server: any;\n /** Database name for connection. Type: string (or Expression with resultType string). */\n database: any;\n /** Schema name for connection. Type: string (or Expression with resultType string). */\n schema?: any;\n /** AuthenticationType to be used for connection. */\n authenticationType?: SybaseAuthenticationType;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for DB2 data source. */\nexport interface Db2LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Db2\";\n /** The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n server: any;\n /** Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n database: any;\n /** AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. */\n authenticationType?: Db2AuthenticationType;\n /** Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n packageCollection?: any;\n /** Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n certificateCommonName?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Teradata data source. */\nexport interface TeradataLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Teradata\";\n /** Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** Server name for connection. Type: string (or Expression with resultType string). */\n server?: any;\n /** AuthenticationType to be used for connection. */\n authenticationType?: TeradataAuthenticationType;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure ML Studio Web Service linked service. */\nexport interface AzureMLLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureML\";\n /** The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */\n mlEndpoint: any;\n /** The API key for accessing the Azure ML model endpoint. */\n apiKey: SecretBaseUnion;\n /** The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */\n updateResourceEndpoint?: any;\n /** The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure ML Service linked service. */\nexport interface AzureMLServiceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLService\";\n /** Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). */\n subscriptionId: any;\n /** Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). */\n resourceGroupName: any;\n /** Azure ML Service workspace name. Type: string (or Expression with resultType string). */\n mlWorkspaceName: any;\n /** The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Open Database Connectivity (ODBC) linked service. */\nexport interface OdbcLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Odbc\";\n /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access credential portion of the connection string specified in driver-specific property-value format. */\n credential?: SecretBaseUnion;\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Informix linked service. */\nexport interface InformixLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Informix\";\n /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access credential portion of the connection string specified in driver-specific property-value format. */\n credential?: SecretBaseUnion;\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Microsoft Access linked service. */\nexport interface MicrosoftAccessLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccess\";\n /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access credential portion of the connection string specified in driver-specific property-value format. */\n credential?: SecretBaseUnion;\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Hadoop Distributed File System (HDFS) linked service. */\nexport interface HdfsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Hdfs\";\n /** The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). */\n url: any;\n /** Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** User name for Windows authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Windows authentication. */\n password?: SecretBaseUnion;\n}\n\n/** Open Data Protocol (OData) linked service. */\nexport interface ODataLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OData\";\n /** The URL of the OData service endpoint. Type: string (or Expression with resultType string). */\n url: any;\n /** Type of authentication used to connect to the OData service. */\n authenticationType?: ODataAuthenticationType;\n /** User name of the OData service. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password of the OData service. */\n password?: SecretBaseUnion;\n /** Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). */\n aadResourceId?: any;\n /** Specify the credential type (key or cert) is used for service principal. */\n aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType;\n /** Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalKey?: SecretBaseUnion;\n /** Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalEmbeddedCert?: SecretBaseUnion;\n /** Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). */\n servicePrincipalEmbeddedCertPassword?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Web linked service. */\nexport interface WebLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Web\";\n /** Web linked service properties. */\n typeProperties: WebLinkedServiceTypePropertiesUnion;\n}\n\n/** Linked service for Cassandra data source. */\nexport interface CassandraLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Cassandra\";\n /** Host name for connection. Type: string (or Expression with resultType string). */\n host: any;\n /** AuthenticationType to be used for connection. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The port for the connection. Type: integer (or Expression with resultType integer). */\n port?: any;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for MongoDb data source. */\nexport interface MongoDbLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDb\";\n /** The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). */\n server: any;\n /** The authentication type to be used to connect to the MongoDB database. */\n authenticationType?: MongoDbAuthenticationType;\n /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */\n databaseName: any;\n /** Username for authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** Password for authentication. */\n password?: SecretBaseUnion;\n /** Database to verify the username and password. Type: string (or Expression with resultType string). */\n authSource?: any;\n /** The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). */\n enableSsl?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for MongoDB Atlas data source. */\nexport interface MongoDbAtlasLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbAtlas\";\n /** The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). */\n database: any;\n}\n\n/** Linked service for MongoDB data source. */\nexport interface MongoDbV2LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbV2\";\n /** The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */\n database: any;\n}\n\n/** Linked service for CosmosDB (MongoDB API) data source. */\nexport interface CosmosDbMongoDbApiLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApi\";\n /** The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString: any;\n /** The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). */\n database: any;\n}\n\n/** Azure Data Lake Store linked service. */\nexport interface AzureDataLakeStoreLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStore\";\n /** Data Lake Store service URI. Type: string (or Expression with resultType string). */\n dataLakeStoreUri: any;\n /** The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The Key of the application used to authenticate against the Azure Data Lake Store account. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** Data Lake Store account name. Type: string (or Expression with resultType string). */\n accountName?: any;\n /** Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */\n subscriptionId?: any;\n /** Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */\n resourceGroupName?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Data Lake Storage Gen2 linked service. */\nexport interface AzureBlobFSLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFS\";\n /** Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */\n url: any;\n /** Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */\n accountKey?: any;\n /** The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */\n servicePrincipalCredentialType?: any;\n /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */\n servicePrincipalCredential?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Office365 linked service. */\nexport interface Office365LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Office365\";\n /** Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). */\n office365TenantId: any;\n /** Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). */\n servicePrincipalTenantId: any;\n /** Specify the application's client ID. Type: string (or Expression with resultType string). */\n servicePrincipalId: any;\n /** Specify the application's key. */\n servicePrincipalKey: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Salesforce. */\nexport interface SalesforceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Salesforce\";\n /** The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */\n environmentUrl?: any;\n /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password for Basic authentication of the Salesforce instance. */\n password?: SecretBaseUnion;\n /** The security token is optional to remotely access Salesforce instance. */\n securityToken?: SecretBaseUnion;\n /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */\n apiVersion?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Salesforce Service Cloud. */\nexport interface SalesforceServiceCloudLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloud\";\n /** The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */\n environmentUrl?: any;\n /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password for Basic authentication of the Salesforce instance. */\n password?: SecretBaseUnion;\n /** The security token is optional to remotely access Salesforce instance. */\n securityToken?: SecretBaseUnion;\n /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */\n apiVersion?: any;\n /** Extended properties appended to the connection string. Type: string (or Expression with resultType string). */\n extendedProperties?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for SAP Cloud for Customer. */\nexport interface SapCloudForCustomerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomer\";\n /** The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). */\n url: any;\n /** The username for Basic authentication. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for SAP ERP Central Component(SAP ECC). */\nexport interface SapEccLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapEcc\";\n /** The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). */\n url: string;\n /** The username for Basic authentication. Type: string (or Expression with resultType string). */\n username?: string;\n /** The password for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */\n encryptedCredential?: string;\n}\n\n/** SAP Business Warehouse Open Hub Destination Linked Service. */\nexport interface SapOpenHubLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOpenHub\";\n /** Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). */\n server: any;\n /** System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber: any;\n /** Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId: any;\n /** Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). */\n language?: any;\n /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */\n systemId?: any;\n /** Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP BW server where the open hub destination is located. */\n password?: SecretBaseUnion;\n /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */\n messageServer?: any;\n /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */\n messageServerService?: any;\n /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */\n logonGroup?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SAP ODP Linked Service. */\nexport interface SapOdpLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOdp\";\n /** Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */\n server?: any;\n /** System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber?: any;\n /** Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId?: any;\n /** Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */\n language?: any;\n /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */\n systemId?: any;\n /** Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP server where the table is located. */\n password?: SecretBaseUnion;\n /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */\n messageServer?: any;\n /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */\n messageServerService?: any;\n /** SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */\n sncMode?: any;\n /** Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncMyName?: any;\n /** Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncPartnerName?: any;\n /** External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncLibraryPath?: any;\n /** SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */\n sncQop?: any;\n /** SNC X509 certificate file path. Type: string (or Expression with resultType string). */\n x509CertificatePath?: any;\n /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */\n logonGroup?: any;\n /** The subscriber name. Type: string (or Expression with resultType string). */\n subscriberName?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Rest Service linked service. */\nexport interface RestServiceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestService\";\n /** The base URL of the REST service. */\n url: any;\n /** Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). */\n enableServerCertificateValidation?: any;\n /** Type of authentication used to connect to the REST service. */\n authenticationType: RestServiceAuthenticationType;\n /** The user name used in Basic authentication type. */\n userName?: any;\n /** The password used in Basic authentication type. */\n password?: SecretBaseUnion;\n /** The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). */\n authHeaders?: any;\n /** The application's client ID used in AadServicePrincipal authentication type. */\n servicePrincipalId?: any;\n /** The application's key used in AadServicePrincipal authentication type. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. */\n tenant?: any;\n /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */\n azureCloudType?: any;\n /** The resource you are requesting authorization to use. */\n aadResourceId?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** The client ID associated with your application. Type: string (or Expression with resultType string). */\n clientId?: any;\n /** The client secret associated with your application. */\n clientSecret?: SecretBaseUnion;\n /** The token endpoint of the authorization server to acquire access token. Type: string (or Expression with resultType string). */\n tokenEndpoint?: any;\n /** The target service or resource to which the access will be requested. Type: string (or Expression with resultType string). */\n resource?: any;\n /** The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). */\n scope?: any;\n}\n\n/** Linked service for TeamDesk. */\nexport interface TeamDeskLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TeamDesk\";\n /** The authentication type to use. */\n authenticationType: TeamDeskAuthenticationType;\n /** The url to connect TeamDesk source. Type: string (or Expression with resultType string). */\n url: any;\n /** The username of the TeamDesk source. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The password of the TeamDesk source. */\n password?: SecretBaseUnion;\n /** The api token for the TeamDesk source. */\n apiToken?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Quickbase. */\nexport interface QuickbaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Quickbase\";\n /** The url to connect Quickbase source. Type: string (or Expression with resultType string). */\n url: any;\n /** The user token for the Quickbase source. */\n userToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Smartsheet. */\nexport interface SmartsheetLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Smartsheet\";\n /** The api token for the Smartsheet source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Zendesk. */\nexport interface ZendeskLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Zendesk\";\n /** The authentication type to use. */\n authenticationType: ZendeskAuthenticationType;\n /** The url to connect Zendesk source. Type: string (or Expression with resultType string). */\n url: any;\n /** The username of the Zendesk source. Type: string (or Expression with resultType string). */\n userName?: any;\n /** The password of the Zendesk source. */\n password?: SecretBaseUnion;\n /** The api token for the Zendesk source. */\n apiToken?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Dataworld. */\nexport interface DataworldLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Dataworld\";\n /** The api token for the Dataworld source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for AppFigures. */\nexport interface AppFiguresLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AppFigures\";\n /** The username of the Appfigures source. */\n userName: any;\n /** The password of the AppFigures source. */\n password: SecretBaseUnion;\n /** The client key for the AppFigures source. */\n clientKey: SecretBaseUnion;\n}\n\n/** Linked service for Asana. */\nexport interface AsanaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Asana\";\n /** The api token for the Asana source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Twilio. */\nexport interface TwilioLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Twilio\";\n /** The Account SID of Twilio service. */\n userName: any;\n /** The auth token of Twilio service. */\n password: SecretBaseUnion;\n}\n\n/** Linked service for GoogleSheets. */\nexport interface GoogleSheetsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleSheets\";\n /** The api token for the GoogleSheets source. */\n apiToken: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Amazon S3. */\nexport interface AmazonS3LinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3\";\n /** The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). */\n authenticationType?: any;\n /** The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */\n accessKeyId?: any;\n /** The secret access key of the Amazon S3 Identity and Access Management (IAM) user. */\n secretAccessKey?: SecretBaseUnion;\n /** This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */\n serviceUrl?: any;\n /** The session token for the S3 temporary security credential. */\n sessionToken?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for Amazon Redshift. */\nexport interface AmazonRedshiftLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRedshift\";\n /** The name of the Amazon Redshift server. Type: string (or Expression with resultType string). */\n server: any;\n /** The username of the Amazon Redshift source. Type: string (or Expression with resultType string). */\n username?: any;\n /** The password of the Amazon Redshift source. */\n password?: SecretBaseUnion;\n /** The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). */\n database: any;\n /** The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). */\n port?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Custom linked service. */\nexport interface CustomDataSourceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomDataSource\";\n /** Custom linked service properties. */\n typeProperties: any;\n}\n\n/** Linked service for Windows Azure Search Service. */\nexport interface AzureSearchLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSearch\";\n /** URL for Azure Search service. Type: string (or Expression with resultType string). */\n url: any;\n /** Admin Key for Azure Search service */\n key?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Linked service for an HTTP source. */\nexport interface HttpLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpServer\";\n /** The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). */\n url: any;\n /** The authentication type to be used to connect to the HTTP server. */\n authenticationType?: HttpAuthenticationType;\n /** User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. */\n password?: SecretBaseUnion;\n /** Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */\n embeddedCertData?: any;\n /** Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */\n certThumbprint?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). */\n enableServerCertificateValidation?: any;\n}\n\n/** A FTP server Linked Service. */\nexport interface FtpServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FtpServer\";\n /** Host name of the FTP server. Type: string (or Expression with resultType string). */\n host: any;\n /** The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The authentication type to be used to connect to the FTP server. */\n authenticationType?: FtpAuthenticationType;\n /** Username to logon the FTP server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to logon the FTP server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */\n enableSsl?: any;\n /** If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */\n enableServerCertificateValidation?: any;\n}\n\n/** A linked service for an SSH File Transfer Protocol (SFTP) server. */\nexport interface SftpServerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Sftp\";\n /** The SFTP server host name. Type: string (or Expression with resultType string). */\n host: any;\n /** The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. */\n port?: any;\n /** The authentication type to be used to connect to the FTP server. */\n authenticationType?: SftpAuthenticationType;\n /** The username used to log on to the SFTP server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to logon the SFTP server for Basic authentication. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). */\n privateKeyPath?: any;\n /** Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. */\n privateKeyContent?: SecretBaseUnion;\n /** The password to decrypt the SSH private key if the SSH private key is encrypted. */\n passPhrase?: SecretBaseUnion;\n /** If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). */\n skipHostKeyValidation?: any;\n /** The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). */\n hostKeyFingerprint?: any;\n}\n\n/** SAP Business Warehouse Linked Service. */\nexport interface SapBWLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapBW\";\n /** Host name of the SAP BW instance. Type: string (or Expression with resultType string). */\n server: any;\n /** System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber: any;\n /** Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId: any;\n /** Username to access the SAP BW server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP BW server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SAP HANA Linked Service. */\nexport interface SapHanaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapHana\";\n /** SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** Host name of the SAP HANA server. Type: string (or Expression with resultType string). */\n server: any;\n /** The authentication type to be used to connect to the SAP HANA server. */\n authenticationType?: SapHanaAuthenticationType;\n /** Username to access the SAP HANA server. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP HANA server. */\n password?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Amazon Marketplace Web Service linked service. */\nexport interface AmazonMWSLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonMWS\";\n /** The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) */\n endpoint: any;\n /** The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) */\n marketplaceID: any;\n /** The Amazon seller ID. */\n sellerID: any;\n /** The Amazon MWS authentication token. */\n mwsAuthToken?: SecretBaseUnion;\n /** The access key id used to access data. */\n accessKeyId: any;\n /** The secret key used to access data. */\n secretKey?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure PostgreSQL linked service. */\nexport interface AzurePostgreSqlLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSql\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Concur Service linked service. */\nexport interface ConcurLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Concur\";\n /** Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** Application client_id supplied by Concur App Management. */\n clientId: any;\n /** The user name that you use to access Concur Service. */\n username: any;\n /** The password corresponding to the user name that you provided in the username field. */\n password?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Couchbase server linked service. */\nexport interface CouchbaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Couchbase\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of credString in connection string. */\n credString?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Drill server linked service. */\nexport interface DrillLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Drill\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Eloqua server linked service. */\nexport interface EloquaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Eloqua\";\n /** The endpoint of the Eloqua server. (i.e. eloqua.example.com) */\n endpoint: any;\n /** The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) */\n username: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Google BigQuery service linked service. */\nexport interface GoogleBigQueryLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleBigQuery\";\n /** The default BigQuery project to query against. */\n project: any;\n /** A comma-separated list of public BigQuery projects to access. */\n additionalProjects?: any;\n /** Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. */\n requestGoogleDriveScope?: any;\n /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */\n authenticationType: GoogleBigQueryAuthenticationType;\n /** The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. */\n refreshToken?: SecretBaseUnion;\n /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */\n clientId?: any;\n /** The client secret of the google application used to acquire the refresh token. */\n clientSecret?: SecretBaseUnion;\n /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */\n email?: any;\n /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */\n keyFilePath?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Greenplum Database linked service. */\nexport interface GreenplumLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Greenplum\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** HBase server linked service. */\nexport interface HBaseLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HBase\";\n /** The IP address or host name of the HBase server. (i.e. 192.168.222.160) */\n host: any;\n /** The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. */\n port?: any;\n /** The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) */\n httpPath?: any;\n /** The authentication mechanism to use to connect to the HBase server. */\n authenticationType: HBaseAuthenticationType;\n /** The user name used to connect to the HBase instance. */\n username?: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Hive Server linked service. */\nexport interface HiveLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Hive\";\n /** IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). */\n host: any;\n /** The TCP port that the Hive server uses to listen for client connections. */\n port?: any;\n /** The type of Hive server. */\n serverType?: HiveServerType;\n /** The transport protocol to use in the Thrift layer. */\n thriftTransportProtocol?: HiveThriftTransportProtocol;\n /** The authentication method used to access the Hive server. */\n authenticationType: HiveAuthenticationType;\n /** true to indicate using the ZooKeeper service, false not. */\n serviceDiscoveryMode?: any;\n /** The namespace on ZooKeeper under which Hive Server 2 nodes are added. */\n zooKeeperNameSpace?: any;\n /** Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. */\n useNativeQuery?: any;\n /** The user name that you use to access Hive Server. */\n username?: any;\n /** The password corresponding to the user name that you provided in the Username field */\n password?: SecretBaseUnion;\n /** The partial URL corresponding to the Hive server. */\n httpPath?: any;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Hubspot Service linked service. */\nexport interface HubspotLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Hubspot\";\n /** The client ID associated with your Hubspot application. */\n clientId: any;\n /** The client secret associated with your Hubspot application. */\n clientSecret?: SecretBaseUnion;\n /** The access token obtained when initially authenticating your OAuth integration. */\n accessToken?: SecretBaseUnion;\n /** The refresh token obtained when initially authenticating your OAuth integration. */\n refreshToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Impala server linked service. */\nexport interface ImpalaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Impala\";\n /** The IP address or host name of the Impala server. (i.e. 192.168.222.160) */\n host: any;\n /** The TCP port that the Impala server uses to listen for client connections. The default value is 21050. */\n port?: any;\n /** The authentication type to use. */\n authenticationType: ImpalaAuthenticationType;\n /** The user name used to access the Impala server. The default value is anonymous when using SASLUsername. */\n username?: any;\n /** The password corresponding to the user name when using UsernameAndPassword. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Jira Service linked service. */\nexport interface JiraLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Jira\";\n /** The IP address or host name of the Jira service. (e.g. jira.example.com) */\n host: any;\n /** The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. */\n port?: any;\n /** The user name that you use to access Jira Service. */\n username: any;\n /** The password corresponding to the user name that you provided in the username field. */\n password?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Magento server linked service. */\nexport interface MagentoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Magento\";\n /** The URL of the Magento instance. (i.e. 192.168.222.110/magento3) */\n host: any;\n /** The access token from Magento. */\n accessToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** MariaDB server linked service. */\nexport interface MariaDBLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MariaDB\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Database for MariaDB linked service. */\nexport interface AzureMariaDBLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMariaDB\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Marketo server linked service. */\nexport interface MarketoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Marketo\";\n /** The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) */\n endpoint: any;\n /** The client Id of your Marketo service. */\n clientId: any;\n /** The client secret of your Marketo service. */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Paypal Service linked service. */\nexport interface PaypalLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Paypal\";\n /** The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) */\n host: any;\n /** The client ID associated with your PayPal application. */\n clientId: any;\n /** The client secret associated with your PayPal application. */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Phoenix server linked service. */\nexport interface PhoenixLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Phoenix\";\n /** The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) */\n host: any;\n /** The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. */\n port?: any;\n /** The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. */\n httpPath?: any;\n /** The authentication mechanism used to connect to the Phoenix server. */\n authenticationType: PhoenixAuthenticationType;\n /** The user name used to connect to the Phoenix server. */\n username?: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Presto server linked service. */\nexport interface PrestoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Presto\";\n /** The IP address or host name of the Presto server. (i.e. 192.168.222.160) */\n host: any;\n /** The version of the Presto server. (i.e. 0.148-t) */\n serverVersion: any;\n /** The catalog context for all request against the server. */\n catalog: any;\n /** The TCP port that the Presto server uses to listen for client connections. The default value is 8080. */\n port?: any;\n /** The authentication mechanism used to connect to the Presto server. */\n authenticationType: PrestoAuthenticationType;\n /** The user name used to connect to the Presto server. */\n username?: any;\n /** The password corresponding to the user name. */\n password?: SecretBaseUnion;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. */\n timeZoneID?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** QuickBooks server linked service. */\nexport interface QuickBooksLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"QuickBooks\";\n /** Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) */\n endpoint: any;\n /** The company ID of the QuickBooks company to authorize. */\n companyId: any;\n /** The consumer key for OAuth 1.0 authentication. */\n consumerKey: any;\n /** The consumer secret for OAuth 1.0 authentication. */\n consumerSecret: SecretBaseUnion;\n /** The access token for OAuth 1.0 authentication. */\n accessToken: SecretBaseUnion;\n /** The access token secret for OAuth 1.0 authentication. */\n accessTokenSecret: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** ServiceNow server linked service. */\nexport interface ServiceNowLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ServiceNow\";\n /** The endpoint of the ServiceNow server. (i.e. <instance>.service-now.com) */\n endpoint: any;\n /** The authentication type to use. */\n authenticationType: ServiceNowAuthenticationType;\n /** The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. */\n username?: any;\n /** The password corresponding to the user name for Basic and OAuth2 authentication. */\n password?: SecretBaseUnion;\n /** The client id for OAuth2 authentication. */\n clientId?: any;\n /** The client secret for OAuth2 authentication. */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Shopify Service linked service. */\nexport interface ShopifyLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Shopify\";\n /** The endpoint of the Shopify server. (i.e. mystore.myshopify.com) */\n host: any;\n /** The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. */\n accessToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Spark Server linked service. */\nexport interface SparkLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Spark\";\n /** IP address or host name of the Spark server */\n host: any;\n /** The TCP port that the Spark server uses to listen for client connections. */\n port: any;\n /** The type of Spark server. */\n serverType?: SparkServerType;\n /** The transport protocol to use in the Thrift layer. */\n thriftTransportProtocol?: SparkThriftTransportProtocol;\n /** The authentication method used to access the Spark server. */\n authenticationType: SparkAuthenticationType;\n /** The user name that you use to access Spark Server. */\n username?: any;\n /** The password corresponding to the user name that you provided in the Username field */\n password?: SecretBaseUnion;\n /** The partial URL corresponding to the Spark server. */\n httpPath?: any;\n /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */\n enableSsl?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */\n allowHostNameCNMismatch?: any;\n /** Specifies whether to allow self-signed certificates from the server. The default value is false. */\n allowSelfSignedServerCert?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Square Service linked service. */\nexport interface SquareLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Square\";\n /** Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The URL of the Square instance. (i.e. mystore.mysquare.com) */\n host: any;\n /** The client ID associated with your Square application. */\n clientId: any;\n /** The client secret associated with your Square application. */\n clientSecret?: SecretBaseUnion;\n /** The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) */\n redirectUri: any;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Xero Service linked service. */\nexport interface XeroLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Xero\";\n /** Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The endpoint of the Xero server. (i.e. api.xero.com) */\n host: any;\n /** The consumer key associated with the Xero application. */\n consumerKey?: SecretBaseUnion;\n /**\n * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings(\n * ).\n */\n privateKey?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Zoho server linked service. */\nexport interface ZohoLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Zoho\";\n /** Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) */\n endpoint: any;\n /** The access token for Zoho authentication. */\n accessToken?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Vertica linked service. */\nexport interface VerticaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Vertica\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Netezza linked service. */\nexport interface NetezzaLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Netezza\";\n /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */\n connectionString?: any;\n /** The Azure key vault secret reference of password in connection string. */\n pwd?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Salesforce Marketing Cloud linked service. */\nexport interface SalesforceMarketingCloudLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceMarketingCloud\";\n /** Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */\n clientId: any;\n /** The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** HDInsight ondemand linked service. */\nexport interface HDInsightOnDemandLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightOnDemand\";\n /** Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). */\n clusterSize: any;\n /** The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). */\n timeToLive: any;\n /** Version of the HDInsight cluster. Type: string (or Expression with resultType string). */\n version: any;\n /** Azure Storage linked service to be used by the on-demand cluster for storing and processing data. */\n linkedServiceName: LinkedServiceReference;\n /** The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). */\n hostSubscriptionId: any;\n /** The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key for the service principal id. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant: any;\n /** The resource group where the cluster belongs. Type: string (or Expression with resultType string). */\n clusterResourceGroup: any;\n /** The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). */\n clusterNamePrefix?: any;\n /** The username to access the cluster. Type: string (or Expression with resultType string). */\n clusterUserName?: any;\n /** The password to access the cluster. */\n clusterPassword?: SecretBaseUnion;\n /** The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). */\n clusterSshUserName?: any;\n /** The password to SSH remotely connect cluster’s node (for Linux). */\n clusterSshPassword?: SecretBaseUnion;\n /** Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. */\n additionalLinkedServiceNames?: LinkedServiceReference[];\n /** The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. */\n hcatalogLinkedServiceName?: LinkedServiceReference;\n /** The cluster type. Type: string (or Expression with resultType string). */\n clusterType?: any;\n /** The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). */\n sparkVersion?: any;\n /** Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. */\n coreConfiguration?: any;\n /** Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. */\n hBaseConfiguration?: any;\n /** Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. */\n hdfsConfiguration?: any;\n /** Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. */\n hiveConfiguration?: any;\n /** Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. */\n mapReduceConfiguration?: any;\n /** Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. */\n oozieConfiguration?: any;\n /** Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. */\n stormConfiguration?: any;\n /** Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. */\n yarnConfiguration?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** Specifies the size of the head node for the HDInsight cluster. */\n headNodeSize?: any;\n /** Specifies the size of the data node for the HDInsight cluster. */\n dataNodeSize?: any;\n /** Specifies the size of the Zoo Keeper node for the HDInsight cluster. */\n zookeeperNodeSize?: any;\n /** Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. */\n scriptActions?: ScriptAction[];\n /** The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). */\n virtualNetworkId?: any;\n /** The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). */\n subnetName?: any;\n}\n\n/** Azure Data Lake Analytics linked service. */\nexport interface AzureDataLakeAnalyticsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeAnalytics\";\n /** The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). */\n accountName: any;\n /** The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The Key of the application used to authenticate against the Azure Data Lake Analytics account. */\n servicePrincipalKey?: SecretBaseUnion;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant: any;\n /** Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */\n subscriptionId?: any;\n /** Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */\n resourceGroupName?: any;\n /** Azure Data Lake Analytics URI Type: string (or Expression with resultType string). */\n dataLakeAnalyticsUri?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Databricks linked service. */\nexport interface AzureDatabricksLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricks\";\n /** <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */\n domain: any;\n /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). */\n accessToken?: SecretBaseUnion;\n /** Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */\n authentication?: any;\n /** Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */\n workspaceResourceId?: any;\n /** The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). */\n existingClusterId?: any;\n /** The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). */\n instancePoolId?: any;\n /** If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). */\n newClusterVersion?: any;\n /** If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). */\n newClusterNumOfWorker?: any;\n /** The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). */\n newClusterNodeType?: any;\n /** A set of optional, user-specified Spark configuration key-value pairs. */\n newClusterSparkConf?: { [propertyName: string]: any };\n /** A set of optional, user-specified Spark environment variables key-value pairs. */\n newClusterSparkEnvVars?: { [propertyName: string]: any };\n /** Additional tags for cluster resources. This property is ignored in instance pool configurations. */\n newClusterCustomTags?: { [propertyName: string]: any };\n /** Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). */\n newClusterLogDestination?: any;\n /** The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). */\n newClusterDriverNodeType?: any;\n /** User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). */\n newClusterInitScripts?: any;\n /** Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). */\n newClusterEnableElasticDisk?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n /** The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). */\n policyId?: any;\n}\n\n/** Azure Databricks Delta Lake linked service. */\nexport interface AzureDatabricksDeltaLakeLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLake\";\n /** <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */\n domain: any;\n /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. */\n accessToken: SecretBaseUnion;\n /** The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). */\n clusterId?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Responsys linked service. */\nexport interface ResponsysLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Responsys\";\n /** The endpoint of the Responsys server. */\n endpoint: any;\n /** The client ID associated with the Responsys application. Type: string (or Expression with resultType string). */\n clientId: any;\n /** The client secret associated with the Responsys application. Type: string (or Expression with resultType string). */\n clientSecret?: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Dynamics AX linked service. */\nexport interface DynamicsAXLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsAX\";\n /** The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. */\n url: any;\n /** Specify the application's client ID. Type: string (or Expression with resultType string). */\n servicePrincipalId: any;\n /** Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). */\n servicePrincipalKey: SecretBaseUnion;\n /** Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). */\n tenant: any;\n /** Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). */\n aadResourceId: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Oracle Service Cloud linked service. */\nexport interface OracleServiceCloudLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleServiceCloud\";\n /** The URL of the Oracle Service Cloud instance. */\n host: any;\n /** The user name that you use to access Oracle Service Cloud server. */\n username: any;\n /** The password corresponding to the user name that you provided in the username key. */\n password: SecretBaseUnion;\n /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useEncryptedEndpoints?: any;\n /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n useHostVerification?: any;\n /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */\n usePeerVerification?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Google AdWords service linked service. */\nexport interface GoogleAdWordsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleAdWords\";\n /** Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked service. Type: object. */\n connectionProperties?: any;\n /** The Client customer ID of the AdWords account that you want to fetch report data for. */\n clientCustomerID?: any;\n /** The developer token associated with the manager account that you use to grant access to the AdWords API. */\n developerToken?: SecretBaseUnion;\n /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */\n authenticationType?: GoogleAdWordsAuthenticationType;\n /** The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. */\n refreshToken?: SecretBaseUnion;\n /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */\n clientId?: any;\n /** The client secret of the google application used to acquire the refresh token. */\n clientSecret?: SecretBaseUnion;\n /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */\n email?: any;\n /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */\n keyFilePath?: any;\n /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */\n trustedCertPath?: any;\n /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */\n useSystemTrustStore?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SAP Table Linked Service. */\nexport interface SapTableLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapTable\";\n /** Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */\n server?: any;\n /** System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */\n systemNumber?: any;\n /** Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */\n clientId?: any;\n /** Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */\n language?: any;\n /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */\n systemId?: any;\n /** Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n userName?: any;\n /** Password to access the SAP server where the table is located. */\n password?: SecretBaseUnion;\n /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */\n messageServer?: any;\n /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */\n messageServerService?: any;\n /** SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */\n sncMode?: any;\n /** Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncMyName?: any;\n /** Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncPartnerName?: any;\n /** External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */\n sncLibraryPath?: any;\n /** SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */\n sncQop?: any;\n /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */\n logonGroup?: any;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Data Explorer (Kusto) linked service. */\nexport interface AzureDataExplorerLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorer\";\n /** The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType string) */\n endpoint: any;\n /** The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). */\n servicePrincipalId?: any;\n /** The key of the service principal used to authenticate against Kusto. */\n servicePrincipalKey?: SecretBaseUnion;\n /** Database name for connection. Type: string (or Expression with resultType string). */\n database: any;\n /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */\n tenant?: any;\n}\n\n/** Azure Function linked service. */\nexport interface AzureFunctionLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFunction\";\n /** The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. */\n functionAppUrl: any;\n /** Function or Host key for Azure Function App. */\n functionKey?: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Snowflake linked service. */\nexport interface SnowflakeLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Snowflake\";\n /** The connection string of snowflake. Type: string, SecureString. */\n connectionString: any;\n /** The Azure key vault secret reference of password in connection string. */\n password?: AzureKeyVaultSecretReference;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** SharePoint Online List linked service. */\nexport interface SharePointOnlineListLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SharePointOnlineList\";\n /** The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). */\n siteUrl: any;\n /** The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). */\n tenantId: any;\n /** The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). */\n servicePrincipalId: any;\n /** The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */\n servicePrincipalKey: SecretBaseUnion;\n /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */\n encryptedCredential?: any;\n}\n\n/** Azure Synapse Analytics (Artifacts) linked service. */\nexport interface AzureSynapseArtifactsLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSynapseArtifacts\";\n /** https://<workspacename>.dev.azuresynapse.net, Azure Synapse Analytics workspace URL. Type: string (or Expression with resultType string). */\n endpoint: any;\n /** Required to specify MSI, if using system assigned managed identity as authentication method. Type: string (or Expression with resultType string). */\n authentication?: any;\n /** The resource ID of the Synapse workspace. The format should be: /subscriptions/{subscriptionID}/resourceGroups/{resourceGroup}/providers/Microsoft.Synapse/workspaces/{workspaceName}. Type: string (or Expression with resultType string). */\n workspaceResourceId?: any;\n}\n\n/** Power BI Workspace linked service. */\nexport interface PowerBIWorkspaceLinkedService extends LinkedService {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PowerBIWorkspace\";\n /** The Power BI workspace id. */\n workspaceId: string;\n /** The tenant id to which the Power BI workspace belongs. */\n tenantId: string;\n}\n\n/** Base class for all control activities like IfCondition, ForEach , Until. */\nexport interface ControlActivity extends Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"Container\"\n | \"ExecutePipeline\"\n | \"IfCondition\"\n | \"Switch\"\n | \"ForEach\"\n | \"Wait\"\n | \"Fail\"\n | \"Until\"\n | \"Validation\"\n | \"Filter\"\n | \"SetVariable\"\n | \"AppendVariable\"\n | \"WebHook\";\n}\n\n/** Base class for all execution activities. */\nexport interface ExecutionActivity extends Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"Execution\"\n | \"Copy\"\n | \"HDInsightHive\"\n | \"HDInsightPig\"\n | \"HDInsightMapReduce\"\n | \"HDInsightStreaming\"\n | \"HDInsightSpark\"\n | \"ExecuteSSISPackage\"\n | \"Custom\"\n | \"SqlServerStoredProcedure\"\n | \"Delete\"\n | \"AzureDataExplorerCommand\"\n | \"Lookup\"\n | \"WebActivity\"\n | \"GetMetadata\"\n | \"AzureMLBatchExecution\"\n | \"AzureMLUpdateResource\"\n | \"AzureMLExecutePipeline\"\n | \"DataLakeAnalyticsU-SQL\"\n | \"DatabricksNotebook\"\n | \"DatabricksSparkJar\"\n | \"DatabricksSparkPython\"\n | \"AzureFunctionActivity\"\n | \"ExecuteDataFlow\"\n | \"Script\"\n | \"SynapseNotebook\"\n | \"SparkJob\";\n /** Linked service reference. */\n linkedServiceName?: LinkedServiceReference;\n /** Activity policy. */\n policy?: ActivityPolicy;\n}\n\n/** Execute SQL pool stored procedure activity. */\nexport interface SqlPoolStoredProcedureActivity extends Activity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlPoolStoredProcedure\";\n /** SQL pool stored procedure reference. */\n sqlPool: SqlPoolReference;\n /** Stored procedure name. Type: string (or Expression with resultType string). */\n storedProcedureName: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n}\n\n/** Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. */\nexport interface RerunTumblingWindowTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RerunTumblingWindowTrigger\";\n /** The parent trigger reference. */\n parentTrigger: any;\n /** The start time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n requestedStartTime: Date;\n /** The end time for the time period for which restatement is initiated. Only UTC time is currently supported. */\n requestedEndTime: Date;\n /** The max number of parallel time windows (ready for execution) for which a rerun is triggered. */\n rerunConcurrency: number;\n}\n\n/** Base class for all triggers that support one to many model for trigger to pipeline. */\nexport interface MultiplePipelineTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"MultiplePipelineTrigger\"\n | \"ScheduleTrigger\"\n | \"BlobTrigger\"\n | \"BlobEventsTrigger\"\n | \"CustomEventsTrigger\";\n /** Pipelines that need to be started. */\n pipelines?: TriggerPipelineReference[];\n}\n\n/** Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). */\nexport interface TumblingWindowTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TumblingWindowTrigger\";\n /** Pipeline for which runs are created when an event is fired for trigger window that is ready. */\n pipeline: TriggerPipelineReference;\n /** The frequency of the time windows. */\n frequency: TumblingWindowFrequency;\n /** The interval of the time windows. The minimum interval allowed is 15 Minutes. */\n interval: number;\n /** The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. */\n startTime: Date;\n /** The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. */\n endTime?: Date;\n /** Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n delay?: any;\n /** The max number of parallel time windows (ready for execution) for which a new run is triggered. */\n maxConcurrency: number;\n /** Retry policy that will be applied for failed pipeline runs. */\n retryPolicy?: RetryPolicy;\n /** Triggers that this trigger depends on. Only tumbling window triggers are supported. */\n dependsOn?: DependencyReferenceUnion[];\n}\n\n/** Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. */\nexport interface ChainingTrigger extends Trigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ChainingTrigger\";\n /** Pipeline for which runs are created when all upstream pipelines complete successfully. */\n pipeline: TriggerPipelineReference;\n /** Upstream Pipelines. */\n dependsOn: PipelineReference[];\n /** Run Dimension property that needs to be emitted by upstream pipelines. */\n runDimension: string;\n}\n\n/** Transformation for data flow source. */\nexport interface DataFlowSource extends Transformation {\n /** Schema linked service reference. */\n schemaLinkedService?: LinkedServiceReference;\n}\n\n/** Transformation for data flow sink. */\nexport interface DataFlowSink extends Transformation {\n /** Schema linked service reference. */\n schemaLinkedService?: LinkedServiceReference;\n /** Rejected data linked service reference. */\n rejectedDataLinkedService?: LinkedServiceReference;\n}\n\n/** The location of azure blob dataset. */\nexport interface AzureBlobStorageLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorageLocation\";\n /** Specify the container of azure blob. Type: string (or Expression with resultType string). */\n container?: any;\n}\n\n/** The location of azure blobFS dataset. */\nexport interface AzureBlobFSLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSLocation\";\n /** Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). */\n fileSystem?: any;\n}\n\n/** The location of azure data lake store dataset. */\nexport interface AzureDataLakeStoreLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreLocation\";\n}\n\n/** The location of amazon S3 dataset. */\nexport interface AmazonS3Location extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3Location\";\n /** Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) */\n bucketName?: any;\n /** Specify the version of amazon S3. Type: string (or Expression with resultType string). */\n version?: any;\n}\n\n/** The location of file server dataset. */\nexport interface FileServerLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServerLocation\";\n}\n\n/** The location of file server dataset. */\nexport interface AzureFileStorageLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorageLocation\";\n}\n\n/** The location of Google Cloud Storage dataset. */\nexport interface GoogleCloudStorageLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleCloudStorageLocation\";\n /** Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) */\n bucketName?: any;\n /** Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). */\n version?: any;\n}\n\n/** The location of ftp server dataset. */\nexport interface FtpServerLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FtpServerLocation\";\n}\n\n/** The location of SFTP dataset. */\nexport interface SftpLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SftpLocation\";\n}\n\n/** The location of http server. */\nexport interface HttpServerLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpServerLocation\";\n /** Specify the relativeUrl of http server. Type: string (or Expression with resultType string) */\n relativeUrl?: any;\n}\n\n/** The location of HDFS. */\nexport interface HdfsLocation extends DatasetLocation {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HdfsLocation\";\n}\n\n/** The data stored in text format. */\nexport interface TextFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TextFormat\";\n /** The column delimiter. Type: string (or Expression with resultType string). */\n columnDelimiter?: any;\n /** The row delimiter. Type: string (or Expression with resultType string). */\n rowDelimiter?: any;\n /** The escape character. Type: string (or Expression with resultType string). */\n escapeChar?: any;\n /** The quote character. Type: string (or Expression with resultType string). */\n quoteChar?: any;\n /** The null value string. Type: string (or Expression with resultType string). */\n nullValue?: any;\n /** The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). */\n treatEmptyAsNull?: any;\n /** The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). */\n skipLineCount?: any;\n /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */\n firstRowAsHeader?: any;\n}\n\n/** The data stored in JSON format. */\nexport interface JsonFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonFormat\";\n /** File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. */\n filePattern?: any;\n /** The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). */\n nestingSeparator?: any;\n /** The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). */\n encodingName?: any;\n /** The JSONPath of the JSON array element to be flattened. Example: \"$.ArrayPath\". Type: string (or Expression with resultType string). */\n jsonNodeReference?: any;\n /** The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with \"$\"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {\"Column1\": \"$.Column1Path\", \"Column2\": \"Column2PathInArray\"}. Type: object (or Expression with resultType object). */\n jsonPathDefinition?: any;\n}\n\n/** The data stored in Avro format. */\nexport interface AvroFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroFormat\";\n}\n\n/** The data stored in Optimized Row Columnar (ORC) format. */\nexport interface OrcFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcFormat\";\n}\n\n/** The data stored in Parquet format. */\nexport interface ParquetFormat extends DatasetStorageFormat {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetFormat\";\n}\n\n/** A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. */\nexport interface WebAnonymousAuthentication\n extends WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"Anonymous\";\n}\n\n/** A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. */\nexport interface WebBasicAuthentication extends WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"Basic\";\n /** User name for Basic authentication. Type: string (or Expression with resultType string). */\n username: any;\n /** The password for Basic authentication. */\n password: SecretBaseUnion;\n}\n\n/** A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This scheme follows mutual authentication; the server must also provide valid credentials to the client. */\nexport interface WebClientCertificateAuthentication\n extends WebLinkedServiceTypeProperties {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authenticationType: \"ClientCertificate\";\n /** Base64-encoded contents of a PFX file. */\n pfx: SecretBaseUnion;\n /** Password for the PFX file. */\n password: SecretBaseUnion;\n}\n\n/** Azure blob read settings. */\nexport interface AzureBlobStorageReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorageReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Azure blob wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Azure blobFS read settings. */\nexport interface AzureBlobFSReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Azure data lake store read settings. */\nexport interface AzureDataLakeStoreReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** ADLS wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** ADLS wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */\n listAfter?: any;\n /** Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */\n listBefore?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Azure data lake store read settings. */\nexport interface AmazonS3ReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonS3ReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** File server read settings. */\nexport interface FileServerReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServerReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** FileServer wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** FileServer wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */\n fileFilter?: any;\n}\n\n/** Azure File Storage read settings. */\nexport interface AzureFileStorageReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorageReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Google Cloud Storage read settings. */\nexport interface GoogleCloudStorageReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleCloudStorageReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). */\n prefix?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n}\n\n/** Ftp read settings. */\nexport interface FtpReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FtpReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Ftp wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Ftp wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Specify whether to use binary transfer mode for FTP stores. */\n useBinaryTransfer?: boolean;\n /** If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). */\n disableChunking?: any;\n}\n\n/** Sftp read settings. */\nexport interface SftpReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SftpReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Sftp wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** Sftp wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). */\n disableChunking?: any;\n}\n\n/** Sftp read settings. */\nexport interface HttpReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpReadSettings\";\n /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** Specifies the timeout for a HTTP client to get HTTP response from HTTP server. */\n requestTimeout?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n}\n\n/** HDFS read settings. */\nexport interface HdfsReadSettings extends StoreReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HdfsReadSettings\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** HDFS wildcardFolderPath. Type: string (or Expression with resultType string). */\n wildcardFolderPath?: any;\n /** HDFS wildcardFileName. Type: string (or Expression with resultType string). */\n wildcardFileName?: any;\n /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */\n fileListPath?: any;\n /** Indicates whether to enable partition discovery. */\n enablePartitionDiscovery?: boolean;\n /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */\n partitionRootPath?: any;\n /** The start of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeStart?: any;\n /** The end of file's modified datetime. Type: string (or Expression with resultType string). */\n modifiedDatetimeEnd?: any;\n /** Specifies Distcp-related settings. */\n distcpSettings?: DistcpSettings;\n /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */\n deleteFilesAfterCompletion?: any;\n}\n\n/** Sftp write settings. */\nexport interface SftpWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SftpWriteSettings\";\n /** Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). */\n operationTimeout?: any;\n /** Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). */\n useTempFileRename?: any;\n}\n\n/** Azure blob write settings. */\nexport interface AzureBlobStorageWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobStorageWriteSettings\";\n /** Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */\n blockSizeInMB?: any;\n}\n\n/** Azure blobFS write settings. */\nexport interface AzureBlobFSWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSWriteSettings\";\n /** Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */\n blockSizeInMB?: any;\n}\n\n/** Azure data lake store write settings. */\nexport interface AzureDataLakeStoreWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreWriteSettings\";\n /** Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of \"2018-12-01T05:00:00Z\". Default value is NULL. Type: integer (or Expression with resultType integer). */\n expiryDateTime?: any;\n}\n\n/** File server write settings. */\nexport interface FileServerWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileServerWriteSettings\";\n}\n\n/** Azure File Storage write settings. */\nexport interface AzureFileStorageWriteSettings extends StoreWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFileStorageWriteSettings\";\n}\n\n/** Delimited text read settings. */\nexport interface DelimitedTextReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextReadSettings\";\n /** Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). */\n skipLineCount?: any;\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n}\n\n/** Json read settings. */\nexport interface JsonReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonReadSettings\";\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n}\n\n/** Xml read settings. */\nexport interface XmlReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XmlReadSettings\";\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n /** Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). */\n validationMode?: any;\n /** Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */\n detectDataType?: any;\n /** Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */\n namespaces?: any;\n /** Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: \"{\"http://www.example.com/xml\":\"prefix\"}\" Type: object (or Expression with resultType object). */\n namespacePrefixes?: any;\n}\n\n/** Binary read settings. */\nexport interface BinaryReadSettings extends FormatReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BinaryReadSettings\";\n /** Compression settings. */\n compressionProperties?: CompressionReadSettingsUnion;\n}\n\n/** The ZipDeflate compression read settings. */\nexport interface ZipDeflateReadSettings extends CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZipDeflateReadSettings\";\n /** Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). */\n preserveZipFileNameAsFolder?: any;\n}\n\n/** The Tar compression read settings. */\nexport interface TarReadSettings extends CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TarReadSettings\";\n /** Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */\n preserveCompressionFileNameAsFolder?: any;\n}\n\n/** The TarGZip compression read settings. */\nexport interface TarGZipReadSettings extends CompressionReadSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TarGZipReadSettings\";\n /** Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */\n preserveCompressionFileNameAsFolder?: any;\n}\n\n/** Avro write settings. */\nexport interface AvroWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroWriteSettings\";\n /** Top level record name in write result, which is required in AVRO spec. */\n recordName?: string;\n /** Record namespace in the write result. */\n recordNamespace?: string;\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Orc write settings. */\nexport interface OrcWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcWriteSettings\";\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Parquet write settings. */\nexport interface ParquetWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetWriteSettings\";\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Delimited text write settings. */\nexport interface DelimitedTextWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextWriteSettings\";\n /** Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). */\n quoteAllText?: any;\n /** The file extension used to create the files. Type: string (or Expression with resultType string). */\n fileExtension: any;\n /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */\n maxRowsPerFile?: any;\n /** Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */\n fileNamePrefix?: any;\n}\n\n/** Json write settings. */\nexport interface JsonWriteSettings extends FormatWriteSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonWriteSettings\";\n /** File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. */\n filePattern?: any;\n}\n\n/** A copy activity Avro source. */\nexport interface AvroSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroSource\";\n /** Avro store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity excel source. */\nexport interface ExcelSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExcelSource\";\n /** Excel store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Parquet source. */\nexport interface ParquetSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetSource\";\n /** Parquet store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity DelimitedText source. */\nexport interface DelimitedTextSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextSource\";\n /** DelimitedText store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** DelimitedText format settings. */\n formatSettings?: DelimitedTextReadSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Json source. */\nexport interface JsonSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonSource\";\n /** Json store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Json format settings. */\n formatSettings?: JsonReadSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Xml source. */\nexport interface XmlSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XmlSource\";\n /** Xml store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Xml format settings. */\n formatSettings?: XmlReadSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity ORC source. */\nexport interface OrcSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcSource\";\n /** ORC store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Binary source. */\nexport interface BinarySource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BinarySource\";\n /** Binary store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** Binary format settings. */\n formatSettings?: BinaryReadSettings;\n}\n\n/** Copy activity sources of tabular type. */\nexport interface TabularSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TabularSource\"\n | \"AzureTableSource\"\n | \"InformixSource\"\n | \"Db2Source\"\n | \"OdbcSource\"\n | \"MySqlSource\"\n | \"PostgreSqlSource\"\n | \"SybaseSource\"\n | \"SapBwSource\"\n | \"SalesforceSource\"\n | \"SapCloudForCustomerSource\"\n | \"SapEccSource\"\n | \"SapHanaSource\"\n | \"SapOpenHubSource\"\n | \"SapOdpSource\"\n | \"SapTableSource\"\n | \"SqlSource\"\n | \"SqlServerSource\"\n | \"AmazonRdsForSqlServerSource\"\n | \"AzureSqlSource\"\n | \"SqlMISource\"\n | \"SqlDWSource\"\n | \"AzureMySqlSource\"\n | \"TeradataSource\"\n | \"CassandraSource\"\n | \"AmazonMWSSource\"\n | \"AzurePostgreSqlSource\"\n | \"ConcurSource\"\n | \"CouchbaseSource\"\n | \"DrillSource\"\n | \"EloquaSource\"\n | \"GoogleBigQuerySource\"\n | \"GreenplumSource\"\n | \"HBaseSource\"\n | \"HiveSource\"\n | \"HubspotSource\"\n | \"ImpalaSource\"\n | \"JiraSource\"\n | \"MagentoSource\"\n | \"MariaDBSource\"\n | \"AzureMariaDBSource\"\n | \"MarketoSource\"\n | \"PaypalSource\"\n | \"PhoenixSource\"\n | \"PrestoSource\"\n | \"QuickBooksSource\"\n | \"ServiceNowSource\"\n | \"ShopifySource\"\n | \"SparkSource\"\n | \"SquareSource\"\n | \"XeroSource\"\n | \"ZohoSource\"\n | \"NetezzaSource\"\n | \"VerticaSource\"\n | \"SalesforceMarketingCloudSource\"\n | \"ResponsysSource\"\n | \"DynamicsAXSource\"\n | \"OracleServiceCloudSource\"\n | \"GoogleAdWordsSource\"\n | \"AmazonRedshiftSource\";\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Azure Blob source. */\nexport interface BlobSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobSource\";\n /** Treat empty as null. Type: boolean (or Expression with resultType boolean). */\n treatEmptyAsNull?: any;\n /** Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). */\n skipHeaderLineCount?: any;\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n}\n\n/** A copy activity Document Database Collection source. */\nexport interface DocumentDbCollectionSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DocumentDbCollectionSource\";\n /** Documents query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Nested properties separator. Type: string (or Expression with resultType string). */\n nestingSeparator?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Azure CosmosDB (SQL API) Collection source. */\nexport interface CosmosDbSqlApiSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbSqlApiSource\";\n /** SQL API query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Page size of the result. Type: integer (or Expression with resultType integer). */\n pageSize?: any;\n /** Preferred regions. Type: array of strings (or Expression with resultType array of strings). */\n preferredRegions?: any;\n /** Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). */\n detectDatetime?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Dynamics source. */\nexport interface DynamicsSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsSource\";\n /** FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Dynamics CRM source. */\nexport interface DynamicsCrmSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrmSource\";\n /** FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Common Data Service for Apps source. */\nexport interface CommonDataServiceForAppsSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForAppsSource\";\n /** FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for various relational databases. */\nexport interface RelationalSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RelationalSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for Microsoft Access. */\nexport interface MicrosoftAccessSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccessSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for OData source. */\nexport interface ODataSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ODataSource\";\n /** OData query. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Salesforce Service Cloud source. */\nexport interface SalesforceServiceCloudSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloudSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The read behavior for the operation. Default is Query. */\n readBehavior?: SalesforceSourceReadBehavior;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Rest service source. */\nexport interface RestSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestSource\";\n /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */\n requestBody?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */\n paginationRules?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n /** The time to await before sending next page request. */\n requestInterval?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity file system source. */\nexport interface FileSystemSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileSystemSource\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity HDFS source. */\nexport interface HdfsSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HdfsSource\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** Specifies Distcp-related settings. */\n distcpSettings?: DistcpSettings;\n}\n\n/** A copy activity Azure Data Explorer (Kusto) source. */\nexport interface AzureDataExplorerSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerSource\";\n /** Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). */\n query: any;\n /** The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. */\n noTruncation?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity Oracle source. */\nexport interface OracleSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleSource\";\n /** Oracle reader query. Type: string (or Expression with resultType string). */\n oracleReaderQuery?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** The partition mechanism that will be used for Oracle read in parallel. */\n partitionOption?: OraclePartitionOption;\n /** The settings that will be leveraged for Oracle source partitioning. */\n partitionSettings?: OraclePartitionSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity AmazonRdsForOracle source. */\nexport interface AmazonRdsForOracleSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForOracleSource\";\n /** AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). */\n oracleReaderQuery?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). */\n partitionOption?: any;\n /** The settings that will be leveraged for AmazonRdsForOracle source partitioning. */\n partitionSettings?: AmazonRdsForOraclePartitionSettings;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for web page table. */\nexport interface WebSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebSource\";\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a MongoDB database. */\nexport interface MongoDbSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbSource\";\n /** Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). */\n query?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a MongoDB Atlas database. */\nexport interface MongoDbAtlasSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbAtlasSource\";\n /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */\n filter?: any;\n /** Cursor methods for Mongodb query */\n cursorMethods?: MongoDbCursorMethodsProperties;\n /** Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a MongoDB database. */\nexport interface MongoDbV2Source extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MongoDbV2Source\";\n /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */\n filter?: any;\n /** Cursor methods for Mongodb query */\n cursorMethods?: MongoDbCursorMethodsProperties;\n /** Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for a CosmosDB (MongoDB API) database. */\nexport interface CosmosDbMongoDbApiSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApiSource\";\n /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */\n filter?: any;\n /** Cursor methods for Mongodb query. */\n cursorMethods?: MongoDbCursorMethodsProperties;\n /** Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n queryTimeout?: any;\n /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */\n additionalColumns?: any;\n}\n\n/** A copy activity source for an Office 365 service. */\nexport interface Office365Source extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Office365Source\";\n /** The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). */\n allowedGroups?: any;\n /** The user scope uri. Type: string (or Expression with resultType string). */\n userScopeFilterUri?: any;\n /** The Column to apply the <paramref name=\"StartTime\"/> and <paramref name=\"EndTime\"/>. Type: string (or Expression with resultType string). */\n dateFilterColumn?: any;\n /** Start time of the requested range for this dataset. Type: string (or Expression with resultType string). */\n startTime?: any;\n /** End time of the requested range for this dataset. Type: string (or Expression with resultType string). */\n endTime?: any;\n /** The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { \"name\": \"Id\" }, { \"name\": \"CreatedDateTime\" } ] */\n outputColumns?: any;\n}\n\n/** A copy activity Azure Data Lake source. */\nexport interface AzureDataLakeStoreSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreSource\";\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n}\n\n/** A copy activity Azure BlobFS source. */\nexport interface AzureBlobFSSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSSource\";\n /** Treat empty as null. Type: boolean (or Expression with resultType boolean). */\n treatEmptyAsNull?: any;\n /** Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). */\n skipHeaderLineCount?: any;\n /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n}\n\n/** A copy activity source for an HTTP file. */\nexport interface HttpSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HttpSource\";\n /** Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity snowflake source. */\nexport interface SnowflakeSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeSource\";\n /** Snowflake Sql query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Snowflake export settings. */\n exportSettings: SnowflakeExportCopyCommand;\n}\n\n/** A copy activity Azure Databricks Delta Lake source. */\nexport interface AzureDatabricksDeltaLakeSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeSource\";\n /** Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). */\n query?: any;\n /** Azure Databricks Delta Lake export settings. */\n exportSettings?: AzureDatabricksDeltaLakeExportCommand;\n}\n\n/** A copy activity source for sharePoint online list source. */\nexport interface SharePointOnlineListSource extends CopySource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SharePointOnlineListSource\";\n /** The OData query to filter the data in SharePoint Online list. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity DelimitedText sink. */\nexport interface DelimitedTextSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DelimitedTextSink\";\n /** DelimitedText store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** DelimitedText format settings. */\n formatSettings?: DelimitedTextWriteSettings;\n}\n\n/** A copy activity Json sink. */\nexport interface JsonSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JsonSink\";\n /** Json store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** Json format settings. */\n formatSettings?: JsonWriteSettings;\n}\n\n/** A copy activity ORC sink. */\nexport interface OrcSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OrcSink\";\n /** ORC store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** ORC format settings. */\n formatSettings?: OrcWriteSettings;\n}\n\n/** A copy activity Rest service Sink. */\nexport interface RestSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"RestSink\";\n /** The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). */\n requestMethod?: any;\n /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */\n additionalHeaders?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n /** The time to await before sending next request, in milliseconds */\n requestInterval?: any;\n /** Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. */\n httpCompressionType?: any;\n}\n\n/** A copy activity Azure PostgreSQL sink. */\nexport interface AzurePostgreSqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSqlSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Azure MySql sink. */\nexport interface AzureMySqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySqlSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Azure Databricks Delta Lake sink. */\nexport interface AzureDatabricksDeltaLakeSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** Azure Databricks Delta Lake import settings. */\n importSettings?: AzureDatabricksDeltaLakeImportCommand;\n}\n\n/** A copy activity SAP Cloud for Customer sink. */\nexport interface SapCloudForCustomerSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomerSink\";\n /** The write behavior for the operation. Default is 'Insert'. */\n writeBehavior?: SapCloudForCustomerSinkWriteBehavior;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity Azure Queue sink. */\nexport interface AzureQueueSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureQueueSink\";\n}\n\n/** A copy activity Azure Table sink. */\nexport interface AzureTableSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTableSink\";\n /** Azure Table default partition key value. Type: string (or Expression with resultType string). */\n azureTableDefaultPartitionKeyValue?: any;\n /** Azure Table partition key name. Type: string (or Expression with resultType string). */\n azureTablePartitionKeyName?: any;\n /** Azure Table row key name. Type: string (or Expression with resultType string). */\n azureTableRowKeyName?: any;\n /** Azure Table insert type. Type: string (or Expression with resultType string). */\n azureTableInsertType?: any;\n}\n\n/** A copy activity Avro sink. */\nexport interface AvroSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AvroSink\";\n /** Avro store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** Avro format settings. */\n formatSettings?: AvroWriteSettings;\n}\n\n/** A copy activity Parquet sink. */\nexport interface ParquetSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ParquetSink\";\n /** Parquet store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n /** Parquet format settings. */\n formatSettings?: ParquetWriteSettings;\n}\n\n/** A copy activity Binary sink. */\nexport interface BinarySink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BinarySink\";\n /** Binary store settings. */\n storeSettings?: StoreWriteSettingsUnion;\n}\n\n/** A copy activity Azure Blob sink. */\nexport interface BlobSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobSink\";\n /** Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). */\n blobWriterOverwriteFiles?: any;\n /** Blob writer date time format. Type: string (or Expression with resultType string). */\n blobWriterDateTimeFormat?: any;\n /** Blob writer add header. Type: boolean (or Expression with resultType boolean). */\n blobWriterAddHeader?: any;\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** A copy activity file system sink. */\nexport interface FileSystemSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"FileSystemSink\";\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** A copy activity Document Database Collection sink. */\nexport interface DocumentDbCollectionSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DocumentDbCollectionSink\";\n /** Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). */\n nestingSeparator?: any;\n /** Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. */\n writeBehavior?: any;\n}\n\n/** A copy activity Azure CosmosDB (SQL API) Collection sink. */\nexport interface CosmosDbSqlApiSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbSqlApiSink\";\n /** Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. */\n writeBehavior?: any;\n}\n\n/** A copy activity SQL sink. */\nexport interface SqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlSink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity SQL server sink. */\nexport interface SqlServerSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerSink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity Azure SQL sink. */\nexport interface AzureSqlSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlSink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity Azure SQL Managed Instance sink. */\nexport interface SqlMISink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlMISink\";\n /** SQL writer stored procedure name. Type: string (or Expression with resultType string). */\n sqlWriterStoredProcedureName?: any;\n /** SQL writer table type. Type: string (or Expression with resultType string). */\n sqlWriterTableType?: any;\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** SQL stored procedure parameters. */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */\n storedProcedureTableTypeParameterName?: any;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity SQL Data Warehouse sink. */\nexport interface SqlDWSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlDWSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). */\n allowPolyBase?: any;\n /** Specifies PolyBase-related settings when allowPolyBase is true. */\n polyBaseSettings?: PolybaseSettings;\n /** Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). */\n allowCopyCommand?: any;\n /** Specifies Copy Command related settings when allowCopyCommand is true. */\n copyCommandSettings?: DWCopyCommandSettings;\n /** The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). */\n tableOption?: any;\n}\n\n/** A copy activity snowflake sink. */\nexport interface SnowflakeSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n /** Snowflake import settings. */\n importSettings?: SnowflakeImportCopyCommand;\n}\n\n/** A copy activity Oracle sink. */\nexport interface OracleSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleSink\";\n /** SQL pre-copy script. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Azure Data Lake Store sink. */\nexport interface AzureDataLakeStoreSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataLakeStoreSink\";\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n /** Single File Parallel. */\n enableAdlsSingleFileParallel?: any;\n}\n\n/** A copy activity Azure Data Lake Storage Gen2 sink. */\nexport interface AzureBlobFSSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureBlobFSSink\";\n /** The type of copy behavior for copy sink. */\n copyBehavior?: any;\n}\n\n/** A copy activity Azure Search Index sink. */\nexport interface AzureSearchIndexSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSearchIndexSink\";\n /** Specify the write behavior when upserting documents into Azure Search Index. */\n writeBehavior?: AzureSearchIndexWriteBehaviorType;\n}\n\n/** A copy activity ODBC sink. */\nexport interface OdbcSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OdbcSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Informix sink. */\nexport interface InformixSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"InformixSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Microsoft Access sink. */\nexport interface MicrosoftAccessSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MicrosoftAccessSink\";\n /** A query to execute before starting the copy. Type: string (or Expression with resultType string). */\n preCopyScript?: any;\n}\n\n/** A copy activity Dynamics sink. */\nexport interface DynamicsSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsSink\";\n /** The write behavior for the operation. */\n writeBehavior: DynamicsSinkWriteBehavior;\n /** The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n /** The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */\n alternateKeyName?: any;\n}\n\n/** A copy activity Dynamics CRM sink. */\nexport interface DynamicsCrmSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsCrmSink\";\n /** The write behavior for the operation. */\n writeBehavior: DynamicsSinkWriteBehavior;\n /** The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n /** The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */\n alternateKeyName?: any;\n}\n\n/** A copy activity Common Data Service for Apps sink. */\nexport interface CommonDataServiceForAppsSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CommonDataServiceForAppsSink\";\n /** The write behavior for the operation. */\n writeBehavior: DynamicsSinkWriteBehavior;\n /** The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n /** The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). */\n alternateKeyName?: any;\n}\n\n/** A copy activity Azure Data Explorer sink. */\nexport interface AzureDataExplorerSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerSink\";\n /** A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. */\n ingestionMappingName?: any;\n /** An explicit column mapping description provided in a json format. Type: string. */\n ingestionMappingAsJson?: any;\n /** If set to true, any aggregation will be skipped. Default is false. Type: boolean. */\n flushImmediately?: any;\n}\n\n/** A copy activity Salesforce sink. */\nexport interface SalesforceSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceSink\";\n /** The write behavior for the operation. Default is Insert. */\n writeBehavior?: SalesforceSinkWriteBehavior;\n /** The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */\n externalIdFieldName?: any;\n /** The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n}\n\n/** A copy activity Salesforce Service Cloud sink. */\nexport interface SalesforceServiceCloudSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceServiceCloudSink\";\n /** The write behavior for the operation. Default is Insert. */\n writeBehavior?: SalesforceSinkWriteBehavior;\n /** The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). */\n externalIdFieldName?: any;\n /** The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). */\n ignoreNullValues?: any;\n}\n\n/** A copy activity sink for a CosmosDB (MongoDB API) database. */\nexport interface CosmosDbMongoDbApiSink extends CopySink {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CosmosDbMongoDbApiSink\";\n /** Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is \"insert\". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). */\n writeBehavior?: any;\n}\n\n/** Snowflake export command settings. */\nexport interface SnowflakeExportCopyCommand extends ExportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeExportCopyCommand\";\n /** Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalCopyOptions\": { \"DATE_FORMAT\": \"MM/DD/YYYY\", \"TIME_FORMAT\": \"'HH24:MI:SS.FF'\" } */\n additionalCopyOptions?: { [propertyName: string]: any };\n /** Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalFormatOptions\": { \"OVERWRITE\": \"TRUE\", \"MAX_FILE_SIZE\": \"'FALSE'\" } */\n additionalFormatOptions?: { [propertyName: string]: any };\n}\n\n/** Azure Databricks Delta Lake export command settings. */\nexport interface AzureDatabricksDeltaLakeExportCommand extends ExportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeExportCommand\";\n /** Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n dateFormat?: any;\n /** Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n timestampFormat?: any;\n}\n\n/** Azure Databricks Delta Lake import command settings. */\nexport interface AzureDatabricksDeltaLakeImportCommand extends ImportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDatabricksDeltaLakeImportCommand\";\n /** Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n dateFormat?: any;\n /** Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */\n timestampFormat?: any;\n}\n\n/** Snowflake import command settings. */\nexport interface SnowflakeImportCopyCommand extends ImportSettings {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SnowflakeImportCopyCommand\";\n /** Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalCopyOptions\": { \"DATE_FORMAT\": \"MM/DD/YYYY\", \"TIME_FORMAT\": \"'HH24:MI:SS.FF'\" } */\n additionalCopyOptions?: { [propertyName: string]: any };\n /** Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: \"additionalFormatOptions\": { \"FORCE\": \"TRUE\", \"LOAD_UNCERTAIN_FILES\": \"'FALSE'\" } */\n additionalFormatOptions?: { [propertyName: string]: any };\n}\n\n/** A copy activity tabular translator. */\nexport interface TabularTranslator extends CopyTranslator {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TabularTranslator\";\n /** Column mappings. Example: \"UserId: MyUserId, Group: MyGroup, Name: MyName\" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. */\n columnMappings?: any;\n /** The schema mapping to map between tabular data and hierarchical data. Example: {\"Column1\": \"$.Column1\", \"Column2\": \"$.Column2.Property1\", \"Column3\": \"$.Column2.Property2\"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. */\n schemaMapping?: any;\n /** The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). */\n collectionReference?: any;\n /** Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). */\n mapComplexValuesToString?: any;\n /** Column mappings with logical types. Tabular->tabular example: [{\"source\":{\"name\":\"CustomerName\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}},{\"source\":{\"name\":\"CustomerAddress\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Hierarchical->tabular example: [{\"source\":{\"path\":\"$.CustomerName\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientName\",\"type\":\"String\"}},{\"source\":{\"path\":\"$.CustomerAddress\",\"type\":\"String\"},\"sink\":{\"name\":\"ClientAddress\",\"type\":\"String\"}}]. Type: object (or Expression with resultType object). */\n mappings?: any;\n /** Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). */\n typeConversion?: any;\n /** Type conversion settings */\n typeConversionSettings?: TypeConversionSettings;\n}\n\n/** Trigger referenced dependency. */\nexport interface TriggerDependencyReference extends DependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type:\n | \"TriggerDependencyReference\"\n | \"TumblingWindowTriggerDependencyReference\";\n /** Referenced trigger. */\n referenceTrigger: TriggerReference;\n}\n\n/** Self referenced tumbling window trigger dependency. */\nexport interface SelfDependencyTumblingWindowTriggerReference\n extends DependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SelfDependencyTumblingWindowTriggerReference\";\n /** Timespan applied to the start time of a tumbling window when evaluating dependency. */\n offset: string;\n /** The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. */\n size?: string;\n}\n\n/** The key authorization type integration runtime. */\nexport interface LinkedIntegrationRuntimeKeyAuthorization\n extends LinkedIntegrationRuntimeType {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authorizationType: \"Key\";\n /** The key used for authorization. */\n key: SecureString;\n}\n\n/** The role based access control (RBAC) authorization type integration runtime. */\nexport interface LinkedIntegrationRuntimeRbacAuthorization\n extends LinkedIntegrationRuntimeType {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n authorizationType: \"RBAC\";\n /** The resource identifier of the integration runtime to be shared. */\n resourceId: string;\n}\n\n/** Azure Synapse nested resource, which belongs to a workspace. */\nexport interface SubResource extends AzureEntityResource {}\n\n/** A Big Data pool */\nexport interface BigDataPoolResourceInfo extends TrackedResource {\n /** The state of the Big Data pool. */\n provisioningState?: string;\n /** Auto-scaling properties */\n autoScale?: AutoScaleProperties;\n /** The time when the Big Data pool was created. */\n creationDate?: Date;\n /** Auto-pausing properties */\n autoPause?: AutoPauseProperties;\n /** Whether compute isolation is required or not. */\n isComputeIsolationEnabled?: boolean;\n /** Whether session level packages enabled. */\n sessionLevelPackagesEnabled?: boolean;\n /** The cache size */\n cacheSize?: number;\n /** Dynamic Executor Allocation */\n dynamicExecutorAllocation?: DynamicExecutorAllocation;\n /** The Spark events folder */\n sparkEventsFolder?: string;\n /** The number of nodes in the Big Data pool. */\n nodeCount?: number;\n /** Library version requirements */\n libraryRequirements?: LibraryRequirements;\n /** List of custom libraries/packages associated with the spark pool. */\n customLibraries?: LibraryInfo[];\n /** Spark configuration file to specify additional properties */\n sparkConfigProperties?: LibraryRequirements;\n /** The Apache Spark version. */\n sparkVersion?: string;\n /** The default folder where Spark logs will be written. */\n defaultSparkLogFolder?: string;\n /** The level of compute power that each node in the Big Data pool has. */\n nodeSize?: NodeSize;\n /** The kind of nodes that the Big Data pool provides. */\n nodeSizeFamily?: NodeSizeFamily;\n /**\n * The time when the Big Data pool was updated successfully.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly lastSucceededTimestamp?: Date;\n}\n\n/** A SQL Analytics pool */\nexport interface SqlPool extends TrackedResource {\n /** SQL pool SKU */\n sku?: Sku;\n /** Maximum size in bytes */\n maxSizeBytes?: number;\n /** Collation mode */\n collation?: string;\n /** Source database to create from */\n sourceDatabaseId?: string;\n /** Backup database to restore from */\n recoverableDatabaseId?: string;\n /** Resource state */\n provisioningState?: string;\n /** Resource status */\n status?: string;\n /** Snapshot time to restore */\n restorePointInTime?: string;\n /**\n * Specifies the mode of sql pool creation.\n *\n * Default: regular sql pool creation.\n *\n * PointInTimeRestore: Creates a sql pool by restoring a point in time backup of an existing sql pool. sourceDatabaseId must be specified as the resource ID of the existing sql pool, and restorePointInTime must be specified.\n *\n * Recovery: Creates a sql pool by a geo-replicated backup. sourceDatabaseId must be specified as the recoverableDatabaseId to restore.\n *\n * Restore: Creates a sql pool by restoring a backup of a deleted sql pool. SourceDatabaseId should be the sql pool's original resource ID. SourceDatabaseId and sourceDatabaseDeletionDate must be specified.\n */\n createMode?: CreateMode;\n /** Date the SQL pool was created */\n creationDate?: Date;\n}\n\n/** A workspace */\nexport interface Workspace extends TrackedResource {\n /** Identity of the workspace */\n identity?: ManagedIdentity;\n /** Workspace default data lake storage account details */\n defaultDataLakeStorage?: DataLakeStorageAccountDetails;\n /** SQL administrator login password */\n sqlAdministratorLoginPassword?: string;\n /** Workspace managed resource group. The resource group name uniquely identifies the resource group within the user subscriptionId. The resource group name must be no longer than 90 characters long, and must be alphanumeric characters (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.' */\n managedResourceGroupName?: string;\n /**\n * Resource provisioning state\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningState?: string;\n /** Login for workspace SQL active directory administrator */\n sqlAdministratorLogin?: string;\n /** Virtual Network profile */\n virtualNetworkProfile?: VirtualNetworkProfile;\n /** Connectivity endpoints */\n connectivityEndpoints?: { [propertyName: string]: string };\n /** Setting this to 'default' will ensure that all compute for this workspace is in a virtual network managed on behalf of the user. */\n managedVirtualNetwork?: string;\n /** Private endpoint connections to the workspace */\n privateEndpointConnections?: PrivateEndpointConnection[];\n /** The encryption details of the workspace */\n encryption?: EncryptionDetails;\n /**\n * The workspace unique identifier\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly workspaceUID?: string;\n /**\n * Workspace level configs and feature flags\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly extraProperties?: { [propertyName: string]: any };\n /** Managed Virtual Network Settings */\n managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings;\n /** Git integration settings */\n workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration;\n /** Purview Configuration */\n purviewConfiguration?: PurviewConfiguration;\n /**\n * The ADLA resource ID.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly adlaResourceId?: string;\n}\n\n/** A private endpoint connection */\nexport interface PrivateEndpointConnection extends ProxyResource {\n /** The private endpoint which the connection belongs to. */\n privateEndpoint?: PrivateEndpoint;\n /** Connection state of the private endpoint connection. */\n privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState;\n /**\n * Provisioning state of the private endpoint connection.\n * NOTE: This property will not be serialized. It can only be populated by the server.\n */\n readonly provisioningState?: string;\n}\n\n/** Execute pipeline activity. */\nexport interface ExecutePipelineActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExecutePipeline\";\n /** Pipeline reference. */\n pipeline: PipelineReference;\n /** Pipeline parameters. */\n parameters?: { [propertyName: string]: any };\n /** Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. */\n waitOnCompletion?: boolean;\n}\n\n/** This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. */\nexport interface IfConditionActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"IfCondition\";\n /** An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. */\n expression: Expression;\n /** List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. */\n ifTrueActivities?: ActivityUnion[];\n /** List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. */\n ifFalseActivities?: ActivityUnion[];\n}\n\n/** This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. */\nexport interface SwitchActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Switch\";\n /** An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. */\n on: Expression;\n /** List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. */\n cases?: SwitchCase[];\n /** List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. */\n defaultActivities?: ActivityUnion[];\n}\n\n/** This activity is used for iterating over a collection and execute given activities. */\nexport interface ForEachActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ForEach\";\n /** Should the loop be executed in sequence or in parallel (max 50) */\n isSequential?: boolean;\n /** Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). */\n batchCount?: number;\n /** Collection to iterate. */\n items: Expression;\n /** List of activities to execute . */\n activities: ActivityUnion[];\n}\n\n/** This activity suspends pipeline execution for the specified interval. */\nexport interface WaitActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Wait\";\n /** Duration in seconds. */\n waitTimeInSeconds: any;\n}\n\n/** This activity will fail within its own scope and output a custom error message and error code. The error message and code can provided either as a string literal or as an expression that can be evaluated to a string at runtime. The activity scope can be the whole pipeline or a control activity (e.g. foreach, switch, until), if the fail activity is contained in it. */\nexport interface FailActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Fail\";\n /** The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */\n message: any;\n /** The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */\n errorCode: any;\n}\n\n/** This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. */\nexport interface UntilActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Until\";\n /** An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true */\n expression: Expression;\n /** Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: any;\n /** List of activities to execute. */\n activities: ActivityUnion[];\n}\n\n/** This activity verifies that an external resource exists. */\nexport interface ValidationActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Validation\";\n /** Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: any;\n /** A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). */\n sleep?: any;\n /** Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). */\n minimumSize?: any;\n /** Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). */\n childItems?: any;\n /** Validation activity dataset reference. */\n dataset: DatasetReference;\n}\n\n/** Filter and return results from input array based on the conditions. */\nexport interface FilterActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Filter\";\n /** Input array on which filter should be applied. */\n items: Expression;\n /** Condition to be used for filtering the input. */\n condition: Expression;\n}\n\n/** Set value for a Variable. */\nexport interface SetVariableActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SetVariable\";\n /** Name of the variable whose value needs to be set. */\n variableName?: string;\n /** Value to be set. Could be a static value or Expression */\n value?: any;\n}\n\n/** Append value for a Variable of type Array. */\nexport interface AppendVariableActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AppendVariable\";\n /** Name of the variable whose value needs to be appended to. */\n variableName?: string;\n /** Value to be appended. Could be a static value or Expression */\n value?: any;\n}\n\n/** WebHook activity. */\nexport interface WebHookActivity extends ControlActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebHook\";\n /** Rest API method for target endpoint. */\n method: WebHookActivityMethod;\n /** WebHook activity target endpoint and path. Type: string (or Expression with resultType string). */\n url: any;\n /** The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n timeout?: string;\n /** Represents the headers that will be sent to the request. For example, to set the language and type on a request: \"headers\" : { \"Accept-Language\": \"en-us\", \"Content-Type\": \"application/json\" }. Type: string (or Expression with resultType string). */\n headers?: any;\n /** Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */\n body?: any;\n /** Authentication method used for calling the endpoint. */\n authentication?: WebActivityAuthentication;\n /** When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). */\n reportStatusOnCallBack?: any;\n}\n\n/** Copy activity. */\nexport interface CopyActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Copy\";\n /** List of inputs for the activity. */\n inputs?: DatasetReference[];\n /** List of outputs for the activity. */\n outputs?: DatasetReference[];\n /** Copy activity source. */\n source: CopySourceUnion;\n /** Copy activity sink. */\n sink: CopySinkUnion;\n /** Copy activity translator. If not specified, tabular translator is used. */\n translator?: any;\n /** Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableStaging?: any;\n /** Specifies interim staging settings when EnableStaging is true. */\n stagingSettings?: StagingSettings;\n /** Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. */\n parallelCopies?: any;\n /** Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. */\n dataIntegrationUnits?: any;\n /** Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableSkipIncompatibleRow?: any;\n /** Redirect incompatible row settings when EnableSkipIncompatibleRow is true. */\n redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings;\n /** (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. */\n logStorageSettings?: LogStorageSettings;\n /** Log settings customer needs provide when enabling log. */\n logSettings?: LogSettings;\n /** Preserve Rules. */\n preserveRules?: any[];\n /** Preserve rules. */\n preserve?: any[];\n /** Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). */\n validateDataConsistency?: any;\n /** Specify the fault tolerance for data consistency. */\n skipErrorFile?: SkipErrorFile;\n}\n\n/** HDInsight Hive activity type. */\nexport interface HDInsightHiveActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightHive\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Script path. Type: string (or Expression with resultType string). */\n scriptPath?: any;\n /** Script linked service reference. */\n scriptLinkedService?: LinkedServiceReference;\n /** Allows user to specify defines for Hive job request. */\n defines?: { [propertyName: string]: any };\n /** User specified arguments under hivevar namespace. */\n variables?: any[];\n /** Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) */\n queryTimeout?: number;\n}\n\n/** HDInsight Pig activity type. */\nexport interface HDInsightPigActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightPig\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). */\n arguments?: any;\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Script path. Type: string (or Expression with resultType string). */\n scriptPath?: any;\n /** Script linked service reference. */\n scriptLinkedService?: LinkedServiceReference;\n /** Allows user to specify defines for Pig job request. */\n defines?: { [propertyName: string]: any };\n}\n\n/** HDInsight MapReduce activity type. */\nexport interface HDInsightMapReduceActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightMapReduce\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Class name. Type: string (or Expression with resultType string). */\n className: any;\n /** Jar path. Type: string (or Expression with resultType string). */\n jarFilePath: any;\n /** Jar linked service reference. */\n jarLinkedService?: LinkedServiceReference;\n /** Jar libs. */\n jarLibs?: any[];\n /** Allows user to specify defines for the MapReduce job request. */\n defines?: { [propertyName: string]: any };\n}\n\n/** HDInsight streaming activity type. */\nexport interface HDInsightStreamingActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightStreaming\";\n /** Storage linked service references. */\n storageLinkedServices?: LinkedServiceReference[];\n /** User specified arguments to HDInsightActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** Mapper executable name. Type: string (or Expression with resultType string). */\n mapper: any;\n /** Reducer executable name. Type: string (or Expression with resultType string). */\n reducer: any;\n /** Input blob path. Type: string (or Expression with resultType string). */\n input: any;\n /** Output blob path. Type: string (or Expression with resultType string). */\n output: any;\n /** Paths to streaming job files. Can be directories. */\n filePaths: any[];\n /** Linked service reference where the files are located. */\n fileLinkedService?: LinkedServiceReference;\n /** Combiner executable name. Type: string (or Expression with resultType string). */\n combiner?: any;\n /** Command line environment values. */\n commandEnvironment?: any[];\n /** Allows user to specify defines for streaming job request. */\n defines?: { [propertyName: string]: any };\n}\n\n/** HDInsight Spark activity. */\nexport interface HDInsightSparkActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HDInsightSpark\";\n /** The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). */\n rootPath: any;\n /** The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). */\n entryFilePath: any;\n /** The user-specified arguments to HDInsightSparkActivity. */\n arguments?: any[];\n /** Debug info option. */\n getDebugInfo?: HDInsightActivityDebugInfoOption;\n /** The storage linked service for uploading the entry file and dependencies, and for receiving logs. */\n sparkJobLinkedService?: LinkedServiceReference;\n /** The application's Java/Spark main class. */\n className?: string;\n /** The user to impersonate that will execute the job. Type: string (or Expression with resultType string). */\n proxyUser?: any;\n /** Spark configuration property. */\n sparkConfig?: { [propertyName: string]: any };\n}\n\n/** Execute SSIS package activity. */\nexport interface ExecuteSsisPackageActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExecuteSSISPackage\";\n /** SSIS package location. */\n packageLocation: SsisPackageLocation;\n /** Specifies the runtime to execute SSIS package. The value should be \"x86\" or \"x64\". Type: string (or Expression with resultType string). */\n runtime?: any;\n /** The logging level of SSIS package execution. Type: string (or Expression with resultType string). */\n loggingLevel?: any;\n /** The environment path to execute the SSIS package. Type: string (or Expression with resultType string). */\n environmentPath?: any;\n /** The package execution credential. */\n executionCredential?: SsisExecutionCredential;\n /** The integration runtime reference. */\n connectVia: IntegrationRuntimeReference;\n /** The project level parameters to execute the SSIS package. */\n projectParameters?: { [propertyName: string]: SsisExecutionParameter };\n /** The package level parameters to execute the SSIS package. */\n packageParameters?: { [propertyName: string]: SsisExecutionParameter };\n /** The project level connection managers to execute the SSIS package. */\n projectConnectionManagers?: {\n [propertyName: string]: { [propertyName: string]: SsisExecutionParameter };\n };\n /** The package level connection managers to execute the SSIS package. */\n packageConnectionManagers?: {\n [propertyName: string]: { [propertyName: string]: SsisExecutionParameter };\n };\n /** The property overrides to execute the SSIS package. */\n propertyOverrides?: { [propertyName: string]: SsisPropertyOverride };\n /** SSIS package execution log location. */\n logLocation?: SsisLogLocation;\n}\n\n/** Custom activity type. */\nexport interface CustomActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Custom\";\n /** Command for custom activity Type: string (or Expression with resultType string). */\n command: any;\n /** Resource linked service reference. */\n resourceLinkedService?: LinkedServiceReference;\n /** Folder path for resource files Type: string (or Expression with resultType string). */\n folderPath?: any;\n /** Reference objects */\n referenceObjects?: CustomActivityReferenceObject;\n /** User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. */\n extendedProperties?: { [propertyName: string]: any };\n /** The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). */\n retentionTimeInDays?: any;\n /** Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). */\n autoUserSpecification?: any;\n}\n\n/** SQL stored procedure activity type. */\nexport interface SqlServerStoredProcedureActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerStoredProcedure\";\n /** Stored procedure name. Type: string (or Expression with resultType string). */\n storedProcedureName: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: any;\n}\n\n/** Delete activity. */\nexport interface DeleteActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Delete\";\n /** If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). */\n recursive?: any;\n /** The max concurrent connections to connect data source at the same time. */\n maxConcurrentConnections?: number;\n /** Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). */\n enableLogging?: any;\n /** Log storage settings customer need to provide when enableLogging is true. */\n logStorageSettings?: LogStorageSettings;\n /** Delete activity dataset reference. */\n dataset: DatasetReference;\n /** Delete activity store settings. */\n storeSettings?: StoreReadSettingsUnion;\n}\n\n/** Azure Data Explorer command activity. */\nexport interface AzureDataExplorerCommandActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureDataExplorerCommand\";\n /** A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). */\n command: any;\n /** Control command timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) */\n commandTimeout?: any;\n}\n\n/** Lookup activity. */\nexport interface LookupActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Lookup\";\n /** Dataset-specific source properties, same as copy activity source. */\n source: CopySourceUnion;\n /** Lookup activity dataset reference. */\n dataset: DatasetReference;\n /** Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). */\n firstRowOnly?: any;\n}\n\n/** Web activity. */\nexport interface WebActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"WebActivity\";\n /** Rest API method for target endpoint. */\n method: WebActivityMethod;\n /** Web activity target endpoint and path. Type: string (or Expression with resultType string). */\n url: any;\n /** Represents the headers that will be sent to the request. For example, to set the language and type on a request: \"headers\" : { \"Accept-Language\": \"en-us\", \"Content-Type\": \"application/json\" }. Type: string (or Expression with resultType string). */\n headers?: any;\n /** Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */\n body?: any;\n /** Authentication method used for calling the endpoint. */\n authentication?: WebActivityAuthentication;\n /** List of datasets passed to web endpoint. */\n datasets?: DatasetReference[];\n /** List of linked services passed to web endpoint. */\n linkedServices?: LinkedServiceReference[];\n /** The integration runtime reference. */\n connectVia?: IntegrationRuntimeReference;\n}\n\n/** Activity to get metadata of dataset */\nexport interface GetMetadataActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GetMetadata\";\n /** GetMetadata activity dataset reference. */\n dataset: DatasetReference;\n /** Fields of metadata to get from dataset. */\n fieldList?: any[];\n /** GetMetadata activity store settings. */\n storeSettings?: StoreReadSettingsUnion;\n /** GetMetadata activity format settings. */\n formatSettings?: FormatReadSettingsUnion;\n}\n\n/** Azure ML Batch Execution activity. */\nexport interface AzureMLBatchExecutionActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLBatchExecution\";\n /** Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. */\n globalParameters?: { [propertyName: string]: any };\n /** Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. */\n webServiceOutputs?: { [propertyName: string]: AzureMLWebServiceFile };\n /** Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. */\n webServiceInputs?: { [propertyName: string]: AzureMLWebServiceFile };\n}\n\n/** Azure ML Update Resource management activity. */\nexport interface AzureMLUpdateResourceActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLUpdateResource\";\n /** Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). */\n trainedModelName: any;\n /** Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. */\n trainedModelLinkedServiceName: LinkedServiceReference;\n /** The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). */\n trainedModelFilePath: any;\n}\n\n/** Azure ML Execute Pipeline activity. */\nexport interface AzureMLExecutePipelineActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMLExecutePipeline\";\n /** ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). */\n mlPipelineId: any;\n /** Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). */\n experimentName?: any;\n /** Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). */\n mlPipelineParameters?: any;\n /** The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). */\n mlParentRunId?: any;\n /** Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). */\n continueOnStepFailure?: any;\n}\n\n/** Data Lake Analytics U-SQL activity. */\nexport interface DataLakeAnalyticsUsqlActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DataLakeAnalyticsU-SQL\";\n /** Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). */\n scriptPath: any;\n /** Script linked service reference. */\n scriptLinkedService: LinkedServiceReference;\n /** The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. */\n degreeOfParallelism?: any;\n /** Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. */\n priority?: any;\n /** Parameters for U-SQL job request. */\n parameters?: { [propertyName: string]: any };\n /** Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). */\n runtimeVersion?: any;\n /** Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). */\n compilationMode?: any;\n}\n\n/** DatabricksNotebook activity. */\nexport interface DatabricksNotebookActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DatabricksNotebook\";\n /** The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). */\n notebookPath: any;\n /** Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. */\n baseParameters?: { [propertyName: string]: any };\n /** A list of libraries to be installed on the cluster that will execute the job. */\n libraries?: { [propertyName: string]: any }[];\n}\n\n/** DatabricksSparkJar activity. */\nexport interface DatabricksSparkJarActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DatabricksSparkJar\";\n /** The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). */\n mainClassName: any;\n /** Parameters that will be passed to the main method. */\n parameters?: any[];\n /** A list of libraries to be installed on the cluster that will execute the job. */\n libraries?: { [propertyName: string]: any }[];\n}\n\n/** DatabricksSparkPython activity. */\nexport interface DatabricksSparkPythonActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DatabricksSparkPython\";\n /** The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). */\n pythonFile: any;\n /** Command line parameters that will be passed to the Python file. */\n parameters?: any[];\n /** A list of libraries to be installed on the cluster that will execute the job. */\n libraries?: { [propertyName: string]: any }[];\n}\n\n/** Azure Function activity. */\nexport interface AzureFunctionActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureFunctionActivity\";\n /** Rest API method for target endpoint. */\n method: AzureFunctionActivityMethod;\n /** Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) */\n functionName: any;\n /** Represents the headers that will be sent to the request. For example, to set the language and type on a request: \"headers\" : { \"Accept-Language\": \"en-us\", \"Content-Type\": \"application/json\" }. Type: string (or Expression with resultType string). */\n headers?: any;\n /** Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). */\n body?: any;\n}\n\n/** Execute data flow activity. */\nexport interface ExecuteDataFlowActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ExecuteDataFlow\";\n /** Data flow reference. */\n dataflow: DataFlowReference;\n /** Staging info for execute data flow activity. */\n staging?: DataFlowStagingInfo;\n /** The integration runtime reference. */\n integrationRuntime?: IntegrationRuntimeReference;\n /** Compute properties for data flow activity. */\n compute?: ExecuteDataFlowActivityTypePropertiesCompute;\n /** Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) */\n traceLevel?: any;\n /** Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) */\n continueOnError?: any;\n /** Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) */\n runConcurrently?: any;\n /** Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer) */\n sourceStagingConcurrency?: any;\n}\n\n/** Script activity type. */\nexport interface ScriptActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Script\";\n /** Array of script blocks. Type: array. */\n scripts?: ScriptActivityScriptBlock[];\n /** Log settings of script activity. */\n logSettings?: ScriptActivityTypePropertiesLogSettings;\n}\n\n/** Execute Synapse notebook activity. */\nexport interface SynapseNotebookActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SynapseNotebook\";\n /** Synapse notebook reference. */\n notebook: SynapseNotebookReference;\n /** The name of the big data pool which will be used to execute the notebook. */\n sparkPool?: BigDataPoolParametrizationReference;\n /** Notebook parameters. */\n parameters?: { [propertyName: string]: NotebookParameter };\n /** Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string). */\n executorSize?: any;\n /** Spark configuration properties, which will override the 'conf' of the notebook you provide. */\n conf?: any;\n /** Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string). */\n driverSize?: any;\n /** Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. */\n numExecutors?: number;\n}\n\n/** Execute spark job activity. */\nexport interface SynapseSparkJobDefinitionActivity extends ExecutionActivity {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SparkJob\";\n /** Synapse spark job reference. */\n sparkJob: SynapseSparkJobReference;\n /** User specified arguments to SynapseSparkJobDefinitionActivity. */\n arguments?: any[];\n /** The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n file?: any;\n /** Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). */\n scanFolder?: any;\n /** The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n className?: any;\n /** (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide. */\n files?: any[];\n /** Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide. */\n pythonCodeReference?: any[];\n /** Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide. */\n filesV2?: any[];\n /** The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide. */\n targetBigDataPool?: BigDataPoolParametrizationReference;\n /** Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n executorSize?: any;\n /** Spark configuration properties, which will override the 'conf' of the spark job definition you provide. */\n conf?: any;\n /** Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). */\n driverSize?: any;\n /** Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). */\n numExecutors?: any;\n /** The type of the spark config. */\n configurationType?: ConfigurationType;\n /** The spark configuration of the spark job. */\n targetSparkConfiguration?: SparkConfigurationParametrizationReference;\n /** Spark configuration property. */\n sparkConfig?: { [propertyName: string]: any };\n}\n\n/** Trigger that creates pipeline runs periodically, on schedule. */\nexport interface ScheduleTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ScheduleTrigger\";\n /** Recurrence schedule configuration. */\n recurrence: ScheduleTriggerRecurrence;\n}\n\n/** Trigger that runs every time the selected Blob container changes. */\nexport interface BlobTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobTrigger\";\n /** The path of the container/folder that will trigger the pipeline. */\n folderPath: string;\n /** The max number of parallel files to handle when it is triggered. */\n maxConcurrency: number;\n /** The Azure Storage linked service reference. */\n linkedService: LinkedServiceReference;\n}\n\n/** Trigger that runs every time a Blob event occurs. */\nexport interface BlobEventsTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"BlobEventsTrigger\";\n /** The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. */\n blobPathBeginsWith?: string;\n /** The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. */\n blobPathEndsWith?: string;\n /** If set to true, blobs with zero bytes will be ignored. */\n ignoreEmptyBlobs?: boolean;\n /** The type of events that cause this trigger to fire. */\n events: BlobEventType[];\n /** The ARM resource ID of the Storage Account. */\n scope: string;\n}\n\n/** Trigger that runs every time a custom event is received. */\nexport interface CustomEventsTrigger extends MultiplePipelineTrigger {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CustomEventsTrigger\";\n /** The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */\n subjectBeginsWith?: string;\n /** The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */\n subjectEndsWith?: string;\n /** The list of event types that cause this trigger to fire. */\n events: any[];\n /** The ARM resource ID of the Azure Event Grid Topic. */\n scope: string;\n}\n\n/** A copy activity Azure Table source. */\nexport interface AzureTableSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureTableSource\";\n /** Azure Table source query. Type: string (or Expression with resultType string). */\n azureTableSourceQuery?: any;\n /** Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). */\n azureTableSourceIgnoreTableNotFound?: any;\n}\n\n/** A copy activity source for Informix. */\nexport interface InformixSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"InformixSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for Db2 databases. */\nexport interface Db2Source extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"Db2Source\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for ODBC databases. */\nexport interface OdbcSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OdbcSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for MySQL databases. */\nexport interface MySqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MySqlSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for PostgreSQL databases. */\nexport interface PostgreSqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PostgreSqlSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for Sybase databases. */\nexport interface SybaseSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SybaseSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for SapBW server via MDX. */\nexport interface SapBwSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapBwSource\";\n /** MDX query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Salesforce source. */\nexport interface SalesforceSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The read behavior for the operation. Default is Query. */\n readBehavior?: SalesforceSourceReadBehavior;\n}\n\n/** A copy activity source for SAP Cloud for Customer source. */\nexport interface SapCloudForCustomerSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapCloudForCustomerSource\";\n /** SAP Cloud for Customer OData query. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity source for SAP ECC source. */\nexport interface SapEccSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapEccSource\";\n /** SAP ECC OData query. For example, \"$top=1\". Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity source for SAP HANA source. */\nexport interface SapHanaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapHanaSource\";\n /** SAP HANA Sql query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). */\n packetSize?: any;\n /** The partition mechanism that will be used for SAP HANA read in parallel. */\n partitionOption?: SapHanaPartitionOption;\n /** The settings that will be leveraged for SAP HANA source partitioning. */\n partitionSettings?: SapHanaPartitionSettings;\n}\n\n/** A copy activity source for SAP Business Warehouse Open Hub Destination source. */\nexport interface SapOpenHubSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOpenHubSource\";\n /** Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */\n excludeLastRequest?: any;\n /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */\n baseRequestId?: any;\n /** Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */\n customRfcReadTableFunctionModule?: any;\n /** The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */\n sapDataColumnDelimiter?: any;\n}\n\n/** A copy activity source for SAP ODP source. */\nexport interface SapOdpSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapOdpSource\";\n /** The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). */\n extractionMode?: any;\n /** The subscriber process to manage the delta process. Type: string (or Expression with resultType string). */\n subscriberProcess?: any;\n /** Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). */\n selection?: any;\n /** Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). */\n projection?: any;\n}\n\n/** A copy activity source for SAP Table source. */\nexport interface SapTableSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SapTableSource\";\n /** The number of rows to be retrieved. Type: integer(or Expression with resultType integer). */\n rowCount?: any;\n /** The number of rows that will be skipped. Type: integer (or Expression with resultType integer). */\n rowSkips?: any;\n /** The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). */\n rfcTableFields?: any;\n /** The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). */\n rfcTableOptions?: any;\n /** Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). */\n batchSize?: any;\n /** Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */\n customRfcReadTableFunctionModule?: any;\n /** The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */\n sapDataColumnDelimiter?: any;\n /** The partition mechanism that will be used for SAP table read in parallel. */\n partitionOption?: SapTablePartitionOption;\n /** The settings that will be leveraged for SAP table source partitioning. */\n partitionSettings?: SapTablePartitionSettings;\n}\n\n/** A copy activity SQL source. */\nexport interface SqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */\n isolationLevel?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity SQL server source. */\nexport interface SqlServerSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlServerSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Amazon RDS for SQL Server source. */\nexport interface AmazonRdsForSqlServerSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRdsForSqlServerSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Azure SQL source. */\nexport interface AzureSqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureSqlSource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Azure SQL Managed Instance source. */\nexport interface SqlMISource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlMISource\";\n /** SQL reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". */\n storedProcedureParameters?: {\n [propertyName: string]: StoredProcedureParameter;\n };\n /** Which additional types to produce. */\n produceAdditionalTypes?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity SQL Data Warehouse source. */\nexport interface SqlDWSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SqlDWSource\";\n /** SQL Data Warehouse reader query. Type: string (or Expression with resultType string). */\n sqlReaderQuery?: any;\n /** Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */\n sqlReaderStoredProcedureName?: any;\n /** Value and type setting for stored procedure parameters. Example: \"{Parameter1: {value: \"1\", type: \"int\"}}\". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. */\n storedProcedureParameters?: any;\n /** The partition mechanism that will be used for Sql read in parallel. Possible values include: \"None\", \"PhysicalPartitionsOfTable\", \"DynamicRange\". */\n partitionOption?: any;\n /** The settings that will be leveraged for Sql source partitioning. */\n partitionSettings?: SqlPartitionSettings;\n}\n\n/** A copy activity Azure MySQL source. */\nexport interface AzureMySqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMySqlSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Teradata source. */\nexport interface TeradataSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TeradataSource\";\n /** Teradata query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The partition mechanism that will be used for teradata read in parallel. */\n partitionOption?: TeradataPartitionOption;\n /** The settings that will be leveraged for teradata source partitioning. */\n partitionSettings?: TeradataPartitionSettings;\n}\n\n/** A copy activity source for a Cassandra database. */\nexport interface CassandraSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CassandraSource\";\n /** Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). */\n query?: any;\n /** The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. */\n consistencyLevel?: CassandraSourceReadConsistencyLevels;\n}\n\n/** A copy activity Amazon Marketplace Web Service source. */\nexport interface AmazonMWSSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonMWSSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Azure PostgreSQL source. */\nexport interface AzurePostgreSqlSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzurePostgreSqlSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Concur Service source. */\nexport interface ConcurSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ConcurSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Couchbase server source. */\nexport interface CouchbaseSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"CouchbaseSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Drill server source. */\nexport interface DrillSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DrillSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Eloqua server source. */\nexport interface EloquaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"EloquaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Google BigQuery service source. */\nexport interface GoogleBigQuerySource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleBigQuerySource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Greenplum Database source. */\nexport interface GreenplumSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GreenplumSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity HBase server source. */\nexport interface HBaseSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HBaseSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Hive Server source. */\nexport interface HiveSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HiveSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Hubspot Service source. */\nexport interface HubspotSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"HubspotSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Impala server source. */\nexport interface ImpalaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ImpalaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Jira Service source. */\nexport interface JiraSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"JiraSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Magento server source. */\nexport interface MagentoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MagentoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity MariaDB server source. */\nexport interface MariaDBSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MariaDBSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Azure MariaDB source. */\nexport interface AzureMariaDBSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AzureMariaDBSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Marketo server source. */\nexport interface MarketoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"MarketoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Paypal Service source. */\nexport interface PaypalSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PaypalSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Phoenix server source. */\nexport interface PhoenixSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PhoenixSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Presto server source. */\nexport interface PrestoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"PrestoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity QuickBooks server source. */\nexport interface QuickBooksSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"QuickBooksSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity ServiceNow server source. */\nexport interface ServiceNowSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ServiceNowSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Shopify Service source. */\nexport interface ShopifySource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ShopifySource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Spark Server source. */\nexport interface SparkSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SparkSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Square Service source. */\nexport interface SquareSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SquareSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Xero Service source. */\nexport interface XeroSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"XeroSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Zoho server source. */\nexport interface ZohoSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ZohoSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Netezza source. */\nexport interface NetezzaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"NetezzaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n /** The partition mechanism that will be used for Netezza read in parallel. */\n partitionOption?: NetezzaPartitionOption;\n /** The settings that will be leveraged for Netezza source partitioning. */\n partitionSettings?: NetezzaPartitionSettings;\n}\n\n/** A copy activity Vertica source. */\nexport interface VerticaSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"VerticaSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Salesforce Marketing Cloud source. */\nexport interface SalesforceMarketingCloudSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"SalesforceMarketingCloudSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Responsys source. */\nexport interface ResponsysSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"ResponsysSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Dynamics AX source. */\nexport interface DynamicsAXSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"DynamicsAXSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */\n httpRequestTimeout?: any;\n}\n\n/** A copy activity Oracle Service Cloud source. */\nexport interface OracleServiceCloudSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"OracleServiceCloudSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity Google AdWords service source. */\nexport interface GoogleAdWordsSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"GoogleAdWordsSource\";\n /** A query to retrieve data from source. Type: string (or Expression with resultType string). */\n query?: any;\n}\n\n/** A copy activity source for Amazon Redshift Source. */\nexport interface AmazonRedshiftSource extends TabularSource {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"AmazonRedshiftSource\";\n /** Database query. Type: string (or Expression with resultType string). */\n query?: any;\n /** The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. */\n redshiftUnloadSettings?: RedshiftUnloadSettings;\n}\n\n/** Referenced tumbling window trigger dependency. */\nexport interface TumblingWindowTriggerDependencyReference\n extends TriggerDependencyReference {\n /** Polymorphic discriminator, which specifies the different types this object can be */\n type: \"TumblingWindowTriggerDependencyReference\";\n /** Timespan applied to the start time of a tumbling window when evaluating dependency. */\n offset?: string;\n /** The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. */\n size?: string;\n}\n\n/** Spark Configuration resource type. */\nexport interface SparkConfigurationResource extends SubResource {\n /** Properties of Spark Configuration. */\n properties: SparkConfiguration;\n}\n\n/** Data flow resource type. */\nexport interface DataFlowResource extends SubResource {\n /** Data flow properties. */\n properties: DataFlowUnion;\n}\n\n/** Dataset resource type. */\nexport interface DatasetResource extends SubResource {\n /** Dataset properties. */\n properties: DatasetUnion;\n}\n\n/** Integration runtime resource type. */\nexport interface IntegrationRuntimeResource extends SubResource {\n /** Integration runtime properties. */\n properties: IntegrationRuntimeUnion;\n}\n\n/** Library response details */\nexport interface LibraryResource extends SubResource {\n /** Library/package properties. */\n properties: LibraryResourceProperties;\n}\n\n/** Linked service resource type. */\nexport interface LinkedServiceResource extends SubResource {\n /** Properties of linked service. */\n properties: LinkedServiceUnion;\n}\n\n/** Pipeline resource type. */\nexport interface PipelineResource extends SubResource {\n /** Describes unknown properties. The value of an unknown property can be of \"any\" type. */\n [property: string]: any;\n /** The description of the pipeline. */\n description?: string;\n /** List of activities in pipeline. */\n activities?: ActivityUnion[];\n /** List of parameters for pipeline. */\n parameters?: { [propertyName: string]: ParameterSpecification };\n /** List of variables for pipeline. */\n variables?: { [propertyName: string]: VariableSpecification };\n /** The max number of concurrent runs for the pipeline. */\n concurrency?: number;\n /** List of tags that can be used for describing the Pipeline. */\n annotations?: any[];\n /** Dimensions emitted by Pipeline. */\n runDimensions?: { [propertyName: string]: any };\n /** The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. */\n folder?: PipelineFolder;\n}\n\n/** Spark job definition resource type. */\nexport interface SparkJobDefinitionResource extends SubResource {\n /** Properties of spark job definition. */\n properties: SparkJobDefinition;\n}\n\n/** Trigger resource type. */\nexport interface TriggerResource extends SubResource {\n /** Properties of the trigger. */\n properties: TriggerUnion;\n}\n\n/** RerunTrigger resource type. */\nexport interface RerunTriggerResource extends SubResource {\n /** Properties of the rerun trigger. */\n properties: RerunTumblingWindowTrigger;\n}\n\n/** Defines headers for DataFlowDebugSession_createDataFlowDebugSession operation. */\nexport interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders {\n /** URI to poll for asynchronous operation status. */\n location?: string;\n}\n\n/** Defines headers for DataFlowDebugSession_executeCommand operation. */\nexport interface DataFlowDebugSessionExecuteCommandHeaders {\n /** URI to poll for asynchronous operation status. */\n location?: string;\n}\n\n/** Known values of {@link Type} that the service accepts. */\nexport enum KnownType {\n /** LinkedServiceReference */\n LinkedServiceReference = \"LinkedServiceReference\"\n}\n\n/**\n * Defines values for Type. \\\n * {@link KnownType} can be used interchangeably with Type,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **LinkedServiceReference**\n */\nexport type Type = string;\n\n/** Known values of {@link RequestStatus} that the service accepts. */\nexport enum KnownRequestStatus {\n /** Running */\n Running = \"Running\",\n /** Completed */\n Completed = \"Completed\",\n /** Failed */\n Failed = \"Failed\"\n}\n\n/**\n * Defines values for RequestStatus. \\\n * {@link KnownRequestStatus} can be used interchangeably with RequestStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Running** \\\n * **Completed** \\\n * **Failed**\n */\nexport type RequestStatus = string;\n\n/** Known values of {@link ResourceStatus} that the service accepts. */\nexport enum KnownResourceStatus {\n /** Creating */\n Creating = \"Creating\",\n /** Created */\n Created = \"Created\",\n /** Failed */\n Failed = \"Failed\"\n}\n\n/**\n * Defines values for ResourceStatus. \\\n * {@link KnownResourceStatus} can be used interchangeably with ResourceStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Creating** \\\n * **Created** \\\n * **Failed**\n */\nexport type ResourceStatus = string;\n\n/** Known values of {@link NodeSize} that the service accepts. */\nexport enum KnownNodeSize {\n /** None */\n None = \"None\",\n /** Small */\n Small = \"Small\",\n /** Medium */\n Medium = \"Medium\",\n /** Large */\n Large = \"Large\",\n /** XLarge */\n XLarge = \"XLarge\",\n /** XXLarge */\n XXLarge = \"XXLarge\",\n /** XXXLarge */\n XXXLarge = \"XXXLarge\"\n}\n\n/**\n * Defines values for NodeSize. \\\n * {@link KnownNodeSize} can be used interchangeably with NodeSize,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **Small** \\\n * **Medium** \\\n * **Large** \\\n * **XLarge** \\\n * **XXLarge** \\\n * **XXXLarge**\n */\nexport type NodeSize = string;\n\n/** Known values of {@link NodeSizeFamily} that the service accepts. */\nexport enum KnownNodeSizeFamily {\n /** None */\n None = \"None\",\n /** MemoryOptimized */\n MemoryOptimized = \"MemoryOptimized\"\n}\n\n/**\n * Defines values for NodeSizeFamily. \\\n * {@link KnownNodeSizeFamily} can be used interchangeably with NodeSizeFamily,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **MemoryOptimized**\n */\nexport type NodeSizeFamily = string;\n\n/** Known values of {@link IntegrationRuntimeType} that the service accepts. */\nexport enum KnownIntegrationRuntimeType {\n /** Managed */\n Managed = \"Managed\",\n /** SelfHosted */\n SelfHosted = \"SelfHosted\"\n}\n\n/**\n * Defines values for IntegrationRuntimeType. \\\n * {@link KnownIntegrationRuntimeType} can be used interchangeably with IntegrationRuntimeType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Managed** \\\n * **SelfHosted**\n */\nexport type IntegrationRuntimeType = string;\n\n/** Known values of {@link ParameterType} that the service accepts. */\nexport enum KnownParameterType {\n /** Object */\n Object = \"Object\",\n /** String */\n String = \"String\",\n /** Int */\n Int = \"Int\",\n /** Float */\n Float = \"Float\",\n /** Bool */\n Bool = \"Bool\",\n /** Array */\n Array = \"Array\",\n /** SecureString */\n SecureString = \"SecureString\"\n}\n\n/**\n * Defines values for ParameterType. \\\n * {@link KnownParameterType} can be used interchangeably with ParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Object** \\\n * **String** \\\n * **Int** \\\n * **Float** \\\n * **Bool** \\\n * **Array** \\\n * **SecureString**\n */\nexport type ParameterType = string;\n\n/** Known values of {@link IntegrationRuntimeReferenceType} that the service accepts. */\nexport enum KnownIntegrationRuntimeReferenceType {\n /** IntegrationRuntimeReference */\n IntegrationRuntimeReference = \"IntegrationRuntimeReference\"\n}\n\n/**\n * Defines values for IntegrationRuntimeReferenceType. \\\n * {@link KnownIntegrationRuntimeReferenceType} can be used interchangeably with IntegrationRuntimeReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **IntegrationRuntimeReference**\n */\nexport type IntegrationRuntimeReferenceType = string;\n\n/** Known values of {@link DataFlowDebugCommandType} that the service accepts. */\nexport enum KnownDataFlowDebugCommandType {\n /** ExecutePreviewQuery */\n ExecutePreviewQuery = \"executePreviewQuery\",\n /** ExecuteStatisticsQuery */\n ExecuteStatisticsQuery = \"executeStatisticsQuery\",\n /** ExecuteExpressionQuery */\n ExecuteExpressionQuery = \"executeExpressionQuery\"\n}\n\n/**\n * Defines values for DataFlowDebugCommandType. \\\n * {@link KnownDataFlowDebugCommandType} can be used interchangeably with DataFlowDebugCommandType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **executePreviewQuery** \\\n * **executeStatisticsQuery** \\\n * **executeExpressionQuery**\n */\nexport type DataFlowDebugCommandType = string;\n\n/** Known values of {@link BigDataPoolReferenceType} that the service accepts. */\nexport enum KnownBigDataPoolReferenceType {\n /** BigDataPoolReference */\n BigDataPoolReference = \"BigDataPoolReference\"\n}\n\n/**\n * Defines values for BigDataPoolReferenceType. \\\n * {@link KnownBigDataPoolReferenceType} can be used interchangeably with BigDataPoolReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **BigDataPoolReference**\n */\nexport type BigDataPoolReferenceType = string;\n\n/** Known values of {@link SparkConfigurationReferenceType} that the service accepts. */\nexport enum KnownSparkConfigurationReferenceType {\n /** SparkConfigurationReference */\n SparkConfigurationReference = \"SparkConfigurationReference\"\n}\n\n/**\n * Defines values for SparkConfigurationReferenceType. \\\n * {@link KnownSparkConfigurationReferenceType} can be used interchangeably with SparkConfigurationReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkConfigurationReference**\n */\nexport type SparkConfigurationReferenceType = string;\n\n/** Known values of {@link CellOutputType} that the service accepts. */\nexport enum KnownCellOutputType {\n /** ExecuteResult */\n ExecuteResult = \"execute_result\",\n /** DisplayData */\n DisplayData = \"display_data\",\n /** Stream */\n Stream = \"stream\",\n /** Error */\n Error = \"error\"\n}\n\n/**\n * Defines values for CellOutputType. \\\n * {@link KnownCellOutputType} can be used interchangeably with CellOutputType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **execute_result** \\\n * **display_data** \\\n * **stream** \\\n * **error**\n */\nexport type CellOutputType = string;\n\n/** Known values of {@link DependencyCondition} that the service accepts. */\nexport enum KnownDependencyCondition {\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Skipped */\n Skipped = \"Skipped\",\n /** Completed */\n Completed = \"Completed\"\n}\n\n/**\n * Defines values for DependencyCondition. \\\n * {@link KnownDependencyCondition} can be used interchangeably with DependencyCondition,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Succeeded** \\\n * **Failed** \\\n * **Skipped** \\\n * **Completed**\n */\nexport type DependencyCondition = string;\n\n/** Known values of {@link VariableType} that the service accepts. */\nexport enum KnownVariableType {\n /** String */\n String = \"String\",\n /** Bool */\n Bool = \"Bool\",\n /** Boolean */\n Boolean = \"Boolean\",\n /** Array */\n Array = \"Array\"\n}\n\n/**\n * Defines values for VariableType. \\\n * {@link KnownVariableType} can be used interchangeably with VariableType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **String** \\\n * **Bool** \\\n * **Boolean** \\\n * **Array**\n */\nexport type VariableType = string;\n\n/** Known values of {@link RunQueryFilterOperand} that the service accepts. */\nexport enum KnownRunQueryFilterOperand {\n /** PipelineName */\n PipelineName = \"PipelineName\",\n /** Status */\n Status = \"Status\",\n /** RunStart */\n RunStart = \"RunStart\",\n /** RunEnd */\n RunEnd = \"RunEnd\",\n /** ActivityName */\n ActivityName = \"ActivityName\",\n /** ActivityRunStart */\n ActivityRunStart = \"ActivityRunStart\",\n /** ActivityRunEnd */\n ActivityRunEnd = \"ActivityRunEnd\",\n /** ActivityType */\n ActivityType = \"ActivityType\",\n /** TriggerName */\n TriggerName = \"TriggerName\",\n /** TriggerRunTimestamp */\n TriggerRunTimestamp = \"TriggerRunTimestamp\",\n /** RunGroupId */\n RunGroupId = \"RunGroupId\",\n /** LatestOnly */\n LatestOnly = \"LatestOnly\"\n}\n\n/**\n * Defines values for RunQueryFilterOperand. \\\n * {@link KnownRunQueryFilterOperand} can be used interchangeably with RunQueryFilterOperand,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **PipelineName** \\\n * **Status** \\\n * **RunStart** \\\n * **RunEnd** \\\n * **ActivityName** \\\n * **ActivityRunStart** \\\n * **ActivityRunEnd** \\\n * **ActivityType** \\\n * **TriggerName** \\\n * **TriggerRunTimestamp** \\\n * **RunGroupId** \\\n * **LatestOnly**\n */\nexport type RunQueryFilterOperand = string;\n\n/** Known values of {@link RunQueryFilterOperator} that the service accepts. */\nexport enum KnownRunQueryFilterOperator {\n /** Equals */\n Equals = \"Equals\",\n /** NotEquals */\n NotEquals = \"NotEquals\",\n /** In */\n In = \"In\",\n /** NotIn */\n NotIn = \"NotIn\"\n}\n\n/**\n * Defines values for RunQueryFilterOperator. \\\n * {@link KnownRunQueryFilterOperator} can be used interchangeably with RunQueryFilterOperator,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Equals** \\\n * **NotEquals** \\\n * **In** \\\n * **NotIn**\n */\nexport type RunQueryFilterOperator = string;\n\n/** Known values of {@link RunQueryOrderByField} that the service accepts. */\nexport enum KnownRunQueryOrderByField {\n /** RunStart */\n RunStart = \"RunStart\",\n /** RunEnd */\n RunEnd = \"RunEnd\",\n /** PipelineName */\n PipelineName = \"PipelineName\",\n /** Status */\n Status = \"Status\",\n /** ActivityName */\n ActivityName = \"ActivityName\",\n /** ActivityRunStart */\n ActivityRunStart = \"ActivityRunStart\",\n /** ActivityRunEnd */\n ActivityRunEnd = \"ActivityRunEnd\",\n /** TriggerName */\n TriggerName = \"TriggerName\",\n /** TriggerRunTimestamp */\n TriggerRunTimestamp = \"TriggerRunTimestamp\"\n}\n\n/**\n * Defines values for RunQueryOrderByField. \\\n * {@link KnownRunQueryOrderByField} can be used interchangeably with RunQueryOrderByField,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **RunStart** \\\n * **RunEnd** \\\n * **PipelineName** \\\n * **Status** \\\n * **ActivityName** \\\n * **ActivityRunStart** \\\n * **ActivityRunEnd** \\\n * **TriggerName** \\\n * **TriggerRunTimestamp**\n */\nexport type RunQueryOrderByField = string;\n\n/** Known values of {@link RunQueryOrder} that the service accepts. */\nexport enum KnownRunQueryOrder {\n /** ASC */\n ASC = \"ASC\",\n /** Desc */\n Desc = \"DESC\"\n}\n\n/**\n * Defines values for RunQueryOrder. \\\n * {@link KnownRunQueryOrder} can be used interchangeably with RunQueryOrder,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ASC** \\\n * **DESC**\n */\nexport type RunQueryOrder = string;\n\n/** Known values of {@link SparkJobType} that the service accepts. */\nexport enum KnownSparkJobType {\n /** SparkBatch */\n SparkBatch = \"SparkBatch\",\n /** SparkSession */\n SparkSession = \"SparkSession\"\n}\n\n/**\n * Defines values for SparkJobType. \\\n * {@link KnownSparkJobType} can be used interchangeably with SparkJobType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkBatch** \\\n * **SparkSession**\n */\nexport type SparkJobType = string;\n\n/** Known values of {@link SparkBatchJobResultType} that the service accepts. */\nexport enum KnownSparkBatchJobResultType {\n /** Uncertain */\n Uncertain = \"Uncertain\",\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Cancelled */\n Cancelled = \"Cancelled\"\n}\n\n/**\n * Defines values for SparkBatchJobResultType. \\\n * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Uncertain** \\\n * **Succeeded** \\\n * **Failed** \\\n * **Cancelled**\n */\nexport type SparkBatchJobResultType = string;\n\n/** Known values of {@link SchedulerCurrentState} that the service accepts. */\nexport enum KnownSchedulerCurrentState {\n /** Queued */\n Queued = \"Queued\",\n /** Scheduled */\n Scheduled = \"Scheduled\",\n /** Ended */\n Ended = \"Ended\"\n}\n\n/**\n * Defines values for SchedulerCurrentState. \\\n * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Queued** \\\n * **Scheduled** \\\n * **Ended**\n */\nexport type SchedulerCurrentState = string;\n\n/** Known values of {@link PluginCurrentState} that the service accepts. */\nexport enum KnownPluginCurrentState {\n /** Preparation */\n Preparation = \"Preparation\",\n /** ResourceAcquisition */\n ResourceAcquisition = \"ResourceAcquisition\",\n /** Queued */\n Queued = \"Queued\",\n /** Submission */\n Submission = \"Submission\",\n /** Monitoring */\n Monitoring = \"Monitoring\",\n /** Cleanup */\n Cleanup = \"Cleanup\",\n /** Ended */\n Ended = \"Ended\"\n}\n\n/**\n * Defines values for PluginCurrentState. \\\n * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Preparation** \\\n * **ResourceAcquisition** \\\n * **Queued** \\\n * **Submission** \\\n * **Monitoring** \\\n * **Cleanup** \\\n * **Ended**\n */\nexport type PluginCurrentState = string;\n\n/** Known values of {@link SparkErrorSource} that the service accepts. */\nexport enum KnownSparkErrorSource {\n /** System */\n System = \"System\",\n /** User */\n User = \"User\",\n /** Unknown */\n Unknown = \"Unknown\",\n /** Dependency */\n Dependency = \"Dependency\"\n}\n\n/**\n * Defines values for SparkErrorSource. \\\n * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **System** \\\n * **User** \\\n * **Unknown** \\\n * **Dependency**\n */\nexport type SparkErrorSource = string;\n\n/** Known values of {@link LivyStates} that the service accepts. */\nexport enum KnownLivyStates {\n /** NotStarted */\n NotStarted = \"not_started\",\n /** Starting */\n Starting = \"starting\",\n /** Idle */\n Idle = \"idle\",\n /** Busy */\n Busy = \"busy\",\n /** ShuttingDown */\n ShuttingDown = \"shutting_down\",\n /** Error */\n Error = \"error\",\n /** Dead */\n Dead = \"dead\",\n /** Killed */\n Killed = \"killed\",\n /** Success */\n Success = \"success\",\n /** Running */\n Running = \"running\",\n /** Recovering */\n Recovering = \"recovering\"\n}\n\n/**\n * Defines values for LivyStates. \\\n * {@link KnownLivyStates} can be used interchangeably with LivyStates,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **not_started** \\\n * **starting** \\\n * **idle** \\\n * **busy** \\\n * **shutting_down** \\\n * **error** \\\n * **dead** \\\n * **killed** \\\n * **success** \\\n * **running** \\\n * **recovering**\n */\nexport type LivyStates = string;\n\n/** Known values of {@link CreateMode} that the service accepts. */\nexport enum KnownCreateMode {\n /** Default */\n Default = \"Default\",\n /** PointInTimeRestore */\n PointInTimeRestore = \"PointInTimeRestore\",\n /** Recovery */\n Recovery = \"Recovery\",\n /** Restore */\n Restore = \"Restore\"\n}\n\n/**\n * Defines values for CreateMode. \\\n * {@link KnownCreateMode} can be used interchangeably with CreateMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Default** \\\n * **PointInTimeRestore** \\\n * **Recovery** \\\n * **Restore**\n */\nexport type CreateMode = string;\n\n/** Known values of {@link SqlScriptType} that the service accepts. */\nexport enum KnownSqlScriptType {\n /** SqlQuery */\n SqlQuery = \"SqlQuery\"\n}\n\n/**\n * Defines values for SqlScriptType. \\\n * {@link KnownSqlScriptType} can be used interchangeably with SqlScriptType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SqlQuery**\n */\nexport type SqlScriptType = string;\n\n/** Known values of {@link SqlConnectionType} that the service accepts. */\nexport enum KnownSqlConnectionType {\n /** SqlOnDemand */\n SqlOnDemand = \"SqlOnDemand\",\n /** SqlPool */\n SqlPool = \"SqlPool\"\n}\n\n/**\n * Defines values for SqlConnectionType. \\\n * {@link KnownSqlConnectionType} can be used interchangeably with SqlConnectionType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SqlOnDemand** \\\n * **SqlPool**\n */\nexport type SqlConnectionType = string;\n\n/** Known values of {@link TriggerRuntimeState} that the service accepts. */\nexport enum KnownTriggerRuntimeState {\n /** Started */\n Started = \"Started\",\n /** Stopped */\n Stopped = \"Stopped\",\n /** Disabled */\n Disabled = \"Disabled\"\n}\n\n/**\n * Defines values for TriggerRuntimeState. \\\n * {@link KnownTriggerRuntimeState} can be used interchangeably with TriggerRuntimeState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Started** \\\n * **Stopped** \\\n * **Disabled**\n */\nexport type TriggerRuntimeState = string;\n\n/** Known values of {@link EventSubscriptionStatus} that the service accepts. */\nexport enum KnownEventSubscriptionStatus {\n /** Enabled */\n Enabled = \"Enabled\",\n /** Provisioning */\n Provisioning = \"Provisioning\",\n /** Deprovisioning */\n Deprovisioning = \"Deprovisioning\",\n /** Disabled */\n Disabled = \"Disabled\",\n /** Unknown */\n Unknown = \"Unknown\"\n}\n\n/**\n * Defines values for EventSubscriptionStatus. \\\n * {@link KnownEventSubscriptionStatus} can be used interchangeably with EventSubscriptionStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Enabled** \\\n * **Provisioning** \\\n * **Deprovisioning** \\\n * **Disabled** \\\n * **Unknown**\n */\nexport type EventSubscriptionStatus = string;\n\n/** Known values of {@link TriggerRunStatus} that the service accepts. */\nexport enum KnownTriggerRunStatus {\n /** Succeeded */\n Succeeded = \"Succeeded\",\n /** Failed */\n Failed = \"Failed\",\n /** Inprogress */\n Inprogress = \"Inprogress\"\n}\n\n/**\n * Defines values for TriggerRunStatus. \\\n * {@link KnownTriggerRunStatus} can be used interchangeably with TriggerRunStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Succeeded** \\\n * **Failed** \\\n * **Inprogress**\n */\nexport type TriggerRunStatus = string;\n\n/** Known values of {@link ExpressionType} that the service accepts. */\nexport enum KnownExpressionType {\n /** Expression */\n Expression = \"Expression\"\n}\n\n/**\n * Defines values for ExpressionType. \\\n * {@link KnownExpressionType} can be used interchangeably with ExpressionType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Expression**\n */\nexport type ExpressionType = string;\n\n/** Known values of {@link PipelineReferenceType} that the service accepts. */\nexport enum KnownPipelineReferenceType {\n /** PipelineReference */\n PipelineReference = \"PipelineReference\"\n}\n\n/**\n * Defines values for PipelineReferenceType. \\\n * {@link KnownPipelineReferenceType} can be used interchangeably with PipelineReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **PipelineReference**\n */\nexport type PipelineReferenceType = string;\n\n/** Known values of {@link DatasetReferenceType} that the service accepts. */\nexport enum KnownDatasetReferenceType {\n /** DatasetReference */\n DatasetReference = \"DatasetReference\"\n}\n\n/**\n * Defines values for DatasetReferenceType. \\\n * {@link KnownDatasetReferenceType} can be used interchangeably with DatasetReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **DatasetReference**\n */\nexport type DatasetReferenceType = string;\n\n/** Known values of {@link DataFlowReferenceType} that the service accepts. */\nexport enum KnownDataFlowReferenceType {\n /** DataFlowReference */\n DataFlowReference = \"DataFlowReference\"\n}\n\n/**\n * Defines values for DataFlowReferenceType. \\\n * {@link KnownDataFlowReferenceType} can be used interchangeably with DataFlowReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **DataFlowReference**\n */\nexport type DataFlowReferenceType = string;\n\n/** Known values of {@link NotebookReferenceType} that the service accepts. */\nexport enum KnownNotebookReferenceType {\n /** NotebookReference */\n NotebookReference = \"NotebookReference\"\n}\n\n/**\n * Defines values for NotebookReferenceType. \\\n * {@link KnownNotebookReferenceType} can be used interchangeably with NotebookReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **NotebookReference**\n */\nexport type NotebookReferenceType = string;\n\n/** Known values of {@link SparkJobReferenceType} that the service accepts. */\nexport enum KnownSparkJobReferenceType {\n /** SparkJobDefinitionReference */\n SparkJobDefinitionReference = \"SparkJobDefinitionReference\"\n}\n\n/**\n * Defines values for SparkJobReferenceType. \\\n * {@link KnownSparkJobReferenceType} can be used interchangeably with SparkJobReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SparkJobDefinitionReference**\n */\nexport type SparkJobReferenceType = string;\n\n/** Known values of {@link SqlPoolReferenceType} that the service accepts. */\nexport enum KnownSqlPoolReferenceType {\n /** SqlPoolReference */\n SqlPoolReference = \"SqlPoolReference\"\n}\n\n/**\n * Defines values for SqlPoolReferenceType. \\\n * {@link KnownSqlPoolReferenceType} can be used interchangeably with SqlPoolReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SqlPoolReference**\n */\nexport type SqlPoolReferenceType = string;\n\n/** Known values of {@link SybaseAuthenticationType} that the service accepts. */\nexport enum KnownSybaseAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Windows */\n Windows = \"Windows\"\n}\n\n/**\n * Defines values for SybaseAuthenticationType. \\\n * {@link KnownSybaseAuthenticationType} can be used interchangeably with SybaseAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Windows**\n */\nexport type SybaseAuthenticationType = string;\n\n/** Known values of {@link Db2AuthenticationType} that the service accepts. */\nexport enum KnownDb2AuthenticationType {\n /** Basic */\n Basic = \"Basic\"\n}\n\n/**\n * Defines values for Db2AuthenticationType. \\\n * {@link KnownDb2AuthenticationType} can be used interchangeably with Db2AuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic**\n */\nexport type Db2AuthenticationType = string;\n\n/** Known values of {@link TeradataAuthenticationType} that the service accepts. */\nexport enum KnownTeradataAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Windows */\n Windows = \"Windows\"\n}\n\n/**\n * Defines values for TeradataAuthenticationType. \\\n * {@link KnownTeradataAuthenticationType} can be used interchangeably with TeradataAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Windows**\n */\nexport type TeradataAuthenticationType = string;\n\n/** Known values of {@link ODataAuthenticationType} that the service accepts. */\nexport enum KnownODataAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Windows */\n Windows = \"Windows\",\n /** AadServicePrincipal */\n AadServicePrincipal = \"AadServicePrincipal\",\n /** ManagedServiceIdentity */\n ManagedServiceIdentity = \"ManagedServiceIdentity\"\n}\n\n/**\n * Defines values for ODataAuthenticationType. \\\n * {@link KnownODataAuthenticationType} can be used interchangeably with ODataAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous** \\\n * **Windows** \\\n * **AadServicePrincipal** \\\n * **ManagedServiceIdentity**\n */\nexport type ODataAuthenticationType = string;\n\n/** Known values of {@link ODataAadServicePrincipalCredentialType} that the service accepts. */\nexport enum KnownODataAadServicePrincipalCredentialType {\n /** ServicePrincipalKey */\n ServicePrincipalKey = \"ServicePrincipalKey\",\n /** ServicePrincipalCert */\n ServicePrincipalCert = \"ServicePrincipalCert\"\n}\n\n/**\n * Defines values for ODataAadServicePrincipalCredentialType. \\\n * {@link KnownODataAadServicePrincipalCredentialType} can be used interchangeably with ODataAadServicePrincipalCredentialType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServicePrincipalKey** \\\n * **ServicePrincipalCert**\n */\nexport type ODataAadServicePrincipalCredentialType = string;\n\n/** Known values of {@link WebAuthenticationType} that the service accepts. */\nexport enum KnownWebAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** ClientCertificate */\n ClientCertificate = \"ClientCertificate\"\n}\n\n/**\n * Defines values for WebAuthenticationType. \\\n * {@link KnownWebAuthenticationType} can be used interchangeably with WebAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous** \\\n * **ClientCertificate**\n */\nexport type WebAuthenticationType = string;\n\n/** Known values of {@link MongoDbAuthenticationType} that the service accepts. */\nexport enum KnownMongoDbAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\"\n}\n\n/**\n * Defines values for MongoDbAuthenticationType. \\\n * {@link KnownMongoDbAuthenticationType} can be used interchangeably with MongoDbAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous**\n */\nexport type MongoDbAuthenticationType = string;\n\n/** Known values of {@link RestServiceAuthenticationType} that the service accepts. */\nexport enum KnownRestServiceAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Basic */\n Basic = \"Basic\",\n /** AadServicePrincipal */\n AadServicePrincipal = \"AadServicePrincipal\",\n /** ManagedServiceIdentity */\n ManagedServiceIdentity = \"ManagedServiceIdentity\",\n /** OAuth2ClientCredential */\n OAuth2ClientCredential = \"OAuth2ClientCredential\"\n}\n\n/**\n * Defines values for RestServiceAuthenticationType. \\\n * {@link KnownRestServiceAuthenticationType} can be used interchangeably with RestServiceAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Basic** \\\n * **AadServicePrincipal** \\\n * **ManagedServiceIdentity** \\\n * **OAuth2ClientCredential**\n */\nexport type RestServiceAuthenticationType = string;\n\n/** Known values of {@link TeamDeskAuthenticationType} that the service accepts. */\nexport enum KnownTeamDeskAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Token */\n Token = \"Token\"\n}\n\n/**\n * Defines values for TeamDeskAuthenticationType. \\\n * {@link KnownTeamDeskAuthenticationType} can be used interchangeably with TeamDeskAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Token**\n */\nexport type TeamDeskAuthenticationType = string;\n\n/** Known values of {@link ZendeskAuthenticationType} that the service accepts. */\nexport enum KnownZendeskAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Token */\n Token = \"Token\"\n}\n\n/**\n * Defines values for ZendeskAuthenticationType. \\\n * {@link KnownZendeskAuthenticationType} can be used interchangeably with ZendeskAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Token**\n */\nexport type ZendeskAuthenticationType = string;\n\n/** Known values of {@link HttpAuthenticationType} that the service accepts. */\nexport enum KnownHttpAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Digest */\n Digest = \"Digest\",\n /** Windows */\n Windows = \"Windows\",\n /** ClientCertificate */\n ClientCertificate = \"ClientCertificate\"\n}\n\n/**\n * Defines values for HttpAuthenticationType. \\\n * {@link KnownHttpAuthenticationType} can be used interchangeably with HttpAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous** \\\n * **Digest** \\\n * **Windows** \\\n * **ClientCertificate**\n */\nexport type HttpAuthenticationType = string;\n\n/** Known values of {@link FtpAuthenticationType} that the service accepts. */\nexport enum KnownFtpAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Anonymous */\n Anonymous = \"Anonymous\"\n}\n\n/**\n * Defines values for FtpAuthenticationType. \\\n * {@link KnownFtpAuthenticationType} can be used interchangeably with FtpAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Anonymous**\n */\nexport type FtpAuthenticationType = string;\n\n/** Known values of {@link SftpAuthenticationType} that the service accepts. */\nexport enum KnownSftpAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** SshPublicKey */\n SshPublicKey = \"SshPublicKey\"\n}\n\n/**\n * Defines values for SftpAuthenticationType. \\\n * {@link KnownSftpAuthenticationType} can be used interchangeably with SftpAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **SshPublicKey**\n */\nexport type SftpAuthenticationType = string;\n\n/** Known values of {@link SapHanaAuthenticationType} that the service accepts. */\nexport enum KnownSapHanaAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** Windows */\n Windows = \"Windows\"\n}\n\n/**\n * Defines values for SapHanaAuthenticationType. \\\n * {@link KnownSapHanaAuthenticationType} can be used interchangeably with SapHanaAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Windows**\n */\nexport type SapHanaAuthenticationType = string;\n\n/** Known values of {@link GoogleBigQueryAuthenticationType} that the service accepts. */\nexport enum KnownGoogleBigQueryAuthenticationType {\n /** ServiceAuthentication */\n ServiceAuthentication = \"ServiceAuthentication\",\n /** UserAuthentication */\n UserAuthentication = \"UserAuthentication\"\n}\n\n/**\n * Defines values for GoogleBigQueryAuthenticationType. \\\n * {@link KnownGoogleBigQueryAuthenticationType} can be used interchangeably with GoogleBigQueryAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServiceAuthentication** \\\n * **UserAuthentication**\n */\nexport type GoogleBigQueryAuthenticationType = string;\n\n/** Known values of {@link HBaseAuthenticationType} that the service accepts. */\nexport enum KnownHBaseAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Basic */\n Basic = \"Basic\"\n}\n\n/**\n * Defines values for HBaseAuthenticationType. \\\n * {@link KnownHBaseAuthenticationType} can be used interchangeably with HBaseAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Basic**\n */\nexport type HBaseAuthenticationType = string;\n\n/** Known values of {@link HiveServerType} that the service accepts. */\nexport enum KnownHiveServerType {\n /** HiveServer1 */\n HiveServer1 = \"HiveServer1\",\n /** HiveServer2 */\n HiveServer2 = \"HiveServer2\",\n /** HiveThriftServer */\n HiveThriftServer = \"HiveThriftServer\"\n}\n\n/**\n * Defines values for HiveServerType. \\\n * {@link KnownHiveServerType} can be used interchangeably with HiveServerType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **HiveServer1** \\\n * **HiveServer2** \\\n * **HiveThriftServer**\n */\nexport type HiveServerType = string;\n\n/** Known values of {@link HiveThriftTransportProtocol} that the service accepts. */\nexport enum KnownHiveThriftTransportProtocol {\n /** Binary */\n Binary = \"Binary\",\n /** Sasl */\n Sasl = \"SASL\",\n /** Http */\n Http = \"HTTP \"\n}\n\n/**\n * Defines values for HiveThriftTransportProtocol. \\\n * {@link KnownHiveThriftTransportProtocol} can be used interchangeably with HiveThriftTransportProtocol,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Binary** \\\n * **SASL** \\\n * **HTTP **\n */\nexport type HiveThriftTransportProtocol = string;\n\n/** Known values of {@link HiveAuthenticationType} that the service accepts. */\nexport enum KnownHiveAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Username */\n Username = \"Username\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\",\n /** WindowsAzureHDInsightService */\n WindowsAzureHDInsightService = \"WindowsAzureHDInsightService\"\n}\n\n/**\n * Defines values for HiveAuthenticationType. \\\n * {@link KnownHiveAuthenticationType} can be used interchangeably with HiveAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Username** \\\n * **UsernameAndPassword** \\\n * **WindowsAzureHDInsightService**\n */\nexport type HiveAuthenticationType = string;\n\n/** Known values of {@link ImpalaAuthenticationType} that the service accepts. */\nexport enum KnownImpalaAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** SaslUsername */\n SaslUsername = \"SASLUsername\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\"\n}\n\n/**\n * Defines values for ImpalaAuthenticationType. \\\n * {@link KnownImpalaAuthenticationType} can be used interchangeably with ImpalaAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **SASLUsername** \\\n * **UsernameAndPassword**\n */\nexport type ImpalaAuthenticationType = string;\n\n/** Known values of {@link PhoenixAuthenticationType} that the service accepts. */\nexport enum KnownPhoenixAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\",\n /** WindowsAzureHDInsightService */\n WindowsAzureHDInsightService = \"WindowsAzureHDInsightService\"\n}\n\n/**\n * Defines values for PhoenixAuthenticationType. \\\n * {@link KnownPhoenixAuthenticationType} can be used interchangeably with PhoenixAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **UsernameAndPassword** \\\n * **WindowsAzureHDInsightService**\n */\nexport type PhoenixAuthenticationType = string;\n\n/** Known values of {@link PrestoAuthenticationType} that the service accepts. */\nexport enum KnownPrestoAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Ldap */\n Ldap = \"LDAP\"\n}\n\n/**\n * Defines values for PrestoAuthenticationType. \\\n * {@link KnownPrestoAuthenticationType} can be used interchangeably with PrestoAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **LDAP**\n */\nexport type PrestoAuthenticationType = string;\n\n/** Known values of {@link ServiceNowAuthenticationType} that the service accepts. */\nexport enum KnownServiceNowAuthenticationType {\n /** Basic */\n Basic = \"Basic\",\n /** OAuth2 */\n OAuth2 = \"OAuth2\"\n}\n\n/**\n * Defines values for ServiceNowAuthenticationType. \\\n * {@link KnownServiceNowAuthenticationType} can be used interchangeably with ServiceNowAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **OAuth2**\n */\nexport type ServiceNowAuthenticationType = string;\n\n/** Known values of {@link SparkServerType} that the service accepts. */\nexport enum KnownSparkServerType {\n /** SharkServer */\n SharkServer = \"SharkServer\",\n /** SharkServer2 */\n SharkServer2 = \"SharkServer2\",\n /** SparkThriftServer */\n SparkThriftServer = \"SparkThriftServer\"\n}\n\n/**\n * Defines values for SparkServerType. \\\n * {@link KnownSparkServerType} can be used interchangeably with SparkServerType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SharkServer** \\\n * **SharkServer2** \\\n * **SparkThriftServer**\n */\nexport type SparkServerType = string;\n\n/** Known values of {@link SparkThriftTransportProtocol} that the service accepts. */\nexport enum KnownSparkThriftTransportProtocol {\n /** Binary */\n Binary = \"Binary\",\n /** Sasl */\n Sasl = \"SASL\",\n /** Http */\n Http = \"HTTP \"\n}\n\n/**\n * Defines values for SparkThriftTransportProtocol. \\\n * {@link KnownSparkThriftTransportProtocol} can be used interchangeably with SparkThriftTransportProtocol,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Binary** \\\n * **SASL** \\\n * **HTTP **\n */\nexport type SparkThriftTransportProtocol = string;\n\n/** Known values of {@link SparkAuthenticationType} that the service accepts. */\nexport enum KnownSparkAuthenticationType {\n /** Anonymous */\n Anonymous = \"Anonymous\",\n /** Username */\n Username = \"Username\",\n /** UsernameAndPassword */\n UsernameAndPassword = \"UsernameAndPassword\",\n /** WindowsAzureHDInsightService */\n WindowsAzureHDInsightService = \"WindowsAzureHDInsightService\"\n}\n\n/**\n * Defines values for SparkAuthenticationType. \\\n * {@link KnownSparkAuthenticationType} can be used interchangeably with SparkAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Anonymous** \\\n * **Username** \\\n * **UsernameAndPassword** \\\n * **WindowsAzureHDInsightService**\n */\nexport type SparkAuthenticationType = string;\n\n/** Known values of {@link GoogleAdWordsAuthenticationType} that the service accepts. */\nexport enum KnownGoogleAdWordsAuthenticationType {\n /** ServiceAuthentication */\n ServiceAuthentication = \"ServiceAuthentication\",\n /** UserAuthentication */\n UserAuthentication = \"UserAuthentication\"\n}\n\n/**\n * Defines values for GoogleAdWordsAuthenticationType. \\\n * {@link KnownGoogleAdWordsAuthenticationType} can be used interchangeably with GoogleAdWordsAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServiceAuthentication** \\\n * **UserAuthentication**\n */\nexport type GoogleAdWordsAuthenticationType = string;\n\n/** Known values of {@link SalesforceSourceReadBehavior} that the service accepts. */\nexport enum KnownSalesforceSourceReadBehavior {\n /** Query */\n Query = \"Query\",\n /** QueryAll */\n QueryAll = \"QueryAll\"\n}\n\n/**\n * Defines values for SalesforceSourceReadBehavior. \\\n * {@link KnownSalesforceSourceReadBehavior} can be used interchangeably with SalesforceSourceReadBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Query** \\\n * **QueryAll**\n */\nexport type SalesforceSourceReadBehavior = string;\n\n/** Known values of {@link SapHanaPartitionOption} that the service accepts. */\nexport enum KnownSapHanaPartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** SapHanaDynamicRange */\n SapHanaDynamicRange = \"SapHanaDynamicRange\"\n}\n\n/**\n * Defines values for SapHanaPartitionOption. \\\n * {@link KnownSapHanaPartitionOption} can be used interchangeably with SapHanaPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **SapHanaDynamicRange**\n */\nexport type SapHanaPartitionOption = string;\n\n/** Known values of {@link SapTablePartitionOption} that the service accepts. */\nexport enum KnownSapTablePartitionOption {\n /** None */\n None = \"None\",\n /** PartitionOnInt */\n PartitionOnInt = \"PartitionOnInt\",\n /** PartitionOnCalendarYear */\n PartitionOnCalendarYear = \"PartitionOnCalendarYear\",\n /** PartitionOnCalendarMonth */\n PartitionOnCalendarMonth = \"PartitionOnCalendarMonth\",\n /** PartitionOnCalendarDate */\n PartitionOnCalendarDate = \"PartitionOnCalendarDate\",\n /** PartitionOnTime */\n PartitionOnTime = \"PartitionOnTime\"\n}\n\n/**\n * Defines values for SapTablePartitionOption. \\\n * {@link KnownSapTablePartitionOption} can be used interchangeably with SapTablePartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PartitionOnInt** \\\n * **PartitionOnCalendarYear** \\\n * **PartitionOnCalendarMonth** \\\n * **PartitionOnCalendarDate** \\\n * **PartitionOnTime**\n */\nexport type SapTablePartitionOption = string;\n\n/** Known values of {@link StoredProcedureParameterType} that the service accepts. */\nexport enum KnownStoredProcedureParameterType {\n /** String */\n String = \"String\",\n /** Int */\n Int = \"Int\",\n /** Int64 */\n Int64 = \"Int64\",\n /** Decimal */\n Decimal = \"Decimal\",\n /** Guid */\n Guid = \"Guid\",\n /** Boolean */\n Boolean = \"Boolean\",\n /** Date */\n Date = \"Date\"\n}\n\n/**\n * Defines values for StoredProcedureParameterType. \\\n * {@link KnownStoredProcedureParameterType} can be used interchangeably with StoredProcedureParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **String** \\\n * **Int** \\\n * **Int64** \\\n * **Decimal** \\\n * **Guid** \\\n * **Boolean** \\\n * **Date**\n */\nexport type StoredProcedureParameterType = string;\n\n/** Known values of {@link OraclePartitionOption} that the service accepts. */\nexport enum KnownOraclePartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for OraclePartitionOption. \\\n * {@link KnownOraclePartitionOption} can be used interchangeably with OraclePartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **DynamicRange**\n */\nexport type OraclePartitionOption = string;\n\n/** Known values of {@link TeradataPartitionOption} that the service accepts. */\nexport enum KnownTeradataPartitionOption {\n /** None */\n None = \"None\",\n /** Hash */\n Hash = \"Hash\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for TeradataPartitionOption. \\\n * {@link KnownTeradataPartitionOption} can be used interchangeably with TeradataPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **Hash** \\\n * **DynamicRange**\n */\nexport type TeradataPartitionOption = string;\n\n/** Known values of {@link CassandraSourceReadConsistencyLevels} that the service accepts. */\nexport enum KnownCassandraSourceReadConsistencyLevels {\n /** ALL */\n ALL = \"ALL\",\n /** EachQuorum */\n EachQuorum = \"EACH_QUORUM\",\n /** Quorum */\n Quorum = \"QUORUM\",\n /** LocalQuorum */\n LocalQuorum = \"LOCAL_QUORUM\",\n /** ONE */\n ONE = \"ONE\",\n /** TWO */\n TWO = \"TWO\",\n /** Three */\n Three = \"THREE\",\n /** LocalONE */\n LocalONE = \"LOCAL_ONE\",\n /** Serial */\n Serial = \"SERIAL\",\n /** LocalSerial */\n LocalSerial = \"LOCAL_SERIAL\"\n}\n\n/**\n * Defines values for CassandraSourceReadConsistencyLevels. \\\n * {@link KnownCassandraSourceReadConsistencyLevels} can be used interchangeably with CassandraSourceReadConsistencyLevels,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ALL** \\\n * **EACH_QUORUM** \\\n * **QUORUM** \\\n * **LOCAL_QUORUM** \\\n * **ONE** \\\n * **TWO** \\\n * **THREE** \\\n * **LOCAL_ONE** \\\n * **SERIAL** \\\n * **LOCAL_SERIAL**\n */\nexport type CassandraSourceReadConsistencyLevels = string;\n\n/** Known values of {@link NetezzaPartitionOption} that the service accepts. */\nexport enum KnownNetezzaPartitionOption {\n /** None */\n None = \"None\",\n /** DataSlice */\n DataSlice = \"DataSlice\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for NetezzaPartitionOption. \\\n * {@link KnownNetezzaPartitionOption} can be used interchangeably with NetezzaPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **DataSlice** \\\n * **DynamicRange**\n */\nexport type NetezzaPartitionOption = string;\n\n/** Known values of {@link NotebookParameterType} that the service accepts. */\nexport enum KnownNotebookParameterType {\n /** String */\n String = \"string\",\n /** Int */\n Int = \"int\",\n /** Float */\n Float = \"float\",\n /** Bool */\n Bool = \"bool\"\n}\n\n/**\n * Defines values for NotebookParameterType. \\\n * {@link KnownNotebookParameterType} can be used interchangeably with NotebookParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **string** \\\n * **int** \\\n * **float** \\\n * **bool**\n */\nexport type NotebookParameterType = string;\n\n/** Known values of {@link SapCloudForCustomerSinkWriteBehavior} that the service accepts. */\nexport enum KnownSapCloudForCustomerSinkWriteBehavior {\n /** Insert */\n Insert = \"Insert\",\n /** Update */\n Update = \"Update\"\n}\n\n/**\n * Defines values for SapCloudForCustomerSinkWriteBehavior. \\\n * {@link KnownSapCloudForCustomerSinkWriteBehavior} can be used interchangeably with SapCloudForCustomerSinkWriteBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Insert** \\\n * **Update**\n */\nexport type SapCloudForCustomerSinkWriteBehavior = string;\n\n/** Known values of {@link PolybaseSettingsRejectType} that the service accepts. */\nexport enum KnownPolybaseSettingsRejectType {\n /** Value */\n Value = \"value\",\n /** Percentage */\n Percentage = \"percentage\"\n}\n\n/**\n * Defines values for PolybaseSettingsRejectType. \\\n * {@link KnownPolybaseSettingsRejectType} can be used interchangeably with PolybaseSettingsRejectType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **value** \\\n * **percentage**\n */\nexport type PolybaseSettingsRejectType = string;\n\n/** Known values of {@link AzureSearchIndexWriteBehaviorType} that the service accepts. */\nexport enum KnownAzureSearchIndexWriteBehaviorType {\n /** Merge */\n Merge = \"Merge\",\n /** Upload */\n Upload = \"Upload\"\n}\n\n/**\n * Defines values for AzureSearchIndexWriteBehaviorType. \\\n * {@link KnownAzureSearchIndexWriteBehaviorType} can be used interchangeably with AzureSearchIndexWriteBehaviorType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Merge** \\\n * **Upload**\n */\nexport type AzureSearchIndexWriteBehaviorType = string;\n\n/** Known values of {@link DynamicsSinkWriteBehavior} that the service accepts. */\nexport enum KnownDynamicsSinkWriteBehavior {\n /** Upsert */\n Upsert = \"Upsert\"\n}\n\n/**\n * Defines values for DynamicsSinkWriteBehavior. \\\n * {@link KnownDynamicsSinkWriteBehavior} can be used interchangeably with DynamicsSinkWriteBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Upsert**\n */\nexport type DynamicsSinkWriteBehavior = string;\n\n/** Known values of {@link SalesforceSinkWriteBehavior} that the service accepts. */\nexport enum KnownSalesforceSinkWriteBehavior {\n /** Insert */\n Insert = \"Insert\",\n /** Upsert */\n Upsert = \"Upsert\"\n}\n\n/**\n * Defines values for SalesforceSinkWriteBehavior. \\\n * {@link KnownSalesforceSinkWriteBehavior} can be used interchangeably with SalesforceSinkWriteBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Insert** \\\n * **Upsert**\n */\nexport type SalesforceSinkWriteBehavior = string;\n\n/** Known values of {@link HDInsightActivityDebugInfoOption} that the service accepts. */\nexport enum KnownHDInsightActivityDebugInfoOption {\n /** None */\n None = \"None\",\n /** Always */\n Always = \"Always\",\n /** Failure */\n Failure = \"Failure\"\n}\n\n/**\n * Defines values for HDInsightActivityDebugInfoOption. \\\n * {@link KnownHDInsightActivityDebugInfoOption} can be used interchangeably with HDInsightActivityDebugInfoOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **Always** \\\n * **Failure**\n */\nexport type HDInsightActivityDebugInfoOption = string;\n\n/** Known values of {@link SsisPackageLocationType} that the service accepts. */\nexport enum KnownSsisPackageLocationType {\n /** Ssisdb */\n Ssisdb = \"SSISDB\",\n /** File */\n File = \"File\",\n /** InlinePackage */\n InlinePackage = \"InlinePackage\",\n /** PackageStore */\n PackageStore = \"PackageStore\"\n}\n\n/**\n * Defines values for SsisPackageLocationType. \\\n * {@link KnownSsisPackageLocationType} can be used interchangeably with SsisPackageLocationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **SSISDB** \\\n * **File** \\\n * **InlinePackage** \\\n * **PackageStore**\n */\nexport type SsisPackageLocationType = string;\n\n/** Known values of {@link SsisLogLocationType} that the service accepts. */\nexport enum KnownSsisLogLocationType {\n /** File */\n File = \"File\"\n}\n\n/**\n * Defines values for SsisLogLocationType. \\\n * {@link KnownSsisLogLocationType} can be used interchangeably with SsisLogLocationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **File**\n */\nexport type SsisLogLocationType = string;\n\n/** Known values of {@link WebActivityMethod} that the service accepts. */\nexport enum KnownWebActivityMethod {\n /** GET */\n GET = \"GET\",\n /** Post */\n Post = \"POST\",\n /** PUT */\n PUT = \"PUT\",\n /** Delete */\n Delete = \"DELETE\"\n}\n\n/**\n * Defines values for WebActivityMethod. \\\n * {@link KnownWebActivityMethod} can be used interchangeably with WebActivityMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **GET** \\\n * **POST** \\\n * **PUT** \\\n * **DELETE**\n */\nexport type WebActivityMethod = string;\n\n/** Known values of {@link AzureFunctionActivityMethod} that the service accepts. */\nexport enum KnownAzureFunctionActivityMethod {\n /** GET */\n GET = \"GET\",\n /** Post */\n Post = \"POST\",\n /** PUT */\n PUT = \"PUT\",\n /** Delete */\n Delete = \"DELETE\",\n /** Options */\n Options = \"OPTIONS\",\n /** Head */\n Head = \"HEAD\",\n /** Trace */\n Trace = \"TRACE\"\n}\n\n/**\n * Defines values for AzureFunctionActivityMethod. \\\n * {@link KnownAzureFunctionActivityMethod} can be used interchangeably with AzureFunctionActivityMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **GET** \\\n * **POST** \\\n * **PUT** \\\n * **DELETE** \\\n * **OPTIONS** \\\n * **HEAD** \\\n * **TRACE**\n */\nexport type AzureFunctionActivityMethod = string;\n\n/** Known values of {@link WebHookActivityMethod} that the service accepts. */\nexport enum KnownWebHookActivityMethod {\n /** Post */\n Post = \"POST\"\n}\n\n/**\n * Defines values for WebHookActivityMethod. \\\n * {@link KnownWebHookActivityMethod} can be used interchangeably with WebHookActivityMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **POST**\n */\nexport type WebHookActivityMethod = string;\n\n/** Known values of {@link DataFlowComputeType} that the service accepts. */\nexport enum KnownDataFlowComputeType {\n /** General */\n General = \"General\",\n /** MemoryOptimized */\n MemoryOptimized = \"MemoryOptimized\",\n /** ComputeOptimized */\n ComputeOptimized = \"ComputeOptimized\"\n}\n\n/**\n * Defines values for DataFlowComputeType. \\\n * {@link KnownDataFlowComputeType} can be used interchangeably with DataFlowComputeType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **General** \\\n * **MemoryOptimized** \\\n * **ComputeOptimized**\n */\nexport type DataFlowComputeType = string;\n\n/** Known values of {@link ScriptType} that the service accepts. */\nexport enum KnownScriptType {\n /** Query */\n Query = \"Query\",\n /** NonQuery */\n NonQuery = \"NonQuery\"\n}\n\n/**\n * Defines values for ScriptType. \\\n * {@link KnownScriptType} can be used interchangeably with ScriptType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Query** \\\n * **NonQuery**\n */\nexport type ScriptType = string;\n\n/** Known values of {@link ScriptActivityParameterType} that the service accepts. */\nexport enum KnownScriptActivityParameterType {\n /** Boolean */\n Boolean = \"Boolean\",\n /** DateTime */\n DateTime = \"DateTime\",\n /** DateTimeOffset */\n DateTimeOffset = \"DateTimeOffset\",\n /** Decimal */\n Decimal = \"Decimal\",\n /** Double */\n Double = \"Double\",\n /** Guid */\n Guid = \"Guid\",\n /** Int16 */\n Int16 = \"Int16\",\n /** Int32 */\n Int32 = \"Int32\",\n /** Int64 */\n Int64 = \"Int64\",\n /** Single */\n Single = \"Single\",\n /** String */\n String = \"String\",\n /** Timespan */\n Timespan = \"Timespan\"\n}\n\n/**\n * Defines values for ScriptActivityParameterType. \\\n * {@link KnownScriptActivityParameterType} can be used interchangeably with ScriptActivityParameterType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Boolean** \\\n * **DateTime** \\\n * **DateTimeOffset** \\\n * **Decimal** \\\n * **Double** \\\n * **Guid** \\\n * **Int16** \\\n * **Int32** \\\n * **Int64** \\\n * **Single** \\\n * **String** \\\n * **Timespan**\n */\nexport type ScriptActivityParameterType = string;\n\n/** Known values of {@link ScriptActivityParameterDirection} that the service accepts. */\nexport enum KnownScriptActivityParameterDirection {\n /** Input */\n Input = \"Input\",\n /** Output */\n Output = \"Output\",\n /** InputOutput */\n InputOutput = \"InputOutput\"\n}\n\n/**\n * Defines values for ScriptActivityParameterDirection. \\\n * {@link KnownScriptActivityParameterDirection} can be used interchangeably with ScriptActivityParameterDirection,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Input** \\\n * **Output** \\\n * **InputOutput**\n */\nexport type ScriptActivityParameterDirection = string;\n\n/** Known values of {@link ScriptActivityLogDestination} that the service accepts. */\nexport enum KnownScriptActivityLogDestination {\n /** ActivityOutput */\n ActivityOutput = \"ActivityOutput\",\n /** ExternalStore */\n ExternalStore = \"ExternalStore\"\n}\n\n/**\n * Defines values for ScriptActivityLogDestination. \\\n * {@link KnownScriptActivityLogDestination} can be used interchangeably with ScriptActivityLogDestination,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ActivityOutput** \\\n * **ExternalStore**\n */\nexport type ScriptActivityLogDestination = string;\n\n/** Known values of {@link ConfigurationType} that the service accepts. */\nexport enum KnownConfigurationType {\n /** Default */\n Default = \"Default\",\n /** Customized */\n Customized = \"Customized\",\n /** Artifact */\n Artifact = \"Artifact\"\n}\n\n/**\n * Defines values for ConfigurationType. \\\n * {@link KnownConfigurationType} can be used interchangeably with ConfigurationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Default** \\\n * **Customized** \\\n * **Artifact**\n */\nexport type ConfigurationType = string;\n\n/** Known values of {@link RecurrenceFrequency} that the service accepts. */\nexport enum KnownRecurrenceFrequency {\n /** NotSpecified */\n NotSpecified = \"NotSpecified\",\n /** Minute */\n Minute = \"Minute\",\n /** Hour */\n Hour = \"Hour\",\n /** Day */\n Day = \"Day\",\n /** Week */\n Week = \"Week\",\n /** Month */\n Month = \"Month\",\n /** Year */\n Year = \"Year\"\n}\n\n/**\n * Defines values for RecurrenceFrequency. \\\n * {@link KnownRecurrenceFrequency} can be used interchangeably with RecurrenceFrequency,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **NotSpecified** \\\n * **Minute** \\\n * **Hour** \\\n * **Day** \\\n * **Week** \\\n * **Month** \\\n * **Year**\n */\nexport type RecurrenceFrequency = string;\n\n/** Known values of {@link BlobEventType} that the service accepts. */\nexport enum KnownBlobEventType {\n /** MicrosoftStorageBlobCreated */\n MicrosoftStorageBlobCreated = \"Microsoft.Storage.BlobCreated\",\n /** MicrosoftStorageBlobDeleted */\n MicrosoftStorageBlobDeleted = \"Microsoft.Storage.BlobDeleted\"\n}\n\n/**\n * Defines values for BlobEventType. \\\n * {@link KnownBlobEventType} can be used interchangeably with BlobEventType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Microsoft.Storage.BlobCreated** \\\n * **Microsoft.Storage.BlobDeleted**\n */\nexport type BlobEventType = string;\n\n/** Known values of {@link TumblingWindowFrequency} that the service accepts. */\nexport enum KnownTumblingWindowFrequency {\n /** Minute */\n Minute = \"Minute\",\n /** Hour */\n Hour = \"Hour\",\n /** Month */\n Month = \"Month\"\n}\n\n/**\n * Defines values for TumblingWindowFrequency. \\\n * {@link KnownTumblingWindowFrequency} can be used interchangeably with TumblingWindowFrequency,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Minute** \\\n * **Hour** \\\n * **Month**\n */\nexport type TumblingWindowFrequency = string;\n\n/** Known values of {@link TriggerReferenceType} that the service accepts. */\nexport enum KnownTriggerReferenceType {\n /** TriggerReference */\n TriggerReference = \"TriggerReference\"\n}\n\n/**\n * Defines values for TriggerReferenceType. \\\n * {@link KnownTriggerReferenceType} can be used interchangeably with TriggerReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **TriggerReference**\n */\nexport type TriggerReferenceType = string;\n\n/** Known values of {@link IntegrationRuntimeState} that the service accepts. */\nexport enum KnownIntegrationRuntimeState {\n /** Initial */\n Initial = \"Initial\",\n /** Stopped */\n Stopped = \"Stopped\",\n /** Started */\n Started = \"Started\",\n /** Starting */\n Starting = \"Starting\",\n /** Stopping */\n Stopping = \"Stopping\",\n /** NeedRegistration */\n NeedRegistration = \"NeedRegistration\",\n /** Online */\n Online = \"Online\",\n /** Limited */\n Limited = \"Limited\",\n /** Offline */\n Offline = \"Offline\",\n /** AccessDenied */\n AccessDenied = \"AccessDenied\"\n}\n\n/**\n * Defines values for IntegrationRuntimeState. \\\n * {@link KnownIntegrationRuntimeState} can be used interchangeably with IntegrationRuntimeState,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Initial** \\\n * **Stopped** \\\n * **Started** \\\n * **Starting** \\\n * **Stopping** \\\n * **NeedRegistration** \\\n * **Online** \\\n * **Limited** \\\n * **Offline** \\\n * **AccessDenied**\n */\nexport type IntegrationRuntimeState = string;\n\n/** Known values of {@link IntegrationRuntimeSsisCatalogPricingTier} that the service accepts. */\nexport enum KnownIntegrationRuntimeSsisCatalogPricingTier {\n /** Basic */\n Basic = \"Basic\",\n /** Standard */\n Standard = \"Standard\",\n /** Premium */\n Premium = \"Premium\",\n /** PremiumRS */\n PremiumRS = \"PremiumRS\"\n}\n\n/**\n * Defines values for IntegrationRuntimeSsisCatalogPricingTier. \\\n * {@link KnownIntegrationRuntimeSsisCatalogPricingTier} can be used interchangeably with IntegrationRuntimeSsisCatalogPricingTier,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Basic** \\\n * **Standard** \\\n * **Premium** \\\n * **PremiumRS**\n */\nexport type IntegrationRuntimeSsisCatalogPricingTier = string;\n\n/** Known values of {@link IntegrationRuntimeLicenseType} that the service accepts. */\nexport enum KnownIntegrationRuntimeLicenseType {\n /** BasePrice */\n BasePrice = \"BasePrice\",\n /** LicenseIncluded */\n LicenseIncluded = \"LicenseIncluded\"\n}\n\n/**\n * Defines values for IntegrationRuntimeLicenseType. \\\n * {@link KnownIntegrationRuntimeLicenseType} can be used interchangeably with IntegrationRuntimeLicenseType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **BasePrice** \\\n * **LicenseIncluded**\n */\nexport type IntegrationRuntimeLicenseType = string;\n\n/** Known values of {@link IntegrationRuntimeEntityReferenceType} that the service accepts. */\nexport enum KnownIntegrationRuntimeEntityReferenceType {\n /** IntegrationRuntimeReference */\n IntegrationRuntimeReference = \"IntegrationRuntimeReference\",\n /** LinkedServiceReference */\n LinkedServiceReference = \"LinkedServiceReference\"\n}\n\n/**\n * Defines values for IntegrationRuntimeEntityReferenceType. \\\n * {@link KnownIntegrationRuntimeEntityReferenceType} can be used interchangeably with IntegrationRuntimeEntityReferenceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **IntegrationRuntimeReference** \\\n * **LinkedServiceReference**\n */\nexport type IntegrationRuntimeEntityReferenceType = string;\n\n/** Known values of {@link IntegrationRuntimeEdition} that the service accepts. */\nexport enum KnownIntegrationRuntimeEdition {\n /** Standard */\n Standard = \"Standard\",\n /** Enterprise */\n Enterprise = \"Enterprise\"\n}\n\n/**\n * Defines values for IntegrationRuntimeEdition. \\\n * {@link KnownIntegrationRuntimeEdition} can be used interchangeably with IntegrationRuntimeEdition,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Standard** \\\n * **Enterprise**\n */\nexport type IntegrationRuntimeEdition = string;\n\n/** Known values of {@link JsonFormatFilePattern} that the service accepts. */\nexport enum KnownJsonFormatFilePattern {\n /** SetOfObjects */\n SetOfObjects = \"setOfObjects\",\n /** ArrayOfObjects */\n ArrayOfObjects = \"arrayOfObjects\"\n}\n\n/**\n * Defines values for JsonFormatFilePattern. \\\n * {@link KnownJsonFormatFilePattern} can be used interchangeably with JsonFormatFilePattern,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **setOfObjects** \\\n * **arrayOfObjects**\n */\nexport type JsonFormatFilePattern = string;\n\n/** Known values of {@link DatasetCompressionLevel} that the service accepts. */\nexport enum KnownDatasetCompressionLevel {\n /** Optimal */\n Optimal = \"Optimal\",\n /** Fastest */\n Fastest = \"Fastest\"\n}\n\n/**\n * Defines values for DatasetCompressionLevel. \\\n * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Optimal** \\\n * **Fastest**\n */\nexport type DatasetCompressionLevel = string;\n\n/** Known values of {@link AvroCompressionCodec} that the service accepts. */\nexport enum KnownAvroCompressionCodec {\n /** None */\n None = \"none\",\n /** Deflate */\n Deflate = \"deflate\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Xz */\n Xz = \"xz\",\n /** Bzip2 */\n Bzip2 = \"bzip2\"\n}\n\n/**\n * Defines values for AvroCompressionCodec. \\\n * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **deflate** \\\n * **snappy** \\\n * **xz** \\\n * **bzip2**\n */\nexport type AvroCompressionCodec = string;\n\n/** Known values of {@link ParquetCompressionCodecEnum} that the service accepts. */\nexport enum KnownParquetCompressionCodecEnum {\n /** None */\n None = \"none\",\n /** Gzip */\n Gzip = \"gzip\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Lzo */\n Lzo = \"lzo\"\n}\n\n/**\n * Defines values for ParquetCompressionCodecEnum. \\\n * {@link KnownParquetCompressionCodecEnum} can be used interchangeably with ParquetCompressionCodecEnum,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **gzip** \\\n * **snappy** \\\n * **lzo**\n */\nexport type ParquetCompressionCodecEnum = string;\n\n/** Known values of {@link CompressionCodec} that the service accepts. */\nexport enum KnownCompressionCodec {\n /** None */\n None = \"none\",\n /** Lzo */\n Lzo = \"lzo\",\n /** Bzip2 */\n Bzip2 = \"bzip2\",\n /** Gzip */\n Gzip = \"gzip\",\n /** Deflate */\n Deflate = \"deflate\",\n /** ZipDeflate */\n ZipDeflate = \"zipDeflate\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Lz4 */\n Lz4 = \"lz4\",\n /** Tar */\n Tar = \"tar\",\n /** TarGZip */\n TarGZip = \"tarGZip\"\n}\n\n/**\n * Defines values for CompressionCodec. \\\n * {@link KnownCompressionCodec} can be used interchangeably with CompressionCodec,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **lzo** \\\n * **bzip2** \\\n * **gzip** \\\n * **deflate** \\\n * **zipDeflate** \\\n * **snappy** \\\n * **lz4** \\\n * **tar** \\\n * **tarGZip**\n */\nexport type CompressionCodec = string;\n\n/** Known values of {@link OrcCompressionCodec} that the service accepts. */\nexport enum KnownOrcCompressionCodec {\n /** None */\n None = \"none\",\n /** Zlib */\n Zlib = \"zlib\",\n /** Snappy */\n Snappy = \"snappy\",\n /** Lzo */\n Lzo = \"lzo\"\n}\n\n/**\n * Defines values for OrcCompressionCodec. \\\n * {@link KnownOrcCompressionCodec} can be used interchangeably with OrcCompressionCodec,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none** \\\n * **zlib** \\\n * **snappy** \\\n * **lzo**\n */\nexport type OrcCompressionCodec = string;\n\n/** Known values of {@link DynamicsDeploymentType} that the service accepts. */\nexport enum KnownDynamicsDeploymentType {\n /** Online */\n Online = \"Online\",\n /** OnPremisesWithIfd */\n OnPremisesWithIfd = \"OnPremisesWithIfd\"\n}\n\n/**\n * Defines values for DynamicsDeploymentType. \\\n * {@link KnownDynamicsDeploymentType} can be used interchangeably with DynamicsDeploymentType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Online** \\\n * **OnPremisesWithIfd**\n */\nexport type DynamicsDeploymentType = string;\n\n/** Known values of {@link DynamicsAuthenticationType} that the service accepts. */\nexport enum KnownDynamicsAuthenticationType {\n /** Office365 */\n Office365 = \"Office365\",\n /** Ifd */\n Ifd = \"Ifd\",\n /** AADServicePrincipal */\n AADServicePrincipal = \"AADServicePrincipal\"\n}\n\n/**\n * Defines values for DynamicsAuthenticationType. \\\n * {@link KnownDynamicsAuthenticationType} can be used interchangeably with DynamicsAuthenticationType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Office365** \\\n * **Ifd** \\\n * **AADServicePrincipal**\n */\nexport type DynamicsAuthenticationType = string;\n\n/** Known values of {@link DynamicsServicePrincipalCredentialType} that the service accepts. */\nexport enum KnownDynamicsServicePrincipalCredentialType {\n /** ServicePrincipalKey */\n ServicePrincipalKey = \"ServicePrincipalKey\",\n /** ServicePrincipalCert */\n ServicePrincipalCert = \"ServicePrincipalCert\"\n}\n\n/**\n * Defines values for DynamicsServicePrincipalCredentialType. \\\n * {@link KnownDynamicsServicePrincipalCredentialType} can be used interchangeably with DynamicsServicePrincipalCredentialType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ServicePrincipalKey** \\\n * **ServicePrincipalCert**\n */\nexport type DynamicsServicePrincipalCredentialType = string;\n\n/** Known values of {@link HdiNodeTypes} that the service accepts. */\nexport enum KnownHdiNodeTypes {\n /** Headnode */\n Headnode = \"Headnode\",\n /** Workernode */\n Workernode = \"Workernode\",\n /** Zookeeper */\n Zookeeper = \"Zookeeper\"\n}\n\n/**\n * Defines values for HdiNodeTypes. \\\n * {@link KnownHdiNodeTypes} can be used interchangeably with HdiNodeTypes,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Headnode** \\\n * **Workernode** \\\n * **Zookeeper**\n */\nexport type HdiNodeTypes = string;\n\n/** Known values of {@link JsonWriteFilePattern} that the service accepts. */\nexport enum KnownJsonWriteFilePattern {\n /** SetOfObjects */\n SetOfObjects = \"setOfObjects\",\n /** ArrayOfObjects */\n ArrayOfObjects = \"arrayOfObjects\"\n}\n\n/**\n * Defines values for JsonWriteFilePattern. \\\n * {@link KnownJsonWriteFilePattern} can be used interchangeably with JsonWriteFilePattern,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **setOfObjects** \\\n * **arrayOfObjects**\n */\nexport type JsonWriteFilePattern = string;\n\n/** Known values of {@link AmazonRdsForOraclePartitionOption} that the service accepts. */\nexport enum KnownAmazonRdsForOraclePartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for AmazonRdsForOraclePartitionOption. \\\n * {@link KnownAmazonRdsForOraclePartitionOption} can be used interchangeably with AmazonRdsForOraclePartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **DynamicRange**\n */\nexport type AmazonRdsForOraclePartitionOption = string;\n\n/** Known values of {@link CopyBehaviorType} that the service accepts. */\nexport enum KnownCopyBehaviorType {\n /** PreserveHierarchy */\n PreserveHierarchy = \"PreserveHierarchy\",\n /** FlattenHierarchy */\n FlattenHierarchy = \"FlattenHierarchy\",\n /** MergeFiles */\n MergeFiles = \"MergeFiles\"\n}\n\n/**\n * Defines values for CopyBehaviorType. \\\n * {@link KnownCopyBehaviorType} can be used interchangeably with CopyBehaviorType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **PreserveHierarchy** \\\n * **FlattenHierarchy** \\\n * **MergeFiles**\n */\nexport type CopyBehaviorType = string;\n\n/** Known values of {@link SqlPartitionOption} that the service accepts. */\nexport enum KnownSqlPartitionOption {\n /** None */\n None = \"None\",\n /** PhysicalPartitionsOfTable */\n PhysicalPartitionsOfTable = \"PhysicalPartitionsOfTable\",\n /** DynamicRange */\n DynamicRange = \"DynamicRange\"\n}\n\n/**\n * Defines values for SqlPartitionOption. \\\n * {@link KnownSqlPartitionOption} can be used interchangeably with SqlPartitionOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **None** \\\n * **PhysicalPartitionsOfTable** \\\n * **DynamicRange**\n */\nexport type SqlPartitionOption = string;\n/** Defines values for ResourceIdentityType. */\nexport type ResourceIdentityType = \"None\" | \"SystemAssigned\";\n/** Defines values for DayOfWeek. */\nexport type DayOfWeek =\n | \"Sunday\"\n | \"Monday\"\n | \"Tuesday\"\n | \"Wednesday\"\n | \"Thursday\"\n | \"Friday\"\n | \"Saturday\";\n\n/** Optional parameters. */\nexport interface LinkConnectionListByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listByWorkspace operation. */\nexport type LinkConnectionListByWorkspaceResponse = LinkConnectionListResponse;\n\n/** Optional parameters. */\nexport interface LinkConnectionCreateOrUpdateOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the createOrUpdate operation. */\nexport type LinkConnectionCreateOrUpdateResponse = LinkConnectionResource;\n\n/** Optional parameters. */\nexport interface LinkConnectionGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type LinkConnectionGetResponse = LinkConnectionResource;\n\n/** Optional parameters. */\nexport interface LinkConnectionDeleteOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionEditTablesOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionStartOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionStopOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionGetDetailedStatusOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDetailedStatus operation. */\nexport type LinkConnectionGetDetailedStatusResponse = LinkConnectionDetailedStatus;\n\n/** Optional parameters. */\nexport interface LinkConnectionListLinkTablesOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listLinkTables operation. */\nexport type LinkConnectionListLinkTablesResponse = LinkTableListResponse;\n\n/** Optional parameters. */\nexport interface LinkConnectionQueryTableStatusOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryTableStatus operation. */\nexport type LinkConnectionQueryTableStatusResponse = LinkConnectionQueryTableStatus;\n\n/** Optional parameters. */\nexport interface LinkConnectionUpdateLandingZoneCredentialOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionPauseOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionResumeOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface LinkConnectionListByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listByWorkspaceNext operation. */\nexport type LinkConnectionListByWorkspaceNextResponse = LinkConnectionListResponse;\n\n/** Optional parameters. */\nexport interface KqlScriptsGetAllOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getAll operation. */\nexport type KqlScriptsGetAllResponse = KqlScriptsResourceCollectionResponse;\n\n/** Optional parameters. */\nexport interface KqlScriptsGetAllNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getAllNext operation. */\nexport type KqlScriptsGetAllNextResponse = KqlScriptsResourceCollectionResponse;\n\n/** Optional parameters. */\nexport interface KqlScriptCreateOrUpdateOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdate operation. */\nexport type KqlScriptCreateOrUpdateResponse = KqlScriptResource;\n\n/** Optional parameters. */\nexport interface KqlScriptGetByNameOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getByName operation. */\nexport type KqlScriptGetByNameResponse = KqlScriptResource;\n\n/** Optional parameters. */\nexport interface KqlScriptDeleteByNameOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface KqlScriptRenameOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface MetastoreRegisterOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the register operation. */\nexport type MetastoreRegisterResponse = MetastoreRegistrationResponse;\n\n/** Optional parameters. */\nexport interface MetastoreGetDatabaseOperationsOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDatabaseOperations operation. */\nexport type MetastoreGetDatabaseOperationsResponse = MetastoreRequestSuccessResponse;\n\n/** Optional parameters. */\nexport interface MetastoreUpdateOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the update operation. */\nexport type MetastoreUpdateResponse = MetastoreUpdationResponse;\n\n/** Optional parameters. */\nexport interface MetastoreDeleteOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkConfigurationsByWorkspace operation. */\nexport type SparkConfigurationGetSparkConfigurationsByWorkspaceResponse = SparkConfigurationListResponse;\n\n/** Optional parameters. */\nexport interface SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the sparkConfiguration entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateSparkConfiguration operation. */\nexport type SparkConfigurationCreateOrUpdateSparkConfigurationResponse = SparkConfigurationResource;\n\n/** Optional parameters. */\nexport interface SparkConfigurationGetSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the sparkConfiguration entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getSparkConfiguration operation. */\nexport type SparkConfigurationGetSparkConfigurationResponse = SparkConfigurationResource;\n\n/** Optional parameters. */\nexport interface SparkConfigurationDeleteSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkConfigurationRenameSparkConfigurationOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkConfigurationsByWorkspaceNext operation. */\nexport type SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse = SparkConfigurationListResponse;\n\n/** Optional parameters. */\nexport interface BigDataPoolsListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult;\n\n/** Optional parameters. */\nexport interface BigDataPoolsGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type BigDataPoolsGetResponse = BigDataPoolResourceInfo;\n\n/** Optional parameters. */\nexport interface DataFlowCreateOrUpdateDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateDataFlow operation. */\nexport type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource;\n\n/** Optional parameters. */\nexport interface DataFlowGetDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getDataFlow operation. */\nexport type DataFlowGetDataFlowResponse = DataFlowResource;\n\n/** Optional parameters. */\nexport interface DataFlowDeleteDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DataFlowRenameDataFlowOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DataFlowGetDataFlowsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDataFlowsByWorkspace operation. */\nexport type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse;\n\n/** Optional parameters. */\nexport interface DataFlowGetDataFlowsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDataFlowsByWorkspaceNext operation. */\nexport type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionCreateDataFlowDebugSessionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createDataFlowDebugSession operation. */\nexport type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryDataFlowDebugSessionsByWorkspace operation. */\nexport type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionAddDataFlowOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the addDataFlow operation. */\nexport type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionDeleteDataFlowDebugSessionOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionExecuteCommandOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the executeCommand operation. */\nexport type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse;\n\n/** Optional parameters. */\nexport interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryDataFlowDebugSessionsByWorkspaceNext operation. */\nexport type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse;\n\n/** Optional parameters. */\nexport interface DatasetGetDatasetsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDatasetsByWorkspace operation. */\nexport type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse;\n\n/** Optional parameters. */\nexport interface DatasetCreateOrUpdateDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateDataset operation. */\nexport type DatasetCreateOrUpdateDatasetResponse = DatasetResource;\n\n/** Optional parameters. */\nexport interface DatasetGetDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getDataset operation. */\nexport type DatasetGetDatasetResponse = DatasetResource;\n\n/** Optional parameters. */\nexport interface DatasetDeleteDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DatasetRenameDatasetOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface DatasetGetDatasetsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getDatasetsByWorkspaceNext operation. */\nexport type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse;\n\n/** Optional parameters. */\nexport interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams\n extends coreClient.OperationOptions {\n /** Can provide a guid, which is helpful for debugging and to provide better customer support */\n clientRequestId?: string;\n}\n\n/** Contains response data for the getGitHubAccessToken operation. */\nexport type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse;\n\n/** Optional parameters. */\nexport interface IntegrationRuntimesListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse;\n\n/** Optional parameters. */\nexport interface IntegrationRuntimesGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type IntegrationRuntimesGetResponse = IntegrationRuntimeResource;\n\n/** Optional parameters. */\nexport interface LibraryListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type LibraryListOperationResponse = LibraryListResponse;\n\n/** Optional parameters. */\nexport interface LibraryFlushOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LibraryGetOperationResultOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getOperationResult operation. */\nexport type LibraryGetOperationResultResponse = LibraryResource;\n\n/** Optional parameters. */\nexport interface LibraryDeleteOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LibraryGetOptionalParams extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type LibraryGetResponse = LibraryResource;\n\n/** Optional parameters. */\nexport interface LibraryCreateOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LibraryAppendOptionalParams\n extends coreClient.OperationOptions {\n /** Set this header to a byte offset at which the block is expected to be appended. The request succeeds only if the current offset matches this value. Otherwise, the request fails with the AppendPositionConditionNotMet error (HTTP status code 412 – Precondition Failed) */\n blobConditionAppendPosition?: number;\n}\n\n/** Optional parameters. */\nexport interface LibraryListNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the listNext operation. */\nexport type LibraryListNextResponse = LibraryListResponse;\n\n/** Optional parameters. */\nexport interface LinkedServiceGetLinkedServicesByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getLinkedServicesByWorkspace operation. */\nexport type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse;\n\n/** Optional parameters. */\nexport interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateLinkedService operation. */\nexport type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource;\n\n/** Optional parameters. */\nexport interface LinkedServiceGetLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getLinkedService operation. */\nexport type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource;\n\n/** Optional parameters. */\nexport interface LinkedServiceDeleteLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LinkedServiceRenameLinkedServiceOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface LinkedServiceGetLinkedServicesByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getLinkedServicesByWorkspaceNext operation. */\nexport type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebooksByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebooksByWorkspace operation. */\nexport type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebookSummaryByWorkSpaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebookSummaryByWorkSpace operation. */\nexport type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookCreateOrUpdateNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateNotebook operation. */\nexport type NotebookCreateOrUpdateNotebookResponse = NotebookResource;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getNotebook operation. */\nexport type NotebookGetNotebookResponse = NotebookResource;\n\n/** Optional parameters. */\nexport interface NotebookDeleteNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface NotebookRenameNotebookOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface NotebookGetNotebooksByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebooksByWorkspaceNext operation. */\nexport type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookGetNotebookSummaryByWorkSpaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getNotebookSummaryByWorkSpaceNext operation. */\nexport type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse;\n\n/** Optional parameters. */\nexport interface NotebookOperationResultGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface PipelineGetPipelinesByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getPipelinesByWorkspace operation. */\nexport type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse;\n\n/** Optional parameters. */\nexport interface PipelineCreateOrUpdatePipelineOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdatePipeline operation. */\nexport type PipelineCreateOrUpdatePipelineResponse = PipelineResource;\n\n/** Optional parameters. */\nexport interface PipelineGetPipelineOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getPipeline operation. */\nexport type PipelineGetPipelineResponse = PipelineResource;\n\n/** Optional parameters. */\nexport interface PipelineDeletePipelineOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface PipelineRenamePipelineOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface PipelineCreatePipelineRunOptionalParams\n extends coreClient.OperationOptions {\n /** Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */\n parameters?: { [propertyName: string]: any };\n /** The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. */\n referencePipelineRunId?: string;\n /** Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId. */\n isRecovery?: boolean;\n /** In recovery mode, the rerun will start from this activity. If not specified, all activities will run. */\n startActivityName?: string;\n}\n\n/** Contains response data for the createPipelineRun operation. */\nexport type PipelineCreatePipelineRunResponse = CreateRunResponse;\n\n/** Optional parameters. */\nexport interface PipelineGetPipelinesByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getPipelinesByWorkspaceNext operation. */\nexport type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse;\n\n/** Optional parameters. */\nexport interface PipelineRunQueryPipelineRunsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryPipelineRunsByWorkspace operation. */\nexport type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse;\n\n/** Optional parameters. */\nexport interface PipelineRunGetPipelineRunOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getPipelineRun operation. */\nexport type PipelineRunGetPipelineRunResponse = PipelineRun;\n\n/** Optional parameters. */\nexport interface PipelineRunQueryActivityRunsOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryActivityRuns operation. */\nexport type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse;\n\n/** Optional parameters. */\nexport interface PipelineRunCancelPipelineRunOptionalParams\n extends coreClient.OperationOptions {\n /** If true, cancel all the Child pipelines that are triggered by the current pipeline. */\n isRecursive?: boolean;\n}\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkJobDefinitionsByWorkspace operation. */\nexport type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Spark Job Definition entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateSparkJobDefinition operation. */\nexport type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the Spark Job Definition entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getSparkJobDefinition operation. */\nexport type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the executeSparkJobDefinition operation. */\nexport type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionRenameSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionDebugSparkJobDefinitionOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the debugSparkJobDefinition operation. */\nexport type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob;\n\n/** Optional parameters. */\nexport interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSparkJobDefinitionsByWorkspaceNext operation. */\nexport type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse;\n\n/** Optional parameters. */\nexport interface SqlPoolsListOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the list operation. */\nexport type SqlPoolsListResponse = SqlPoolInfoListResult;\n\n/** Optional parameters. */\nexport interface SqlPoolsGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type SqlPoolsGetResponse = SqlPool;\n\n/** Optional parameters. */\nexport interface SqlScriptGetSqlScriptsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSqlScriptsByWorkspace operation. */\nexport type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse;\n\n/** Optional parameters. */\nexport interface SqlScriptCreateOrUpdateSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateSqlScript operation. */\nexport type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource;\n\n/** Optional parameters. */\nexport interface SqlScriptGetSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getSqlScript operation. */\nexport type SqlScriptGetSqlScriptResponse = SqlScriptResource;\n\n/** Optional parameters. */\nexport interface SqlScriptDeleteSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SqlScriptRenameSqlScriptOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface SqlScriptGetSqlScriptsByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getSqlScriptsByWorkspaceNext operation. */\nexport type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse;\n\n/** Optional parameters. */\nexport interface TriggerGetTriggersByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getTriggersByWorkspace operation. */\nexport type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse;\n\n/** Optional parameters. */\nexport interface TriggerCreateOrUpdateTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */\n ifMatch?: string;\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the createOrUpdateTrigger operation. */\nexport type TriggerCreateOrUpdateTriggerResponse = TriggerResource;\n\n/** Optional parameters. */\nexport interface TriggerGetTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */\n ifNoneMatch?: string;\n}\n\n/** Contains response data for the getTrigger operation. */\nexport type TriggerGetTriggerResponse = TriggerResource;\n\n/** Optional parameters. */\nexport interface TriggerDeleteTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface TriggerSubscribeTriggerToEventsOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the subscribeTriggerToEvents operation. */\nexport type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus;\n\n/** Optional parameters. */\nexport interface TriggerGetEventSubscriptionStatusOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getEventSubscriptionStatus operation. */\nexport type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus;\n\n/** Optional parameters. */\nexport interface TriggerUnsubscribeTriggerFromEventsOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Contains response data for the unsubscribeTriggerFromEvents operation. */\nexport type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus;\n\n/** Optional parameters. */\nexport interface TriggerStartTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface TriggerStopTriggerOptionalParams\n extends coreClient.OperationOptions {\n /** Delay to wait until next poll, in milliseconds. */\n updateIntervalInMs?: number;\n /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */\n resumeFrom?: string;\n}\n\n/** Optional parameters. */\nexport interface TriggerGetTriggersByWorkspaceNextOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the getTriggersByWorkspaceNext operation. */\nexport type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse;\n\n/** Optional parameters. */\nexport interface TriggerRunRerunTriggerInstanceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface TriggerRunCancelTriggerInstanceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Optional parameters. */\nexport interface TriggerRunQueryTriggerRunsByWorkspaceOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the queryTriggerRunsByWorkspace operation. */\nexport type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse;\n\n/** Optional parameters. */\nexport interface WorkspaceGetOptionalParams\n extends coreClient.OperationOptions {}\n\n/** Contains response data for the get operation. */\nexport type WorkspaceGetResponse = Workspace;\n\n/** Optional parameters. */\nexport interface ArtifactsClientOptionalParams\n extends coreClient.ServiceClientOptions {\n /** Overrides client endpoint. */\n endpoint?: string;\n}\n"]}
|