@azure/storage-blob 12.9.0-beta.2 → 12.9.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/CHANGELOG.md +16 -5
  2. package/README.md +11 -10
  3. package/dist/index.js +841 -461
  4. package/dist/index.js.map +1 -1
  5. package/dist-esm/storage-blob/src/BatchResponseParser.js +2 -2
  6. package/dist-esm/storage-blob/src/BatchResponseParser.js.map +1 -1
  7. package/dist-esm/storage-blob/src/BlobBatch.js +8 -8
  8. package/dist-esm/storage-blob/src/BlobBatch.js.map +1 -1
  9. package/dist-esm/storage-blob/src/BlobBatchClient.js +2 -2
  10. package/dist-esm/storage-blob/src/BlobBatchClient.js.map +1 -1
  11. package/dist-esm/storage-blob/src/BlobDownloadResponse.js +1 -1
  12. package/dist-esm/storage-blob/src/BlobDownloadResponse.js.map +1 -1
  13. package/dist-esm/storage-blob/src/BlobLeaseClient.js +5 -5
  14. package/dist-esm/storage-blob/src/BlobLeaseClient.js.map +1 -1
  15. package/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map +1 -1
  16. package/dist-esm/storage-blob/src/BlobQueryResponse.js.map +1 -1
  17. package/dist-esm/storage-blob/src/BlobServiceClient.js +22 -20
  18. package/dist-esm/storage-blob/src/BlobServiceClient.js.map +1 -1
  19. package/dist-esm/storage-blob/src/Clients.js +100 -97
  20. package/dist-esm/storage-blob/src/Clients.js.map +1 -1
  21. package/dist-esm/storage-blob/src/ContainerClient.js +241 -38
  22. package/dist-esm/storage-blob/src/ContainerClient.js.map +1 -1
  23. package/dist-esm/storage-blob/src/PageBlobRangeResponse.js +3 -3
  24. package/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map +1 -1
  25. package/dist-esm/storage-blob/src/Pipeline.js +6 -6
  26. package/dist-esm/storage-blob/src/Pipeline.js.map +1 -1
  27. package/dist-esm/storage-blob/src/TelemetryPolicyFactory.js +1 -1
  28. package/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map +1 -1
  29. package/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js +1 -3
  30. package/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map +1 -1
  31. package/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js +1 -3
  32. package/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map +1 -1
  33. package/dist-esm/storage-blob/src/generated/src/models/index.js.map +1 -1
  34. package/dist-esm/storage-blob/src/generated/src/models/mappers.js +53 -0
  35. package/dist-esm/storage-blob/src/generated/src/models/mappers.js.map +1 -1
  36. package/dist-esm/storage-blob/src/generated/src/models/parameters.js +1 -1
  37. package/dist-esm/storage-blob/src/generated/src/models/parameters.js.map +1 -1
  38. package/dist-esm/storage-blob/src/generated/src/operations/container.js +41 -0
  39. package/dist-esm/storage-blob/src/generated/src/operations/container.js.map +1 -1
  40. package/dist-esm/storage-blob/src/generated/src/storageClientContext.js +2 -2
  41. package/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map +1 -1
  42. package/dist-esm/storage-blob/src/generatedModels.js.map +1 -1
  43. package/dist-esm/storage-blob/src/index.browser.js +1 -1
  44. package/dist-esm/storage-blob/src/index.browser.js.map +1 -1
  45. package/dist-esm/storage-blob/src/index.js +1 -1
  46. package/dist-esm/storage-blob/src/index.js.map +1 -1
  47. package/dist-esm/storage-blob/src/models.js.map +1 -1
  48. package/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js +1 -1
  49. package/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map +1 -1
  50. package/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js +5 -5
  51. package/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map +1 -1
  52. package/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js +1 -1
  53. package/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map +1 -1
  54. package/dist-esm/storage-blob/src/policies/TelemetryPolicy.js +1 -1
  55. package/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map +1 -1
  56. package/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js +3 -3
  57. package/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map +1 -1
  58. package/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js +2 -2
  59. package/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map +1 -1
  60. package/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js +11 -6
  61. package/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map +1 -1
  62. package/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js +13 -0
  63. package/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map +1 -1
  64. package/dist-esm/storage-blob/src/sas/SASQueryParameters.js +2 -2
  65. package/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map +1 -1
  66. package/dist-esm/storage-blob/src/utils/Batch.js.map +1 -1
  67. package/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js +1 -1
  68. package/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map +1 -1
  69. package/dist-esm/storage-blob/src/utils/Mutex.js.map +1 -1
  70. package/dist-esm/storage-blob/src/utils/RetriableReadableStream.js +1 -2
  71. package/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map +1 -1
  72. package/dist-esm/storage-blob/src/utils/constants.js +8 -8
  73. package/dist-esm/storage-blob/src/utils/constants.js.map +1 -1
  74. package/dist-esm/storage-blob/src/utils/tracing.js +2 -2
  75. package/dist-esm/storage-blob/src/utils/tracing.js.map +1 -1
  76. package/dist-esm/storage-blob/src/utils/utils.common.js +25 -25
  77. package/dist-esm/storage-blob/src/utils/utils.common.js.map +1 -1
  78. package/dist-esm/storage-internal-avro/src/AvroParser.js.map +1 -1
  79. package/dist-esm/storage-internal-avro/src/AvroReader.js +8 -8
  80. package/dist-esm/storage-internal-avro/src/AvroReader.js.map +1 -1
  81. package/package.json +40 -49
  82. package/{typings → types}/3.1/storage-blob.d.ts +205 -5
  83. package/{typings → types}/latest/storage-blob.d.ts +184 -5
@@ -1,7 +1,7 @@
1
1
  // Copyright (c) Microsoft Corporation.
2
2
  // Licensed under the MIT license.
3
3
  import { __asyncGenerator, __await } from "tslib";
4
- import { AVRO_SYNC_MARKER_SIZE, AVRO_INIT_BYTES, AVRO_CODEC_KEY, AVRO_SCHEMA_KEY } from "./AvroConstants";
4
+ import { AVRO_SYNC_MARKER_SIZE, AVRO_INIT_BYTES, AVRO_CODEC_KEY, AVRO_SCHEMA_KEY, } from "./AvroConstants";
5
5
  import { arraysEqual } from "./utils/utils.common";
6
6
  import { AvroType, AvroParser } from "./AvroParser";
7
7
  import "@azure/core-paging";
@@ -22,7 +22,7 @@ export class AvroReader {
22
22
  }
23
23
  async initialize(options = {}) {
24
24
  const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {
25
- abortSignal: options.abortSignal
25
+ abortSignal: options.abortSignal,
26
26
  });
27
27
  if (!arraysEqual(header, AVRO_INIT_BYTES)) {
28
28
  throw new Error("Stream is not an Avro file.");
@@ -30,7 +30,7 @@ export class AvroReader {
30
30
  // File metadata is written as if defined by the following map schema:
31
31
  // { "type": "map", "values": "bytes"}
32
32
  this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {
33
- abortSignal: options.abortSignal
33
+ abortSignal: options.abortSignal,
34
34
  });
35
35
  // Validate codec
36
36
  const codec = this._metadata[AVRO_CODEC_KEY];
@@ -39,7 +39,7 @@ export class AvroReader {
39
39
  }
40
40
  // The 16-byte, randomly-generated sync marker for this file.
41
41
  this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {
42
- abortSignal: options.abortSignal
42
+ abortSignal: options.abortSignal,
43
43
  });
44
44
  // Parse the schema
45
45
  const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]);
@@ -48,7 +48,7 @@ export class AvroReader {
48
48
  this._blockOffset = this._initialBlockOffset + this._dataStream.position;
49
49
  }
50
50
  this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {
51
- abortSignal: options.abortSignal
51
+ abortSignal: options.abortSignal,
52
52
  });
53
53
  // skip block length
54
54
  await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });
@@ -70,13 +70,13 @@ export class AvroReader {
70
70
  }
71
71
  while (this.hasNext()) {
72
72
  const result = yield __await(this._itemType.read(this._dataStream, {
73
- abortSignal: options.abortSignal
73
+ abortSignal: options.abortSignal,
74
74
  }));
75
75
  this._itemsRemainingInBlock--;
76
76
  this._objectIndex++;
77
77
  if (this._itemsRemainingInBlock == 0) {
78
78
  const marker = yield __await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {
79
- abortSignal: options.abortSignal
79
+ abortSignal: options.abortSignal,
80
80
  }));
81
81
  this._blockOffset = this._initialBlockOffset + this._dataStream.position;
82
82
  this._objectIndex = 0;
@@ -85,7 +85,7 @@ export class AvroReader {
85
85
  }
86
86
  try {
87
87
  this._itemsRemainingInBlock = yield __await(AvroParser.readLong(this._dataStream, {
88
- abortSignal: options.abortSignal
88
+ abortSignal: options.abortSignal,
89
89
  }));
90
90
  }
91
91
  catch (err) {
@@ -1 +1 @@
1
- {"version":3,"file":"AvroReader.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAGlC,OAAO,EACL,qBAAqB,EACrB,eAAe,EACf,cAAc,EACd,eAAe,EAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACnD,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AACpD,OAAO,oBAAoB,CAAC;AAc5B,MAAM,OAAO,UAAU;IAuCrB,YACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC;QAEhC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;QAChD,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;QAC5C,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;IACrD,CAAC;IAhCD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAGD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IA2BO,KAAK,CAAC,UAAU,CAAC,UAA4B,EAAE;QACrD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;SAChD;QAED,sEAAsE;QACtE,sCAAsC;QACtC,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,iBAAiB;QACjB,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAI,CAAC,CAAC,KAAK,IAAI,SAAS,IAAI,KAAK,IAAI,MAAM,CAAC,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,6DAA6D;QAC7D,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,mBAAmB;QACnB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAE7C,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,EAAE;YAC1B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;SAC1E;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,oBAAoB;QACpB,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAElF,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;gBAC1C,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;aAChC;SACF;IACH,CAAC;IAEM,OAAO;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;IAChE,CAAC;IAEa,YAAY,CACxB,UAA4B,EAAE;;YAE9B,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACtB,cAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;aAChC;YAED,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;gBACrB,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;iBACjC,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;gBAErB,IAAI,IAAI,CAAC,sBAAsB,IAAI,CAAC,EAAE;oBACpC,MAAM,MAAM,GAAG,cAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;qBACjC,CAAC,CAAA,CAAC;oBAEH,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;oBACzE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;wBAC3C,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;qBACrD;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;yBACjC,CAAC,CAAA,CAAC;qBACJ;oBAAC,OAAO,GAAG,EAAE;wBACZ,gCAAgC;wBAChC,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;qBACjC;oBAED,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;wBACpC,oBAAoB;wBACpB,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;qBACnF;iBACF;gBACD,oBAAM,MAAM,CAAA,CAAC;aACd;QACH,CAAC;KAAA;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable } from \"./AvroReadable\";\nimport {\n AVRO_SYNC_MARKER_SIZE,\n AVRO_INIT_BYTES,\n AVRO_CODEC_KEY,\n AVRO_SCHEMA_KEY\n} from \"./AvroConstants\";\nimport { arraysEqual } from \"./utils/utils.common\";\nimport { AvroType, AvroParser } from \"./AvroParser\";\nimport \"@azure/core-paging\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the &commat;azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record<string, string>;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}) {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec == undefined || codec == \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset == 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator<Record<string, any> | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock == 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal\n });\n } catch (err) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n"]}
1
+ {"version":3,"file":"AvroReader.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAGlC,OAAO,EACL,qBAAqB,EACrB,eAAe,EACf,cAAc,EACd,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACnD,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AACpD,OAAO,oBAAoB,CAAC;AAc5B,MAAM,OAAO,UAAU;IAuCrB,YACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC;QAEhC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;QAChD,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;QAC5C,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;IACrD,CAAC;IAhCD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAGD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IA2BO,KAAK,CAAC,UAAU,CAAC,UAA4B,EAAE;QACrD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;SAChD;QAED,sEAAsE;QACtE,sCAAsC;QACtC,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,iBAAiB;QACjB,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAI,CAAC,CAAC,KAAK,IAAI,SAAS,IAAI,KAAK,IAAI,MAAM,CAAC,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,6DAA6D;QAC7D,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,mBAAmB;QACnB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAE7C,IAAI,IAAI,CAAC,YAAY,IAAI,CAAC,EAAE;YAC1B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;SAC1E;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,oBAAoB;QACpB,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAElF,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;gBAC1C,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;aAChC;SACF;IACH,CAAC;IAEM,OAAO;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;IAChE,CAAC;IAEa,YAAY,CACxB,UAA4B,EAAE;;YAE9B,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACtB,cAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;aAChC;YAED,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;gBACrB,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;iBACjC,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;gBAErB,IAAI,IAAI,CAAC,sBAAsB,IAAI,CAAC,EAAE;oBACpC,MAAM,MAAM,GAAG,cAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;qBACjC,CAAC,CAAA,CAAC;oBAEH,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;oBACzE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;wBAC3C,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;qBACrD;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;yBACjC,CAAC,CAAA,CAAC;qBACJ;oBAAC,OAAO,GAAG,EAAE;wBACZ,gCAAgC;wBAChC,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;qBACjC;oBAED,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;wBACpC,oBAAoB;wBACpB,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;qBACnF;iBACF;gBACD,oBAAM,MAAM,CAAA,CAAC;aACd;QACH,CAAC;KAAA;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable } from \"./AvroReadable\";\nimport {\n AVRO_SYNC_MARKER_SIZE,\n AVRO_INIT_BYTES,\n AVRO_CODEC_KEY,\n AVRO_SCHEMA_KEY,\n} from \"./AvroConstants\";\nimport { arraysEqual } from \"./utils/utils.common\";\nimport { AvroType, AvroParser } from \"./AvroParser\";\nimport \"@azure/core-paging\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the &commat;azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record<string, string>;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}) {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec == undefined || codec == \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset == 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator<Record<string, any> | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock == 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n"]}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@azure/storage-blob",
3
3
  "sdk-type": "client",
4
- "version": "12.9.0-beta.2",
4
+ "version": "12.9.0-beta.3",
5
5
  "description": "Microsoft Azure Storage SDK for JavaScript - Blob",
6
6
  "main": "./dist/index.js",
7
7
  "module": "./dist-esm/storage-blob/src/index.js",
@@ -20,11 +20,11 @@
20
20
  "os": false,
21
21
  "process": false
22
22
  },
23
- "types": "./typings/latest/storage-blob.d.ts",
23
+ "types": "./types/latest/storage-blob.d.ts",
24
24
  "typesVersions": {
25
25
  "<3.6": {
26
26
  "*": [
27
- "./typings/3.1/storage-blob.d.ts"
27
+ "./types/3.1/storage-blob.d.ts"
28
28
  ]
29
29
  }
30
30
  },
@@ -33,20 +33,18 @@
33
33
  },
34
34
  "scripts": {
35
35
  "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit",
36
- "build:node": "tsc -p . && cross-env ONLY_NODE=true rollup -c 2>&1",
37
- "build:browser": "tsc -p . && cross-env ONLY_BROWSER=true rollup -c 2>&1",
38
- "build:nodebrowser": "rollup -c 2>&1",
39
- "build:samples": "cross-env ONLY_NODE=true npm run build && npm run build:prep-samples",
40
- "build:prep-samples": "dev-tool samples prep && cd dist-samples && tsc",
41
- "build:test": "tsc -p . && rollup -c rollup.test.config.js 2>&1",
42
- "build:types": "downlevel-dts typings/latest typings/3.1",
43
- "build": "npm run clean && tsc -p . && npm run build:nodebrowser && api-extractor run --local && npm run build:types",
44
- "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"",
45
- "clean": "rimraf dist dist-* typings temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml",
46
- "clean:samples": "rimraf samples/javascript/node_modules samples/typescript/node_modules samples/typescript/dist samples/typescript/package-lock.json samples/javascript/package-lock.json",
36
+ "build:browser": "tsc -p . && dev-tool run bundle",
37
+ "build:node": "tsc -p . && dev-tool run bundle",
38
+ "build:test": "tsc -p . && dev-tool run bundle",
39
+ "build:types": "downlevel-dts types/latest types/3.1",
40
+ "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types",
41
+ "build:samples": "echo Obsolete;",
42
+ "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"",
43
+ "clean": "rimraf dist dist-* types temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml",
44
+ "clean:samples": "rimraf samples/v12/javascript/node_modules samples/v12/typescript/node_modules samples/v12/typescript/dist samples/v12/typescript/package-lock.json samples/v12/javascript/package-lock.json",
47
45
  "extract-api": "tsc -p . && api-extractor run --local",
48
- "execute:samples": "npm run build:samples && dev-tool samples run dist-samples/javascript dist-samples/typescript/src/",
49
- "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"",
46
+ "execute:samples": "dev-tool samples run samples-dev",
47
+ "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"",
50
48
  "integration-test:browser": "karma start --single-run",
51
49
  "integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --full-trace -t 300000 \"dist-esm/storage-blob/test/*.spec.js\" \"dist-esm/storage-blob/test/node/*.spec.js\"",
52
50
  "integration-test": "npm run integration-test:node && npm run integration-test:browser",
@@ -58,10 +56,9 @@
58
56
  "test:node": "npm run clean && npm run build:test && npm run unit-test:node",
59
57
  "test": "npm run clean && npm run build:test && npm run unit-test",
60
58
  "unit-test:browser": "karma start --single-run",
61
- "unit-test:node": "mocha --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --full-trace -t 120000 \"dist-test/index.node.js\"",
59
+ "unit-test:node": "mocha -r esm --require ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace \"test/{,!(browser)/**/}*.spec.ts\"",
62
60
  "unit-test": "npm run unit-test:node && npm run unit-test:browser",
63
- "emulator-tests": "cross-env STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true && npm run test:node",
64
- "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src"
61
+ "emulator-tests": "cross-env STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true && npm run test:node"
65
62
  },
66
63
  "files": [
67
64
  "BreakingChanges.md",
@@ -69,8 +66,8 @@
69
66
  "dist-esm/storage-blob/src/",
70
67
  "dist-esm/storage-internal-avro/src/",
71
68
  "dist-esm/storage-common/src/",
72
- "typings/latest/storage-blob.d.ts",
73
- "typings/3.1/storage-blob.d.ts",
69
+ "types/latest/storage-blob.d.ts",
70
+ "types/3.1/storage-blob.d.ts",
74
71
  "README.md",
75
72
  "LICENSE"
76
73
  ],
@@ -110,19 +107,25 @@
110
107
  },
111
108
  "//sampleConfiguration": {
112
109
  "skip": [
113
- "advanced.js",
114
- "anonymousCred.js",
110
+ "advancedRequestOptions.js",
111
+ "anonymousAuth.js",
115
112
  "azureAdAuth.js",
116
- "basic.js",
117
113
  "customPipeline.js",
118
114
  "customizedClientHeaders.js",
119
- "iterators-blobs-hierarchy.js",
120
- "iterators-blobs.js",
121
- "iterators-containers.js",
122
- "iterators-without-await.js",
123
- "readingSnapshot.js",
124
- "sharedKeyCred.js"
125
- ]
115
+ "listBlobsByHierarchy.js",
116
+ "listBlobs.js",
117
+ "listContainers.js",
118
+ "snapshots.js",
119
+ "sharedKeyAuth.js"
120
+ ],
121
+ "productName": "Azure Storage Blob",
122
+ "productSlugs": [
123
+ "azure",
124
+ "azure-storage"
125
+ ],
126
+ "requiredResources": {
127
+ "Azure Storage Account": "https://docs.microsoft.com/azure/storage/common/storage-account-overview"
128
+ }
126
129
  },
127
130
  "dependencies": {
128
131
  "@azure/abort-controller": "^1.0.0",
@@ -135,7 +138,6 @@
135
138
  "tslib": "^2.2.0"
136
139
  },
137
140
  "devDependencies": {
138
- "@azure/core-rest-pipeline": "^1.1.0",
139
141
  "@azure/dev-tool": "^1.0.0",
140
142
  "@azure/eslint-plugin-azure-sdk": "^3.0.0",
141
143
  "@azure/identity": "^2.0.1",
@@ -143,16 +145,14 @@
143
145
  "@azure-tools/test-recorder": "^1.0.0",
144
146
  "@azure/test-utils-perf": "^1.0.0",
145
147
  "@microsoft/api-extractor": "^7.18.11",
146
- "@rollup/plugin-multi-entry": "^3.0.0",
147
- "@rollup/plugin-replace": "^2.2.0",
148
- "@rollup/plugin-json": "^4.0.0",
148
+ "@types/chai": "^4.1.6",
149
149
  "@types/mocha": "^7.0.2",
150
150
  "@types/node": "^12.0.0",
151
151
  "@types/node-fetch": "^2.5.0",
152
- "assert": "^1.4.1",
152
+ "chai": "^4.2.0",
153
153
  "cross-env": "^7.0.2",
154
154
  "dotenv": "^8.2.0",
155
- "downlevel-dts": "~0.4.0",
155
+ "downlevel-dts": "^0.8.0",
156
156
  "es6-promise": "^4.2.5",
157
157
  "eslint": "^7.15.0",
158
158
  "esm": "^3.2.18",
@@ -171,23 +171,14 @@
171
171
  "karma-mocha-reporter": "^2.2.5",
172
172
  "karma-sourcemap-loader": "^0.3.8",
173
173
  "mocha": "^7.1.1",
174
- "mocha-junit-reporter": "^1.18.0",
175
- "node-fetch": "^2.6.0",
176
- "nyc": "^14.0.0",
177
- "prettier": "^1.16.4",
174
+ "mocha-junit-reporter": "^2.0.0",
175
+ "nyc": "^15.0.0",
176
+ "prettier": "^2.5.1",
178
177
  "puppeteer": "^10.2.0",
179
178
  "rimraf": "^3.0.0",
180
- "rollup": "^1.16.3",
181
- "@rollup/plugin-commonjs": "11.0.2",
182
- "@rollup/plugin-node-resolve": "^8.0.0",
183
- "rollup-plugin-shim": "^1.0.0",
184
- "rollup-plugin-sourcemaps": "^0.4.2",
185
- "rollup-plugin-terser": "^5.1.1",
186
- "rollup-plugin-visualizer": "^4.0.4",
187
179
  "source-map-support": "^0.5.9",
188
180
  "ts-node": "^10.0.0",
189
181
  "typescript": "~4.2.0",
190
- "typedoc": "0.15.2",
191
182
  "util": "^0.12.1"
192
183
  }
193
184
  }
@@ -1252,7 +1252,13 @@ export declare class BlobClient extends StorageClient {
1252
1252
  private _containerName;
1253
1253
  private _versionId?;
1254
1254
  private _snapshot?;
1255
+ /*
1256
+ * The name of the blob.
1257
+ */
1255
1258
  readonly name: string;
1259
+ /*
1260
+ * The name of the storage container the blob is associated with.
1261
+ */
1256
1262
  readonly containerName: string;
1257
1263
  /**
1258
1264
  *
@@ -2449,7 +2455,17 @@ export declare class BlobLeaseClient {
2449
2455
  private _url;
2450
2456
  private _containerOrBlobOperation;
2451
2457
  private _isContainer;
2458
+ /*
2459
+ * Gets the lease Id.
2460
+ *
2461
+ * @readonly
2462
+ */
2452
2463
  readonly leaseId: string;
2464
+ /*
2465
+ * Gets the url.
2466
+ *
2467
+ * @readonly
2468
+ */
2453
2469
  readonly url: string;
2454
2470
  /**
2455
2471
  * Creates an instance of BlobLeaseClient.
@@ -5003,6 +5019,9 @@ export declare class ContainerClient extends StorageClient {
5003
5019
  */
5004
5020
  private containerContext;
5005
5021
  private _containerName;
5022
+ /*
5023
+ * The name of the container.
5024
+ */
5006
5025
  readonly containerName: string;
5007
5026
  /**
5008
5027
  *
@@ -5380,7 +5399,7 @@ export declare class ContainerClient extends StorageClient {
5380
5399
  * if (item.kind === "prefix") {
5381
5400
  * console.log(`\tBlobPrefix: ${item.name}`);
5382
5401
  * } else {
5383
- * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);
5402
+ * console.log(`\tBlobItem: name - ${item.name}`);
5384
5403
  * }
5385
5404
  * }
5386
5405
  * ```
@@ -5395,7 +5414,7 @@ export declare class ContainerClient extends StorageClient {
5395
5414
  * if (item.kind === "prefix") {
5396
5415
  * console.log(`\tBlobPrefix: ${item.name}`);
5397
5416
  * } else {
5398
- * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);
5417
+ * console.log(`\tBlobItem: name - ${item.name}`);
5399
5418
  * }
5400
5419
  * entity = await iter.next();
5401
5420
  * }
@@ -5413,7 +5432,7 @@ export declare class ContainerClient extends StorageClient {
5413
5432
  * }
5414
5433
  * }
5415
5434
  * for (const blob of response.segment.blobItems) {
5416
- * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);
5435
+ * console.log(`\tBlobItem: name - ${blob.name}`);
5417
5436
  * }
5418
5437
  * }
5419
5438
  * ```
@@ -5424,7 +5443,9 @@ export declare class ContainerClient extends StorageClient {
5424
5443
  * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size");
5425
5444
  *
5426
5445
  * let i = 1;
5427
- * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) {
5446
+ * for await (const response of containerClient
5447
+ * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" })
5448
+ * .byPage({ maxPageSize: 2 })) {
5428
5449
  * console.log(`Page ${i++}`);
5429
5450
  * const segment = response.segment;
5430
5451
  *
@@ -5435,7 +5456,7 @@ export declare class ContainerClient extends StorageClient {
5435
5456
  * }
5436
5457
  *
5437
5458
  * for (const blob of response.segment.blobItems) {
5438
- * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);
5459
+ * console.log(`\tBlobItem: name - ${blob.name}`);
5439
5460
  * }
5440
5461
  * }
5441
5462
  * ```
@@ -5448,6 +5469,129 @@ export declare class ContainerClient extends StorageClient {
5448
5469
  } & BlobPrefix) | ({
5449
5470
  kind: "blob";
5450
5471
  } & BlobItem), ContainerListBlobHierarchySegmentResponse>;
5472
+ /**
5473
+ * The Filter Blobs operation enables callers to list blobs in the container whose tags
5474
+ * match a given search expression.
5475
+ *
5476
+ * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.
5477
+ * The given expression must evaluate to true for a blob to be returned in the results.
5478
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
5479
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
5480
+ * @param marker - A string value that identifies the portion of
5481
+ * the list of blobs to be returned with the next listing operation. The
5482
+ * operation returns the continuationToken value within the response body if the
5483
+ * listing operation did not return all blobs remaining to be listed
5484
+ * with the current page. The continuationToken value can be used as the value for
5485
+ * the marker parameter in a subsequent call to request the next page of list
5486
+ * items. The marker value is opaque to the client.
5487
+ * @param options - Options to find blobs by tags.
5488
+ */
5489
+ private findBlobsByTagsSegment;
5490
+ /**
5491
+ * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.
5492
+ *
5493
+ * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.
5494
+ * The given expression must evaluate to true for a blob to be returned in the results.
5495
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
5496
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
5497
+ * @param marker - A string value that identifies the portion of
5498
+ * the list of blobs to be returned with the next listing operation. The
5499
+ * operation returns the continuationToken value within the response body if the
5500
+ * listing operation did not return all blobs remaining to be listed
5501
+ * with the current page. The continuationToken value can be used as the value for
5502
+ * the marker parameter in a subsequent call to request the next page of list
5503
+ * items. The marker value is opaque to the client.
5504
+ * @param options - Options to find blobs by tags.
5505
+ */
5506
+ private findBlobsByTagsSegments;
5507
+ /**
5508
+ * Returns an AsyncIterableIterator for blobs.
5509
+ *
5510
+ * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.
5511
+ * The given expression must evaluate to true for a blob to be returned in the results.
5512
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
5513
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
5514
+ * @param options - Options to findBlobsByTagsItems.
5515
+ */
5516
+ private findBlobsByTagsItems;
5517
+ /**
5518
+ * Returns an async iterable iterator to find all blobs with specified tag
5519
+ * under the specified container.
5520
+ *
5521
+ * .byPage() returns an async iterable iterator to list the blobs in pages.
5522
+ *
5523
+ * Example using `for await` syntax:
5524
+ *
5525
+ * ```js
5526
+ * let i = 1;
5527
+ * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) {
5528
+ * console.log(`Blob ${i++}: ${blob.name}`);
5529
+ * }
5530
+ * ```
5531
+ *
5532
+ * Example using `iter.next()`:
5533
+ *
5534
+ * ```js
5535
+ * let i = 1;
5536
+ * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'");
5537
+ * let blobItem = await iter.next();
5538
+ * while (!blobItem.done) {
5539
+ * console.log(`Blob ${i++}: ${blobItem.value.name}`);
5540
+ * blobItem = await iter.next();
5541
+ * }
5542
+ * ```
5543
+ *
5544
+ * Example using `byPage()`:
5545
+ *
5546
+ * ```js
5547
+ * // passing optional maxPageSize in the page settings
5548
+ * let i = 1;
5549
+ * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) {
5550
+ * if (response.blobs) {
5551
+ * for (const blob of response.blobs) {
5552
+ * console.log(`Blob ${i++}: ${blob.name}`);
5553
+ * }
5554
+ * }
5555
+ * }
5556
+ * ```
5557
+ *
5558
+ * Example using paging with a marker:
5559
+ *
5560
+ * ```js
5561
+ * let i = 1;
5562
+ * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 });
5563
+ * let response = (await iterator.next()).value;
5564
+ *
5565
+ * // Prints 2 blob names
5566
+ * if (response.blobs) {
5567
+ * for (const blob of response.blobs) {
5568
+ * console.log(`Blob ${i++}: ${blob.name}`);
5569
+ * }
5570
+ * }
5571
+ *
5572
+ * // Gets next marker
5573
+ * let marker = response.continuationToken;
5574
+ * // Passing next marker as continuationToken
5575
+ * iterator = containerClient
5576
+ * .findBlobsByTags("tagkey='tagvalue'")
5577
+ * .byPage({ continuationToken: marker, maxPageSize: 10 });
5578
+ * response = (await iterator.next()).value;
5579
+ *
5580
+ * // Prints blob names
5581
+ * if (response.blobs) {
5582
+ * for (const blob of response.blobs) {
5583
+ * console.log(`Blob ${i++}: ${blob.name}`);
5584
+ * }
5585
+ * }
5586
+ * ```
5587
+ *
5588
+ * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.
5589
+ * The given expression must evaluate to true for a blob to be returned in the results.
5590
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
5591
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
5592
+ * @param options - Options to find blobs by tags.
5593
+ */
5594
+ findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator<FilterBlobItem, ContainerFindBlobsByTagsSegmentResponse>;
5451
5595
  private getContainerNameFromUrl;
5452
5596
  /**
5453
5597
  * Only available for ContainerClient constructed with a shared key credential.
@@ -5599,6 +5743,49 @@ export declare interface ContainerExistsOptions extends CommonOptions {
5599
5743
  */
5600
5744
  abortSignal?: AbortSignalLike;
5601
5745
  }
5746
+ /** Defines headers for Container_filterBlobs operation. */
5747
+ export declare interface ContainerFilterBlobsHeaders {
5748
+ /** If a client request id header is sent in the request, this header will be present in the response with the same value. */
5749
+ clientRequestId?: string;
5750
+ /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */
5751
+ requestId?: string;
5752
+ /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */
5753
+ version?: string;
5754
+ /** UTC date/time value generated by the service that indicates the time at which the response was initiated */
5755
+ date?: Date;
5756
+ }
5757
+ /**
5758
+ * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.
5759
+ */
5760
+ export declare interface ContainerFindBlobByTagsOptions extends CommonOptions {
5761
+ /**
5762
+ * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.
5763
+ * For example, use the &commat;azure/abort-controller to create an `AbortSignal`.
5764
+ */
5765
+ abortSignal?: AbortSignalLike;
5766
+ }
5767
+ /**
5768
+ * The response of {@link BlobServiceClient.findBlobsByTags} operation.
5769
+ */
5770
+ export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & {
5771
+ /**
5772
+ * The underlying HTTP response.
5773
+ */
5774
+ _response: HttpResponse & {
5775
+ /**
5776
+ * The parsed HTTP response headers.
5777
+ */
5778
+ parsedHeaders: ContainerFilterBlobsHeaders;
5779
+ /**
5780
+ * The response body as text (string format)
5781
+ */
5782
+ bodyAsText: string;
5783
+ /**
5784
+ * The response body as parsed JSON or XML
5785
+ */
5786
+ parsedBody: FilterBlobSegmentModel;
5787
+ };
5788
+ };
5602
5789
  /**
5603
5790
  * Options to configure {@link ContainerClient.generateSasUrl} operation.
5604
5791
  */
@@ -6013,6 +6200,10 @@ export declare class ContainerSASPermissions {
6013
6200
  * Specifies that Permanent Delete is permitted.
6014
6201
  */
6015
6202
  permanentDelete: boolean;
6203
+ /**
6204
+ * Specifies that Filter Blobs by Tags is permitted.
6205
+ */
6206
+ filterByTags: boolean;
6016
6207
  /**
6017
6208
  * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
6018
6209
  * order accepted by the service.
@@ -6076,6 +6267,10 @@ export declare interface ContainerSASPermissionsLike {
6076
6267
  * Specifies that Permanent Delete is permitted.
6077
6268
  */
6078
6269
  permanentDelete?: boolean;
6270
+ /**
6271
+ * Specifies that Filter Blobs by Tags is permitted.
6272
+ */
6273
+ filterByTags?: boolean;
6079
6274
  }
6080
6275
  /** Defines headers for Container_setAccessPolicy operation. */
6081
6276
  export declare interface ContainerSetAccessPolicyHeaders {
@@ -7865,6 +8060,11 @@ export declare class SASQueryParameters {
7865
8060
  * This is only used for User Delegation SAS.
7866
8061
  */
7867
8062
  readonly correlationId?: string;
8063
+ /*
8064
+ * Optional. IP range allowed for this SAS.
8065
+ *
8066
+ * @readonly
8067
+ */
7868
8068
  readonly ipRange: SasIPRange | undefined;
7869
8069
  /**
7870
8070
  * Creates an instance of SASQueryParameters.