@remotion/media-parser 4.0.279 → 4.0.280

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1182,7 +1182,7 @@ var bufferManager = ({
1182
1182
  uintArray = new Uint8Array(buf);
1183
1183
  uintArray.set(newData);
1184
1184
  view = new DataView(uintArray.buffer);
1185
- counter.setDiscardedOffset(0);
1185
+ counter.setDiscardedOffset(seekTo);
1186
1186
  counter.decrement(counter.getOffset());
1187
1187
  counter.increment(seekTo);
1188
1188
  };
@@ -9013,16 +9013,14 @@ var slowDurationAndFpsState = () => {
9013
9013
  let largestVideoSample;
9014
9014
  let smallestAudioSample;
9015
9015
  let largestAudioSample;
9016
- let audioSizesInBytes = 0;
9017
- let videoSizeInBytes = 0;
9018
- let videoSamples = 0;
9019
- let audioSamples = 0;
9016
+ const videoSamples = new Map;
9017
+ const audioSamples = new Map;
9020
9018
  const getSlowVideoDurationInSeconds = () => {
9021
9019
  let videoDuration = null;
9022
9020
  if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
9023
9021
  const startingTimestampDifference = largestVideoSample - smallestVideoSample;
9024
- const timeBetweenSamples = startingTimestampDifference / (videoSamples - 1);
9025
- videoDuration = timeBetweenSamples * videoSamples;
9022
+ const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
9023
+ videoDuration = timeBetweenSamples * videoSamples.size;
9026
9024
  }
9027
9025
  return videoDuration;
9028
9026
  };
@@ -9031,8 +9029,8 @@ var slowDurationAndFpsState = () => {
9031
9029
  let audioDuration = null;
9032
9030
  if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
9033
9031
  const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
9034
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples - 1);
9035
- audioDuration = timeBetweenSamplesAudio * audioSamples;
9032
+ const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
9033
+ audioDuration = timeBetweenSamplesAudio * audioSamples.size;
9036
9034
  }
9037
9035
  if (videoDuration === null && audioDuration === null) {
9038
9036
  throw new Error("No samples");
@@ -9041,7 +9039,7 @@ var slowDurationAndFpsState = () => {
9041
9039
  };
9042
9040
  return {
9043
9041
  addVideoSample: (videoSample) => {
9044
- videoSamples++;
9042
+ videoSamples.set(videoSample.cts, videoSample.data.byteLength);
9045
9043
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
9046
9044
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
9047
9045
  largestVideoSample = presentationTimeInSeconds;
@@ -9049,10 +9047,9 @@ var slowDurationAndFpsState = () => {
9049
9047
  if (smallestVideoSample === undefined || presentationTimeInSeconds < smallestVideoSample) {
9050
9048
  smallestVideoSample = presentationTimeInSeconds;
9051
9049
  }
9052
- videoSizeInBytes += videoSample.data.byteLength;
9053
9050
  },
9054
9051
  addAudioSample: (audioSample) => {
9055
- audioSamples++;
9052
+ audioSamples.set(audioSample.cts, audioSample.data.byteLength);
9056
9053
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
9057
9054
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
9058
9055
  largestAudioSample = presentationTimeInSeconds;
@@ -9060,7 +9057,6 @@ var slowDurationAndFpsState = () => {
9060
9057
  if (smallestAudioSample === undefined || presentationTimeInSeconds < smallestAudioSample) {
9061
9058
  smallestAudioSample = presentationTimeInSeconds;
9062
9059
  }
9063
- audioSizesInBytes += audioSample.data.byteLength;
9064
9060
  },
9065
9061
  getSlowDurationInSeconds,
9066
9062
  getFps: () => {
@@ -9068,22 +9064,24 @@ var slowDurationAndFpsState = () => {
9068
9064
  if (videoDuration === 0) {
9069
9065
  return 0;
9070
9066
  }
9071
- return videoSamples / videoDuration;
9067
+ return videoSamples.size / videoDuration;
9072
9068
  },
9073
- getSlowNumberOfFrames: () => videoSamples,
9069
+ getSlowNumberOfFrames: () => videoSamples.size,
9074
9070
  getAudioBitrate: () => {
9075
9071
  const audioDuration = getSlowDurationInSeconds();
9076
- if (audioDuration === 0 || audioSizesInBytes === 0) {
9072
+ if (audioDuration === 0 || audioSamples.size === 0) {
9077
9073
  return null;
9078
9074
  }
9075
+ const audioSizesInBytes = Array.from(audioSamples.values()).reduce((acc, size) => acc + size, 0);
9079
9076
  return audioSizesInBytes * 8 / audioDuration;
9080
9077
  },
9081
9078
  getVideoBitrate: () => {
9082
9079
  const videoDuration = getSlowDurationInSeconds();
9083
- if (videoDuration === 0 || videoSizeInBytes === 0) {
9080
+ if (videoDuration === 0 || videoSamples.size === 0) {
9084
9081
  return null;
9085
9082
  }
9086
- return videoSizeInBytes * 8 / videoDuration;
9083
+ const videoSizesInBytes = Array.from(videoSamples.values()).reduce((acc, size) => acc + size, 0);
9084
+ return videoSizesInBytes * 8 / videoDuration;
9087
9085
  }
9088
9086
  };
9089
9087
  };
@@ -12964,7 +12962,7 @@ var downloadAndParseMedia = async (options) => {
12964
12962
  return returnValue;
12965
12963
  };
12966
12964
  // src/version.ts
12967
- var VERSION = "4.0.279";
12965
+ var VERSION = "4.0.280";
12968
12966
 
12969
12967
  // src/index.ts
12970
12968
  var MediaParserInternals = {
@@ -2034,7 +2034,7 @@ var bufferManager = ({
2034
2034
  uintArray = new Uint8Array(buf);
2035
2035
  uintArray.set(newData);
2036
2036
  view = new DataView(uintArray.buffer);
2037
- counter.setDiscardedOffset(0);
2037
+ counter.setDiscardedOffset(seekTo);
2038
2038
  counter.decrement(counter.getOffset());
2039
2039
  counter.increment(seekTo);
2040
2040
  };
@@ -7195,16 +7195,14 @@ var slowDurationAndFpsState = () => {
7195
7195
  let largestVideoSample;
7196
7196
  let smallestAudioSample;
7197
7197
  let largestAudioSample;
7198
- let audioSizesInBytes = 0;
7199
- let videoSizeInBytes = 0;
7200
- let videoSamples = 0;
7201
- let audioSamples = 0;
7198
+ const videoSamples = new Map;
7199
+ const audioSamples = new Map;
7202
7200
  const getSlowVideoDurationInSeconds = () => {
7203
7201
  let videoDuration = null;
7204
7202
  if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
7205
7203
  const startingTimestampDifference = largestVideoSample - smallestVideoSample;
7206
- const timeBetweenSamples = startingTimestampDifference / (videoSamples - 1);
7207
- videoDuration = timeBetweenSamples * videoSamples;
7204
+ const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
7205
+ videoDuration = timeBetweenSamples * videoSamples.size;
7208
7206
  }
7209
7207
  return videoDuration;
7210
7208
  };
@@ -7213,8 +7211,8 @@ var slowDurationAndFpsState = () => {
7213
7211
  let audioDuration = null;
7214
7212
  if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
7215
7213
  const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
7216
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples - 1);
7217
- audioDuration = timeBetweenSamplesAudio * audioSamples;
7214
+ const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
7215
+ audioDuration = timeBetweenSamplesAudio * audioSamples.size;
7218
7216
  }
7219
7217
  if (videoDuration === null && audioDuration === null) {
7220
7218
  throw new Error("No samples");
@@ -7223,7 +7221,7 @@ var slowDurationAndFpsState = () => {
7223
7221
  };
7224
7222
  return {
7225
7223
  addVideoSample: (videoSample) => {
7226
- videoSamples++;
7224
+ videoSamples.set(videoSample.cts, videoSample.data.byteLength);
7227
7225
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
7228
7226
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
7229
7227
  largestVideoSample = presentationTimeInSeconds;
@@ -7231,10 +7229,9 @@ var slowDurationAndFpsState = () => {
7231
7229
  if (smallestVideoSample === undefined || presentationTimeInSeconds < smallestVideoSample) {
7232
7230
  smallestVideoSample = presentationTimeInSeconds;
7233
7231
  }
7234
- videoSizeInBytes += videoSample.data.byteLength;
7235
7232
  },
7236
7233
  addAudioSample: (audioSample) => {
7237
- audioSamples++;
7234
+ audioSamples.set(audioSample.cts, audioSample.data.byteLength);
7238
7235
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
7239
7236
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
7240
7237
  largestAudioSample = presentationTimeInSeconds;
@@ -7242,7 +7239,6 @@ var slowDurationAndFpsState = () => {
7242
7239
  if (smallestAudioSample === undefined || presentationTimeInSeconds < smallestAudioSample) {
7243
7240
  smallestAudioSample = presentationTimeInSeconds;
7244
7241
  }
7245
- audioSizesInBytes += audioSample.data.byteLength;
7246
7242
  },
7247
7243
  getSlowDurationInSeconds,
7248
7244
  getFps: () => {
@@ -7250,22 +7246,24 @@ var slowDurationAndFpsState = () => {
7250
7246
  if (videoDuration === 0) {
7251
7247
  return 0;
7252
7248
  }
7253
- return videoSamples / videoDuration;
7249
+ return videoSamples.size / videoDuration;
7254
7250
  },
7255
- getSlowNumberOfFrames: () => videoSamples,
7251
+ getSlowNumberOfFrames: () => videoSamples.size,
7256
7252
  getAudioBitrate: () => {
7257
7253
  const audioDuration = getSlowDurationInSeconds();
7258
- if (audioDuration === 0 || audioSizesInBytes === 0) {
7254
+ if (audioDuration === 0 || audioSamples.size === 0) {
7259
7255
  return null;
7260
7256
  }
7257
+ const audioSizesInBytes = Array.from(audioSamples.values()).reduce((acc, size) => acc + size, 0);
7261
7258
  return audioSizesInBytes * 8 / audioDuration;
7262
7259
  },
7263
7260
  getVideoBitrate: () => {
7264
7261
  const videoDuration = getSlowDurationInSeconds();
7265
- if (videoDuration === 0 || videoSizeInBytes === 0) {
7262
+ if (videoDuration === 0 || videoSamples.size === 0) {
7266
7263
  return null;
7267
7264
  }
7268
- return videoSizeInBytes * 8 / videoDuration;
7265
+ const videoSizesInBytes = Array.from(videoSamples.values()).reduce((acc, size) => acc + size, 0);
7266
+ return videoSizesInBytes * 8 / videoDuration;
7269
7267
  }
7270
7268
  };
7271
7269
  };
@@ -1969,7 +1969,7 @@ var bufferManager = ({
1969
1969
  uintArray = new Uint8Array(buf);
1970
1970
  uintArray.set(newData);
1971
1971
  view = new DataView(uintArray.buffer);
1972
- counter.setDiscardedOffset(0);
1972
+ counter.setDiscardedOffset(seekTo);
1973
1973
  counter.decrement(counter.getOffset());
1974
1974
  counter.increment(seekTo);
1975
1975
  };
@@ -7130,16 +7130,14 @@ var slowDurationAndFpsState = () => {
7130
7130
  let largestVideoSample;
7131
7131
  let smallestAudioSample;
7132
7132
  let largestAudioSample;
7133
- let audioSizesInBytes = 0;
7134
- let videoSizeInBytes = 0;
7135
- let videoSamples = 0;
7136
- let audioSamples = 0;
7133
+ const videoSamples = new Map;
7134
+ const audioSamples = new Map;
7137
7135
  const getSlowVideoDurationInSeconds = () => {
7138
7136
  let videoDuration = null;
7139
7137
  if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
7140
7138
  const startingTimestampDifference = largestVideoSample - smallestVideoSample;
7141
- const timeBetweenSamples = startingTimestampDifference / (videoSamples - 1);
7142
- videoDuration = timeBetweenSamples * videoSamples;
7139
+ const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
7140
+ videoDuration = timeBetweenSamples * videoSamples.size;
7143
7141
  }
7144
7142
  return videoDuration;
7145
7143
  };
@@ -7148,8 +7146,8 @@ var slowDurationAndFpsState = () => {
7148
7146
  let audioDuration = null;
7149
7147
  if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
7150
7148
  const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
7151
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples - 1);
7152
- audioDuration = timeBetweenSamplesAudio * audioSamples;
7149
+ const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
7150
+ audioDuration = timeBetweenSamplesAudio * audioSamples.size;
7153
7151
  }
7154
7152
  if (videoDuration === null && audioDuration === null) {
7155
7153
  throw new Error("No samples");
@@ -7158,7 +7156,7 @@ var slowDurationAndFpsState = () => {
7158
7156
  };
7159
7157
  return {
7160
7158
  addVideoSample: (videoSample) => {
7161
- videoSamples++;
7159
+ videoSamples.set(videoSample.cts, videoSample.data.byteLength);
7162
7160
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
7163
7161
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
7164
7162
  largestVideoSample = presentationTimeInSeconds;
@@ -7166,10 +7164,9 @@ var slowDurationAndFpsState = () => {
7166
7164
  if (smallestVideoSample === undefined || presentationTimeInSeconds < smallestVideoSample) {
7167
7165
  smallestVideoSample = presentationTimeInSeconds;
7168
7166
  }
7169
- videoSizeInBytes += videoSample.data.byteLength;
7170
7167
  },
7171
7168
  addAudioSample: (audioSample) => {
7172
- audioSamples++;
7169
+ audioSamples.set(audioSample.cts, audioSample.data.byteLength);
7173
7170
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
7174
7171
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
7175
7172
  largestAudioSample = presentationTimeInSeconds;
@@ -7177,7 +7174,6 @@ var slowDurationAndFpsState = () => {
7177
7174
  if (smallestAudioSample === undefined || presentationTimeInSeconds < smallestAudioSample) {
7178
7175
  smallestAudioSample = presentationTimeInSeconds;
7179
7176
  }
7180
- audioSizesInBytes += audioSample.data.byteLength;
7181
7177
  },
7182
7178
  getSlowDurationInSeconds,
7183
7179
  getFps: () => {
@@ -7185,22 +7181,24 @@ var slowDurationAndFpsState = () => {
7185
7181
  if (videoDuration === 0) {
7186
7182
  return 0;
7187
7183
  }
7188
- return videoSamples / videoDuration;
7184
+ return videoSamples.size / videoDuration;
7189
7185
  },
7190
- getSlowNumberOfFrames: () => videoSamples,
7186
+ getSlowNumberOfFrames: () => videoSamples.size,
7191
7187
  getAudioBitrate: () => {
7192
7188
  const audioDuration = getSlowDurationInSeconds();
7193
- if (audioDuration === 0 || audioSizesInBytes === 0) {
7189
+ if (audioDuration === 0 || audioSamples.size === 0) {
7194
7190
  return null;
7195
7191
  }
7192
+ const audioSizesInBytes = Array.from(audioSamples.values()).reduce((acc, size) => acc + size, 0);
7196
7193
  return audioSizesInBytes * 8 / audioDuration;
7197
7194
  },
7198
7195
  getVideoBitrate: () => {
7199
7196
  const videoDuration = getSlowDurationInSeconds();
7200
- if (videoDuration === 0 || videoSizeInBytes === 0) {
7197
+ if (videoDuration === 0 || videoSamples.size === 0) {
7201
7198
  return null;
7202
7199
  }
7203
- return videoSizeInBytes * 8 / videoDuration;
7200
+ const videoSizesInBytes = Array.from(videoSamples.values()).reduce((acc, size) => acc + size, 0);
7201
+ return videoSizesInBytes * 8 / videoDuration;
7204
7202
  }
7205
7203
  };
7206
7204
  };
@@ -67,8 +67,7 @@ const bufferManager = ({ initialData, maxBytes, counter, }) => {
67
67
  uintArray = new Uint8Array(buf);
68
68
  uintArray.set(newData);
69
69
  view = new DataView(uintArray.buffer);
70
- // no more dicarded bytes
71
- counter.setDiscardedOffset(0);
70
+ counter.setDiscardedOffset(seekTo);
72
71
  // reset counter to 0
73
72
  counter.decrement(counter.getOffset());
74
73
  // seek to the new position
@@ -6,16 +6,14 @@ const slowDurationAndFpsState = () => {
6
6
  let largestVideoSample;
7
7
  let smallestAudioSample;
8
8
  let largestAudioSample;
9
- let audioSizesInBytes = 0;
10
- let videoSizeInBytes = 0;
11
- let videoSamples = 0;
12
- let audioSamples = 0;
9
+ const videoSamples = new Map();
10
+ const audioSamples = new Map();
13
11
  const getSlowVideoDurationInSeconds = () => {
14
12
  let videoDuration = null;
15
13
  if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
16
14
  const startingTimestampDifference = largestVideoSample - smallestVideoSample;
17
- const timeBetweenSamples = startingTimestampDifference / (videoSamples - 1);
18
- videoDuration = timeBetweenSamples * videoSamples;
15
+ const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
16
+ videoDuration = timeBetweenSamples * videoSamples.size;
19
17
  }
20
18
  return videoDuration;
21
19
  };
@@ -24,8 +22,8 @@ const slowDurationAndFpsState = () => {
24
22
  let audioDuration = null;
25
23
  if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
26
24
  const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
27
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples - 1);
28
- audioDuration = timeBetweenSamplesAudio * audioSamples;
25
+ const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
26
+ audioDuration = timeBetweenSamplesAudio * audioSamples.size;
29
27
  }
30
28
  if (videoDuration === null && audioDuration === null) {
31
29
  throw new Error('No samples');
@@ -34,7 +32,7 @@ const slowDurationAndFpsState = () => {
34
32
  };
35
33
  return {
36
34
  addVideoSample: (videoSample) => {
37
- videoSamples++;
35
+ videoSamples.set(videoSample.cts, videoSample.data.byteLength);
38
36
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
39
37
  if (largestVideoSample === undefined ||
40
38
  presentationTimeInSeconds > largestVideoSample) {
@@ -44,10 +42,9 @@ const slowDurationAndFpsState = () => {
44
42
  presentationTimeInSeconds < smallestVideoSample) {
45
43
  smallestVideoSample = presentationTimeInSeconds;
46
44
  }
47
- videoSizeInBytes += videoSample.data.byteLength;
48
45
  },
49
46
  addAudioSample: (audioSample) => {
50
- audioSamples++;
47
+ audioSamples.set(audioSample.cts, audioSample.data.byteLength);
51
48
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
52
49
  if (largestAudioSample === undefined ||
53
50
  presentationTimeInSeconds > largestAudioSample) {
@@ -57,7 +54,6 @@ const slowDurationAndFpsState = () => {
57
54
  presentationTimeInSeconds < smallestAudioSample) {
58
55
  smallestAudioSample = presentationTimeInSeconds;
59
56
  }
60
- audioSizesInBytes += audioSample.data.byteLength;
61
57
  },
62
58
  getSlowDurationInSeconds,
63
59
  getFps: () => {
@@ -66,22 +62,24 @@ const slowDurationAndFpsState = () => {
66
62
  if (videoDuration === 0) {
67
63
  return 0;
68
64
  }
69
- return videoSamples / videoDuration;
65
+ return videoSamples.size / videoDuration;
70
66
  },
71
- getSlowNumberOfFrames: () => videoSamples,
67
+ getSlowNumberOfFrames: () => videoSamples.size,
72
68
  getAudioBitrate: () => {
73
69
  const audioDuration = getSlowDurationInSeconds();
74
- if (audioDuration === 0 || audioSizesInBytes === 0) {
70
+ if (audioDuration === 0 || audioSamples.size === 0) {
75
71
  return null;
76
72
  }
73
+ const audioSizesInBytes = Array.from(audioSamples.values()).reduce((acc, size) => acc + size, 0);
77
74
  return (audioSizesInBytes * 8) / audioDuration;
78
75
  },
79
76
  getVideoBitrate: () => {
80
77
  const videoDuration = getSlowDurationInSeconds();
81
- if (videoDuration === 0 || videoSizeInBytes === 0) {
78
+ if (videoDuration === 0 || videoSamples.size === 0) {
82
79
  return null;
83
80
  }
84
- return (videoSizeInBytes * 8) / videoDuration;
81
+ const videoSizesInBytes = Array.from(videoSamples.values()).reduce((acc, size) => acc + size, 0);
82
+ return (videoSizesInBytes * 8) / videoDuration;
85
83
  },
86
84
  };
87
85
  };
package/dist/version.d.ts CHANGED
@@ -1 +1 @@
1
- export declare const VERSION = "4.0.279";
1
+ export declare const VERSION = "4.0.280";
package/dist/version.js CHANGED
@@ -2,4 +2,4 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.VERSION = void 0;
4
4
  // Automatically generated on publish
5
- exports.VERSION = '4.0.279';
5
+ exports.VERSION = '4.0.280';
package/package.json CHANGED
@@ -3,15 +3,15 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-parser"
4
4
  },
5
5
  "name": "@remotion/media-parser",
6
- "version": "4.0.279",
6
+ "version": "4.0.280",
7
7
  "main": "dist/index.js",
8
8
  "sideEffects": false,
9
9
  "devDependencies": {
10
10
  "@types/wicg-file-system-access": "2023.10.5",
11
11
  "eslint": "9.19.0",
12
12
  "@types/bun": "1.2.5",
13
- "@remotion/example-videos": "4.0.279",
14
- "@remotion/eslint-config-internal": "4.0.279"
13
+ "@remotion/example-videos": "4.0.280",
14
+ "@remotion/eslint-config-internal": "4.0.280"
15
15
  },
16
16
  "publishConfig": {
17
17
  "access": "public"