@microsoft/teams-js 2.9.1 → 2.10.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/MicrosoftTeams.d.ts
CHANGED
@@ -2075,7 +2075,7 @@ export namespace videoEx {
|
|
2075
2075
|
* @internal
|
2076
2076
|
* Limited to Microsoft-internal use
|
2077
2077
|
*/
|
2078
|
-
interface VideoFrame extends video.
|
2078
|
+
interface VideoFrame extends video.VideoFrameData {
|
2079
2079
|
/**
|
2080
2080
|
* @hidden
|
2081
2081
|
* The model output if you passed in an {@linkcode VideoFrameConfig.audioInferenceModel}
|
@@ -6683,7 +6683,7 @@ export namespace video {
|
|
6683
6683
|
* Represents a video frame
|
6684
6684
|
* @beta
|
6685
6685
|
*/
|
6686
|
-
interface
|
6686
|
+
interface VideoFrameData {
|
6687
6687
|
/**
|
6688
6688
|
* Video frame width
|
6689
6689
|
*/
|
@@ -6695,7 +6695,7 @@ export namespace video {
|
|
6695
6695
|
/**
|
6696
6696
|
* Video frame buffer
|
6697
6697
|
*/
|
6698
|
-
|
6698
|
+
videoFrameBuffer: Uint8ClampedArray;
|
6699
6699
|
/**
|
6700
6700
|
* NV12 luma stride, valid only when video frame format is NV12
|
6701
6701
|
*/
|
@@ -6718,7 +6718,7 @@ export namespace video {
|
|
6718
6718
|
* @beta
|
6719
6719
|
*/
|
6720
6720
|
enum VideoFrameFormat {
|
6721
|
-
NV12 =
|
6721
|
+
NV12 = "NV12"
|
6722
6722
|
}
|
6723
6723
|
/**
|
6724
6724
|
* Video frame configuration supplied to the host to customize the generated video frame parameters, like format
|
@@ -6748,7 +6748,7 @@ export namespace video {
|
|
6748
6748
|
* Video frame call back function definition
|
6749
6749
|
* @beta
|
6750
6750
|
*/
|
6751
|
-
type VideoFrameCallback = (frame:
|
6751
|
+
type VideoFrameCallback = (frame: VideoFrameData, notifyVideoFrameProcessed: () => void, notifyError: (errorMessage: string) => void) => void;
|
6752
6752
|
/**
|
6753
6753
|
* Predefined failure reasons for preparing the selected video effect
|
6754
6754
|
* @beta
|
@@ -6801,6 +6801,56 @@ export namespace video {
|
|
6801
6801
|
*
|
6802
6802
|
*/
|
6803
6803
|
function isSupported(): boolean;
|
6804
|
+
/**
|
6805
|
+
* @beta
|
6806
|
+
* Namespace to get video frames from a media stream.
|
6807
|
+
* When the host supports this capability, developer should call {@link mediaStream.registerForVideoFrame} to get the video frames instead of {@link registerForVideoFrame} to get the video frames, callback of {@link registerForVideoFrame} will be ignored when the host supports this capability.
|
6808
|
+
*/
|
6809
|
+
namespace mediaStream {
|
6810
|
+
/**
|
6811
|
+
* @beta
|
6812
|
+
* Checks if video.mediaStream capability is supported by the host
|
6813
|
+
* @returns boolean to represent whether the video.medisStream capability is supported
|
6814
|
+
*
|
6815
|
+
* @throws Error if {@linkcode app.initialize} has not successfully completed
|
6816
|
+
*
|
6817
|
+
*/
|
6818
|
+
function isSupported(): boolean;
|
6819
|
+
/**
|
6820
|
+
* @beta
|
6821
|
+
* Video frame data extracted from the media stream. More properties may be added in the future.
|
6822
|
+
*/
|
6823
|
+
type MediaStreamFrameData = {
|
6824
|
+
/**
|
6825
|
+
* The video frame from the media stream.
|
6826
|
+
*/
|
6827
|
+
videoFrame: VideoFrame;
|
6828
|
+
};
|
6829
|
+
/**
|
6830
|
+
* @beta
|
6831
|
+
* Video effect change call back function definition.
|
6832
|
+
* The video app should resolve the promise to notify a successfully processed video frame.
|
6833
|
+
* The video app should reject the promise to notify a failure.
|
6834
|
+
*/
|
6835
|
+
type VideoFrameCallback = (receivedVideoFrame: MediaStreamFrameData) => Promise<VideoFrame>;
|
6836
|
+
/**
|
6837
|
+
* @beta
|
6838
|
+
* Register to read the video frames from the media stream provided by the host.
|
6839
|
+
* @param frameCallback - The callback to invoke when recieve a video frame from the media stream.
|
6840
|
+
* @example
|
6841
|
+
* ```typescript
|
6842
|
+
* video.mediaStream.registerForVideoFrame(async (receivedVideoFrame) => {
|
6843
|
+
* const { videoFrame } = receivedVideoFrame;
|
6844
|
+
* try {
|
6845
|
+
* return await processVideoFrame(videoFrame);
|
6846
|
+
* } catch (error) {
|
6847
|
+
* throw error;
|
6848
|
+
* }
|
6849
|
+
* });
|
6850
|
+
* ```
|
6851
|
+
*/
|
6852
|
+
function registerForVideoFrame(frameCallback: VideoFrameCallback): void;
|
6853
|
+
}
|
6804
6854
|
}
|
6805
6855
|
|
6806
6856
|
/**
|
@@ -7623,19 +7673,7 @@ export interface IFluidTenantInfo {
|
|
7623
7673
|
/**
|
7624
7674
|
* The Fluid service endpoint Live Share should use.
|
7625
7675
|
*/
|
7626
|
-
serviceEndpoint
|
7627
|
-
/**
|
7628
|
-
* @deprecated
|
7629
|
-
* As of Fluid 1.0 this configuration information has been deprecated in favor of
|
7630
|
-
* `serviceEndpoint`.
|
7631
|
-
*/
|
7632
|
-
ordererEndpoint: string;
|
7633
|
-
/**
|
7634
|
-
* @deprecated
|
7635
|
-
* As of Fluid 1.0 this configuration information has been deprecated in favor of
|
7636
|
-
* `serviceEndpoint`.
|
7637
|
-
*/
|
7638
|
-
storageEndpoint: string;
|
7676
|
+
serviceEndpoint: string;
|
7639
7677
|
}
|
7640
7678
|
/**
|
7641
7679
|
* Live Share host implementation for O365 and Teams clients.
|
package/dist/MicrosoftTeams.js
CHANGED
@@ -1969,7 +1969,6 @@ var teamsRuntimeConfig = {
|
|
1969
1969
|
fullTrust: {},
|
1970
1970
|
},
|
1971
1971
|
remoteCamera: {},
|
1972
|
-
sharing: {},
|
1973
1972
|
stageView: {},
|
1974
1973
|
teams: {
|
1975
1974
|
fullTrust: {},
|
@@ -2057,6 +2056,10 @@ var versionConstants = {
|
|
2057
2056
|
capability: { people: {} },
|
2058
2057
|
hostClientTypes: v1HostClientTypes,
|
2059
2058
|
},
|
2059
|
+
{
|
2060
|
+
capability: { sharing: {} },
|
2061
|
+
hostClientTypes: [HostClientType.desktop, HostClientType.web],
|
2062
|
+
},
|
2060
2063
|
],
|
2061
2064
|
'2.0.1': [
|
2062
2065
|
{
|
@@ -2155,7 +2158,7 @@ var _minRuntimeConfigToUninitialize = {
|
|
2155
2158
|
};
|
2156
2159
|
|
2157
2160
|
;// CONCATENATED MODULE: ./src/public/version.ts
|
2158
|
-
var version = "2.
|
2161
|
+
var version = "2.10.0-beta.1";
|
2159
2162
|
|
2160
2163
|
;// CONCATENATED MODULE: ./src/internal/internalAPIs.ts
|
2161
2164
|
|
@@ -7790,6 +7793,53 @@ var profile;
|
|
7790
7793
|
})(profile || (profile = {}));
|
7791
7794
|
|
7792
7795
|
;// CONCATENATED MODULE: ./src/public/video.ts
|
7796
|
+
var video_assign = (undefined && undefined.__assign) || function () {
|
7797
|
+
video_assign = Object.assign || function(t) {
|
7798
|
+
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
7799
|
+
s = arguments[i];
|
7800
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
7801
|
+
t[p] = s[p];
|
7802
|
+
}
|
7803
|
+
return t;
|
7804
|
+
};
|
7805
|
+
return video_assign.apply(this, arguments);
|
7806
|
+
};
|
7807
|
+
var video_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
7808
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
7809
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
7810
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
7811
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7812
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
7813
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
7814
|
+
});
|
7815
|
+
};
|
7816
|
+
var video_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
7817
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
7818
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
7819
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
7820
|
+
function step(op) {
|
7821
|
+
if (f) throw new TypeError("Generator is already executing.");
|
7822
|
+
while (_) try {
|
7823
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
7824
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
7825
|
+
switch (op[0]) {
|
7826
|
+
case 0: case 1: t = op; break;
|
7827
|
+
case 4: _.label++; return { value: op[1], done: false };
|
7828
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
7829
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
7830
|
+
default:
|
7831
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
7832
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
7833
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
7834
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
7835
|
+
if (t[2]) _.ops.pop();
|
7836
|
+
_.trys.pop(); continue;
|
7837
|
+
}
|
7838
|
+
op = body.call(thisArg, _);
|
7839
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
7840
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
7841
|
+
}
|
7842
|
+
};
|
7793
7843
|
|
7794
7844
|
|
7795
7845
|
|
@@ -7807,7 +7857,7 @@ var video;
|
|
7807
7857
|
*/
|
7808
7858
|
var VideoFrameFormat;
|
7809
7859
|
(function (VideoFrameFormat) {
|
7810
|
-
VideoFrameFormat[
|
7860
|
+
VideoFrameFormat["NV12"] = "NV12";
|
7811
7861
|
})(VideoFrameFormat = video.VideoFrameFormat || (video.VideoFrameFormat = {}));
|
7812
7862
|
/**
|
7813
7863
|
* Video effect change type enum
|
@@ -7851,10 +7901,14 @@ var video;
|
|
7851
7901
|
if (!isSupported()) {
|
7852
7902
|
throw errorNotSupportedOnPlatform;
|
7853
7903
|
}
|
7854
|
-
registerHandler('video.newVideoFrame',
|
7904
|
+
registerHandler('video.newVideoFrame',
|
7905
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
7906
|
+
function (videoFrame) {
|
7855
7907
|
if (videoFrame) {
|
7856
|
-
|
7857
|
-
|
7908
|
+
// The host may pass the VideoFrame with the old definition which has `data` instead of `videoFrameBuffer`
|
7909
|
+
var videoFrameData = video_assign(video_assign({}, videoFrame), { videoFrameBuffer: videoFrame.videoFrameBuffer || videoFrame.data });
|
7910
|
+
var timestamp_1 = videoFrameData.timestamp;
|
7911
|
+
frameCallback(videoFrameData, function () {
|
7858
7912
|
notifyVideoFrameProcessed(timestamp_1);
|
7859
7913
|
}, notifyError);
|
7860
7914
|
}
|
@@ -7930,6 +7984,165 @@ var video;
|
|
7930
7984
|
return ensureInitialized(runtime) && runtime.supports.video ? true : false;
|
7931
7985
|
}
|
7932
7986
|
video.isSupported = isSupported;
|
7987
|
+
/**
|
7988
|
+
* @beta
|
7989
|
+
* Namespace to get video frames from a media stream.
|
7990
|
+
* When the host supports this capability, developer should call {@link mediaStream.registerForVideoFrame} to get the video frames instead of {@link registerForVideoFrame} to get the video frames, callback of {@link registerForVideoFrame} will be ignored when the host supports this capability.
|
7991
|
+
*/
|
7992
|
+
var mediaStream;
|
7993
|
+
(function (mediaStream_1) {
|
7994
|
+
/**
|
7995
|
+
* @beta
|
7996
|
+
* Checks if video.mediaStream capability is supported by the host
|
7997
|
+
* @returns boolean to represent whether the video.medisStream capability is supported
|
7998
|
+
*
|
7999
|
+
* @throws Error if {@linkcode app.initialize} has not successfully completed
|
8000
|
+
*
|
8001
|
+
*/
|
8002
|
+
function isSupported() {
|
8003
|
+
var _a;
|
8004
|
+
return ensureInitialized(runtime) && isTextureStreamAvailable() && !!((_a = runtime.supports.video) === null || _a === void 0 ? void 0 : _a.mediaStream);
|
8005
|
+
}
|
8006
|
+
mediaStream_1.isSupported = isSupported;
|
8007
|
+
function isTextureStreamAvailable() {
|
8008
|
+
var _a, _b, _c, _d;
|
8009
|
+
return (typeof window !== 'undefined' &&
|
8010
|
+
!!(((_b = (_a = window['chrome']) === null || _a === void 0 ? void 0 : _a.webview) === null || _b === void 0 ? void 0 : _b.getTextureStream) && ((_d = (_c = window['chrome']) === null || _c === void 0 ? void 0 : _c.webview) === null || _d === void 0 ? void 0 : _d.registerTextureStream)));
|
8011
|
+
}
|
8012
|
+
/**
|
8013
|
+
* @beta
|
8014
|
+
* Register to read the video frames from the media stream provided by the host.
|
8015
|
+
* @param frameCallback - The callback to invoke when recieve a video frame from the media stream.
|
8016
|
+
* @example
|
8017
|
+
* ```typescript
|
8018
|
+
* video.mediaStream.registerForVideoFrame(async (receivedVideoFrame) => {
|
8019
|
+
* const { videoFrame } = receivedVideoFrame;
|
8020
|
+
* try {
|
8021
|
+
* return await processVideoFrame(videoFrame);
|
8022
|
+
* } catch (error) {
|
8023
|
+
* throw error;
|
8024
|
+
* }
|
8025
|
+
* });
|
8026
|
+
* ```
|
8027
|
+
*/
|
8028
|
+
function registerForVideoFrame(frameCallback) {
|
8029
|
+
var _this = this;
|
8030
|
+
ensureInitialized(runtime, FrameContexts.sidePanel);
|
8031
|
+
if (!isSupported()) {
|
8032
|
+
throw errorNotSupportedOnPlatform;
|
8033
|
+
}
|
8034
|
+
registerHandler('video.startVideoExtensibilityVideoStream', function (mediaStreamInfo) { return video_awaiter(_this, void 0, void 0, function () {
|
8035
|
+
var streamId, videoTrack, generator;
|
8036
|
+
var _a, _b;
|
8037
|
+
return video_generator(this, function (_c) {
|
8038
|
+
switch (_c.label) {
|
8039
|
+
case 0:
|
8040
|
+
streamId = mediaStreamInfo.streamId;
|
8041
|
+
return [4 /*yield*/, getInputVideoTrack(streamId)];
|
8042
|
+
case 1:
|
8043
|
+
videoTrack = _c.sent();
|
8044
|
+
generator = createProcessedStreamGenerator(videoTrack, frameCallback);
|
8045
|
+
// register the video track with processed frames back to the stream:
|
8046
|
+
typeof window !== 'undefined' && ((_b = (_a = window['chrome']) === null || _a === void 0 ? void 0 : _a.webview) === null || _b === void 0 ? void 0 : _b.registerTextureStream(streamId, generator));
|
8047
|
+
return [2 /*return*/];
|
8048
|
+
}
|
8049
|
+
});
|
8050
|
+
}); });
|
8051
|
+
sendMessageToParent('video.mediaStream.registerForVideoFrame', [
|
8052
|
+
{
|
8053
|
+
format: VideoFrameFormat.NV12,
|
8054
|
+
},
|
8055
|
+
]);
|
8056
|
+
}
|
8057
|
+
mediaStream_1.registerForVideoFrame = registerForVideoFrame;
|
8058
|
+
/**
|
8059
|
+
* Get the video track from the media stream gotten from chrome.webview.getTextureStream(streamId).
|
8060
|
+
*/
|
8061
|
+
function getInputVideoTrack(streamId) {
|
8062
|
+
return video_awaiter(this, void 0, void 0, function () {
|
8063
|
+
var chrome, mediaStream_2, tracks, error_1, errorMsg;
|
8064
|
+
return video_generator(this, function (_a) {
|
8065
|
+
switch (_a.label) {
|
8066
|
+
case 0:
|
8067
|
+
if (typeof window === 'undefined') {
|
8068
|
+
throw errorNotSupportedOnPlatform;
|
8069
|
+
}
|
8070
|
+
chrome = window['chrome'];
|
8071
|
+
_a.label = 1;
|
8072
|
+
case 1:
|
8073
|
+
_a.trys.push([1, 3, , 4]);
|
8074
|
+
return [4 /*yield*/, chrome.webview.getTextureStream(streamId)];
|
8075
|
+
case 2:
|
8076
|
+
mediaStream_2 = _a.sent();
|
8077
|
+
tracks = mediaStream_2.getVideoTracks();
|
8078
|
+
if (tracks.length === 0) {
|
8079
|
+
throw new Error("No video track in stream ".concat(streamId));
|
8080
|
+
}
|
8081
|
+
return [2 /*return*/, tracks[0]];
|
8082
|
+
case 3:
|
8083
|
+
error_1 = _a.sent();
|
8084
|
+
errorMsg = "Failed to get video track from stream ".concat(streamId, ", error: ").concat(error_1);
|
8085
|
+
notifyError(errorMsg);
|
8086
|
+
throw new Error(errorMsg);
|
8087
|
+
case 4: return [2 /*return*/];
|
8088
|
+
}
|
8089
|
+
});
|
8090
|
+
});
|
8091
|
+
}
|
8092
|
+
/**
|
8093
|
+
* The function to create a processed video track from the original video track.
|
8094
|
+
* It reads frames from the video track and pipes them to the video frame callback to process the frames.
|
8095
|
+
* The processed frames are then enqueued to the generator.
|
8096
|
+
* The generator can be registered back to the media stream so that the host can get the processed frames.
|
8097
|
+
*/
|
8098
|
+
function createProcessedStreamGenerator(videoTrack, videoFrameCallback) {
|
8099
|
+
var processor = new MediaStreamTrackProcessor({ track: videoTrack });
|
8100
|
+
var source = processor.readable;
|
8101
|
+
var generator = new MediaStreamTrackGenerator({ kind: 'video' });
|
8102
|
+
var sink = generator.writable;
|
8103
|
+
source
|
8104
|
+
.pipeThrough(new TransformStream({
|
8105
|
+
transform: function (originalFrame, controller) {
|
8106
|
+
return video_awaiter(this, void 0, void 0, function () {
|
8107
|
+
var timestamp, frameProcessedByApp, processedFrame, error_2;
|
8108
|
+
return video_generator(this, function (_a) {
|
8109
|
+
switch (_a.label) {
|
8110
|
+
case 0:
|
8111
|
+
timestamp = originalFrame.timestamp;
|
8112
|
+
if (!(timestamp !== null)) return [3 /*break*/, 5];
|
8113
|
+
_a.label = 1;
|
8114
|
+
case 1:
|
8115
|
+
_a.trys.push([1, 3, , 4]);
|
8116
|
+
return [4 /*yield*/, videoFrameCallback({ videoFrame: originalFrame })];
|
8117
|
+
case 2:
|
8118
|
+
frameProcessedByApp = _a.sent();
|
8119
|
+
processedFrame = new VideoFrame(frameProcessedByApp, {
|
8120
|
+
// we need the timestamp to be unchanged from the oirginal frame, so we explicitly set it here.
|
8121
|
+
timestamp: timestamp,
|
8122
|
+
});
|
8123
|
+
controller.enqueue(processedFrame);
|
8124
|
+
originalFrame.close();
|
8125
|
+
frameProcessedByApp.close();
|
8126
|
+
return [3 /*break*/, 4];
|
8127
|
+
case 3:
|
8128
|
+
error_2 = _a.sent();
|
8129
|
+
originalFrame.close();
|
8130
|
+
notifyError(error_2);
|
8131
|
+
return [3 /*break*/, 4];
|
8132
|
+
case 4: return [3 /*break*/, 6];
|
8133
|
+
case 5:
|
8134
|
+
notifyError('timestamp of the original video frame is null');
|
8135
|
+
_a.label = 6;
|
8136
|
+
case 6: return [2 /*return*/];
|
8137
|
+
}
|
8138
|
+
});
|
8139
|
+
});
|
8140
|
+
},
|
8141
|
+
}))
|
8142
|
+
.pipeTo(sink);
|
8143
|
+
return generator;
|
8144
|
+
}
|
8145
|
+
})(mediaStream = video.mediaStream || (video.mediaStream = {}));
|
7933
8146
|
})(video || (video = {})); //end of video namespace
|
7934
8147
|
|
7935
8148
|
;// CONCATENATED MODULE: ./src/public/search.ts
|