@microsoft/teams-js 2.11.0-beta.2 → 2.12.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/MicrosoftTeams.d.ts +170 -59
- package/dist/MicrosoftTeams.js +450 -87
- package/dist/MicrosoftTeams.js.map +1 -1
- package/dist/MicrosoftTeams.min.js +1 -1
- package/dist/MicrosoftTeams.min.js.map +1 -1
- package/package.json +1 -1
package/dist/MicrosoftTeams.js
CHANGED
@@ -876,6 +876,7 @@ __webpack_require__.d(__webpack_exports__, {
|
|
876
876
|
"remoteCamera": () => (/* reexport */ remoteCamera),
|
877
877
|
"returnFocus": () => (/* reexport */ returnFocus),
|
878
878
|
"search": () => (/* reexport */ search),
|
879
|
+
"secondaryBrowser": () => (/* reexport */ secondaryBrowser),
|
879
880
|
"sendCustomEvent": () => (/* reexport */ sendCustomEvent),
|
880
881
|
"sendCustomMessage": () => (/* reexport */ sendCustomMessage),
|
881
882
|
"setFrameContext": () => (/* reexport */ setFrameContext),
|
@@ -1447,6 +1448,14 @@ var errorNotSupportedOnPlatform = { errorCode: ErrorCode.NOT_SUPPORTED_ON_PLATFO
|
|
1447
1448
|
* Minimum Adaptive Card version supported by the host.
|
1448
1449
|
*/
|
1449
1450
|
var minAdaptiveCardVersion = { majorVersion: 1, minorVersion: 5 };
|
1451
|
+
/**
|
1452
|
+
* @hidden
|
1453
|
+
*
|
1454
|
+
* Adaptive Card version supported by the Teams v1 client.
|
1455
|
+
*/
|
1456
|
+
var teamsMinAdaptiveCardVersion = {
|
1457
|
+
adaptiveCardSchemaVersion: { majorVersion: 1, minorVersion: 5 },
|
1458
|
+
};
|
1450
1459
|
|
1451
1460
|
;// CONCATENATED MODULE: ./src/internal/utils.ts
|
1452
1461
|
/* eslint-disable @typescript-eslint/ban-types */
|
@@ -1487,7 +1496,7 @@ function validateHostAgainstPattern(pattern, host) {
|
|
1487
1496
|
*/
|
1488
1497
|
function validateOrigin(messageOrigin) {
|
1489
1498
|
// Check whether the url is in the pre-known allowlist or supplied by user
|
1490
|
-
if (messageOrigin
|
1499
|
+
if (!isValidHttpsURL(messageOrigin)) {
|
1491
1500
|
return false;
|
1492
1501
|
}
|
1493
1502
|
var messageOriginHost = messageOrigin.host;
|
@@ -1789,6 +1798,16 @@ function isHostAdaptiveCardSchemaVersionUnsupported(hostAdaptiveCardSchemaVersio
|
|
1789
1798
|
return true;
|
1790
1799
|
}
|
1791
1800
|
}
|
1801
|
+
/**
|
1802
|
+
* @hidden
|
1803
|
+
* Checks if a URL is a HTTPS protocol based URL.
|
1804
|
+
* @param url URL to be validated.
|
1805
|
+
*
|
1806
|
+
* @returns true if the URL is an https URL.
|
1807
|
+
*/
|
1808
|
+
function isValidHttpsURL(url) {
|
1809
|
+
return url.protocol === 'https:';
|
1810
|
+
}
|
1792
1811
|
|
1793
1812
|
;// CONCATENATED MODULE: ./src/public/runtime.ts
|
1794
1813
|
/* eslint-disable @typescript-eslint/ban-types */
|
@@ -1842,7 +1861,7 @@ function isRuntimeInitialized(runtime) {
|
|
1842
1861
|
var runtime = _uninitializedRuntime;
|
1843
1862
|
var teamsRuntimeConfig = {
|
1844
1863
|
apiVersion: 2,
|
1845
|
-
hostVersionsInfo:
|
1864
|
+
hostVersionsInfo: teamsMinAdaptiveCardVersion,
|
1846
1865
|
isLegacyTeams: true,
|
1847
1866
|
supports: {
|
1848
1867
|
appInstallDialog: {},
|
@@ -1851,6 +1870,9 @@ var teamsRuntimeConfig = {
|
|
1851
1870
|
chat: {},
|
1852
1871
|
conversations: {},
|
1853
1872
|
dialog: {
|
1873
|
+
card: {
|
1874
|
+
bot: {},
|
1875
|
+
},
|
1854
1876
|
url: {
|
1855
1877
|
bot: {},
|
1856
1878
|
},
|
@@ -1875,7 +1897,9 @@ var teamsRuntimeConfig = {
|
|
1875
1897
|
fullTrust: {},
|
1876
1898
|
},
|
1877
1899
|
teamsCore: {},
|
1878
|
-
video: {
|
1900
|
+
video: {
|
1901
|
+
sharedFrame: {},
|
1902
|
+
},
|
1879
1903
|
},
|
1880
1904
|
};
|
1881
1905
|
var v1HostClientTypes = [
|
@@ -2014,6 +2038,7 @@ function generateBackCompatRuntimeConfig(highestSupportedVersion) {
|
|
2014
2038
|
});
|
2015
2039
|
var backCompatRuntimeConfig = {
|
2016
2040
|
apiVersion: 2,
|
2041
|
+
hostVersionsInfo: teamsMinAdaptiveCardVersion,
|
2017
2042
|
isLegacyTeams: true,
|
2018
2043
|
supports: newSupports,
|
2019
2044
|
};
|
@@ -2063,7 +2088,7 @@ var _minRuntimeConfigToUninitialize = {
|
|
2063
2088
|
* @hidden
|
2064
2089
|
* Package version.
|
2065
2090
|
*/
|
2066
|
-
var version = "2.
|
2091
|
+
var version = "2.12.0-beta.0";
|
2067
2092
|
|
2068
2093
|
;// CONCATENATED MODULE: ./src/internal/internalAPIs.ts
|
2069
2094
|
|
@@ -2200,6 +2225,11 @@ function processAdditionalValidOrigins(validMessageOrigins) {
|
|
2200
2225
|
GlobalVars.additionalValidOrigins = combinedOriginUrls;
|
2201
2226
|
}
|
2202
2227
|
|
2228
|
+
;// CONCATENATED MODULE: ./src/private/inServerSideRenderingEnvironment.ts
|
2229
|
+
function inServerSideRenderingEnvironment() {
|
2230
|
+
return typeof window === 'undefined';
|
2231
|
+
}
|
2232
|
+
|
2203
2233
|
;// CONCATENATED MODULE: ./src/public/authentication.ts
|
2204
2234
|
|
2205
2235
|
|
@@ -3043,7 +3073,6 @@ var dialog;
|
|
3043
3073
|
};
|
3044
3074
|
return dialogInfo;
|
3045
3075
|
}
|
3046
|
-
adaptiveCard.getDialogInfoFromAdaptiveCardDialogInfo = getDialogInfoFromAdaptiveCardDialogInfo;
|
3047
3076
|
/**
|
3048
3077
|
* @hidden
|
3049
3078
|
* Hide from docs
|
@@ -3057,46 +3086,6 @@ var dialog;
|
|
3057
3086
|
dialogInfo.completionBotId = botAdaptiveCardDialogInfo.completionBotId;
|
3058
3087
|
return dialogInfo;
|
3059
3088
|
}
|
3060
|
-
adaptiveCard.getDialogInfoFromBotAdaptiveCardDialogInfo = getDialogInfoFromBotAdaptiveCardDialogInfo;
|
3061
|
-
/**
|
3062
|
-
* @hidden
|
3063
|
-
* Converts {@link TaskInfo} to {@link AdaptiveCardDialogInfo}
|
3064
|
-
* @param taskInfo - TaskInfo object to convert
|
3065
|
-
* @returns - converted AdaptiveCardDialogInfo
|
3066
|
-
*/
|
3067
|
-
function getAdaptiveCardDialogInfoFromTaskInfo(taskInfo) {
|
3068
|
-
// eslint-disable-next-line strict-null-checks/all
|
3069
|
-
var adaptiveCardDialogInfo = {
|
3070
|
-
card: taskInfo.card,
|
3071
|
-
size: {
|
3072
|
-
height: taskInfo.height ? taskInfo.height : DialogDimension.Small,
|
3073
|
-
width: taskInfo.width ? taskInfo.width : DialogDimension.Small,
|
3074
|
-
},
|
3075
|
-
title: taskInfo.title,
|
3076
|
-
};
|
3077
|
-
return adaptiveCardDialogInfo;
|
3078
|
-
}
|
3079
|
-
adaptiveCard.getAdaptiveCardDialogInfoFromTaskInfo = getAdaptiveCardDialogInfoFromTaskInfo;
|
3080
|
-
/**
|
3081
|
-
* @hidden
|
3082
|
-
* Converts {@link TaskInfo} to {@link BotAdaptiveCardDialogInfo}
|
3083
|
-
* @param taskInfo - TaskInfo object to convert
|
3084
|
-
* @returns - converted BotAdaptiveCardDialogInfo
|
3085
|
-
*/
|
3086
|
-
function getBotAdaptiveCardDialogInfoFromTaskInfo(taskInfo) {
|
3087
|
-
/* eslint-disable-next-line strict-null-checks/all */ /* Fix tracked by 5730662 */
|
3088
|
-
var botAdaptiveCardDialogInfo = {
|
3089
|
-
card: taskInfo.card,
|
3090
|
-
size: {
|
3091
|
-
height: taskInfo.height ? taskInfo.height : DialogDimension.Small,
|
3092
|
-
width: taskInfo.width ? taskInfo.width : DialogDimension.Small,
|
3093
|
-
},
|
3094
|
-
title: taskInfo.title,
|
3095
|
-
completionBotId: taskInfo.completionBotId,
|
3096
|
-
};
|
3097
|
-
return botAdaptiveCardDialogInfo;
|
3098
|
-
}
|
3099
|
-
adaptiveCard.getBotAdaptiveCardDialogInfoFromTaskInfo = getBotAdaptiveCardDialogInfoFromTaskInfo;
|
3100
3089
|
})(adaptiveCard = dialog.adaptiveCard || (dialog.adaptiveCard = {}));
|
3101
3090
|
})(dialog || (dialog = {}));
|
3102
3091
|
|
@@ -3409,6 +3398,7 @@ var teamsCore;
|
|
3409
3398
|
|
3410
3399
|
|
3411
3400
|
|
3401
|
+
|
3412
3402
|
/**
|
3413
3403
|
* Namespace to interact with app initialization and lifecycle.
|
3414
3404
|
*/
|
@@ -3825,9 +3815,6 @@ function transformLegacyContextToAppContext(legacyContext) {
|
|
3825
3815
|
};
|
3826
3816
|
return context;
|
3827
3817
|
}
|
3828
|
-
function inServerSideRenderingEnvironment() {
|
3829
|
-
return typeof window === 'undefined';
|
3830
|
-
}
|
3831
3818
|
|
3832
3819
|
;// CONCATENATED MODULE: ./src/public/pages.ts
|
3833
3820
|
|
@@ -4615,7 +4602,7 @@ function callHandler(name, args) {
|
|
4615
4602
|
return [true, result];
|
4616
4603
|
}
|
4617
4604
|
else if (Communication.childWindow) {
|
4618
|
-
sendMessageEventToChild(name,
|
4605
|
+
sendMessageEventToChild(name, args);
|
4619
4606
|
return [false, undefined];
|
4620
4607
|
}
|
4621
4608
|
else {
|
@@ -6845,6 +6832,56 @@ var ParentAppWindow = /** @class */ (function () {
|
|
6845
6832
|
}());
|
6846
6833
|
|
6847
6834
|
|
6835
|
+
;// CONCATENATED MODULE: ./src/public/secondaryBrowser.ts
|
6836
|
+
|
6837
|
+
|
6838
|
+
|
6839
|
+
|
6840
|
+
|
6841
|
+
|
6842
|
+
/**
|
6843
|
+
* Namespace to power up the in-app browser experiences in the Host App.
|
6844
|
+
* For e.g., opening a URL in the Host App inside a browser
|
6845
|
+
*
|
6846
|
+
* @beta
|
6847
|
+
*/
|
6848
|
+
var secondaryBrowser;
|
6849
|
+
(function (secondaryBrowser) {
|
6850
|
+
/**
|
6851
|
+
* Open a URL in the secondary browser aka in-app browser
|
6852
|
+
*
|
6853
|
+
* @param url Url to open in the browser
|
6854
|
+
* @returns Promise that successfully resolves if the URL opens in the secondaryBrowser
|
6855
|
+
* or throws an error {@link SdkError} incase of failure before starting navigation
|
6856
|
+
*
|
6857
|
+
* @remarks Any error that happens after navigation begins is handled by the platform browser component and not returned from this function.
|
6858
|
+
* @beta
|
6859
|
+
*/
|
6860
|
+
function open(url) {
|
6861
|
+
ensureInitialized(runtime, FrameContexts.content);
|
6862
|
+
if (!isSupported()) {
|
6863
|
+
throw errorNotSupportedOnPlatform;
|
6864
|
+
}
|
6865
|
+
if (!url || !isValidHttpsURL(url)) {
|
6866
|
+
throw { errorCode: ErrorCode.INVALID_ARGUMENTS, message: 'Invalid Url: Only https URL is allowed' };
|
6867
|
+
}
|
6868
|
+
return sendAndHandleSdkError('secondaryBrowser.open', url.toString());
|
6869
|
+
}
|
6870
|
+
secondaryBrowser.open = open;
|
6871
|
+
/**
|
6872
|
+
* Checks if secondaryBrowser capability is supported by the host
|
6873
|
+
* @returns boolean to represent whether secondaryBrowser is supported
|
6874
|
+
*
|
6875
|
+
* @throws Error if {@linkcode app.initialize} has not successfully completed
|
6876
|
+
*
|
6877
|
+
* @beta
|
6878
|
+
*/
|
6879
|
+
function isSupported() {
|
6880
|
+
return ensureInitialized(runtime) && runtime.supports.secondaryBrowser ? true : false;
|
6881
|
+
}
|
6882
|
+
secondaryBrowser.isSupported = isSupported;
|
6883
|
+
})(secondaryBrowser || (secondaryBrowser = {}));
|
6884
|
+
|
6848
6885
|
;// CONCATENATED MODULE: ./src/public/location.ts
|
6849
6886
|
|
6850
6887
|
|
@@ -7806,7 +7843,234 @@ var profile;
|
|
7806
7843
|
profile.isSupported = isSupported;
|
7807
7844
|
})(profile || (profile = {}));
|
7808
7845
|
|
7846
|
+
;// CONCATENATED MODULE: ./src/internal/videoUtils.ts
|
7847
|
+
var videoUtils_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
7848
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
7849
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
7850
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
7851
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7852
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
7853
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
7854
|
+
});
|
7855
|
+
};
|
7856
|
+
var videoUtils_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
7857
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
7858
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
7859
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
7860
|
+
function step(op) {
|
7861
|
+
if (f) throw new TypeError("Generator is already executing.");
|
7862
|
+
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
7863
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
7864
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
7865
|
+
switch (op[0]) {
|
7866
|
+
case 0: case 1: t = op; break;
|
7867
|
+
case 4: _.label++; return { value: op[1], done: false };
|
7868
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
7869
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
7870
|
+
default:
|
7871
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
7872
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
7873
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
7874
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
7875
|
+
if (t[2]) _.ops.pop();
|
7876
|
+
_.trys.pop(); continue;
|
7877
|
+
}
|
7878
|
+
op = body.call(thisArg, _);
|
7879
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
7880
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
7881
|
+
}
|
7882
|
+
};
|
7883
|
+
|
7884
|
+
|
7885
|
+
|
7886
|
+
|
7887
|
+
/**
|
7888
|
+
* @hidden
|
7889
|
+
* Create a MediaStreamTrack from the media stream with the given streamId and processed by videoFrameHandler.
|
7890
|
+
*/
|
7891
|
+
function processMediaStream(streamId, videoFrameHandler, notifyError) {
|
7892
|
+
return videoUtils_awaiter(this, void 0, void 0, function () {
|
7893
|
+
var _a;
|
7894
|
+
return videoUtils_generator(this, function (_b) {
|
7895
|
+
switch (_b.label) {
|
7896
|
+
case 0:
|
7897
|
+
_a = createProcessedStreamGenerator;
|
7898
|
+
return [4 /*yield*/, getInputVideoTrack(streamId, notifyError)];
|
7899
|
+
case 1: return [2 /*return*/, _a.apply(void 0, [_b.sent(), videoFrameHandler,
|
7900
|
+
notifyError])];
|
7901
|
+
}
|
7902
|
+
});
|
7903
|
+
});
|
7904
|
+
}
|
7905
|
+
/**
|
7906
|
+
* Get the video track from the media stream gotten from chrome.webview.getTextureStream(streamId).
|
7907
|
+
*/
|
7908
|
+
function getInputVideoTrack(streamId, notifyError) {
|
7909
|
+
return videoUtils_awaiter(this, void 0, void 0, function () {
|
7910
|
+
var chrome, mediaStream, tracks, error_1, errorMsg;
|
7911
|
+
return videoUtils_generator(this, function (_a) {
|
7912
|
+
switch (_a.label) {
|
7913
|
+
case 0:
|
7914
|
+
if (inServerSideRenderingEnvironment()) {
|
7915
|
+
throw errorNotSupportedOnPlatform;
|
7916
|
+
}
|
7917
|
+
chrome = window['chrome'];
|
7918
|
+
_a.label = 1;
|
7919
|
+
case 1:
|
7920
|
+
_a.trys.push([1, 3, , 4]);
|
7921
|
+
return [4 /*yield*/, chrome.webview.getTextureStream(streamId)];
|
7922
|
+
case 2:
|
7923
|
+
mediaStream = _a.sent();
|
7924
|
+
tracks = mediaStream.getVideoTracks();
|
7925
|
+
if (tracks.length === 0) {
|
7926
|
+
throw new Error("No video track in stream ".concat(streamId));
|
7927
|
+
}
|
7928
|
+
return [2 /*return*/, tracks[0]];
|
7929
|
+
case 3:
|
7930
|
+
error_1 = _a.sent();
|
7931
|
+
errorMsg = "Failed to get video track from stream ".concat(streamId, ", error: ").concat(error_1);
|
7932
|
+
notifyError(errorMsg);
|
7933
|
+
throw new Error("Internal error: can't get video track from stream ".concat(streamId));
|
7934
|
+
case 4: return [2 /*return*/];
|
7935
|
+
}
|
7936
|
+
});
|
7937
|
+
});
|
7938
|
+
}
|
7939
|
+
/**
|
7940
|
+
* The function to create a processed video track from the original video track.
|
7941
|
+
* It reads frames from the video track and pipes them to the video frame callback to process the frames.
|
7942
|
+
* The processed frames are then enqueued to the generator.
|
7943
|
+
* The generator can be registered back to the media stream so that the host can get the processed frames.
|
7944
|
+
*/
|
7945
|
+
function createProcessedStreamGenerator(videoTrack, videoFrameHandler, notifyError) {
|
7946
|
+
if (inServerSideRenderingEnvironment()) {
|
7947
|
+
throw errorNotSupportedOnPlatform;
|
7948
|
+
}
|
7949
|
+
var MediaStreamTrackProcessor = window['MediaStreamTrackProcessor'];
|
7950
|
+
var processor = new MediaStreamTrackProcessor({ track: videoTrack });
|
7951
|
+
var source = processor.readable;
|
7952
|
+
var MediaStreamTrackGenerator = window['MediaStreamTrackGenerator'];
|
7953
|
+
var generator = new MediaStreamTrackGenerator({ kind: 'video' });
|
7954
|
+
var sink = generator.writable;
|
7955
|
+
source
|
7956
|
+
.pipeThrough(new TransformStream({
|
7957
|
+
transform: function (originalFrame, controller) {
|
7958
|
+
return videoUtils_awaiter(this, void 0, void 0, function () {
|
7959
|
+
var timestamp, frameProcessedByApp, processedFrame, error_2;
|
7960
|
+
return videoUtils_generator(this, function (_a) {
|
7961
|
+
switch (_a.label) {
|
7962
|
+
case 0:
|
7963
|
+
timestamp = originalFrame.timestamp;
|
7964
|
+
if (!(timestamp !== null)) return [3 /*break*/, 5];
|
7965
|
+
_a.label = 1;
|
7966
|
+
case 1:
|
7967
|
+
_a.trys.push([1, 3, , 4]);
|
7968
|
+
return [4 /*yield*/, videoFrameHandler({ videoFrame: originalFrame })];
|
7969
|
+
case 2:
|
7970
|
+
frameProcessedByApp = _a.sent();
|
7971
|
+
processedFrame = new VideoFrame(frameProcessedByApp, {
|
7972
|
+
// we need the timestamp to be unchanged from the oirginal frame, so we explicitly set it here.
|
7973
|
+
timestamp: timestamp,
|
7974
|
+
});
|
7975
|
+
controller.enqueue(processedFrame);
|
7976
|
+
originalFrame.close();
|
7977
|
+
frameProcessedByApp.close();
|
7978
|
+
return [3 /*break*/, 4];
|
7979
|
+
case 3:
|
7980
|
+
error_2 = _a.sent();
|
7981
|
+
originalFrame.close();
|
7982
|
+
notifyError(error_2);
|
7983
|
+
return [3 /*break*/, 4];
|
7984
|
+
case 4: return [3 /*break*/, 6];
|
7985
|
+
case 5:
|
7986
|
+
notifyError('timestamp of the original video frame is null');
|
7987
|
+
_a.label = 6;
|
7988
|
+
case 6: return [2 /*return*/];
|
7989
|
+
}
|
7990
|
+
});
|
7991
|
+
});
|
7992
|
+
},
|
7993
|
+
}))
|
7994
|
+
.pipeTo(sink);
|
7995
|
+
return generator;
|
7996
|
+
}
|
7997
|
+
/**
|
7998
|
+
* @hidden
|
7999
|
+
*/
|
8000
|
+
function createEffectParameterChangeCallback(callback) {
|
8001
|
+
return function (effectId, effectParam) {
|
8002
|
+
callback(effectId, effectParam)
|
8003
|
+
.then(function () {
|
8004
|
+
sendMessageToParent('video.videoEffectReadiness', [true, effectId]);
|
8005
|
+
})
|
8006
|
+
.catch(function (reason) {
|
8007
|
+
var validReason = reason in video.EffectFailureReason ? reason : video.EffectFailureReason.InitializationFailure;
|
8008
|
+
sendMessageToParent('video.videoEffectReadiness', [false, effectId, validReason]);
|
8009
|
+
});
|
8010
|
+
};
|
8011
|
+
}
|
8012
|
+
|
7809
8013
|
;// CONCATENATED MODULE: ./src/public/video.ts
|
8014
|
+
var video_assign = (undefined && undefined.__assign) || function () {
|
8015
|
+
video_assign = Object.assign || function(t) {
|
8016
|
+
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
8017
|
+
s = arguments[i];
|
8018
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
8019
|
+
t[p] = s[p];
|
8020
|
+
}
|
8021
|
+
return t;
|
8022
|
+
};
|
8023
|
+
return video_assign.apply(this, arguments);
|
8024
|
+
};
|
8025
|
+
var video_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
8026
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
8027
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
8028
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
8029
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
8030
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8031
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8032
|
+
});
|
8033
|
+
};
|
8034
|
+
var video_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
8035
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
8036
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
8037
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
8038
|
+
function step(op) {
|
8039
|
+
if (f) throw new TypeError("Generator is already executing.");
|
8040
|
+
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
8041
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
8042
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
8043
|
+
switch (op[0]) {
|
8044
|
+
case 0: case 1: t = op; break;
|
8045
|
+
case 4: _.label++; return { value: op[1], done: false };
|
8046
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
8047
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
8048
|
+
default:
|
8049
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
8050
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
8051
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
8052
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
8053
|
+
if (t[2]) _.ops.pop();
|
8054
|
+
_.trys.pop(); continue;
|
8055
|
+
}
|
8056
|
+
op = body.call(thisArg, _);
|
8057
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
8058
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
8059
|
+
}
|
8060
|
+
};
|
8061
|
+
var __rest = (undefined && undefined.__rest) || function (s, e) {
|
8062
|
+
var t = {};
|
8063
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
8064
|
+
t[p] = s[p];
|
8065
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
8066
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
8067
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
8068
|
+
t[p[i]] = s[p[i]];
|
8069
|
+
}
|
8070
|
+
return t;
|
8071
|
+
};
|
8072
|
+
|
8073
|
+
|
7810
8074
|
|
7811
8075
|
|
7812
8076
|
|
@@ -7825,7 +8089,7 @@ var video;
|
|
7825
8089
|
var VideoFrameFormat;
|
7826
8090
|
(function (VideoFrameFormat) {
|
7827
8091
|
/** Video format used for encoding and decoding YUV color data in video streaming and storage applications. */
|
7828
|
-
VideoFrameFormat[
|
8092
|
+
VideoFrameFormat["NV12"] = "NV12";
|
7829
8093
|
})(VideoFrameFormat = video.VideoFrameFormat || (video.VideoFrameFormat = {}));
|
7830
8094
|
/**
|
7831
8095
|
* Video effect change type enum
|
@@ -7836,11 +8100,11 @@ var video;
|
|
7836
8100
|
/**
|
7837
8101
|
* Current video effect changed
|
7838
8102
|
*/
|
7839
|
-
EffectChangeType[
|
8103
|
+
EffectChangeType["EffectChanged"] = "EffectChanged";
|
7840
8104
|
/**
|
7841
8105
|
* Disable the video effect
|
7842
8106
|
*/
|
7843
|
-
EffectChangeType[
|
8107
|
+
EffectChangeType["EffectDisabled"] = "EffectDisabled";
|
7844
8108
|
})(EffectChangeType = video.EffectChangeType || (video.EffectChangeType = {}));
|
7845
8109
|
/**
|
7846
8110
|
* Predefined failure reasons for preparing the selected video effect
|
@@ -7859,25 +8123,60 @@ var video;
|
|
7859
8123
|
EffectFailureReason["InitializationFailure"] = "InitializationFailure";
|
7860
8124
|
})(EffectFailureReason = video.EffectFailureReason || (video.EffectFailureReason = {}));
|
7861
8125
|
/**
|
7862
|
-
* Register to
|
8126
|
+
* Register callbacks to process the video frames if the host supports it.
|
7863
8127
|
* @beta
|
7864
|
-
* @param
|
7865
|
-
*
|
8128
|
+
* @param parameters - Callbacks and configuration to process the video frames. A host may support either {@link VideoFrameHandler} or {@link VideoBufferHandler}, but not both.
|
8129
|
+
* To ensure the video effect works on all supported hosts, the video app must provide both {@link VideoFrameHandler} and {@link VideoBufferHandler}.
|
8130
|
+
* The host will choose the appropriate callback based on the host's capability.
|
8131
|
+
*
|
8132
|
+
* @example
|
8133
|
+
* ```typescript
|
8134
|
+
* video.registerForVideoFrame({
|
8135
|
+
* videoFrameHandler: async (videoFrameData) => {
|
8136
|
+
* const originalFrame = videoFrameData.videoFrame as VideoFrame;
|
8137
|
+
* try {
|
8138
|
+
* const processedFrame = await processFrame(originalFrame);
|
8139
|
+
* return processedFrame;
|
8140
|
+
* } catch (e) {
|
8141
|
+
* throw e;
|
8142
|
+
* }
|
8143
|
+
* },
|
8144
|
+
* videoBufferHandler: (
|
8145
|
+
* bufferData: VideoBufferData,
|
8146
|
+
* notifyVideoFrameProcessed: notifyVideoFrameProcessedFunctionType,
|
8147
|
+
* notifyError: notifyErrorFunctionType
|
8148
|
+
* ) => {
|
8149
|
+
* try {
|
8150
|
+
* processFrameInplace(bufferData);
|
8151
|
+
* notifyVideoFrameProcessed();
|
8152
|
+
* } catch (e) {
|
8153
|
+
* notifyError(e);
|
8154
|
+
* }
|
8155
|
+
* },
|
8156
|
+
* config: {
|
8157
|
+
* format: video.VideoPixelFormat.NV12,
|
8158
|
+
* }
|
8159
|
+
* });
|
8160
|
+
* ```
|
7866
8161
|
*/
|
7867
|
-
function registerForVideoFrame(
|
8162
|
+
function registerForVideoFrame(parameters) {
|
7868
8163
|
ensureInitialized(runtime, FrameContexts.sidePanel);
|
7869
8164
|
if (!isSupported()) {
|
7870
8165
|
throw errorNotSupportedOnPlatform;
|
7871
8166
|
}
|
7872
|
-
|
7873
|
-
|
7874
|
-
|
7875
|
-
|
7876
|
-
|
7877
|
-
|
7878
|
-
|
7879
|
-
|
7880
|
-
|
8167
|
+
if (!parameters.videoFrameHandler || !parameters.videoBufferHandler) {
|
8168
|
+
throw new Error('Both videoFrameHandler and videoBufferHandler must be provided');
|
8169
|
+
}
|
8170
|
+
if (doesSupportMediaStream()) {
|
8171
|
+
registerForMediaStream(parameters.videoFrameHandler, parameters.config);
|
8172
|
+
}
|
8173
|
+
else if (doesSupportSharedFrame()) {
|
8174
|
+
registerForVideoBuffer(parameters.videoBufferHandler, parameters.config);
|
8175
|
+
}
|
8176
|
+
else {
|
8177
|
+
// should not happen if isSupported() is true
|
8178
|
+
throw errorNotSupportedOnPlatform;
|
8179
|
+
}
|
7881
8180
|
}
|
7882
8181
|
video.registerForVideoFrame = registerForVideoFrame;
|
7883
8182
|
/**
|
@@ -7906,17 +8205,7 @@ var video;
|
|
7906
8205
|
if (!isSupported()) {
|
7907
8206
|
throw errorNotSupportedOnPlatform;
|
7908
8207
|
}
|
7909
|
-
|
7910
|
-
callback(effectId)
|
7911
|
-
.then(function () {
|
7912
|
-
sendMessageToParent('video.videoEffectReadiness', [true, effectId]);
|
7913
|
-
})
|
7914
|
-
.catch(function (reason) {
|
7915
|
-
var validReason = reason in EffectFailureReason ? reason : EffectFailureReason.InitializationFailure;
|
7916
|
-
sendMessageToParent('video.videoEffectReadiness', [false, effectId, validReason]);
|
7917
|
-
});
|
7918
|
-
};
|
7919
|
-
registerHandler('video.effectParameterChange', effectParameterChangeHandler, false);
|
8208
|
+
registerHandler('video.effectParameterChange', createEffectParameterChangeCallback(callback), false);
|
7920
8209
|
sendMessageToParent('video.registerForVideoEffect');
|
7921
8210
|
}
|
7922
8211
|
video.registerForVideoEffect = registerForVideoEffect;
|
@@ -7937,7 +8226,7 @@ var video;
|
|
7937
8226
|
sendMessageToParent('video.notifyError', [errorMessage]);
|
7938
8227
|
}
|
7939
8228
|
/**
|
7940
|
-
* Checks if video capability is supported by the host
|
8229
|
+
* Checks if video capability is supported by the host.
|
7941
8230
|
* @beta
|
7942
8231
|
* @returns boolean to represent whether the video capability is supported
|
7943
8232
|
*
|
@@ -7945,9 +8234,78 @@ var video;
|
|
7945
8234
|
*
|
7946
8235
|
*/
|
7947
8236
|
function isSupported() {
|
7948
|
-
return ensureInitialized(runtime) &&
|
8237
|
+
return (ensureInitialized(runtime) &&
|
8238
|
+
!!runtime.supports.video &&
|
8239
|
+
/** A host should support either mediaStream or sharedFrame sub-capability to support the video capability */
|
8240
|
+
(!!runtime.supports.video.mediaStream || !!runtime.supports.video.sharedFrame));
|
7949
8241
|
}
|
7950
8242
|
video.isSupported = isSupported;
|
8243
|
+
function registerForMediaStream(videoFrameHandler, config) {
|
8244
|
+
var _this = this;
|
8245
|
+
ensureInitialized(runtime, FrameContexts.sidePanel);
|
8246
|
+
if (!isSupported() || !doesSupportMediaStream()) {
|
8247
|
+
throw errorNotSupportedOnPlatform;
|
8248
|
+
}
|
8249
|
+
registerHandler('video.startVideoExtensibilityVideoStream', function (mediaStreamInfo) { return video_awaiter(_this, void 0, void 0, function () {
|
8250
|
+
var streamId, generator;
|
8251
|
+
var _a, _b;
|
8252
|
+
return video_generator(this, function (_c) {
|
8253
|
+
switch (_c.label) {
|
8254
|
+
case 0:
|
8255
|
+
streamId = mediaStreamInfo.streamId;
|
8256
|
+
return [4 /*yield*/, processMediaStream(streamId, videoFrameHandler, notifyError)];
|
8257
|
+
case 1:
|
8258
|
+
generator = _c.sent();
|
8259
|
+
// register the video track with processed frames back to the stream:
|
8260
|
+
!inServerSideRenderingEnvironment() && ((_b = (_a = window['chrome']) === null || _a === void 0 ? void 0 : _a.webview) === null || _b === void 0 ? void 0 : _b.registerTextureStream(streamId, generator));
|
8261
|
+
return [2 /*return*/];
|
8262
|
+
}
|
8263
|
+
});
|
8264
|
+
}); }, false);
|
8265
|
+
sendMessageToParent('video.mediaStream.registerForVideoFrame', [config]);
|
8266
|
+
}
|
8267
|
+
function registerForVideoBuffer(videoBufferHandler, config) {
|
8268
|
+
ensureInitialized(runtime, FrameContexts.sidePanel);
|
8269
|
+
if (!isSupported() || !doesSupportSharedFrame()) {
|
8270
|
+
throw errorNotSupportedOnPlatform;
|
8271
|
+
}
|
8272
|
+
registerHandler('video.newVideoFrame',
|
8273
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
8274
|
+
function (videoBufferData) {
|
8275
|
+
if (videoBufferData) {
|
8276
|
+
var timestamp_1 = videoBufferData.timestamp;
|
8277
|
+
videoBufferHandler(normalizeVideoBufferData(videoBufferData), function () {
|
8278
|
+
notifyVideoFrameProcessed(timestamp_1);
|
8279
|
+
}, notifyError);
|
8280
|
+
}
|
8281
|
+
}, false);
|
8282
|
+
sendMessageToParent('video.registerForVideoFrame', [config]);
|
8283
|
+
}
|
8284
|
+
function normalizeVideoBufferData(videoBufferData) {
|
8285
|
+
if ('videoFrameBuffer' in videoBufferData) {
|
8286
|
+
return videoBufferData;
|
8287
|
+
}
|
8288
|
+
else {
|
8289
|
+
// The host may pass the VideoFrame with the old definition which has `data` instead of `videoFrameBuffer`
|
8290
|
+
var data = videoBufferData.data, newVideoBufferData = __rest(videoBufferData, ["data"]);
|
8291
|
+
return video_assign(video_assign({}, newVideoBufferData), { videoFrameBuffer: data });
|
8292
|
+
}
|
8293
|
+
}
|
8294
|
+
function doesSupportMediaStream() {
|
8295
|
+
var _a;
|
8296
|
+
return (ensureInitialized(runtime, FrameContexts.sidePanel) &&
|
8297
|
+
isTextureStreamAvailable() &&
|
8298
|
+
!!((_a = runtime.supports.video) === null || _a === void 0 ? void 0 : _a.mediaStream));
|
8299
|
+
}
|
8300
|
+
function isTextureStreamAvailable() {
|
8301
|
+
var _a, _b, _c, _d;
|
8302
|
+
return (!inServerSideRenderingEnvironment() &&
|
8303
|
+
!!(((_b = (_a = window['chrome']) === null || _a === void 0 ? void 0 : _a.webview) === null || _b === void 0 ? void 0 : _b.getTextureStream) && ((_d = (_c = window['chrome']) === null || _c === void 0 ? void 0 : _c.webview) === null || _d === void 0 ? void 0 : _d.registerTextureStream)));
|
8304
|
+
}
|
8305
|
+
function doesSupportSharedFrame() {
|
8306
|
+
var _a;
|
8307
|
+
return ensureInitialized(runtime, FrameContexts.sidePanel) && !!((_a = runtime.supports.video) === null || _a === void 0 ? void 0 : _a.sharedFrame);
|
8308
|
+
}
|
7951
8309
|
})(video || (video = {})); //end of video namespace
|
7952
8310
|
|
7953
8311
|
;// CONCATENATED MODULE: ./src/public/search.ts
|
@@ -8832,7 +9190,7 @@ var settings;
|
|
8832
9190
|
|
8833
9191
|
;// CONCATENATED MODULE: ./src/public/tasks.ts
|
8834
9192
|
/* eslint-disable @typescript-eslint/ban-types */
|
8835
|
-
var
|
9193
|
+
var tasks_rest = (undefined && undefined.__rest) || function (s, e) {
|
8836
9194
|
var t = {};
|
8837
9195
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
8838
9196
|
t[p] = s[p];
|
@@ -8907,7 +9265,7 @@ var tasks;
|
|
8907
9265
|
function updateTask(taskInfo) {
|
8908
9266
|
taskInfo = getDefaultSizeIfNotProvided(taskInfo);
|
8909
9267
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
8910
|
-
var width = taskInfo.width, height = taskInfo.height, extra =
|
9268
|
+
var width = taskInfo.width, height = taskInfo.height, extra = tasks_rest(taskInfo, ["width", "height"]);
|
8911
9269
|
if (Object.keys(extra).length) {
|
8912
9270
|
throw new Error('resize requires a TaskInfo argument containing only width and height');
|
8913
9271
|
}
|
@@ -9219,6 +9577,7 @@ function ensureSupported() {
|
|
9219
9577
|
|
9220
9578
|
|
9221
9579
|
|
9580
|
+
|
9222
9581
|
|
9223
9582
|
|
9224
9583
|
|
@@ -10460,6 +10819,7 @@ var teams;
|
|
10460
10819
|
|
10461
10820
|
|
10462
10821
|
|
10822
|
+
|
10463
10823
|
/**
|
10464
10824
|
* @hidden
|
10465
10825
|
* Extended video API
|
@@ -10494,20 +10854,23 @@ var videoEx;
|
|
10494
10854
|
* @internal
|
10495
10855
|
* Limited to Microsoft-internal use
|
10496
10856
|
*/
|
10497
|
-
function registerForVideoFrame(
|
10857
|
+
function registerForVideoFrame(parameters) {
|
10498
10858
|
ensureInitialized(runtime, FrameContexts.sidePanel);
|
10499
10859
|
if (!isSupported()) {
|
10500
10860
|
throw errorNotSupportedOnPlatform;
|
10501
10861
|
}
|
10502
|
-
|
10503
|
-
|
10504
|
-
|
10505
|
-
|
10862
|
+
if (!parameters.videoBufferHandler) {
|
10863
|
+
throw new Error('parameters.videoBufferHandler must be provided');
|
10864
|
+
}
|
10865
|
+
registerHandler('video.newVideoFrame', function (videoBufferData) {
|
10866
|
+
if (videoBufferData) {
|
10867
|
+
var timestamp_1 = videoBufferData.timestamp;
|
10868
|
+
parameters.videoBufferHandler(videoBufferData, function () {
|
10506
10869
|
notifyVideoFrameProcessed(timestamp_1);
|
10507
10870
|
}, notifyError);
|
10508
10871
|
}
|
10509
10872
|
}, false);
|
10510
|
-
sendMessageToParent('video.registerForVideoFrame', [config]);
|
10873
|
+
sendMessageToParent('video.registerForVideoFrame', [parameters.config]);
|
10511
10874
|
}
|
10512
10875
|
videoEx.registerForVideoFrame = registerForVideoFrame;
|
10513
10876
|
/**
|
@@ -10545,7 +10908,7 @@ var videoEx;
|
|
10545
10908
|
if (!isSupported()) {
|
10546
10909
|
throw errorNotSupportedOnPlatform;
|
10547
10910
|
}
|
10548
|
-
registerHandler('video.effectParameterChange', callback, false);
|
10911
|
+
registerHandler('video.effectParameterChange', createEffectParameterChangeCallback(callback), false);
|
10549
10912
|
sendMessageToParent('video.registerForVideoEffect');
|
10550
10913
|
}
|
10551
10914
|
videoEx.registerForVideoEffect = registerForVideoEffect;
|