@microsoft/teams-js 2.16.0-beta.0 → 2.16.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/MicrosoftTeams.d.ts +286 -76
- package/dist/MicrosoftTeams.js +800 -526
- package/dist/MicrosoftTeams.js.map +1 -1
- package/dist/MicrosoftTeams.min.js +1 -1
- package/dist/MicrosoftTeams.min.js.map +1 -1
- package/package.json +1 -1
package/dist/MicrosoftTeams.js
CHANGED
@@ -802,96 +802,96 @@ __webpack_require__.r(__webpack_exports__);
|
|
802
802
|
|
803
803
|
// EXPORTS
|
804
804
|
__webpack_require__.d(__webpack_exports__, {
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
|
813
|
-
|
814
|
-
|
815
|
-
|
816
|
-
|
817
|
-
|
818
|
-
|
819
|
-
|
820
|
-
|
821
|
-
|
822
|
-
|
823
|
-
|
824
|
-
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
846
|
-
|
847
|
-
|
848
|
-
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
|
854
|
-
|
855
|
-
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
|
875
|
-
|
876
|
-
|
877
|
-
|
878
|
-
|
879
|
-
|
880
|
-
|
881
|
-
|
882
|
-
|
883
|
-
|
884
|
-
|
885
|
-
|
886
|
-
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
|
891
|
-
|
892
|
-
|
893
|
-
|
894
|
-
|
805
|
+
ActionObjectType: () => (/* reexport */ ActionObjectType),
|
806
|
+
ChannelType: () => (/* reexport */ ChannelType),
|
807
|
+
ChildAppWindow: () => (/* reexport */ ChildAppWindow),
|
808
|
+
DialogDimension: () => (/* reexport */ DialogDimension),
|
809
|
+
ErrorCode: () => (/* reexport */ ErrorCode),
|
810
|
+
FileOpenPreference: () => (/* reexport */ FileOpenPreference),
|
811
|
+
FrameContexts: () => (/* reexport */ FrameContexts),
|
812
|
+
HostClientType: () => (/* reexport */ HostClientType),
|
813
|
+
HostName: () => (/* reexport */ HostName),
|
814
|
+
LiveShareHost: () => (/* reexport */ LiveShareHost),
|
815
|
+
NotificationTypes: () => (/* reexport */ NotificationTypes),
|
816
|
+
ParentAppWindow: () => (/* reexport */ ParentAppWindow),
|
817
|
+
SecondaryM365ContentIdName: () => (/* reexport */ SecondaryM365ContentIdName),
|
818
|
+
TaskModuleDimension: () => (/* reexport */ TaskModuleDimension),
|
819
|
+
TeamType: () => (/* reexport */ TeamType),
|
820
|
+
UserSettingTypes: () => (/* reexport */ UserSettingTypes),
|
821
|
+
UserTeamRole: () => (/* reexport */ UserTeamRole),
|
822
|
+
ViewerActionTypes: () => (/* reexport */ ViewerActionTypes),
|
823
|
+
app: () => (/* reexport */ app),
|
824
|
+
appEntity: () => (/* reexport */ appEntity),
|
825
|
+
appInitialization: () => (/* reexport */ appInitialization),
|
826
|
+
appInstallDialog: () => (/* reexport */ appInstallDialog),
|
827
|
+
authentication: () => (/* reexport */ authentication),
|
828
|
+
barCode: () => (/* reexport */ barCode),
|
829
|
+
calendar: () => (/* reexport */ calendar),
|
830
|
+
call: () => (/* reexport */ call),
|
831
|
+
chat: () => (/* reexport */ chat),
|
832
|
+
clipboard: () => (/* reexport */ clipboard),
|
833
|
+
conversations: () => (/* reexport */ conversations),
|
834
|
+
dialog: () => (/* reexport */ dialog),
|
835
|
+
enablePrintCapability: () => (/* reexport */ enablePrintCapability),
|
836
|
+
executeDeepLink: () => (/* reexport */ executeDeepLink),
|
837
|
+
files: () => (/* reexport */ files),
|
838
|
+
geoLocation: () => (/* reexport */ geoLocation),
|
839
|
+
getAdaptiveCardSchemaVersion: () => (/* reexport */ getAdaptiveCardSchemaVersion),
|
840
|
+
getContext: () => (/* reexport */ getContext),
|
841
|
+
getMruTabInstances: () => (/* reexport */ getMruTabInstances),
|
842
|
+
getTabInstances: () => (/* reexport */ getTabInstances),
|
843
|
+
initialize: () => (/* reexport */ initialize),
|
844
|
+
initializeWithFrameContext: () => (/* reexport */ initializeWithFrameContext),
|
845
|
+
liveShare: () => (/* reexport */ liveShare),
|
846
|
+
location: () => (/* reexport */ location_location),
|
847
|
+
logs: () => (/* reexport */ logs),
|
848
|
+
mail: () => (/* reexport */ mail),
|
849
|
+
marketplace: () => (/* reexport */ marketplace),
|
850
|
+
media: () => (/* reexport */ media),
|
851
|
+
meeting: () => (/* reexport */ meeting),
|
852
|
+
meetingRoom: () => (/* reexport */ meetingRoom),
|
853
|
+
menus: () => (/* reexport */ menus),
|
854
|
+
monetization: () => (/* reexport */ monetization),
|
855
|
+
navigateBack: () => (/* reexport */ navigateBack),
|
856
|
+
navigateCrossDomain: () => (/* reexport */ navigateCrossDomain),
|
857
|
+
navigateToTab: () => (/* reexport */ navigateToTab),
|
858
|
+
notifications: () => (/* reexport */ notifications),
|
859
|
+
openFilePreview: () => (/* reexport */ openFilePreview),
|
860
|
+
pages: () => (/* reexport */ pages),
|
861
|
+
people: () => (/* reexport */ people),
|
862
|
+
print: () => (/* reexport */ print),
|
863
|
+
profile: () => (/* reexport */ profile),
|
864
|
+
registerAppButtonClickHandler: () => (/* reexport */ registerAppButtonClickHandler),
|
865
|
+
registerAppButtonHoverEnterHandler: () => (/* reexport */ registerAppButtonHoverEnterHandler),
|
866
|
+
registerAppButtonHoverLeaveHandler: () => (/* reexport */ registerAppButtonHoverLeaveHandler),
|
867
|
+
registerBackButtonHandler: () => (/* reexport */ registerBackButtonHandler),
|
868
|
+
registerBeforeUnloadHandler: () => (/* reexport */ registerBeforeUnloadHandler),
|
869
|
+
registerChangeSettingsHandler: () => (/* reexport */ registerChangeSettingsHandler),
|
870
|
+
registerCustomHandler: () => (/* reexport */ registerCustomHandler),
|
871
|
+
registerFocusEnterHandler: () => (/* reexport */ registerFocusEnterHandler),
|
872
|
+
registerFullScreenHandler: () => (/* reexport */ registerFullScreenHandler),
|
873
|
+
registerOnLoadHandler: () => (/* reexport */ registerOnLoadHandler),
|
874
|
+
registerOnThemeChangeHandler: () => (/* reexport */ registerOnThemeChangeHandler),
|
875
|
+
registerUserSettingsChangeHandler: () => (/* reexport */ registerUserSettingsChangeHandler),
|
876
|
+
remoteCamera: () => (/* reexport */ remoteCamera),
|
877
|
+
returnFocus: () => (/* reexport */ returnFocus),
|
878
|
+
search: () => (/* reexport */ search),
|
879
|
+
secondaryBrowser: () => (/* reexport */ secondaryBrowser),
|
880
|
+
sendCustomEvent: () => (/* reexport */ sendCustomEvent),
|
881
|
+
sendCustomMessage: () => (/* reexport */ sendCustomMessage),
|
882
|
+
setFrameContext: () => (/* reexport */ setFrameContext),
|
883
|
+
settings: () => (/* reexport */ settings),
|
884
|
+
shareDeepLink: () => (/* reexport */ shareDeepLink),
|
885
|
+
sharing: () => (/* reexport */ sharing),
|
886
|
+
stageView: () => (/* reexport */ stageView),
|
887
|
+
tasks: () => (/* reexport */ tasks),
|
888
|
+
teams: () => (/* reexport */ teams),
|
889
|
+
teamsCore: () => (/* reexport */ teamsCore),
|
890
|
+
uploadCustomApp: () => (/* reexport */ uploadCustomApp),
|
891
|
+
version: () => (/* reexport */ version),
|
892
|
+
videoEffects: () => (/* reexport */ videoEffects),
|
893
|
+
videoEffectsEx: () => (/* reexport */ videoEffectsEx),
|
894
|
+
webStorage: () => (/* reexport */ webStorage)
|
895
895
|
});
|
896
896
|
|
897
897
|
;// CONCATENATED MODULE: ./src/internal/constants.ts
|
@@ -1028,6 +1028,8 @@ var validOrigins = [
|
|
1028
1028
|
'www.officeppe.com',
|
1029
1029
|
'*.www.microsoft365.com',
|
1030
1030
|
'www.microsoft365.com',
|
1031
|
+
'bing.com',
|
1032
|
+
'edgeservices.bing.com',
|
1031
1033
|
];
|
1032
1034
|
/**
|
1033
1035
|
* @hidden
|
@@ -1971,11 +1973,16 @@ function isRuntimeInitialized(runtime) {
|
|
1971
1973
|
}
|
1972
1974
|
}
|
1973
1975
|
var runtime = _uninitializedRuntime;
|
1974
|
-
var
|
1976
|
+
var versionAndPlatformAgnosticTeamsRuntimeConfig = {
|
1975
1977
|
apiVersion: 3,
|
1976
1978
|
hostVersionsInfo: teamsMinAdaptiveCardVersion,
|
1977
1979
|
isLegacyTeams: true,
|
1978
1980
|
supports: {
|
1981
|
+
app: {
|
1982
|
+
lifecycle: {
|
1983
|
+
caching: {},
|
1984
|
+
},
|
1985
|
+
},
|
1979
1986
|
appInstallDialog: {},
|
1980
1987
|
appEntity: {},
|
1981
1988
|
call: {},
|
@@ -2088,7 +2095,7 @@ var upgradeChain = [
|
|
2088
2095
|
},
|
2089
2096
|
},
|
2090
2097
|
];
|
2091
|
-
var
|
2098
|
+
var mapTeamsVersionToSupportedCapabilities = {
|
2092
2099
|
'1.9.0': [
|
2093
2100
|
{
|
2094
2101
|
capability: { location: {} },
|
@@ -2136,33 +2143,33 @@ var generateBackCompatRuntimeConfigLogger = runtimeLogger.extend('generateBackCo
|
|
2136
2143
|
* Limited to Microsoft-internal use
|
2137
2144
|
*
|
2138
2145
|
* Generates and returns a runtime configuration for host clients which are not on the latest host SDK version
|
2139
|
-
* and do not provide their own runtime config
|
2140
|
-
* client SDK version that they can support.
|
2146
|
+
* and do not provide their own runtime config (this is just older versions of Teams on some platforms).
|
2147
|
+
* Their supported capabilities are based on the highest client SDK version that they can support.
|
2141
2148
|
*
|
2142
2149
|
* @param highestSupportedVersion - The highest client SDK version that the host client can support.
|
2143
2150
|
* @returns runtime which describes the APIs supported by the legacy host client.
|
2144
2151
|
*/
|
2145
|
-
function
|
2152
|
+
function generateVersionBasedTeamsRuntimeConfig(highestSupportedVersion) {
|
2146
2153
|
generateBackCompatRuntimeConfigLogger('generating back compat runtime config for %s', highestSupportedVersion);
|
2147
|
-
var newSupports = __assign({},
|
2154
|
+
var newSupports = __assign({}, versionAndPlatformAgnosticTeamsRuntimeConfig.supports);
|
2148
2155
|
generateBackCompatRuntimeConfigLogger('Supported capabilities in config before updating based on highestSupportedVersion: %o', newSupports);
|
2149
|
-
Object.keys(
|
2156
|
+
Object.keys(mapTeamsVersionToSupportedCapabilities).forEach(function (versionNumber) {
|
2150
2157
|
if (compareSDKVersions(highestSupportedVersion, versionNumber) >= 0) {
|
2151
|
-
|
2158
|
+
mapTeamsVersionToSupportedCapabilities[versionNumber].forEach(function (capabilityReqs) {
|
2152
2159
|
if (capabilityReqs.hostClientTypes.includes(GlobalVars.hostClientType)) {
|
2153
2160
|
newSupports = __assign(__assign({}, newSupports), capabilityReqs.capability);
|
2154
2161
|
}
|
2155
2162
|
});
|
2156
2163
|
}
|
2157
2164
|
});
|
2158
|
-
var
|
2165
|
+
var teamsBackCompatRuntimeConfig = {
|
2159
2166
|
apiVersion: latestRuntimeApiVersion,
|
2160
2167
|
hostVersionsInfo: teamsMinAdaptiveCardVersion,
|
2161
2168
|
isLegacyTeams: true,
|
2162
2169
|
supports: newSupports,
|
2163
2170
|
};
|
2164
|
-
generateBackCompatRuntimeConfigLogger('Runtime config after updating based on highestSupportedVersion: %o',
|
2165
|
-
return
|
2171
|
+
generateBackCompatRuntimeConfigLogger('Runtime config after updating based on highestSupportedVersion: %o', teamsBackCompatRuntimeConfig);
|
2172
|
+
return teamsBackCompatRuntimeConfig;
|
2166
2173
|
}
|
2167
2174
|
var applyRuntimeConfigLogger = runtimeLogger.extend('applyRuntimeConfig');
|
2168
2175
|
function applyRuntimeConfig(runtimeConfig) {
|
@@ -2207,7 +2214,7 @@ var _minRuntimeConfigToUninitialize = {
|
|
2207
2214
|
* @hidden
|
2208
2215
|
* Package version.
|
2209
2216
|
*/
|
2210
|
-
var version = "2.16.0-beta.
|
2217
|
+
var version = "2.16.0-beta.2";
|
2211
2218
|
|
2212
2219
|
;// CONCATENATED MODULE: ./src/internal/internalAPIs.ts
|
2213
2220
|
|
@@ -3536,7 +3543,7 @@ var app;
|
|
3536
3543
|
}
|
3537
3544
|
catch (e) {
|
3538
3545
|
if (e instanceof SyntaxError) {
|
3539
|
-
applyRuntimeConfig(
|
3546
|
+
applyRuntimeConfig(generateVersionBasedTeamsRuntimeConfig(GlobalVars.clientSupportedSDKVersion));
|
3540
3547
|
}
|
3541
3548
|
else {
|
3542
3549
|
throw e;
|
@@ -3674,6 +3681,66 @@ var app;
|
|
3674
3681
|
});
|
3675
3682
|
}
|
3676
3683
|
app.openLink = openLink;
|
3684
|
+
/**
|
3685
|
+
* A namespace for enabling the suspension or delayed termination of an app when the user navigates away.
|
3686
|
+
* When an app registers for the registerBeforeSuspendOrTerminateHandler, it chooses to delay termination.
|
3687
|
+
* When an app registers for both registerBeforeSuspendOrTerminateHandler and registerOnResumeHandler, it chooses the suspension of the app .
|
3688
|
+
* Please note that selecting suspension doesn't guarantee prevention of background termination.
|
3689
|
+
* The outcome is influenced by factors such as available memory and the number of suspended apps.
|
3690
|
+
*
|
3691
|
+
* @beta
|
3692
|
+
*/
|
3693
|
+
var lifecycle;
|
3694
|
+
(function (lifecycle) {
|
3695
|
+
/**
|
3696
|
+
* Registers a handler to be called before the page is suspended or terminated. Once a user navigates away from an app,
|
3697
|
+
* the handler will be invoked. App developers can use this handler to save unsaved data, pause sync calls etc.
|
3698
|
+
*
|
3699
|
+
* @param handler - The handler to invoke before the page is suspended or terminated. When invoked, app can perform tasks like cleanups, logging etc.
|
3700
|
+
* Upon returning, the app will be suspended or terminated.
|
3701
|
+
*
|
3702
|
+
*/
|
3703
|
+
function registerBeforeSuspendOrTerminateHandler(handler) {
|
3704
|
+
if (!handler) {
|
3705
|
+
throw new Error('[app.lifecycle.registerBeforeSuspendOrTerminateHandler] Handler cannot be null');
|
3706
|
+
}
|
3707
|
+
if (!isSupported()) {
|
3708
|
+
throw errorNotSupportedOnPlatform;
|
3709
|
+
}
|
3710
|
+
handlers_registerBeforeSuspendOrTerminateHandler(handler);
|
3711
|
+
}
|
3712
|
+
lifecycle.registerBeforeSuspendOrTerminateHandler = registerBeforeSuspendOrTerminateHandler;
|
3713
|
+
/**
|
3714
|
+
* Registers a handler to be called when the page has been requested to resume from being suspended.
|
3715
|
+
*
|
3716
|
+
* @param handler - The handler to invoke when the page is requested to be resumed. The app is supposed to navigate to
|
3717
|
+
* the appropriate page using the ResumeContext. Once done, the app should then call {@link notifySuccess}.
|
3718
|
+
*
|
3719
|
+
* @beta
|
3720
|
+
*/
|
3721
|
+
function registerOnResumeHandler(handler) {
|
3722
|
+
if (!handler) {
|
3723
|
+
throw new Error('[app.lifecycle.registerOnResumeHandler] Handler cannot be null');
|
3724
|
+
}
|
3725
|
+
if (!isSupported()) {
|
3726
|
+
throw errorNotSupportedOnPlatform;
|
3727
|
+
}
|
3728
|
+
handlers_registerOnResumeHandler(handler);
|
3729
|
+
}
|
3730
|
+
lifecycle.registerOnResumeHandler = registerOnResumeHandler;
|
3731
|
+
/**
|
3732
|
+
* Checks if app.lifecycle is supported by the host.
|
3733
|
+
* @returns boolean to represent whether the lifecycle capability is supported
|
3734
|
+
* @throws Error if {@linkcode app.initialize} has not successfully completed
|
3735
|
+
*
|
3736
|
+
* @beta
|
3737
|
+
*/
|
3738
|
+
function isSupported() {
|
3739
|
+
var _a;
|
3740
|
+
return ensureInitialized(runtime) && !!((_a = runtime.supports.app) === null || _a === void 0 ? void 0 : _a.lifecycle);
|
3741
|
+
}
|
3742
|
+
lifecycle.isSupported = isSupported;
|
3743
|
+
})(lifecycle = app.lifecycle || (app.lifecycle = {}));
|
3677
3744
|
})(app || (app = {}));
|
3678
3745
|
/**
|
3679
3746
|
* @hidden
|
@@ -4545,6 +4612,8 @@ var HandlersPrivate = /** @class */ (function () {
|
|
4545
4612
|
HandlersPrivate.handlers['themeChange'] = handleThemeChange;
|
4546
4613
|
HandlersPrivate.handlers['load'] = handleLoad;
|
4547
4614
|
HandlersPrivate.handlers['beforeUnload'] = handleBeforeUnload;
|
4615
|
+
HandlersPrivate.handlers['beforeSuspendOrTerminate'] = handleBeforeSuspendOrTerminate;
|
4616
|
+
HandlersPrivate.handlers['resume'] = handleResume;
|
4548
4617
|
pages.backStack._initialize();
|
4549
4618
|
};
|
4550
4619
|
/**
|
@@ -4557,6 +4626,8 @@ var HandlersPrivate = /** @class */ (function () {
|
|
4557
4626
|
HandlersPrivate.themeChangeHandler = null;
|
4558
4627
|
HandlersPrivate.loadHandler = null;
|
4559
4628
|
HandlersPrivate.beforeUnloadHandler = null;
|
4629
|
+
HandlersPrivate.beforeSuspendOrTerminateHandler = null;
|
4630
|
+
HandlersPrivate.resumeHandler = null;
|
4560
4631
|
};
|
4561
4632
|
HandlersPrivate.handlers = {};
|
4562
4633
|
return HandlersPrivate;
|
@@ -4668,6 +4739,8 @@ function handleThemeChange(theme) {
|
|
4668
4739
|
/**
|
4669
4740
|
* @internal
|
4670
4741
|
* Limited to Microsoft-internal use
|
4742
|
+
*
|
4743
|
+
* @deprecated
|
4671
4744
|
*/
|
4672
4745
|
function handlers_registerOnLoadHandler(handler) {
|
4673
4746
|
HandlersPrivate.loadHandler = handler;
|
@@ -4676,6 +4749,8 @@ function handlers_registerOnLoadHandler(handler) {
|
|
4676
4749
|
/**
|
4677
4750
|
* @internal
|
4678
4751
|
* Limited to Microsoft-internal use
|
4752
|
+
*
|
4753
|
+
* @deprecated
|
4679
4754
|
*/
|
4680
4755
|
function handleLoad(context) {
|
4681
4756
|
if (HandlersPrivate.loadHandler) {
|
@@ -4688,6 +4763,8 @@ function handleLoad(context) {
|
|
4688
4763
|
/**
|
4689
4764
|
* @internal
|
4690
4765
|
* Limited to Microsoft-internal use
|
4766
|
+
*
|
4767
|
+
* @deprecated
|
4691
4768
|
*/
|
4692
4769
|
function handlers_registerBeforeUnloadHandler(handler) {
|
4693
4770
|
HandlersPrivate.beforeUnloadHandler = handler;
|
@@ -4696,6 +4773,8 @@ function handlers_registerBeforeUnloadHandler(handler) {
|
|
4696
4773
|
/**
|
4697
4774
|
* @internal
|
4698
4775
|
* Limited to Microsoft-internal use
|
4776
|
+
*
|
4777
|
+
* @deprecated
|
4699
4778
|
*/
|
4700
4779
|
function handleBeforeUnload() {
|
4701
4780
|
var readyToUnload = function () {
|
@@ -4710,6 +4789,52 @@ function handleBeforeUnload() {
|
|
4710
4789
|
}
|
4711
4790
|
}
|
4712
4791
|
}
|
4792
|
+
/**
|
4793
|
+
* @internal
|
4794
|
+
* Limited to Microsoft-internal use
|
4795
|
+
*/
|
4796
|
+
function handlers_registerBeforeSuspendOrTerminateHandler(handler) {
|
4797
|
+
HandlersPrivate.beforeSuspendOrTerminateHandler = handler;
|
4798
|
+
handler && sendMessageToParent('registerHandler', ['beforeSuspendOrTerminate']);
|
4799
|
+
}
|
4800
|
+
/**
|
4801
|
+
* @internal
|
4802
|
+
* Limited to Microsoft-internal use
|
4803
|
+
*/
|
4804
|
+
function handleBeforeSuspendOrTerminate() {
|
4805
|
+
var readyToSuspendOrTerminate = function () {
|
4806
|
+
sendMessageToParent('readyToSuspendOrTerminate', []);
|
4807
|
+
};
|
4808
|
+
if (HandlersPrivate.beforeSuspendOrTerminateHandler) {
|
4809
|
+
HandlersPrivate.beforeSuspendOrTerminateHandler();
|
4810
|
+
}
|
4811
|
+
if (Communication.childWindow) {
|
4812
|
+
sendMessageEventToChild('beforeSuspendOrTerminate');
|
4813
|
+
}
|
4814
|
+
else {
|
4815
|
+
readyToSuspendOrTerminate();
|
4816
|
+
}
|
4817
|
+
}
|
4818
|
+
/**
|
4819
|
+
* @internal
|
4820
|
+
* Limited to Microsoft-internal use
|
4821
|
+
*/
|
4822
|
+
function handlers_registerOnResumeHandler(handler) {
|
4823
|
+
HandlersPrivate.loadHandler = handler;
|
4824
|
+
handler && sendMessageToParent('registerHandler', ['resume']);
|
4825
|
+
}
|
4826
|
+
/**
|
4827
|
+
* @internal
|
4828
|
+
* Limited to Microsoft-internal use
|
4829
|
+
*/
|
4830
|
+
function handleResume(context) {
|
4831
|
+
if (HandlersPrivate.loadHandler) {
|
4832
|
+
HandlersPrivate.loadHandler(context);
|
4833
|
+
}
|
4834
|
+
if (Communication.childWindow) {
|
4835
|
+
sendMessageEventToChild('resume', [context]);
|
4836
|
+
}
|
4837
|
+
}
|
4713
4838
|
|
4714
4839
|
;// CONCATENATED MODULE: ./src/internal/communication.ts
|
4715
4840
|
/* eslint-disable @typescript-eslint/ban-types */
|
@@ -7186,6 +7311,8 @@ var meeting_generator = (undefined && undefined.__generator) || function (thisAr
|
|
7186
7311
|
* Interact with meetings, including retrieving meeting details, getting mic status, and sharing app content.
|
7187
7312
|
* This namespace is used to handle meeting related functionality like
|
7188
7313
|
* get meeting details, get/update state of mic, sharing app content and more.
|
7314
|
+
*
|
7315
|
+
* To learn more, visit https://aka.ms/teamsmeetingapps
|
7189
7316
|
*/
|
7190
7317
|
var meeting;
|
7191
7318
|
(function (meeting) {
|
@@ -7218,38 +7345,101 @@ var meeting;
|
|
7218
7345
|
MeetingReactionType["surprised"] = "surprised";
|
7219
7346
|
MeetingReactionType["applause"] = "applause";
|
7220
7347
|
})(MeetingReactionType = meeting.MeetingReactionType || (meeting.MeetingReactionType = {}));
|
7221
|
-
/**
|
7348
|
+
/**
|
7349
|
+
* Represents the type of a meeting
|
7350
|
+
*
|
7351
|
+
* @hidden
|
7352
|
+
* Hide from docs.
|
7353
|
+
*
|
7354
|
+
* @remarks
|
7355
|
+
* Teams has several types of meetings to account for different user scenarios and requirements.
|
7356
|
+
*/
|
7222
7357
|
var MeetingType;
|
7223
7358
|
(function (MeetingType) {
|
7224
|
-
/**
|
7359
|
+
/**
|
7360
|
+
* Used when the meeting type is not known.
|
7361
|
+
*
|
7362
|
+
* @remarks
|
7363
|
+
* This response is not an expected case.
|
7364
|
+
*/
|
7225
7365
|
MeetingType["Unknown"] = "Unknown";
|
7226
|
-
/**
|
7366
|
+
/**
|
7367
|
+
* Used for group call meeting types.
|
7368
|
+
*
|
7369
|
+
* @remarks
|
7370
|
+
* To test this meeting type in Teams, start a chat with two or more users and click the "Call" button.
|
7371
|
+
* Note that a group call may return as this or {@link CallType.GroupCall}. These two different response types should be considered as equal.
|
7372
|
+
*/
|
7227
7373
|
MeetingType["Adhoc"] = "Adhoc";
|
7228
|
-
/**
|
7374
|
+
/**
|
7375
|
+
* Used for single-occurrence meetings that have been scheduled in advance.
|
7376
|
+
*
|
7377
|
+
* @remarks
|
7378
|
+
* To create a meeting of this type in Teams, press the "New meeting" button from the calendar and enter a meeting title.
|
7379
|
+
* Before saving, ensure that the "Online Meeting" field is checked.
|
7380
|
+
*/
|
7229
7381
|
MeetingType["Scheduled"] = "Scheduled";
|
7230
|
-
/**
|
7382
|
+
/**
|
7383
|
+
* Used for meetings that occur on a recurring basis.
|
7384
|
+
*
|
7385
|
+
* @remarks
|
7386
|
+
* To create a meeting of this type in Teams, press the "New meeting" button from the calendar, enter a meeting title, and then change the field labeled "Does not repeat" to some other value.
|
7387
|
+
* Before saving, ensure that the "Online Meeting" field is checked.
|
7388
|
+
*/
|
7231
7389
|
MeetingType["Recurring"] = "Recurring";
|
7232
|
-
/**
|
7390
|
+
/**
|
7391
|
+
* Used for webinars.
|
7392
|
+
*
|
7393
|
+
* @remarks
|
7394
|
+
* Meeting apps are only supported for those in the "event group" of a webinar, which are those who'll be presenting and producing the webinar.
|
7395
|
+
* To learn how to create a meeting of this type, visit https://aka.ms/teams/howto/webinars.
|
7396
|
+
*/
|
7233
7397
|
MeetingType["Broadcast"] = "Broadcast";
|
7234
|
-
/**
|
7398
|
+
/**
|
7399
|
+
* Used for meet now meetings, which are meetings users create on the fly.
|
7400
|
+
*
|
7401
|
+
* @remarks
|
7402
|
+
* To create a meeting of this type, click the "Meet now" button from the calendar in Teams or the "Teams call" button in Outlook.
|
7403
|
+
*/
|
7235
7404
|
MeetingType["MeetNow"] = "MeetNow";
|
7236
7405
|
})(MeetingType = meeting.MeetingType || (meeting.MeetingType = {}));
|
7237
|
-
/**
|
7406
|
+
/**
|
7407
|
+
* Represents the type of a call.
|
7408
|
+
*
|
7409
|
+
* @hidden
|
7410
|
+
* Hide from docs.
|
7411
|
+
*/
|
7238
7412
|
var CallType;
|
7239
7413
|
(function (CallType) {
|
7240
|
-
/**
|
7414
|
+
/**
|
7415
|
+
* Represents a call between two people.
|
7416
|
+
*
|
7417
|
+
* @remarks
|
7418
|
+
* To test this feature, start a chat with one other user and click the "Call" button.
|
7419
|
+
*/
|
7241
7420
|
CallType["OneOnOneCall"] = "oneOnOneCall";
|
7242
|
-
/**
|
7421
|
+
/**
|
7422
|
+
* Represents a call between more than two people.
|
7423
|
+
*
|
7424
|
+
* @remarks
|
7425
|
+
* To test this meeting type in Teams, start a chat with two or more users and click the "Call" button.
|
7426
|
+
* Note that a group call may return as this or {@link MeetingType.Adhoc}. These two different response types should be considered as equal.
|
7427
|
+
*/
|
7243
7428
|
CallType["GroupCall"] = "groupCall";
|
7244
7429
|
})(CallType = meeting.CallType || (meeting.CallType = {}));
|
7245
7430
|
/**
|
7246
|
-
* Allows an app to get the incoming audio speaker setting for the meeting user
|
7431
|
+
* Allows an app to get the incoming audio speaker setting for the meeting user.
|
7432
|
+
* To learn more, visit https://aka.ms/teamsjs/getIncomingClientAudioState
|
7247
7433
|
*
|
7248
|
-
* @
|
7434
|
+
* @remarks
|
7435
|
+
* Use {@link toggleIncomingClientAudio} to toggle the current audio state.
|
7436
|
+
* For private scheduled meetings, meet now, or calls, include the `OnlineMeetingParticipant.ToggleIncomingAudio.Chat` RSC permission in your app manifest.
|
7437
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/authorization.
|
7438
|
+
* This API can only be used in the `sidePanel` and `meetingStage` frame contexts.
|
7249
7439
|
*
|
7250
|
-
*
|
7251
|
-
*
|
7252
|
-
* result
|
7440
|
+
* @param callback - Callback contains 2 parameters, `error` and `result`.
|
7441
|
+
* `error` can either contain an error of type `SdkError`, in case of an error, or null when fetch is successful.
|
7442
|
+
* `result` will be true when incoming audio is muted and false when incoming audio is unmuted, or null when the request fails.
|
7253
7443
|
*/
|
7254
7444
|
function getIncomingClientAudioState(callback) {
|
7255
7445
|
if (!callback) {
|
@@ -7260,12 +7450,18 @@ var meeting;
|
|
7260
7450
|
}
|
7261
7451
|
meeting.getIncomingClientAudioState = getIncomingClientAudioState;
|
7262
7452
|
/**
|
7263
|
-
* Allows an app to toggle the incoming audio speaker setting for the meeting user from mute to unmute or vice-versa
|
7453
|
+
* Allows an app to toggle the incoming audio speaker setting for the meeting user from mute to unmute or vice-versa.
|
7454
|
+
* To learn more, visit https://aka.ms/teamsjs/toggleIncomingClientAudio
|
7264
7455
|
*
|
7265
|
-
* @
|
7266
|
-
*
|
7267
|
-
*
|
7268
|
-
*
|
7456
|
+
* @remarks
|
7457
|
+
* Use {@link getIncomingClientAudioState} to get the current audio state.
|
7458
|
+
* For private scheduled meetings, meet now, or calls, include the `OnlineMeetingParticipant.ToggleIncomingAudio.Chat` RSC permission in your app manifest.
|
7459
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/authorization.
|
7460
|
+
* This API can only be used in the `sidePanel` and `meetingStage` frame contexts.
|
7461
|
+
*
|
7462
|
+
* @param callback - Callback contains 2 parameters, `error` and `result`.
|
7463
|
+
* `error` can either contain an error of type `SdkError`, in case of an error, or null when toggle is successful.
|
7464
|
+
* `result` will be true when incoming audio is muted and false when incoming audio is unmuted, or null when the toggling fails.
|
7269
7465
|
*/
|
7270
7466
|
function toggleIncomingClientAudio(callback) {
|
7271
7467
|
if (!callback) {
|
@@ -7279,9 +7475,9 @@ var meeting;
|
|
7279
7475
|
* @hidden
|
7280
7476
|
* Allows an app to get the meeting details for the meeting
|
7281
7477
|
*
|
7282
|
-
* @param callback - Callback contains 2 parameters, error and meetingDetailsResponse
|
7283
|
-
* error can either contain an error of type SdkError
|
7284
|
-
* result can either contain a IMeetingDetailsResponse value, in case of a successful get or null when the get fails
|
7478
|
+
* @param callback - Callback contains 2 parameters, `error` and `meetingDetailsResponse`.
|
7479
|
+
* `error` can either contain an error of type `SdkError`, in case of an error, or null when get is successful
|
7480
|
+
* `result` can either contain a {@link IMeetingDetailsResponse} value, in case of a successful get or null when the get fails
|
7285
7481
|
*
|
7286
7482
|
* @internal
|
7287
7483
|
* Limited to Microsoft-internal use
|
@@ -7298,9 +7494,9 @@ var meeting;
|
|
7298
7494
|
* @hidden
|
7299
7495
|
* Allows an app to get the authentication token for the anonymous or guest user in the meeting
|
7300
7496
|
*
|
7301
|
-
* @param callback - Callback contains 2 parameters, error and authenticationTokenOfAnonymousUser
|
7302
|
-
* error can either contain an error of type SdkError
|
7303
|
-
* authenticationTokenOfAnonymousUser can either contain a string value,
|
7497
|
+
* @param callback - Callback contains 2 parameters, `error` and `authenticationTokenOfAnonymousUser`.
|
7498
|
+
* `error` can either contain an error of type `SdkError`, in case of an error, or null when get is successful
|
7499
|
+
* `authenticationTokenOfAnonymousUser` can either contain a string value, in case of a successful get or null when the get fails
|
7304
7500
|
*
|
7305
7501
|
* @internal
|
7306
7502
|
* Limited to Microsoft-internal use
|
@@ -7314,11 +7510,17 @@ var meeting;
|
|
7314
7510
|
}
|
7315
7511
|
meeting.getAuthenticationTokenForAnonymousUser = getAuthenticationTokenForAnonymousUser;
|
7316
7512
|
/**
|
7317
|
-
* Allows an app to get the state of the live stream in the current meeting
|
7513
|
+
* Allows an app to get the state of the outgoing live stream in the current meeting.
|
7514
|
+
*
|
7515
|
+
* @remarks
|
7516
|
+
* Use {@link requestStartLiveStreaming} or {@link requestStopLiveStreaming} to start/stop a live stream.
|
7517
|
+
* This API can only be used in the `sidePanel` frame context.
|
7518
|
+
* The `meetingExtensionDefinition.supportsStreaming` field in your app manifest must be `true` to use this API.
|
7519
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/meetingExtensionDefinition.
|
7318
7520
|
*
|
7319
|
-
* @param callback - Callback contains 2 parameters: error and liveStreamState
|
7320
|
-
* error can either contain an error of type SdkError
|
7321
|
-
* liveStreamState can either contain a LiveStreamState value, or null when operation fails
|
7521
|
+
* @param callback - Callback contains 2 parameters: `error` and `liveStreamState`.
|
7522
|
+
* `error` can either contain an error of type `SdkError`, in case of an error, or null when the request is successful
|
7523
|
+
* `liveStreamState` can either contain a `LiveStreamState` value, or null when operation fails
|
7322
7524
|
*/
|
7323
7525
|
function getLiveStreamState(callback) {
|
7324
7526
|
if (!callback) {
|
@@ -7329,14 +7531,20 @@ var meeting;
|
|
7329
7531
|
}
|
7330
7532
|
meeting.getLiveStreamState = getLiveStreamState;
|
7331
7533
|
/**
|
7332
|
-
* Allows an app to
|
7534
|
+
* Allows an app to ask the local user to begin live streaming the current meeting to the given Real-Time Messaging Protocol (RTMP) stream url.
|
7535
|
+
* A confirmation dialog will be shown to the local user with options to "Allow" or "Cancel" this request.
|
7333
7536
|
*
|
7334
7537
|
* @remarks
|
7335
|
-
*
|
7538
|
+
* Meeting content (e.g., user video, screenshare, audio, etc.) can be externally streamed to any platform that supports the popular RTMP standard.
|
7539
|
+
* Content broadcasted through RTMP is automatically formatted and cannot be customized.
|
7540
|
+
* Use {@link getLiveStreamState} or {@link registerLiveStreamChangedHandler} to get updates on the live stream state.
|
7541
|
+
* This API can only be used in the `sidePanel` frame context.
|
7542
|
+
* The `meetingExtensionDefinition.supportsStreaming` field in your app manifest must be `true` to use this API.
|
7543
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/meetingExtensionDefinition.
|
7336
7544
|
*
|
7337
|
-
* @param
|
7338
|
-
* @param
|
7339
|
-
* @param
|
7545
|
+
* @param callback - completion callback that contains an `error` parameter, which can be of type `SdkError` in case of an error, or null when operation is successful
|
7546
|
+
* @param streamUrl - the url to the RTMP stream resource
|
7547
|
+
* @param streamKey - the key to the RTMP stream resource
|
7340
7548
|
*/
|
7341
7549
|
function requestStartLiveStreaming(callback, streamUrl, streamKey) {
|
7342
7550
|
if (!callback) {
|
@@ -7347,12 +7555,15 @@ var meeting;
|
|
7347
7555
|
}
|
7348
7556
|
meeting.requestStartLiveStreaming = requestStartLiveStreaming;
|
7349
7557
|
/**
|
7350
|
-
* Allows an app to request
|
7558
|
+
* Allows an app to request that live streaming be stopped.
|
7351
7559
|
*
|
7352
7560
|
* @remarks
|
7353
|
-
* Use getLiveStreamState or registerLiveStreamChangedHandler to get updates on the live stream state
|
7561
|
+
* Use {@link getLiveStreamState} or {@link registerLiveStreamChangedHandler} to get updates on the live stream state.
|
7562
|
+
* This API can only be used in the `sidePanel` frame context.
|
7563
|
+
* The `meetingExtensionDefinition.supportsStreaming` field in your app manifest must be `true` to use this API.
|
7564
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/meetingExtensionDefinition.
|
7354
7565
|
*
|
7355
|
-
* @param callback -
|
7566
|
+
* @param callback - completion callback that contains an error parameter, which can be of type `SdkError` in case of an error, or null when operation is successful
|
7356
7567
|
*/
|
7357
7568
|
function requestStopLiveStreaming(callback) {
|
7358
7569
|
if (!callback) {
|
@@ -7363,10 +7574,14 @@ var meeting;
|
|
7363
7574
|
}
|
7364
7575
|
meeting.requestStopLiveStreaming = requestStopLiveStreaming;
|
7365
7576
|
/**
|
7366
|
-
* Registers
|
7577
|
+
* Registers an event handler for state changes to the live stream.
|
7367
7578
|
*
|
7368
7579
|
* @remarks
|
7369
7580
|
* Only one handler can be registered at a time. A subsequent registration replaces an existing registration.
|
7581
|
+
* Use {@link requestStartLiveStreaming} or {@link requestStopLiveStreaming} to start/stop a live stream.
|
7582
|
+
* This API can only be used in the `sidePanel` frame context.
|
7583
|
+
* The `meetingExtensionDefinition.supportsStreaming` field in your app manifest must be `true` to use this API.
|
7584
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/meetingExtensionDefinition.
|
7370
7585
|
*
|
7371
7586
|
* @param handler - The handler to invoke when the live stream state changes
|
7372
7587
|
*/
|
@@ -7379,12 +7594,22 @@ var meeting;
|
|
7379
7594
|
}
|
7380
7595
|
meeting.registerLiveStreamChangedHandler = registerLiveStreamChangedHandler;
|
7381
7596
|
/**
|
7382
|
-
* Allows an app to share
|
7597
|
+
* Allows an app to share a given URL to the meeting stage for all users in the meeting.
|
7598
|
+
* To learn more, visit https://aka.ms/teamsjs/shareAppContentToStage
|
7383
7599
|
*
|
7384
|
-
* @
|
7385
|
-
*
|
7386
|
-
*
|
7387
|
-
*
|
7600
|
+
* @remarks
|
7601
|
+
* This API can only be used in the `sidePanel` and `meetingStage` frame contexts.
|
7602
|
+
* For private scheduled meetings, meet now, or calls, include the `MeetingStage.Write.Chat` RSC permission in your app manifest.
|
7603
|
+
* For channel meetings, include the `ChannelMeetingStage.Write.Group` RSC permission in your app manifest.
|
7604
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/authorization.
|
7605
|
+
* Use {@link getAppContentStageSharingCapabilities} to determine if the local user is eligible to use this API.
|
7606
|
+
* Use {@link getAppContentStageSharingState} to determine whether app content is already being shared to the meeting stage.
|
7607
|
+
*
|
7608
|
+
* @param callback - Callback contains 2 parameters, `error` and `result`.
|
7609
|
+
* `error` can either contain an error of type `SdkError`, in case of an error, or null when share is successful
|
7610
|
+
* `result` can either contain a true value, in case of a successful share or null when the share fails
|
7611
|
+
* @param appContentUrl - is the input URL to be shared to the meeting stage.
|
7612
|
+
* the URL origin must be included in your app manifest's `validDomains` field.
|
7388
7613
|
*/
|
7389
7614
|
function shareAppContentToStage(callback, appContentUrl) {
|
7390
7615
|
if (!callback) {
|
@@ -7395,12 +7620,21 @@ var meeting;
|
|
7395
7620
|
}
|
7396
7621
|
meeting.shareAppContentToStage = shareAppContentToStage;
|
7397
7622
|
/**
|
7398
|
-
*
|
7623
|
+
* Allows an app to request whether the local user's app version has the required app manifest permissions to share content to meeting stage.
|
7624
|
+
* To learn more, visit https://aka.ms/teamsjs/getAppContentStageSharingCapabilities
|
7399
7625
|
*
|
7400
|
-
* @
|
7401
|
-
*
|
7402
|
-
*
|
7403
|
-
*
|
7626
|
+
* @remarks
|
7627
|
+
* If you are updating your published app to include the share to stage feature, you can use this API to prompt users to update their app if they are using an older version.
|
7628
|
+
* Your app's `configurableTabs` or `staticTabs` entry's `context` array must include `meetingStage` for `doesAppHaveSharePermission` to be `true` in the `callback` response.
|
7629
|
+
*
|
7630
|
+
* @throws error if API is being used outside of `sidePanel` or `meetingStage` frame contexts.
|
7631
|
+
* @throws error if your app manifest does not include the `MeetingStage.Write.Chat` RSC permission in your app manifest in a private scheduled meeting, meet now, or call --
|
7632
|
+
* or if it does not include the `ChannelMeetingStage.Write.Group` RSC permission in your app manifest in a channel meeting.
|
7633
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/authorization.
|
7634
|
+
*
|
7635
|
+
* @param callback - Completion callback contains 2 parameters: `error` and `appContentStageSharingCapabilities`.
|
7636
|
+
* `error` can either contain an error of type `SdkError` (error indication), or null (non-error indication).
|
7637
|
+
* `appContentStageSharingCapabilities` will contain an {@link IAppContentStageSharingCapabilities} object if the request succeeds, or null if it failed.
|
7404
7638
|
*/
|
7405
7639
|
function getAppContentStageSharingCapabilities(callback) {
|
7406
7640
|
if (!callback) {
|
@@ -7428,12 +7662,18 @@ var meeting;
|
|
7428
7662
|
}
|
7429
7663
|
meeting.stopSharingAppContentToStage = stopSharingAppContentToStage;
|
7430
7664
|
/**
|
7431
|
-
* Provides information related to current stage sharing state for app
|
7665
|
+
* Provides information related to current stage sharing state for your app.
|
7666
|
+
* To learn more, visit https://aka.ms/teamsjs/getAppContentStageSharingState
|
7432
7667
|
*
|
7433
|
-
* @
|
7668
|
+
* @remarks
|
7669
|
+
* This API can only be used in the `sidePanel` and `meetingStage` frame contexts.
|
7670
|
+
* For private scheduled meetings, meet now, or calls, include the `MeetingStage.Write.Chat` RSC permission in your app manifest.
|
7671
|
+
* For channel meetings, include the `ChannelMeetingStage.Write.Group` RSC permission in your app manifest.
|
7672
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/authorization.
|
7673
|
+
*
|
7674
|
+
* @param callback - Callback contains 2 parameters, `error` and `appContentStageSharingState`.
|
7434
7675
|
* error can either contain an error of type SdkError (error indication), or null (non-error indication)
|
7435
|
-
* appContentStageSharingState can either contain an IAppContentStageSharingState object
|
7436
|
-
* (indication of successful retrieval), or null (indication of failed retrieval)
|
7676
|
+
* `appContentStageSharingState` can either contain an `IAppContentStageSharingState` object if the request succeeds, or null if it failed
|
7437
7677
|
*/
|
7438
7678
|
function getAppContentStageSharingState(callback) {
|
7439
7679
|
if (!callback) {
|
@@ -7444,9 +7684,17 @@ var meeting;
|
|
7444
7684
|
}
|
7445
7685
|
meeting.getAppContentStageSharingState = getAppContentStageSharingState;
|
7446
7686
|
/**
|
7447
|
-
* Registers a handler for changes to
|
7448
|
-
*
|
7449
|
-
*
|
7687
|
+
* Registers a handler for changes to participant speaking states.
|
7688
|
+
* To learn more, visit https://aka.ms/teamsjs/registerSpeakingStateChangeHandler
|
7689
|
+
*
|
7690
|
+
* @remarks
|
7691
|
+
* This API returns {@link ISpeakingState}, which will have `isSpeakingDetected` and/or an error object.
|
7692
|
+
* If any participant is speaking, `isSpeakingDetected` will be true, or false if no participants are speaking.
|
7693
|
+
* Only one handler can be registered at a time. Subsequent registrations replace existing registrations.
|
7694
|
+
* This API can only be used in the `sidePanel` and `meetingStage` frame contexts.
|
7695
|
+
* For private scheduled meetings, meet now, or calls, include the `OnlineMeetingIncomingAudio.Detect.Chat` RSC permission in your app manifest.
|
7696
|
+
* For channel meetings, include the `OnlineMeetingIncomingAudio.Detect.Group` RSC permission in your app manifest.
|
7697
|
+
* Find the app manifest reference at https://aka.ms/teamsAppManifest/authorization.
|
7450
7698
|
*
|
7451
7699
|
* @param handler The handler to invoke when the speaking state of any participant changes (start/stop speaking).
|
7452
7700
|
*/
|
@@ -7507,6 +7755,12 @@ var meeting;
|
|
7507
7755
|
/**
|
7508
7756
|
* Nested namespace for functions to control behavior of the app share button
|
7509
7757
|
*
|
7758
|
+
* @hidden
|
7759
|
+
* Hide from docs.
|
7760
|
+
*
|
7761
|
+
* @internal
|
7762
|
+
* Limited to Microsoft-internal use
|
7763
|
+
*
|
7510
7764
|
* @beta
|
7511
7765
|
*/
|
7512
7766
|
var appShareButton;
|
@@ -7519,6 +7773,13 @@ var meeting;
|
|
7519
7773
|
* @throws standard Invalid Url error
|
7520
7774
|
* @param shareInformation has two elements, one isVisible boolean flag and another
|
7521
7775
|
* optional string contentUrl, which will override contentUrl coming from Manifest
|
7776
|
+
*
|
7777
|
+
* @hidden
|
7778
|
+
* Hide from docs.
|
7779
|
+
*
|
7780
|
+
* @internal
|
7781
|
+
* Limited to Microsoft-internal use
|
7782
|
+
*
|
7522
7783
|
* @beta
|
7523
7784
|
*/
|
7524
7785
|
function setOptions(shareInformation) {
|
@@ -7881,6 +8142,9 @@ var teamsCore;
|
|
7881
8142
|
*
|
7882
8143
|
* @param handler - The handler to invoke when the page is loaded.
|
7883
8144
|
*
|
8145
|
+
* @deprecated
|
8146
|
+
* As of 2.14.1, please use {@link app.lifecycle.registerOnResumeHandler} instead.
|
8147
|
+
*
|
7884
8148
|
* @beta
|
7885
8149
|
*/
|
7886
8150
|
function registerOnLoadHandler(handler) {
|
@@ -7900,6 +8164,8 @@ var teamsCore;
|
|
7900
8164
|
*
|
7901
8165
|
* @param handler - The handler to invoke when the page is loaded.
|
7902
8166
|
* @param versionSpecificHelper - The helper function containing logic pertaining to a specific version of the API.
|
8167
|
+
*
|
8168
|
+
* @deprecated
|
7903
8169
|
*/
|
7904
8170
|
function registerOnLoadHandlerHelper(handler, versionSpecificHelper) {
|
7905
8171
|
// allow for registration cleanup even when not finished initializing
|
@@ -7919,6 +8185,7 @@ var teamsCore;
|
|
7919
8185
|
* @param handler - The handler to invoke before the page is unloaded. If this handler returns true the page should
|
7920
8186
|
* invoke the readyToUnload function provided to it once it's ready to be unloaded.
|
7921
8187
|
*
|
8188
|
+
* @deprecated
|
7922
8189
|
* @beta
|
7923
8190
|
*/
|
7924
8191
|
function registerBeforeUnloadHandler(handler) {
|
@@ -7939,6 +8206,8 @@ var teamsCore;
|
|
7939
8206
|
* @param handler - - The handler to invoke before the page is unloaded. If this handler returns true the page should
|
7940
8207
|
* invoke the readyToUnload function provided to it once it's ready to be unloaded.
|
7941
8208
|
* @param versionSpecificHelper - The helper function containing logic pertaining to a specific version of the API.
|
8209
|
+
*
|
8210
|
+
* @deprecated
|
7942
8211
|
*/
|
7943
8212
|
function registerBeforeUnloadHandlerHelper(handler, versionSpecificHelper) {
|
7944
8213
|
// allow for registration cleanup even when not finished initializing
|
@@ -8148,311 +8417,43 @@ var profile;
|
|
8148
8417
|
profile.isSupported = isSupported;
|
8149
8418
|
})(profile || (profile = {}));
|
8150
8419
|
|
8151
|
-
;// CONCATENATED MODULE: ./src/internal/
|
8152
|
-
|
8153
|
-
|
8154
|
-
function
|
8155
|
-
|
8156
|
-
|
8157
|
-
|
8158
|
-
|
8159
|
-
|
8160
|
-
|
8161
|
-
|
8162
|
-
|
8163
|
-
|
8164
|
-
|
8165
|
-
|
8166
|
-
|
8167
|
-
|
8168
|
-
|
8169
|
-
|
8170
|
-
|
8171
|
-
|
8172
|
-
|
8173
|
-
|
8174
|
-
|
8175
|
-
|
8176
|
-
|
8177
|
-
|
8178
|
-
|
8179
|
-
|
8180
|
-
|
8181
|
-
|
8182
|
-
// not to invoke them in the loop to avoid modifying the collection while iterating
|
8183
|
-
for (var key in VideoFrameTick.setTimeoutCallbacks) {
|
8184
|
-
var callback = VideoFrameTick.setTimeoutCallbacks[key];
|
8185
|
-
var start = callback.startedAtInMs;
|
8186
|
-
if (now - start >= callback.timeoutInMs) {
|
8187
|
-
timeoutIds.push(key);
|
8420
|
+
;// CONCATENATED MODULE: ./src/internal/videoEffectsUtils.ts
|
8421
|
+
var videoEffectsUtils_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
8422
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
8423
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
8424
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
8425
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
8426
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8427
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8428
|
+
});
|
8429
|
+
};
|
8430
|
+
var videoEffectsUtils_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
8431
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
8432
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
8433
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
8434
|
+
function step(op) {
|
8435
|
+
if (f) throw new TypeError("Generator is already executing.");
|
8436
|
+
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
8437
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
8438
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
8439
|
+
switch (op[0]) {
|
8440
|
+
case 0: case 1: t = op; break;
|
8441
|
+
case 4: _.label++; return { value: op[1], done: false };
|
8442
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
8443
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
8444
|
+
default:
|
8445
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
8446
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
8447
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
8448
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
8449
|
+
if (t[2]) _.ops.pop();
|
8450
|
+
_.trys.pop(); continue;
|
8188
8451
|
}
|
8189
|
-
|
8190
|
-
|
8191
|
-
|
8192
|
-
|
8193
|
-
|
8194
|
-
callback.callback();
|
8195
|
-
delete VideoFrameTick.setTimeoutCallbacks[id];
|
8196
|
-
}
|
8197
|
-
};
|
8198
|
-
VideoFrameTick.setTimeoutCallbacks = {};
|
8199
|
-
return VideoFrameTick;
|
8200
|
-
}());
|
8201
|
-
|
8202
|
-
|
8203
|
-
;// CONCATENATED MODULE: ./src/internal/videoPerformanceStatistics.ts
|
8204
|
-
|
8205
|
-
var VideoPerformanceStatistics = /** @class */ (function () {
|
8206
|
-
function VideoPerformanceStatistics(distributionBinSize,
|
8207
|
-
/**
|
8208
|
-
* Function to report the statistics result
|
8209
|
-
*/
|
8210
|
-
reportStatisticsResult) {
|
8211
|
-
this.reportStatisticsResult = reportStatisticsResult;
|
8212
|
-
this.sampleCount = 0;
|
8213
|
-
this.distributionBins = new Uint32Array(distributionBinSize);
|
8214
|
-
}
|
8215
|
-
/**
|
8216
|
-
* Call this function before processing every frame
|
8217
|
-
*/
|
8218
|
-
VideoPerformanceStatistics.prototype.processStarts = function (effectId, frameWidth, frameHeight, effectParam) {
|
8219
|
-
VideoFrameTick.tick();
|
8220
|
-
if (!this.suitableForThisSession(effectId, frameWidth, frameHeight, effectParam)) {
|
8221
|
-
this.reportAndResetSession(this.getStatistics(), effectId, effectParam, frameWidth, frameHeight);
|
8222
|
-
}
|
8223
|
-
this.start();
|
8224
|
-
};
|
8225
|
-
VideoPerformanceStatistics.prototype.processEnds = function () {
|
8226
|
-
// calculate duration of the process and record it
|
8227
|
-
var durationInMillisecond = performance.now() - this.frameProcessingStartedAt;
|
8228
|
-
var binIndex = Math.floor(Math.max(0, Math.min(this.distributionBins.length - 1, durationInMillisecond)));
|
8229
|
-
this.distributionBins[binIndex] += 1;
|
8230
|
-
this.sampleCount += 1;
|
8231
|
-
};
|
8232
|
-
VideoPerformanceStatistics.prototype.getStatistics = function () {
|
8233
|
-
if (!this.currentSession) {
|
8234
|
-
return null;
|
8235
|
-
}
|
8236
|
-
return {
|
8237
|
-
effectId: this.currentSession.effectId,
|
8238
|
-
effectParam: this.currentSession.effectParam,
|
8239
|
-
frameHeight: this.currentSession.frameHeight,
|
8240
|
-
frameWidth: this.currentSession.frameWidth,
|
8241
|
-
duration: performance.now() - this.currentSession.startedAtInMs,
|
8242
|
-
sampleCount: this.sampleCount,
|
8243
|
-
distributionBins: this.distributionBins.slice(),
|
8244
|
-
};
|
8245
|
-
};
|
8246
|
-
VideoPerformanceStatistics.prototype.start = function () {
|
8247
|
-
this.frameProcessingStartedAt = performance.now();
|
8248
|
-
};
|
8249
|
-
VideoPerformanceStatistics.prototype.suitableForThisSession = function (effectId, frameWidth, frameHeight, effectParam) {
|
8250
|
-
return (this.currentSession &&
|
8251
|
-
this.currentSession.effectId === effectId &&
|
8252
|
-
this.currentSession.effectParam === effectParam &&
|
8253
|
-
this.currentSession.frameWidth === frameWidth &&
|
8254
|
-
this.currentSession.frameHeight === frameHeight);
|
8255
|
-
};
|
8256
|
-
VideoPerformanceStatistics.prototype.reportAndResetSession = function (result, effectId, effectParam, frameWidth, frameHeight) {
|
8257
|
-
var _this = this;
|
8258
|
-
result && this.reportStatisticsResult(result);
|
8259
|
-
this.resetCurrentSession(this.getNextTimeout(effectId, this.currentSession), effectId, effectParam, frameWidth, frameHeight);
|
8260
|
-
if (this.timeoutId) {
|
8261
|
-
VideoFrameTick.clearTimeout(this.timeoutId);
|
8262
|
-
}
|
8263
|
-
this.timeoutId = VideoFrameTick.setTimeout((function () { return _this.reportAndResetSession(_this.getStatistics(), effectId, effectParam, frameWidth, frameHeight); }).bind(this), this.currentSession.timeoutInMs);
|
8264
|
-
};
|
8265
|
-
VideoPerformanceStatistics.prototype.resetCurrentSession = function (timeoutInMs, effectId, effectParam, frameWidth, frameHeight) {
|
8266
|
-
this.currentSession = {
|
8267
|
-
startedAtInMs: performance.now(),
|
8268
|
-
timeoutInMs: timeoutInMs,
|
8269
|
-
effectId: effectId,
|
8270
|
-
effectParam: effectParam,
|
8271
|
-
frameWidth: frameWidth,
|
8272
|
-
frameHeight: frameHeight,
|
8273
|
-
};
|
8274
|
-
this.sampleCount = 0;
|
8275
|
-
this.distributionBins.fill(0);
|
8276
|
-
};
|
8277
|
-
// send the statistics result every n second, where n starts from 1, 2, 4...and finally stays at every 30 seconds.
|
8278
|
-
VideoPerformanceStatistics.prototype.getNextTimeout = function (effectId, currentSession) {
|
8279
|
-
// only reset timeout when new session or effect changed
|
8280
|
-
if (!currentSession || currentSession.effectId !== effectId) {
|
8281
|
-
return VideoPerformanceStatistics.initialSessionTimeoutInMs;
|
8282
|
-
}
|
8283
|
-
return Math.min(VideoPerformanceStatistics.maxSessionTimeoutInMs, currentSession.timeoutInMs * 2);
|
8284
|
-
};
|
8285
|
-
VideoPerformanceStatistics.initialSessionTimeoutInMs = 1000;
|
8286
|
-
VideoPerformanceStatistics.maxSessionTimeoutInMs = 1000 * 30;
|
8287
|
-
return VideoPerformanceStatistics;
|
8288
|
-
}());
|
8289
|
-
|
8290
|
-
|
8291
|
-
;// CONCATENATED MODULE: ./src/internal/videoPerformanceMonitor.ts
|
8292
|
-
|
8293
|
-
|
8294
|
-
/**
|
8295
|
-
* This class is used to monitor the performance of video processing, and report performance events.
|
8296
|
-
*/
|
8297
|
-
var VideoPerformanceMonitor = /** @class */ (function () {
|
8298
|
-
function VideoPerformanceMonitor(reportPerformanceEvent) {
|
8299
|
-
var _this = this;
|
8300
|
-
this.reportPerformanceEvent = reportPerformanceEvent;
|
8301
|
-
this.isFirstFrameProcessed = false;
|
8302
|
-
this.frameProcessTimeLimit = 100;
|
8303
|
-
this.frameProcessingStartedAt = 0;
|
8304
|
-
this.frameProcessingTimeCost = 0;
|
8305
|
-
this.processedFrameCount = 0;
|
8306
|
-
this.performanceStatistics = new VideoPerformanceStatistics(VideoPerformanceMonitor.distributionBinSize, function (result) {
|
8307
|
-
return _this.reportPerformanceEvent('video.performance.performanceDataGenerated', [result]);
|
8308
|
-
});
|
8309
|
-
}
|
8310
|
-
/**
|
8311
|
-
* Start to check frame processing time intervally
|
8312
|
-
* and report performance event if the average frame processing time is too long.
|
8313
|
-
*/
|
8314
|
-
VideoPerformanceMonitor.prototype.startMonitorSlowFrameProcessing = function () {
|
8315
|
-
var _this = this;
|
8316
|
-
VideoFrameTick.setInterval(function () {
|
8317
|
-
if (_this.processedFrameCount === 0) {
|
8318
|
-
return;
|
8319
|
-
}
|
8320
|
-
var averageFrameProcessingTime = _this.frameProcessingTimeCost / _this.processedFrameCount;
|
8321
|
-
if (averageFrameProcessingTime > _this.frameProcessTimeLimit) {
|
8322
|
-
_this.reportPerformanceEvent('video.performance.frameProcessingSlow', [averageFrameProcessingTime]);
|
8323
|
-
}
|
8324
|
-
_this.frameProcessingTimeCost = 0;
|
8325
|
-
_this.processedFrameCount = 0;
|
8326
|
-
}, VideoPerformanceMonitor.calculateFPSInterval);
|
8327
|
-
};
|
8328
|
-
/**
|
8329
|
-
* Define the time limit of frame processing.
|
8330
|
-
* When the average frame processing time is longer than the time limit, a "video.performance.frameProcessingSlow" event will be reported.
|
8331
|
-
* @param timeLimit
|
8332
|
-
*/
|
8333
|
-
VideoPerformanceMonitor.prototype.setFrameProcessTimeLimit = function (timeLimit) {
|
8334
|
-
this.frameProcessTimeLimit = timeLimit;
|
8335
|
-
};
|
8336
|
-
/**
|
8337
|
-
* Call this function when the app starts to switch to the new video effect
|
8338
|
-
*/
|
8339
|
-
VideoPerformanceMonitor.prototype.reportApplyingVideoEffect = function (effectId, effectParam) {
|
8340
|
-
var _a, _b;
|
8341
|
-
if (((_a = this.applyingEffect) === null || _a === void 0 ? void 0 : _a.effectId) === effectId && ((_b = this.applyingEffect) === null || _b === void 0 ? void 0 : _b.effectParam) === effectParam) {
|
8342
|
-
return;
|
8343
|
-
}
|
8344
|
-
this.applyingEffect = {
|
8345
|
-
effectId: effectId,
|
8346
|
-
effectParam: effectParam,
|
8347
|
-
};
|
8348
|
-
this.appliedEffect = undefined;
|
8349
|
-
};
|
8350
|
-
/**
|
8351
|
-
* Call this function when the new video effect is ready
|
8352
|
-
*/
|
8353
|
-
VideoPerformanceMonitor.prototype.reportVideoEffectChanged = function (effectId, effectParam) {
|
8354
|
-
if (this.applyingEffect === undefined ||
|
8355
|
-
(this.applyingEffect.effectId !== effectId && this.applyingEffect.effectParam !== effectParam)) {
|
8356
|
-
// don't handle obsoleted event
|
8357
|
-
return;
|
8358
|
-
}
|
8359
|
-
this.appliedEffect = {
|
8360
|
-
effectId: effectId,
|
8361
|
-
effectParam: effectParam,
|
8362
|
-
};
|
8363
|
-
this.applyingEffect = undefined;
|
8364
|
-
this.isFirstFrameProcessed = false;
|
8365
|
-
};
|
8366
|
-
/**
|
8367
|
-
* Call this function when the app starts to process a video frame
|
8368
|
-
*/
|
8369
|
-
VideoPerformanceMonitor.prototype.reportStartFrameProcessing = function (frameWidth, frameHeight) {
|
8370
|
-
VideoFrameTick.tick();
|
8371
|
-
if (!this.appliedEffect) {
|
8372
|
-
return;
|
8373
|
-
}
|
8374
|
-
this.frameProcessingStartedAt = performance.now();
|
8375
|
-
this.performanceStatistics.processStarts(this.appliedEffect.effectId, frameWidth, frameHeight, this.appliedEffect.effectParam);
|
8376
|
-
};
|
8377
|
-
/**
|
8378
|
-
* Call this function when the app finishes successfully processing a video frame
|
8379
|
-
*/
|
8380
|
-
VideoPerformanceMonitor.prototype.reportFrameProcessed = function () {
|
8381
|
-
var _a;
|
8382
|
-
if (!this.appliedEffect) {
|
8383
|
-
return;
|
8384
|
-
}
|
8385
|
-
this.processedFrameCount++;
|
8386
|
-
this.frameProcessingTimeCost += performance.now() - this.frameProcessingStartedAt;
|
8387
|
-
this.performanceStatistics.processEnds();
|
8388
|
-
if (!this.isFirstFrameProcessed) {
|
8389
|
-
this.isFirstFrameProcessed = true;
|
8390
|
-
this.reportPerformanceEvent('video.performance.firstFrameProcessed', [
|
8391
|
-
Date.now(),
|
8392
|
-
this.appliedEffect.effectId,
|
8393
|
-
(_a = this.appliedEffect) === null || _a === void 0 ? void 0 : _a.effectParam,
|
8394
|
-
]);
|
8395
|
-
}
|
8396
|
-
};
|
8397
|
-
/**
|
8398
|
-
* Call this function when the app starts to get the texture stream
|
8399
|
-
*/
|
8400
|
-
VideoPerformanceMonitor.prototype.reportGettingTextureStream = function (streamId) {
|
8401
|
-
this.gettingTextureStreamStartedAt = performance.now();
|
8402
|
-
this.currentStreamId = streamId;
|
8403
|
-
};
|
8404
|
-
/**
|
8405
|
-
* Call this function when the app finishes successfully getting the texture stream
|
8406
|
-
*/
|
8407
|
-
VideoPerformanceMonitor.prototype.reportTextureStreamAcquired = function () {
|
8408
|
-
if (this.gettingTextureStreamStartedAt !== undefined) {
|
8409
|
-
var timeTaken = performance.now() - this.gettingTextureStreamStartedAt;
|
8410
|
-
this.reportPerformanceEvent('video.performance.textureStreamAcquired', [this.currentStreamId, timeTaken]);
|
8411
|
-
}
|
8412
|
-
};
|
8413
|
-
VideoPerformanceMonitor.distributionBinSize = 1000;
|
8414
|
-
VideoPerformanceMonitor.calculateFPSInterval = 1000;
|
8415
|
-
return VideoPerformanceMonitor;
|
8416
|
-
}());
|
8417
|
-
|
8418
|
-
|
8419
|
-
;// CONCATENATED MODULE: ./src/internal/videoUtils.ts
|
8420
|
-
var videoUtils_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
8421
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
8422
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
8423
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
8424
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
8425
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8426
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8427
|
-
});
|
8428
|
-
};
|
8429
|
-
var videoUtils_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
8430
|
-
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
8431
|
-
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
8432
|
-
function verb(n) { return function (v) { return step([n, v]); }; }
|
8433
|
-
function step(op) {
|
8434
|
-
if (f) throw new TypeError("Generator is already executing.");
|
8435
|
-
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
8436
|
-
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
8437
|
-
if (y = 0, t) op = [op[0] & 2, t.value];
|
8438
|
-
switch (op[0]) {
|
8439
|
-
case 0: case 1: t = op; break;
|
8440
|
-
case 4: _.label++; return { value: op[1], done: false };
|
8441
|
-
case 5: _.label++; y = op[1]; op = [0]; continue;
|
8442
|
-
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
8443
|
-
default:
|
8444
|
-
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
8445
|
-
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
8446
|
-
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
8447
|
-
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
8448
|
-
if (t[2]) _.ops.pop();
|
8449
|
-
_.trys.pop(); continue;
|
8450
|
-
}
|
8451
|
-
op = body.call(thisArg, _);
|
8452
|
-
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
8453
|
-
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
8454
|
-
}
|
8455
|
-
};
|
8452
|
+
op = body.call(thisArg, _);
|
8453
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
8454
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
8455
|
+
}
|
8456
|
+
};
|
8456
8457
|
|
8457
8458
|
|
8458
8459
|
|
@@ -8464,9 +8465,9 @@ var videoUtils_generator = (undefined && undefined.__generator) || function (thi
|
|
8464
8465
|
*/
|
8465
8466
|
function processMediaStream(streamId, videoFrameHandler, notifyError, videoPerformanceMonitor) {
|
8466
8467
|
var _a, _b;
|
8467
|
-
return
|
8468
|
+
return videoEffectsUtils_awaiter(this, void 0, void 0, function () {
|
8468
8469
|
var generator, _c;
|
8469
|
-
return
|
8470
|
+
return videoEffectsUtils_generator(this, function (_d) {
|
8470
8471
|
switch (_d.label) {
|
8471
8472
|
case 0:
|
8472
8473
|
generator = createProcessedStreamGeneratorWithoutSource();
|
@@ -8491,9 +8492,9 @@ function processMediaStream(streamId, videoFrameHandler, notifyError, videoPerfo
|
|
8491
8492
|
*/
|
8492
8493
|
function processMediaStreamWithMetadata(streamId, videoFrameHandler, notifyError, videoPerformanceMonitor) {
|
8493
8494
|
var _a, _b;
|
8494
|
-
return
|
8495
|
+
return videoEffectsUtils_awaiter(this, void 0, void 0, function () {
|
8495
8496
|
var generator, _c;
|
8496
|
-
return
|
8497
|
+
return videoEffectsUtils_generator(this, function (_d) {
|
8497
8498
|
switch (_d.label) {
|
8498
8499
|
case 0:
|
8499
8500
|
generator = createProcessedStreamGeneratorWithoutSource();
|
@@ -8512,9 +8513,9 @@ function processMediaStreamWithMetadata(streamId, videoFrameHandler, notifyError
|
|
8512
8513
|
* Get the video track from the media stream gotten from chrome.webview.getTextureStream(streamId).
|
8513
8514
|
*/
|
8514
8515
|
function getInputVideoTrack(streamId, notifyError, videoPerformanceMonitor) {
|
8515
|
-
return
|
8516
|
+
return videoEffectsUtils_awaiter(this, void 0, void 0, function () {
|
8516
8517
|
var chrome, mediaStream, tracks, error_1, errorMsg;
|
8517
|
-
return
|
8518
|
+
return videoEffectsUtils_generator(this, function (_a) {
|
8518
8519
|
switch (_a.label) {
|
8519
8520
|
case 0:
|
8520
8521
|
if (inServerSideRenderingEnvironment()) {
|
@@ -8584,9 +8585,9 @@ var DefaultTransformer = /** @class */ (function () {
|
|
8584
8585
|
var _this = this;
|
8585
8586
|
this.notifyError = notifyError;
|
8586
8587
|
this.videoFrameHandler = videoFrameHandler;
|
8587
|
-
this.transform = function (originalFrame, controller) { return
|
8588
|
+
this.transform = function (originalFrame, controller) { return videoEffectsUtils_awaiter(_this, void 0, void 0, function () {
|
8588
8589
|
var timestamp, frameProcessedByApp, processedFrame, error_2;
|
8589
|
-
return
|
8590
|
+
return videoEffectsUtils_generator(this, function (_a) {
|
8590
8591
|
switch (_a.label) {
|
8591
8592
|
case 0:
|
8592
8593
|
timestamp = originalFrame.timestamp;
|
@@ -8746,9 +8747,9 @@ var TransformerWithMetadata = /** @class */ (function () {
|
|
8746
8747
|
this.notifyError = notifyError;
|
8747
8748
|
this.videoFrameHandler = videoFrameHandler;
|
8748
8749
|
this.shouldDiscardAudioInferenceResult = false;
|
8749
|
-
this.transform = function (originalFrame, controller) { return
|
8750
|
+
this.transform = function (originalFrame, controller) { return videoEffectsUtils_awaiter(_this, void 0, void 0, function () {
|
8750
8751
|
var timestamp, _a, videoFrame, _b, _c, audioInferenceResult, frameProcessedByApp, processedFrame, error_3;
|
8751
|
-
return
|
8752
|
+
return videoEffectsUtils_generator(this, function (_d) {
|
8752
8753
|
switch (_d.label) {
|
8753
8754
|
case 0:
|
8754
8755
|
timestamp = originalFrame.timestamp;
|
@@ -8809,9 +8810,9 @@ var TransformerWithMetadata = /** @class */ (function () {
|
|
8809
8810
|
* @internal
|
8810
8811
|
* Limited to Microsoft-internal use
|
8811
8812
|
*/
|
8812
|
-
this.extractVideoFrameAndMetadata = function (texture) { return
|
8813
|
+
this.extractVideoFrameAndMetadata = function (texture) { return videoEffectsUtils_awaiter(_this, void 0, void 0, function () {
|
8813
8814
|
var headerRect, headerBuffer, header, metadataRect, metadataBuffer, metadata;
|
8814
|
-
return
|
8815
|
+
return videoEffectsUtils_generator(this, function (_a) {
|
8815
8816
|
switch (_a.label) {
|
8816
8817
|
case 0:
|
8817
8818
|
if (inServerSideRenderingEnvironment()) {
|
@@ -8874,15 +8875,283 @@ function createEffectParameterChangeCallback(callback, videoPerformanceMonitor)
|
|
8874
8875
|
sendMessageToParent('video.videoEffectReadiness', [true, effectId, undefined, effectParam]);
|
8875
8876
|
})
|
8876
8877
|
.catch(function (reason) {
|
8877
|
-
var validReason = reason in
|
8878
|
+
var validReason = reason in videoEffects.EffectFailureReason ? reason : videoEffects.EffectFailureReason.InitializationFailure;
|
8878
8879
|
sendMessageToParent('video.videoEffectReadiness', [false, effectId, validReason, effectParam]);
|
8879
8880
|
});
|
8880
8881
|
};
|
8881
8882
|
}
|
8882
8883
|
|
8883
|
-
;// CONCATENATED MODULE: ./src/
|
8884
|
-
|
8885
|
-
|
8884
|
+
;// CONCATENATED MODULE: ./src/internal/videoFrameTick.ts
|
8885
|
+
|
8886
|
+
var VideoFrameTick = /** @class */ (function () {
|
8887
|
+
function VideoFrameTick() {
|
8888
|
+
}
|
8889
|
+
VideoFrameTick.setTimeout = function (callback, timeoutInMs) {
|
8890
|
+
var startedAtInMs = performance.now();
|
8891
|
+
var id = generateGUID();
|
8892
|
+
VideoFrameTick.setTimeoutCallbacks[id] = {
|
8893
|
+
callback: callback,
|
8894
|
+
timeoutInMs: timeoutInMs,
|
8895
|
+
startedAtInMs: startedAtInMs,
|
8896
|
+
};
|
8897
|
+
return id;
|
8898
|
+
};
|
8899
|
+
VideoFrameTick.clearTimeout = function (id) {
|
8900
|
+
delete VideoFrameTick.setTimeoutCallbacks[id];
|
8901
|
+
};
|
8902
|
+
VideoFrameTick.setInterval = function (callback, intervalInMs) {
|
8903
|
+
VideoFrameTick.setTimeout(function next() {
|
8904
|
+
callback();
|
8905
|
+
VideoFrameTick.setTimeout(next, intervalInMs);
|
8906
|
+
}, intervalInMs);
|
8907
|
+
};
|
8908
|
+
/**
|
8909
|
+
* Call this function whenever a frame comes in, it will check if any timeout is due and call the callback
|
8910
|
+
*/
|
8911
|
+
VideoFrameTick.tick = function () {
|
8912
|
+
var now = performance.now();
|
8913
|
+
var timeoutIds = [];
|
8914
|
+
// find all the timeouts that are due,
|
8915
|
+
// not to invoke them in the loop to avoid modifying the collection while iterating
|
8916
|
+
for (var key in VideoFrameTick.setTimeoutCallbacks) {
|
8917
|
+
var callback = VideoFrameTick.setTimeoutCallbacks[key];
|
8918
|
+
var start = callback.startedAtInMs;
|
8919
|
+
if (now - start >= callback.timeoutInMs) {
|
8920
|
+
timeoutIds.push(key);
|
8921
|
+
}
|
8922
|
+
}
|
8923
|
+
// invoke the callbacks
|
8924
|
+
for (var _i = 0, timeoutIds_1 = timeoutIds; _i < timeoutIds_1.length; _i++) {
|
8925
|
+
var id = timeoutIds_1[_i];
|
8926
|
+
var callback = VideoFrameTick.setTimeoutCallbacks[id];
|
8927
|
+
callback.callback();
|
8928
|
+
delete VideoFrameTick.setTimeoutCallbacks[id];
|
8929
|
+
}
|
8930
|
+
};
|
8931
|
+
VideoFrameTick.setTimeoutCallbacks = {};
|
8932
|
+
return VideoFrameTick;
|
8933
|
+
}());
|
8934
|
+
|
8935
|
+
|
8936
|
+
;// CONCATENATED MODULE: ./src/internal/videoPerformanceStatistics.ts
|
8937
|
+
|
8938
|
+
var VideoPerformanceStatistics = /** @class */ (function () {
|
8939
|
+
function VideoPerformanceStatistics(distributionBinSize,
|
8940
|
+
/**
|
8941
|
+
* Function to report the statistics result
|
8942
|
+
*/
|
8943
|
+
reportStatisticsResult) {
|
8944
|
+
this.reportStatisticsResult = reportStatisticsResult;
|
8945
|
+
this.sampleCount = 0;
|
8946
|
+
this.distributionBins = new Uint32Array(distributionBinSize);
|
8947
|
+
}
|
8948
|
+
/**
|
8949
|
+
* Call this function before processing every frame
|
8950
|
+
*/
|
8951
|
+
VideoPerformanceStatistics.prototype.processStarts = function (effectId, frameWidth, frameHeight, effectParam) {
|
8952
|
+
VideoFrameTick.tick();
|
8953
|
+
if (!this.suitableForThisSession(effectId, frameWidth, frameHeight, effectParam)) {
|
8954
|
+
this.reportAndResetSession(this.getStatistics(), effectId, effectParam, frameWidth, frameHeight);
|
8955
|
+
}
|
8956
|
+
this.start();
|
8957
|
+
};
|
8958
|
+
VideoPerformanceStatistics.prototype.processEnds = function () {
|
8959
|
+
// calculate duration of the process and record it
|
8960
|
+
var durationInMillisecond = performance.now() - this.frameProcessingStartedAt;
|
8961
|
+
var binIndex = Math.floor(Math.max(0, Math.min(this.distributionBins.length - 1, durationInMillisecond)));
|
8962
|
+
this.distributionBins[binIndex] += 1;
|
8963
|
+
this.sampleCount += 1;
|
8964
|
+
};
|
8965
|
+
VideoPerformanceStatistics.prototype.getStatistics = function () {
|
8966
|
+
if (!this.currentSession) {
|
8967
|
+
return null;
|
8968
|
+
}
|
8969
|
+
return {
|
8970
|
+
effectId: this.currentSession.effectId,
|
8971
|
+
effectParam: this.currentSession.effectParam,
|
8972
|
+
frameHeight: this.currentSession.frameHeight,
|
8973
|
+
frameWidth: this.currentSession.frameWidth,
|
8974
|
+
duration: performance.now() - this.currentSession.startedAtInMs,
|
8975
|
+
sampleCount: this.sampleCount,
|
8976
|
+
distributionBins: this.distributionBins.slice(),
|
8977
|
+
};
|
8978
|
+
};
|
8979
|
+
VideoPerformanceStatistics.prototype.start = function () {
|
8980
|
+
this.frameProcessingStartedAt = performance.now();
|
8981
|
+
};
|
8982
|
+
VideoPerformanceStatistics.prototype.suitableForThisSession = function (effectId, frameWidth, frameHeight, effectParam) {
|
8983
|
+
return (this.currentSession &&
|
8984
|
+
this.currentSession.effectId === effectId &&
|
8985
|
+
this.currentSession.effectParam === effectParam &&
|
8986
|
+
this.currentSession.frameWidth === frameWidth &&
|
8987
|
+
this.currentSession.frameHeight === frameHeight);
|
8988
|
+
};
|
8989
|
+
VideoPerformanceStatistics.prototype.reportAndResetSession = function (result, effectId, effectParam, frameWidth, frameHeight) {
|
8990
|
+
var _this = this;
|
8991
|
+
result && this.reportStatisticsResult(result);
|
8992
|
+
this.resetCurrentSession(this.getNextTimeout(effectId, this.currentSession), effectId, effectParam, frameWidth, frameHeight);
|
8993
|
+
if (this.timeoutId) {
|
8994
|
+
VideoFrameTick.clearTimeout(this.timeoutId);
|
8995
|
+
}
|
8996
|
+
this.timeoutId = VideoFrameTick.setTimeout((function () { return _this.reportAndResetSession(_this.getStatistics(), effectId, effectParam, frameWidth, frameHeight); }).bind(this), this.currentSession.timeoutInMs);
|
8997
|
+
};
|
8998
|
+
VideoPerformanceStatistics.prototype.resetCurrentSession = function (timeoutInMs, effectId, effectParam, frameWidth, frameHeight) {
|
8999
|
+
this.currentSession = {
|
9000
|
+
startedAtInMs: performance.now(),
|
9001
|
+
timeoutInMs: timeoutInMs,
|
9002
|
+
effectId: effectId,
|
9003
|
+
effectParam: effectParam,
|
9004
|
+
frameWidth: frameWidth,
|
9005
|
+
frameHeight: frameHeight,
|
9006
|
+
};
|
9007
|
+
this.sampleCount = 0;
|
9008
|
+
this.distributionBins.fill(0);
|
9009
|
+
};
|
9010
|
+
// send the statistics result every n second, where n starts from 1, 2, 4...and finally stays at every 30 seconds.
|
9011
|
+
VideoPerformanceStatistics.prototype.getNextTimeout = function (effectId, currentSession) {
|
9012
|
+
// only reset timeout when new session or effect changed
|
9013
|
+
if (!currentSession || currentSession.effectId !== effectId) {
|
9014
|
+
return VideoPerformanceStatistics.initialSessionTimeoutInMs;
|
9015
|
+
}
|
9016
|
+
return Math.min(VideoPerformanceStatistics.maxSessionTimeoutInMs, currentSession.timeoutInMs * 2);
|
9017
|
+
};
|
9018
|
+
VideoPerformanceStatistics.initialSessionTimeoutInMs = 1000;
|
9019
|
+
VideoPerformanceStatistics.maxSessionTimeoutInMs = 1000 * 30;
|
9020
|
+
return VideoPerformanceStatistics;
|
9021
|
+
}());
|
9022
|
+
|
9023
|
+
|
9024
|
+
;// CONCATENATED MODULE: ./src/internal/videoPerformanceMonitor.ts
|
9025
|
+
|
9026
|
+
|
9027
|
+
/**
|
9028
|
+
* This class is used to monitor the performance of video processing, and report performance events.
|
9029
|
+
*/
|
9030
|
+
var VideoPerformanceMonitor = /** @class */ (function () {
|
9031
|
+
function VideoPerformanceMonitor(reportPerformanceEvent) {
|
9032
|
+
var _this = this;
|
9033
|
+
this.reportPerformanceEvent = reportPerformanceEvent;
|
9034
|
+
this.isFirstFrameProcessed = false;
|
9035
|
+
this.frameProcessTimeLimit = 100;
|
9036
|
+
this.frameProcessingStartedAt = 0;
|
9037
|
+
this.frameProcessingTimeCost = 0;
|
9038
|
+
this.processedFrameCount = 0;
|
9039
|
+
this.performanceStatistics = new VideoPerformanceStatistics(VideoPerformanceMonitor.distributionBinSize, function (result) {
|
9040
|
+
return _this.reportPerformanceEvent('video.performance.performanceDataGenerated', [result]);
|
9041
|
+
});
|
9042
|
+
}
|
9043
|
+
/**
|
9044
|
+
* Start to check frame processing time intervally
|
9045
|
+
* and report performance event if the average frame processing time is too long.
|
9046
|
+
*/
|
9047
|
+
VideoPerformanceMonitor.prototype.startMonitorSlowFrameProcessing = function () {
|
9048
|
+
var _this = this;
|
9049
|
+
VideoFrameTick.setInterval(function () {
|
9050
|
+
if (_this.processedFrameCount === 0) {
|
9051
|
+
return;
|
9052
|
+
}
|
9053
|
+
var averageFrameProcessingTime = _this.frameProcessingTimeCost / _this.processedFrameCount;
|
9054
|
+
if (averageFrameProcessingTime > _this.frameProcessTimeLimit) {
|
9055
|
+
_this.reportPerformanceEvent('video.performance.frameProcessingSlow', [averageFrameProcessingTime]);
|
9056
|
+
}
|
9057
|
+
_this.frameProcessingTimeCost = 0;
|
9058
|
+
_this.processedFrameCount = 0;
|
9059
|
+
}, VideoPerformanceMonitor.calculateFPSInterval);
|
9060
|
+
};
|
9061
|
+
/**
|
9062
|
+
* Define the time limit of frame processing.
|
9063
|
+
* When the average frame processing time is longer than the time limit, a "video.performance.frameProcessingSlow" event will be reported.
|
9064
|
+
* @param timeLimit
|
9065
|
+
*/
|
9066
|
+
VideoPerformanceMonitor.prototype.setFrameProcessTimeLimit = function (timeLimit) {
|
9067
|
+
this.frameProcessTimeLimit = timeLimit;
|
9068
|
+
};
|
9069
|
+
/**
|
9070
|
+
* Call this function when the app starts to switch to the new video effect
|
9071
|
+
*/
|
9072
|
+
VideoPerformanceMonitor.prototype.reportApplyingVideoEffect = function (effectId, effectParam) {
|
9073
|
+
var _a, _b;
|
9074
|
+
if (((_a = this.applyingEffect) === null || _a === void 0 ? void 0 : _a.effectId) === effectId && ((_b = this.applyingEffect) === null || _b === void 0 ? void 0 : _b.effectParam) === effectParam) {
|
9075
|
+
return;
|
9076
|
+
}
|
9077
|
+
this.applyingEffect = {
|
9078
|
+
effectId: effectId,
|
9079
|
+
effectParam: effectParam,
|
9080
|
+
};
|
9081
|
+
this.appliedEffect = undefined;
|
9082
|
+
};
|
9083
|
+
/**
|
9084
|
+
* Call this function when the new video effect is ready
|
9085
|
+
*/
|
9086
|
+
VideoPerformanceMonitor.prototype.reportVideoEffectChanged = function (effectId, effectParam) {
|
9087
|
+
if (this.applyingEffect === undefined ||
|
9088
|
+
(this.applyingEffect.effectId !== effectId && this.applyingEffect.effectParam !== effectParam)) {
|
9089
|
+
// don't handle obsoleted event
|
9090
|
+
return;
|
9091
|
+
}
|
9092
|
+
this.appliedEffect = {
|
9093
|
+
effectId: effectId,
|
9094
|
+
effectParam: effectParam,
|
9095
|
+
};
|
9096
|
+
this.applyingEffect = undefined;
|
9097
|
+
this.isFirstFrameProcessed = false;
|
9098
|
+
};
|
9099
|
+
/**
|
9100
|
+
* Call this function when the app starts to process a video frame
|
9101
|
+
*/
|
9102
|
+
VideoPerformanceMonitor.prototype.reportStartFrameProcessing = function (frameWidth, frameHeight) {
|
9103
|
+
VideoFrameTick.tick();
|
9104
|
+
if (!this.appliedEffect) {
|
9105
|
+
return;
|
9106
|
+
}
|
9107
|
+
this.frameProcessingStartedAt = performance.now();
|
9108
|
+
this.performanceStatistics.processStarts(this.appliedEffect.effectId, frameWidth, frameHeight, this.appliedEffect.effectParam);
|
9109
|
+
};
|
9110
|
+
/**
|
9111
|
+
* Call this function when the app finishes successfully processing a video frame
|
9112
|
+
*/
|
9113
|
+
VideoPerformanceMonitor.prototype.reportFrameProcessed = function () {
|
9114
|
+
var _a;
|
9115
|
+
if (!this.appliedEffect) {
|
9116
|
+
return;
|
9117
|
+
}
|
9118
|
+
this.processedFrameCount++;
|
9119
|
+
this.frameProcessingTimeCost += performance.now() - this.frameProcessingStartedAt;
|
9120
|
+
this.performanceStatistics.processEnds();
|
9121
|
+
if (!this.isFirstFrameProcessed) {
|
9122
|
+
this.isFirstFrameProcessed = true;
|
9123
|
+
this.reportPerformanceEvent('video.performance.firstFrameProcessed', [
|
9124
|
+
Date.now(),
|
9125
|
+
this.appliedEffect.effectId,
|
9126
|
+
(_a = this.appliedEffect) === null || _a === void 0 ? void 0 : _a.effectParam,
|
9127
|
+
]);
|
9128
|
+
}
|
9129
|
+
};
|
9130
|
+
/**
|
9131
|
+
* Call this function when the app starts to get the texture stream
|
9132
|
+
*/
|
9133
|
+
VideoPerformanceMonitor.prototype.reportGettingTextureStream = function (streamId) {
|
9134
|
+
this.gettingTextureStreamStartedAt = performance.now();
|
9135
|
+
this.currentStreamId = streamId;
|
9136
|
+
};
|
9137
|
+
/**
|
9138
|
+
* Call this function when the app finishes successfully getting the texture stream
|
9139
|
+
*/
|
9140
|
+
VideoPerformanceMonitor.prototype.reportTextureStreamAcquired = function () {
|
9141
|
+
if (this.gettingTextureStreamStartedAt !== undefined) {
|
9142
|
+
var timeTaken = performance.now() - this.gettingTextureStreamStartedAt;
|
9143
|
+
this.reportPerformanceEvent('video.performance.textureStreamAcquired', [this.currentStreamId, timeTaken]);
|
9144
|
+
}
|
9145
|
+
};
|
9146
|
+
VideoPerformanceMonitor.distributionBinSize = 1000;
|
9147
|
+
VideoPerformanceMonitor.calculateFPSInterval = 1000;
|
9148
|
+
return VideoPerformanceMonitor;
|
9149
|
+
}());
|
9150
|
+
|
9151
|
+
|
9152
|
+
;// CONCATENATED MODULE: ./src/public/videoEffects.ts
|
9153
|
+
var videoEffects_assign = (undefined && undefined.__assign) || function () {
|
9154
|
+
videoEffects_assign = Object.assign || function(t) {
|
8886
9155
|
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
8887
9156
|
s = arguments[i];
|
8888
9157
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
@@ -8890,9 +9159,9 @@ var video_assign = (undefined && undefined.__assign) || function () {
|
|
8890
9159
|
}
|
8891
9160
|
return t;
|
8892
9161
|
};
|
8893
|
-
return
|
9162
|
+
return videoEffects_assign.apply(this, arguments);
|
8894
9163
|
};
|
8895
|
-
var
|
9164
|
+
var videoEffects_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
8896
9165
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
8897
9166
|
return new (P || (P = Promise))(function (resolve, reject) {
|
8898
9167
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
@@ -8901,7 +9170,7 @@ var video_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _ar
|
|
8901
9170
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
8902
9171
|
});
|
8903
9172
|
};
|
8904
|
-
var
|
9173
|
+
var videoEffects_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
8905
9174
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
8906
9175
|
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
8907
9176
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
@@ -8928,7 +9197,7 @@ var video_generator = (undefined && undefined.__generator) || function (thisArg,
|
|
8928
9197
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
8929
9198
|
}
|
8930
9199
|
};
|
8931
|
-
var
|
9200
|
+
var videoEffects_rest = (undefined && undefined.__rest) || function (s, e) {
|
8932
9201
|
var t = {};
|
8933
9202
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
8934
9203
|
t[p] = s[p];
|
@@ -8951,8 +9220,8 @@ var video_rest = (undefined && undefined.__rest) || function (s, e) {
|
|
8951
9220
|
* Namespace to video extensibility of the SDK
|
8952
9221
|
* @beta
|
8953
9222
|
*/
|
8954
|
-
var
|
8955
|
-
(function (
|
9223
|
+
var videoEffects;
|
9224
|
+
(function (videoEffects) {
|
8956
9225
|
var videoPerformanceMonitor = inServerSideRenderingEnvironment()
|
8957
9226
|
? undefined
|
8958
9227
|
: new VideoPerformanceMonitor(sendMessageToParent);
|
@@ -8964,7 +9233,7 @@ var video;
|
|
8964
9233
|
(function (VideoFrameFormat) {
|
8965
9234
|
/** Video format used for encoding and decoding YUV color data in video streaming and storage applications. */
|
8966
9235
|
VideoFrameFormat["NV12"] = "NV12";
|
8967
|
-
})(VideoFrameFormat =
|
9236
|
+
})(VideoFrameFormat = videoEffects.VideoFrameFormat || (videoEffects.VideoFrameFormat = {}));
|
8968
9237
|
/**
|
8969
9238
|
* Video effect change type enum
|
8970
9239
|
* @beta
|
@@ -8979,7 +9248,7 @@ var video;
|
|
8979
9248
|
* Disable the video effect
|
8980
9249
|
*/
|
8981
9250
|
EffectChangeType["EffectDisabled"] = "EffectDisabled";
|
8982
|
-
})(EffectChangeType =
|
9251
|
+
})(EffectChangeType = videoEffects.EffectChangeType || (videoEffects.EffectChangeType = {}));
|
8983
9252
|
/**
|
8984
9253
|
* Predefined failure reasons for preparing the selected video effect
|
8985
9254
|
* @beta
|
@@ -8995,7 +9264,7 @@ var video;
|
|
8995
9264
|
* The effect can't be initialized
|
8996
9265
|
*/
|
8997
9266
|
EffectFailureReason["InitializationFailure"] = "InitializationFailure";
|
8998
|
-
})(EffectFailureReason =
|
9267
|
+
})(EffectFailureReason = videoEffects.EffectFailureReason || (videoEffects.EffectFailureReason = {}));
|
8999
9268
|
/**
|
9000
9269
|
* Register callbacks to process the video frames if the host supports it.
|
9001
9270
|
* @beta
|
@@ -9005,7 +9274,7 @@ var video;
|
|
9005
9274
|
*
|
9006
9275
|
* @example
|
9007
9276
|
* ```typescript
|
9008
|
-
*
|
9277
|
+
* videoEffects.registerForVideoFrame({
|
9009
9278
|
* videoFrameHandler: async (videoFrameData) => {
|
9010
9279
|
* const originalFrame = videoFrameData.videoFrame as VideoFrame;
|
9011
9280
|
* try {
|
@@ -9028,7 +9297,7 @@ var video;
|
|
9028
9297
|
* }
|
9029
9298
|
* },
|
9030
9299
|
* config: {
|
9031
|
-
* format:
|
9300
|
+
* format: videoEffects.VideoPixelFormat.NV12,
|
9032
9301
|
* }
|
9033
9302
|
* });
|
9034
9303
|
* ```
|
@@ -9056,7 +9325,7 @@ var video;
|
|
9056
9325
|
}
|
9057
9326
|
videoPerformanceMonitor === null || videoPerformanceMonitor === void 0 ? void 0 : videoPerformanceMonitor.startMonitorSlowFrameProcessing();
|
9058
9327
|
}
|
9059
|
-
|
9328
|
+
videoEffects.registerForVideoFrame = registerForVideoFrame;
|
9060
9329
|
/**
|
9061
9330
|
* Video extension should call this to notify host that the current selected effect parameter changed.
|
9062
9331
|
* If it's pre-meeting, host will call videoEffectCallback immediately then use the videoEffect.
|
@@ -9072,7 +9341,7 @@ var video;
|
|
9072
9341
|
}
|
9073
9342
|
sendMessageToParent('video.videoEffectChanged', [effectChangeType, effectId]);
|
9074
9343
|
}
|
9075
|
-
|
9344
|
+
videoEffects.notifySelectedVideoEffectChanged = notifySelectedVideoEffectChanged;
|
9076
9345
|
/**
|
9077
9346
|
* Register a callback to be notified when a new video effect is applied.
|
9078
9347
|
* @beta
|
@@ -9086,7 +9355,7 @@ var video;
|
|
9086
9355
|
registerHandler('video.effectParameterChange', createEffectParameterChangeCallback(callback, videoPerformanceMonitor), false);
|
9087
9356
|
sendMessageToParent('video.registerForVideoEffect');
|
9088
9357
|
}
|
9089
|
-
|
9358
|
+
videoEffects.registerForVideoEffect = registerForVideoEffect;
|
9090
9359
|
/**
|
9091
9360
|
* Sending notification to host finished the video frame processing, now host can render this video frame
|
9092
9361
|
* or pass the video frame to next one in video pipeline
|
@@ -9117,16 +9386,16 @@ var video;
|
|
9117
9386
|
/** A host should support either mediaStream or sharedFrame sub-capability to support the video capability */
|
9118
9387
|
(!!runtime.supports.video.mediaStream || !!runtime.supports.video.sharedFrame));
|
9119
9388
|
}
|
9120
|
-
|
9389
|
+
videoEffects.isSupported = isSupported;
|
9121
9390
|
function registerForMediaStream(videoFrameHandler, config) {
|
9122
9391
|
var _this = this;
|
9123
9392
|
ensureInitialized(runtime, FrameContexts.sidePanel);
|
9124
9393
|
if (!isSupported() || !doesSupportMediaStream()) {
|
9125
9394
|
throw errorNotSupportedOnPlatform;
|
9126
9395
|
}
|
9127
|
-
registerHandler('video.startVideoExtensibilityVideoStream', function (mediaStreamInfo) { return
|
9396
|
+
registerHandler('video.startVideoExtensibilityVideoStream', function (mediaStreamInfo) { return videoEffects_awaiter(_this, void 0, void 0, function () {
|
9128
9397
|
var streamId, monitoredVideoFrameHandler;
|
9129
|
-
return
|
9398
|
+
return videoEffects_generator(this, function (_a) {
|
9130
9399
|
switch (_a.label) {
|
9131
9400
|
case 0:
|
9132
9401
|
streamId = mediaStreamInfo.streamId;
|
@@ -9142,9 +9411,9 @@ var video;
|
|
9142
9411
|
}
|
9143
9412
|
function createMonitoredVideoFrameHandler(videoFrameHandler, videoPerformanceMonitor) {
|
9144
9413
|
var _this = this;
|
9145
|
-
return function (videoFrameData) { return
|
9414
|
+
return function (videoFrameData) { return videoEffects_awaiter(_this, void 0, void 0, function () {
|
9146
9415
|
var originalFrame, processedFrame;
|
9147
|
-
return
|
9416
|
+
return videoEffects_generator(this, function (_a) {
|
9148
9417
|
switch (_a.label) {
|
9149
9418
|
case 0:
|
9150
9419
|
originalFrame = videoFrameData.videoFrame;
|
@@ -9183,8 +9452,8 @@ var video;
|
|
9183
9452
|
}
|
9184
9453
|
else {
|
9185
9454
|
// The host may pass the VideoFrame with the old definition which has `data` instead of `videoFrameBuffer`
|
9186
|
-
var data = videoBufferData.data, newVideoBufferData =
|
9187
|
-
return
|
9455
|
+
var data = videoBufferData.data, newVideoBufferData = videoEffects_rest(videoBufferData, ["data"]);
|
9456
|
+
return videoEffects_assign(videoEffects_assign({}, newVideoBufferData), { videoFrameBuffer: data });
|
9188
9457
|
}
|
9189
9458
|
}
|
9190
9459
|
function doesSupportMediaStream() {
|
@@ -9201,7 +9470,7 @@ var video;
|
|
9201
9470
|
var _a;
|
9202
9471
|
return ensureInitialized(runtime, FrameContexts.sidePanel) && !!((_a = runtime.supports.video) === null || _a === void 0 ? void 0 : _a.sharedFrame);
|
9203
9472
|
}
|
9204
|
-
})(
|
9473
|
+
})(videoEffects || (videoEffects = {})); //end of video namespace
|
9205
9474
|
|
9206
9475
|
;// CONCATENATED MODULE: ./src/public/search.ts
|
9207
9476
|
|
@@ -9894,7 +10163,8 @@ function shareDeepLink(deepLinkParameters) {
|
|
9894
10163
|
function executeDeepLink(deepLink, onComplete) {
|
9895
10164
|
ensureInitialized(runtime, FrameContexts.content, FrameContexts.sidePanel, FrameContexts.settings, FrameContexts.task, FrameContexts.stage, FrameContexts.meetingStage);
|
9896
10165
|
onComplete = onComplete ? onComplete : getGenericOnCompleteHandler();
|
9897
|
-
app
|
10166
|
+
app
|
10167
|
+
.openLink(deepLink)
|
9898
10168
|
.then(function () {
|
9899
10169
|
onComplete(true);
|
9900
10170
|
})
|
@@ -9957,7 +10227,8 @@ function returnFocus(navigateForward) {
|
|
9957
10227
|
function navigateToTab(tabInstance, onComplete) {
|
9958
10228
|
ensureInitialized(runtime);
|
9959
10229
|
onComplete = onComplete ? onComplete : getGenericOnCompleteHandler();
|
9960
|
-
pages.tabs
|
10230
|
+
pages.tabs
|
10231
|
+
.navigateToTab(tabInstance)
|
9961
10232
|
.then(function () {
|
9962
10233
|
onComplete(true);
|
9963
10234
|
})
|
@@ -9981,7 +10252,8 @@ function navigateToTab(tabInstance, onComplete) {
|
|
9981
10252
|
function navigateCrossDomain(url, onComplete) {
|
9982
10253
|
ensureInitialized(runtime, FrameContexts.content, FrameContexts.sidePanel, FrameContexts.settings, FrameContexts.remove, FrameContexts.task, FrameContexts.stage, FrameContexts.meetingStage);
|
9983
10254
|
onComplete = onComplete ? onComplete : getGenericOnCompleteHandler();
|
9984
|
-
pages
|
10255
|
+
pages
|
10256
|
+
.navigateCrossDomain(url)
|
9985
10257
|
.then(function () {
|
9986
10258
|
onComplete(true);
|
9987
10259
|
})
|
@@ -10001,7 +10273,8 @@ function navigateCrossDomain(url, onComplete) {
|
|
10001
10273
|
function navigateBack(onComplete) {
|
10002
10274
|
ensureInitialized(runtime);
|
10003
10275
|
onComplete = onComplete ? onComplete : getGenericOnCompleteHandler();
|
10004
|
-
pages.backStack
|
10276
|
+
pages.backStack
|
10277
|
+
.navigateBack()
|
10005
10278
|
.then(function () {
|
10006
10279
|
onComplete(true);
|
10007
10280
|
})
|
@@ -10065,7 +10338,8 @@ var settings;
|
|
10065
10338
|
function setSettings(instanceSettings, onComplete) {
|
10066
10339
|
ensureInitialized(runtime, FrameContexts.content, FrameContexts.settings, FrameContexts.sidePanel);
|
10067
10340
|
onComplete = onComplete ? onComplete : getGenericOnCompleteHandler();
|
10068
|
-
pages.config
|
10341
|
+
pages.config
|
10342
|
+
.setConfig(instanceSettings)
|
10069
10343
|
.then(function () {
|
10070
10344
|
onComplete(true);
|
10071
10345
|
})
|
@@ -12154,8 +12428,8 @@ var teams;
|
|
12154
12428
|
})(fullTrust = teams.fullTrust || (teams.fullTrust = {}));
|
12155
12429
|
})(teams || (teams = {}));
|
12156
12430
|
|
12157
|
-
;// CONCATENATED MODULE: ./src/private/
|
12158
|
-
var
|
12431
|
+
;// CONCATENATED MODULE: ./src/private/videoEffectsEx.ts
|
12432
|
+
var videoEffectsEx_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
|
12159
12433
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
12160
12434
|
return new (P || (P = Promise))(function (resolve, reject) {
|
12161
12435
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
@@ -12164,7 +12438,7 @@ var videoEx_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _
|
|
12164
12438
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
12165
12439
|
});
|
12166
12440
|
};
|
12167
|
-
var
|
12441
|
+
var videoEffectsEx_generator = (undefined && undefined.__generator) || function (thisArg, body) {
|
12168
12442
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
12169
12443
|
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
12170
12444
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
@@ -12208,8 +12482,8 @@ var videoEx_generator = (undefined && undefined.__generator) || function (thisAr
|
|
12208
12482
|
* @internal
|
12209
12483
|
* Limited to Microsoft-internal use
|
12210
12484
|
*/
|
12211
|
-
var
|
12212
|
-
(function (
|
12485
|
+
var videoEffectsEx;
|
12486
|
+
(function (videoEffectsEx) {
|
12213
12487
|
var videoPerformanceMonitor = inServerSideRenderingEnvironment()
|
12214
12488
|
? undefined
|
12215
12489
|
: new VideoPerformanceMonitor(sendMessageToParent);
|
@@ -12225,7 +12499,7 @@ var videoEx;
|
|
12225
12499
|
(function (ErrorLevel) {
|
12226
12500
|
ErrorLevel["Fatal"] = "fatal";
|
12227
12501
|
ErrorLevel["Warn"] = "warn";
|
12228
|
-
})(ErrorLevel =
|
12502
|
+
})(ErrorLevel = videoEffectsEx.ErrorLevel || (videoEffectsEx.ErrorLevel = {}));
|
12229
12503
|
/**
|
12230
12504
|
* @hidden
|
12231
12505
|
* Register to process video frames
|
@@ -12250,9 +12524,9 @@ var videoEx;
|
|
12250
12524
|
if (ensureInitialized(runtime, FrameContexts.sidePanel)) {
|
12251
12525
|
registerHandler('video.setFrameProcessTimeLimit', function (timeLimit) { return videoPerformanceMonitor === null || videoPerformanceMonitor === void 0 ? void 0 : videoPerformanceMonitor.setFrameProcessTimeLimit(timeLimit); }, false);
|
12252
12526
|
if ((_a = runtime.supports.video) === null || _a === void 0 ? void 0 : _a.mediaStream) {
|
12253
|
-
registerHandler('video.startVideoExtensibilityVideoStream', function (mediaStreamInfo) { return
|
12527
|
+
registerHandler('video.startVideoExtensibilityVideoStream', function (mediaStreamInfo) { return videoEffectsEx_awaiter(_this, void 0, void 0, function () {
|
12254
12528
|
var streamId, metadataInTexture, handler, _a;
|
12255
|
-
return
|
12529
|
+
return videoEffectsEx_generator(this, function (_b) {
|
12256
12530
|
switch (_b.label) {
|
12257
12531
|
case 0:
|
12258
12532
|
streamId = mediaStreamInfo.streamId, metadataInTexture = mediaStreamInfo.metadataInTexture;
|
@@ -12296,12 +12570,12 @@ var videoEx;
|
|
12296
12570
|
videoPerformanceMonitor === null || videoPerformanceMonitor === void 0 ? void 0 : videoPerformanceMonitor.startMonitorSlowFrameProcessing();
|
12297
12571
|
}
|
12298
12572
|
}
|
12299
|
-
|
12573
|
+
videoEffectsEx.registerForVideoFrame = registerForVideoFrame;
|
12300
12574
|
function createMonitoredVideoFrameHandler(videoFrameHandler, videoPerformanceMonitor) {
|
12301
12575
|
var _this = this;
|
12302
|
-
return function (receivedVideoFrame) { return
|
12576
|
+
return function (receivedVideoFrame) { return videoEffectsEx_awaiter(_this, void 0, void 0, function () {
|
12303
12577
|
var originalFrame, processedFrame;
|
12304
|
-
return
|
12578
|
+
return videoEffectsEx_generator(this, function (_a) {
|
12305
12579
|
switch (_a.label) {
|
12306
12580
|
case 0:
|
12307
12581
|
originalFrame = receivedVideoFrame.videoFrame;
|
@@ -12340,7 +12614,7 @@ var videoEx;
|
|
12340
12614
|
}
|
12341
12615
|
sendMessageToParent('video.videoEffectChanged', [effectChangeType, effectId, effectParam]);
|
12342
12616
|
}
|
12343
|
-
|
12617
|
+
videoEffectsEx.notifySelectedVideoEffectChanged = notifySelectedVideoEffectChanged;
|
12344
12618
|
/**
|
12345
12619
|
* @hidden
|
12346
12620
|
* Register the video effect callback, host uses this to notify the video extension the new video effect will by applied
|
@@ -12358,7 +12632,7 @@ var videoEx;
|
|
12358
12632
|
registerHandler('video.effectParameterChange', createEffectParameterChangeCallback(callback, videoPerformanceMonitor), false);
|
12359
12633
|
sendMessageToParent('video.registerForVideoEffect');
|
12360
12634
|
}
|
12361
|
-
|
12635
|
+
videoEffectsEx.registerForVideoEffect = registerForVideoEffect;
|
12362
12636
|
/**
|
12363
12637
|
* @hidden
|
12364
12638
|
* Send personalized effects to Teams client
|
@@ -12369,12 +12643,12 @@ var videoEx;
|
|
12369
12643
|
*/
|
12370
12644
|
function updatePersonalizedEffects(effects) {
|
12371
12645
|
ensureInitialized(runtime, FrameContexts.sidePanel);
|
12372
|
-
if (!
|
12646
|
+
if (!videoEffects.isSupported()) {
|
12373
12647
|
throw errorNotSupportedOnPlatform;
|
12374
12648
|
}
|
12375
12649
|
sendMessageToParent('video.personalizedEffectsChanged', [effects]);
|
12376
12650
|
}
|
12377
|
-
|
12651
|
+
videoEffectsEx.updatePersonalizedEffects = updatePersonalizedEffects;
|
12378
12652
|
/**
|
12379
12653
|
* @hidden
|
12380
12654
|
*
|
@@ -12390,9 +12664,9 @@ var videoEx;
|
|
12390
12664
|
*/
|
12391
12665
|
function isSupported() {
|
12392
12666
|
ensureInitialized(runtime);
|
12393
|
-
return
|
12667
|
+
return videoEffects.isSupported();
|
12394
12668
|
}
|
12395
|
-
|
12669
|
+
videoEffectsEx.isSupported = isSupported;
|
12396
12670
|
/**
|
12397
12671
|
* @hidden
|
12398
12672
|
* Sending notification to host finished the video frame processing, now host can render this video frame
|
@@ -12431,13 +12705,13 @@ var videoEx;
|
|
12431
12705
|
*/
|
12432
12706
|
function notifyFatalError(errorMessage) {
|
12433
12707
|
ensureInitialized(runtime);
|
12434
|
-
if (!
|
12708
|
+
if (!videoEffects.isSupported()) {
|
12435
12709
|
throw errorNotSupportedOnPlatform;
|
12436
12710
|
}
|
12437
12711
|
notifyError(errorMessage, ErrorLevel.Fatal);
|
12438
12712
|
}
|
12439
|
-
|
12440
|
-
})(
|
12713
|
+
videoEffectsEx.notifyFatalError = notifyFatalError;
|
12714
|
+
})(videoEffectsEx || (videoEffectsEx = {}));
|
12441
12715
|
|
12442
12716
|
;// CONCATENATED MODULE: ./src/private/index.ts
|
12443
12717
|
|