@streamplace/components 0.7.21 → 0.7.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/chat/chat-message.js +5 -4
- package/dist/components/chat/chat.js +14 -4
- package/dist/components/mobile-player/webrtc-diagnostics.js +67 -13
- package/dist/lib/system-messages.js +1 -0
- package/dist/livestream-store/chat.js +2 -0
- package/dist/livestream-store/stream-key.js +1 -0
- package/dist/livestream-store/websocket-consumer.js +4 -1
- package/dist/streamplace-store/stream.js +2 -0
- package/node-compile-cache/v22.15.0-x64-efe9a9df-0/37be0eec +0 -0
- package/package.json +4 -4
- package/src/components/chat/chat-message.tsx +5 -4
- package/src/components/chat/chat.tsx +20 -4
- package/src/components/mobile-player/webrtc-diagnostics.tsx +73 -15
- package/src/lib/system-messages.ts +1 -0
- package/src/livestream-store/chat.tsx +2 -0
- package/src/livestream-store/stream-key.tsx +1 -0
- package/src/livestream-store/websocket-consumer.tsx +4 -1
- package/src/streamplace-store/stream.tsx +2 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -48,7 +48,8 @@ exports.RenderChatMessage = (0, react_1.memo)(function RenderChatMessage({ item,
|
|
|
48
48
|
hour12: false,
|
|
49
49
|
});
|
|
50
50
|
}, []);
|
|
51
|
-
|
|
51
|
+
const replyTo = item.replyTo || null;
|
|
52
|
+
return ((0, jsx_runtime_1.jsxs)(jsx_runtime_1.Fragment, { children: [replyTo && showReply && ((0, jsx_runtime_1.jsx)(react_native_1.View, { style: [
|
|
52
53
|
atoms_1.gap.all[2],
|
|
53
54
|
ui_1.layout.flex.row,
|
|
54
55
|
{ minWidth: 0, maxWidth: "100%" },
|
|
@@ -62,12 +63,12 @@ exports.RenderChatMessage = (0, react_1.memo)(function RenderChatMessage({ item,
|
|
|
62
63
|
atoms_1.mr[4],
|
|
63
64
|
{ minWidth: 0, overflow: "hidden" },
|
|
64
65
|
], children: [(0, jsx_runtime_1.jsxs)(text_1.Text, { style: {
|
|
65
|
-
color: getRgbColor(
|
|
66
|
+
color: getRgbColor(replyTo.chatProfile?.color),
|
|
66
67
|
fontWeight: "thin",
|
|
67
|
-
}, children: ["@",
|
|
68
|
+
}, children: ["@", replyTo.author.handle] }), " ", (0, jsx_runtime_1.jsx)(text_1.Text, { style: {
|
|
68
69
|
color: ui_1.colors.gray[300],
|
|
69
70
|
fontStyle: "italic",
|
|
70
|
-
}, children:
|
|
71
|
+
}, children: replyTo.record.text })] }) })), (0, jsx_runtime_1.jsxs)(react_native_1.View, { style: [
|
|
71
72
|
atoms_1.gap.all[2],
|
|
72
73
|
ui_1.layout.flex.row,
|
|
73
74
|
{ minWidth: 0, maxWidth: "100%" },
|
|
@@ -122,9 +122,7 @@ const ChatLine = (0, react_1.memo)(({ item, canModerate, }) => {
|
|
|
122
122
|
isHovered && atoms_1.bg.gray[950],
|
|
123
123
|
], onPointerEnter: handleHoverIn, onPointerLeave: handleHoverOut, children: [(0, jsx_runtime_1.jsx)(react_native_1.Pressable, { style: [{ minWidth: 0, maxWidth: "100%" }], children: (0, jsx_runtime_1.jsx)(chat_message_1.RenderChatMessage, { item: item }) }), (0, jsx_runtime_1.jsx)(ActionsBar, { item: item, visible: isHovered, hoverTimeoutRef: hoverTimeoutRef })] }));
|
|
124
124
|
}
|
|
125
|
-
return ((0, jsx_runtime_1.jsx)(jsx_runtime_1.Fragment, { children: (0, jsx_runtime_1.jsx)(ReanimatedSwipeable_1.default, { containerStyle: [atoms_1.py[1]], friction: 2, enableTrackpadTwoFingerGesture: true, rightThreshold: 40, leftThreshold: 40, renderRightActions: react_native_1.Platform.OS === "android" ? undefined : RightAction, renderLeftActions: react_native_1.Platform.OS === "android" ? undefined : LeftAction, overshootFriction: 9, ref: (
|
|
126
|
-
swipeableRef.current = ref;
|
|
127
|
-
}, onSwipeableOpen: (r) => {
|
|
125
|
+
return ((0, jsx_runtime_1.jsx)(jsx_runtime_1.Fragment, { children: (0, jsx_runtime_1.jsx)(ReanimatedSwipeable_1.default, { containerStyle: [atoms_1.py[1]], friction: 2, enableTrackpadTwoFingerGesture: true, rightThreshold: 40, leftThreshold: 40, renderRightActions: react_native_1.Platform.OS === "android" ? undefined : RightAction, renderLeftActions: react_native_1.Platform.OS === "android" ? undefined : LeftAction, overshootFriction: 9, ref: swipeableRef, onSwipeableOpen: (r) => {
|
|
128
126
|
if (r === (react_native_1.Platform.OS === "android" ? "right" : "left")) {
|
|
129
127
|
setReply(item);
|
|
130
128
|
}
|
|
@@ -140,11 +138,23 @@ const ChatLine = (0, react_1.memo)(({ item, canModerate, }) => {
|
|
|
140
138
|
});
|
|
141
139
|
function Chat({ shownMessages = SHOWN_MSGS, style: propsStyle, canModerate = false, ...props }) {
|
|
142
140
|
const chat = (0, __1.useChat)();
|
|
141
|
+
const [isScrolledUp, setIsScrolledUp] = (0, react_1.useState)(false);
|
|
142
|
+
const handleScroll = (event) => {
|
|
143
|
+
const { contentOffset } = event.nativeEvent;
|
|
144
|
+
const scrolledUp = contentOffset.y > 20; // threshold
|
|
145
|
+
if (scrolledUp !== isScrolledUp) {
|
|
146
|
+
setIsScrolledUp(scrolledUp);
|
|
147
|
+
// Dismiss keyboard when scrolled up
|
|
148
|
+
if (scrolledUp && react_native_1.Platform.OS !== "web") {
|
|
149
|
+
react_native_1.Keyboard.dismiss();
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
};
|
|
143
153
|
if (!chat)
|
|
144
154
|
return ((0, jsx_runtime_1.jsx)(__1.View, { style: [atoms_1.flex.shrink[1], { minWidth: 0, maxWidth: "100%" }], children: (0, jsx_runtime_1.jsx)(__1.Text, { children: "Loading chaat..." }) }));
|
|
145
155
|
return ((0, jsx_runtime_1.jsxs)(__1.View, { style: [atoms_1.flex.shrink[1], { minWidth: 0, maxWidth: "100%" }].concat(propsStyle || []), children: [(0, jsx_runtime_1.jsx)(react_native_gesture_handler_1.FlatList, { style: [
|
|
146
156
|
atoms_1.flex.grow[1],
|
|
147
157
|
atoms_1.flex.shrink[1],
|
|
148
158
|
{ minWidth: 0, maxWidth: "100%" },
|
|
149
|
-
], data: chat.slice(0, shownMessages), inverted: true, keyExtractor: keyExtractor, renderItem: ({ item, index }) => ((0, jsx_runtime_1.jsx)(ChatLine, { item: item, canModerate: canModerate })), removeClippedSubviews: true, maxToRenderPerBatch: 10, initialNumToRender: 10, updateCellsBatchingPeriod: 50 }), (0, jsx_runtime_1.jsx)(mod_view_1.ModView, {})] }));
|
|
159
|
+
], data: chat.slice(0, shownMessages), inverted: true, keyExtractor: keyExtractor, renderItem: ({ item, index }) => ((0, jsx_runtime_1.jsx)(ChatLine, { item: item, canModerate: canModerate })), removeClippedSubviews: true, maxToRenderPerBatch: 10, initialNumToRender: 10, updateCellsBatchingPeriod: 50, onScroll: handleScroll, scrollEventThrottle: 16 }), (0, jsx_runtime_1.jsx)(mod_view_1.ModView, {})] }));
|
|
150
160
|
}
|
|
@@ -11,12 +11,29 @@ function useWebRTCDiagnostics() {
|
|
|
11
11
|
rtcSessionDescription: false,
|
|
12
12
|
getUserMedia: false,
|
|
13
13
|
getDisplayMedia: false,
|
|
14
|
+
isHwH264Supported: false,
|
|
14
15
|
errors: [],
|
|
15
16
|
warnings: [],
|
|
16
17
|
});
|
|
17
18
|
(0, react_1.useEffect)(() => {
|
|
18
19
|
const errors = [];
|
|
19
20
|
const warnings = [];
|
|
21
|
+
const checkH264Support = async () => {
|
|
22
|
+
try {
|
|
23
|
+
const pc = new RTCPeerConnection();
|
|
24
|
+
const offer = await pc.createOffer();
|
|
25
|
+
pc.close();
|
|
26
|
+
if (offer.sdp) {
|
|
27
|
+
const h264Match = offer.sdp.search(/rtpmap:([0-9]+) H264/g);
|
|
28
|
+
return h264Match !== -1;
|
|
29
|
+
}
|
|
30
|
+
return false;
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
console.warn("Failed to check H.264 support:", error);
|
|
34
|
+
return false;
|
|
35
|
+
}
|
|
36
|
+
};
|
|
20
37
|
// Check if we're in a browser environment
|
|
21
38
|
if (typeof window === "undefined") {
|
|
22
39
|
errors.push("Running in non-browser environment");
|
|
@@ -27,6 +44,7 @@ function useWebRTCDiagnostics() {
|
|
|
27
44
|
rtcSessionDescription: false,
|
|
28
45
|
getUserMedia: false,
|
|
29
46
|
getDisplayMedia: false,
|
|
47
|
+
isHwH264Supported: false,
|
|
30
48
|
errors,
|
|
31
49
|
warnings,
|
|
32
50
|
});
|
|
@@ -71,20 +89,42 @@ function useWebRTCDiagnostics() {
|
|
|
71
89
|
warnings.push("Safari may have limited WebRTC codec support");
|
|
72
90
|
}
|
|
73
91
|
const browserSupport = rtcPeerConnection && rtcSessionDescription;
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
92
|
+
// Check H.264 support asynchronously
|
|
93
|
+
if (rtcPeerConnection) {
|
|
94
|
+
checkH264Support().then((isHwH264Supported) => {
|
|
95
|
+
if (!isHwH264Supported) {
|
|
96
|
+
warnings.push("H.264 hardware acceleration is not supported\n In Firefox, try enabling 'media.webrtc.hw.h264.enabled' in about:config");
|
|
97
|
+
}
|
|
98
|
+
setDiagnostics({
|
|
99
|
+
done: true,
|
|
100
|
+
browserSupport,
|
|
101
|
+
rtcPeerConnection,
|
|
102
|
+
rtcSessionDescription,
|
|
103
|
+
getUserMedia,
|
|
104
|
+
getDisplayMedia,
|
|
105
|
+
isHwH264Supported,
|
|
106
|
+
errors,
|
|
107
|
+
warnings,
|
|
108
|
+
});
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
else {
|
|
112
|
+
setDiagnostics({
|
|
113
|
+
done: true,
|
|
114
|
+
browserSupport,
|
|
115
|
+
rtcPeerConnection,
|
|
116
|
+
rtcSessionDescription,
|
|
117
|
+
getUserMedia,
|
|
118
|
+
getDisplayMedia,
|
|
119
|
+
isHwH264Supported: false,
|
|
120
|
+
errors,
|
|
121
|
+
warnings,
|
|
122
|
+
});
|
|
123
|
+
}
|
|
84
124
|
}, []);
|
|
85
125
|
return diagnostics;
|
|
86
126
|
}
|
|
87
|
-
function logWebRTCDiagnostics() {
|
|
127
|
+
async function logWebRTCDiagnostics() {
|
|
88
128
|
console.group("WebRTC Diagnostics");
|
|
89
129
|
// Log browser support
|
|
90
130
|
console.log("RTCPeerConnection:", !!window.RTCPeerConnection);
|
|
@@ -95,14 +135,28 @@ function logWebRTCDiagnostics() {
|
|
|
95
135
|
console.log("User Agent:", navigator.userAgent);
|
|
96
136
|
console.log("Protocol:", location.protocol);
|
|
97
137
|
console.log("Host:", location.hostname);
|
|
98
|
-
|
|
138
|
+
console.groupEnd();
|
|
99
139
|
if (window.RTCPeerConnection) {
|
|
100
140
|
try {
|
|
101
141
|
const pc = new RTCPeerConnection();
|
|
102
|
-
|
|
142
|
+
// Check H.264 support
|
|
143
|
+
try {
|
|
144
|
+
const offer = await pc.createOffer({ offerToReceiveVideo: true });
|
|
145
|
+
const isHwH264Supported = offer.sdp
|
|
146
|
+
? offer.sdp.search(/rtpmap:([0-9]+) H264/g) !== -1
|
|
147
|
+
: false;
|
|
148
|
+
console.group("WebRTC Peer Connection Test");
|
|
149
|
+
console.log("RTCPeerConnection creation: ✓ Success");
|
|
150
|
+
console.log("H.264 support:", isHwH264Supported ? "✓ Supported" : "✗ Not supported");
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
console.group("WebRTC Peer Connection Test");
|
|
154
|
+
console.error("H.264 check failed:", error);
|
|
155
|
+
}
|
|
103
156
|
pc.close();
|
|
104
157
|
}
|
|
105
158
|
catch (error) {
|
|
159
|
+
console.group("WebRTC Peer Connection Test");
|
|
106
160
|
console.error("RTCPeerConnection creation: ✗ Failed", error);
|
|
107
161
|
}
|
|
108
162
|
}
|
|
@@ -32,6 +32,7 @@ const createSystemMessage = (type, text, metadata, date = new Date()) => {
|
|
|
32
32
|
indexedAt: now.toISOString(),
|
|
33
33
|
chatProfile: {
|
|
34
34
|
color: { red: 128, green: 128, blue: 128 }, // Gray color for system messages
|
|
35
|
+
$type: "place.stream.chat.profile",
|
|
35
36
|
},
|
|
36
37
|
};
|
|
37
38
|
};
|
|
@@ -50,6 +50,7 @@ const useCreateChatMessage = () => {
|
|
|
50
50
|
const rt = new api_1.RichText({ text: msg.text });
|
|
51
51
|
await rt.detectFacets(pdsAgent);
|
|
52
52
|
const record = {
|
|
53
|
+
$type: "place.stream.chat.message",
|
|
53
54
|
text: msg.text,
|
|
54
55
|
createdAt: new Date().toISOString(),
|
|
55
56
|
streamer: streamerProfile.did,
|
|
@@ -226,6 +227,7 @@ const reduceChatIncremental = (state, newMessages, blocks, hideUris = []) => {
|
|
|
226
227
|
processedMessage = {
|
|
227
228
|
...message,
|
|
228
229
|
replyTo: {
|
|
230
|
+
$type: "place.stream.chat.defs#messageView",
|
|
229
231
|
cid: parentMsg.cid,
|
|
230
232
|
uri: parentMsg.uri,
|
|
231
233
|
author: parentMsg.author,
|
|
@@ -8,7 +8,7 @@ const chat_1 = require("./chat");
|
|
|
8
8
|
const problems_1 = require("./problems");
|
|
9
9
|
const MAX_RECENT_SEGMENTS = 10;
|
|
10
10
|
const handleWebSocketMessages = (state, messages) => {
|
|
11
|
-
for (
|
|
11
|
+
for (let message of messages) {
|
|
12
12
|
if (streamplace_1.PlaceStreamLivestream.isLivestreamView(message)) {
|
|
13
13
|
const newLivestream = message;
|
|
14
14
|
const oldLivestream = state.livestream;
|
|
@@ -27,12 +27,14 @@ const handleWebSocketMessages = (state, messages) => {
|
|
|
27
27
|
};
|
|
28
28
|
}
|
|
29
29
|
else if (streamplace_1.PlaceStreamLivestream.isViewerCount(message)) {
|
|
30
|
+
message = message;
|
|
30
31
|
state = {
|
|
31
32
|
...state,
|
|
32
33
|
viewers: message.count,
|
|
33
34
|
};
|
|
34
35
|
}
|
|
35
36
|
else if (streamplace_1.PlaceStreamChatDefs.isMessageView(message)) {
|
|
37
|
+
message = message;
|
|
36
38
|
// Explicitly map MessageView to MessageViewHydrated
|
|
37
39
|
const hydrated = {
|
|
38
40
|
uri: message.uri,
|
|
@@ -64,6 +66,7 @@ const handleWebSocketMessages = (state, messages) => {
|
|
|
64
66
|
state = (0, chat_1.reduceChat)(state, [], [block], []);
|
|
65
67
|
}
|
|
66
68
|
else if (streamplace_1.PlaceStreamDefs.isRenditions(message)) {
|
|
69
|
+
message = message;
|
|
67
70
|
state = {
|
|
68
71
|
...state,
|
|
69
72
|
renditions: message.renditions,
|
|
@@ -179,6 +179,7 @@ function useCreateStreamRecord() {
|
|
|
179
179
|
platVersion = (0, browser_1.getBrowserName)(window.navigator.userAgent);
|
|
180
180
|
}
|
|
181
181
|
const record = {
|
|
182
|
+
$type: "place.stream.livestream",
|
|
182
183
|
title: title,
|
|
183
184
|
url: finalUrl,
|
|
184
185
|
createdAt: new Date().toISOString(),
|
|
@@ -230,6 +231,7 @@ function useUpdateStreamRecord(customUrl = null) {
|
|
|
230
231
|
}
|
|
231
232
|
}
|
|
232
233
|
const record = {
|
|
234
|
+
$type: "place.stream.livestream",
|
|
233
235
|
title: title,
|
|
234
236
|
url: finalUrl,
|
|
235
237
|
createdAt: new Date().toISOString(),
|
|
Binary file
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@streamplace/components",
|
|
3
|
-
"version": "0.7.
|
|
3
|
+
"version": "0.7.25",
|
|
4
4
|
"description": "Streamplace React (Native) Components",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "src/index.tsx",
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"tsup": "^8.5.0"
|
|
21
21
|
},
|
|
22
22
|
"dependencies": {
|
|
23
|
-
"@atproto/api": "^0.
|
|
23
|
+
"@atproto/api": "^0.16.7",
|
|
24
24
|
"@atproto/crypto": "^0.4.4",
|
|
25
25
|
"@emoji-mart/react": "^1.1.1",
|
|
26
26
|
"@gorhom/bottom-sheet": "^5.1.6",
|
|
@@ -40,7 +40,7 @@
|
|
|
40
40
|
"react-native-svg": "^15.0.0",
|
|
41
41
|
"react-native-webrtc": "git+https://github.com/streamplace/react-native-webrtc.git#6b8472a771ac47f89217d327058a8a4124a6ae56",
|
|
42
42
|
"react-use-websocket": "^4.13.0",
|
|
43
|
-
"streamplace": "0.7.
|
|
43
|
+
"streamplace": "0.7.25",
|
|
44
44
|
"viem": "^2.21.44",
|
|
45
45
|
"zustand": "^5.0.5"
|
|
46
46
|
},
|
|
@@ -52,5 +52,5 @@
|
|
|
52
52
|
"start": "tsc --watch --preserveWatchOutput",
|
|
53
53
|
"prepare": "tsc"
|
|
54
54
|
},
|
|
55
|
-
"gitHead": "
|
|
55
|
+
"gitHead": "288afabcb270c01ae8012e2a5cd9d75d5e1aae28"
|
|
56
56
|
}
|
|
@@ -105,9 +105,10 @@ export const RenderChatMessage = memo(
|
|
|
105
105
|
hour12: false,
|
|
106
106
|
});
|
|
107
107
|
}, []);
|
|
108
|
+
const replyTo = (item.replyTo as ChatMessageViewHydrated) || null;
|
|
108
109
|
return (
|
|
109
110
|
<>
|
|
110
|
-
{
|
|
111
|
+
{replyTo && showReply && (
|
|
111
112
|
<View
|
|
112
113
|
style={[
|
|
113
114
|
gap.all[2],
|
|
@@ -130,11 +131,11 @@ export const RenderChatMessage = memo(
|
|
|
130
131
|
>
|
|
131
132
|
<Text
|
|
132
133
|
style={{
|
|
133
|
-
color: getRgbColor(
|
|
134
|
+
color: getRgbColor(replyTo.chatProfile?.color),
|
|
134
135
|
fontWeight: "thin",
|
|
135
136
|
}}
|
|
136
137
|
>
|
|
137
|
-
@{(
|
|
138
|
+
@{(replyTo.author as any).handle}
|
|
138
139
|
</Text>{" "}
|
|
139
140
|
<Text
|
|
140
141
|
style={{
|
|
@@ -142,7 +143,7 @@ export const RenderChatMessage = memo(
|
|
|
142
143
|
fontStyle: "italic",
|
|
143
144
|
}}
|
|
144
145
|
>
|
|
145
|
-
{
|
|
146
|
+
{replyTo.record.text}
|
|
146
147
|
</Text>
|
|
147
148
|
</Text>
|
|
148
149
|
</View>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Ellipsis, Reply } from "lucide-react-native";
|
|
2
2
|
import { ComponentProps, memo, useEffect, useRef, useState } from "react";
|
|
3
|
-
import { Platform, Pressable } from "react-native";
|
|
3
|
+
import { Keyboard, Platform, Pressable } from "react-native";
|
|
4
4
|
import { FlatList } from "react-native-gesture-handler";
|
|
5
5
|
import Swipeable, {
|
|
6
6
|
SwipeableMethods,
|
|
@@ -223,9 +223,7 @@ const ChatLine = memo(
|
|
|
223
223
|
}
|
|
224
224
|
renderLeftActions={Platform.OS === "android" ? undefined : LeftAction}
|
|
225
225
|
overshootFriction={9}
|
|
226
|
-
ref={
|
|
227
|
-
swipeableRef.current = ref;
|
|
228
|
-
}}
|
|
226
|
+
ref={swipeableRef}
|
|
229
227
|
onSwipeableOpen={(r) => {
|
|
230
228
|
if (r === (Platform.OS === "android" ? "right" : "left")) {
|
|
231
229
|
setReply(item);
|
|
@@ -258,6 +256,22 @@ export function Chat({
|
|
|
258
256
|
canModerate?: boolean;
|
|
259
257
|
}) {
|
|
260
258
|
const chat = useChat();
|
|
259
|
+
const [isScrolledUp, setIsScrolledUp] = useState(false);
|
|
260
|
+
|
|
261
|
+
const handleScroll = (event: any) => {
|
|
262
|
+
const { contentOffset } = event.nativeEvent;
|
|
263
|
+
|
|
264
|
+
const scrolledUp = contentOffset.y > 20; // threshold
|
|
265
|
+
|
|
266
|
+
if (scrolledUp !== isScrolledUp) {
|
|
267
|
+
setIsScrolledUp(scrolledUp);
|
|
268
|
+
|
|
269
|
+
// Dismiss keyboard when scrolled up
|
|
270
|
+
if (scrolledUp && Platform.OS !== "web") {
|
|
271
|
+
Keyboard.dismiss();
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
};
|
|
261
275
|
|
|
262
276
|
if (!chat)
|
|
263
277
|
return (
|
|
@@ -288,6 +302,8 @@ export function Chat({
|
|
|
288
302
|
maxToRenderPerBatch={10}
|
|
289
303
|
initialNumToRender={10}
|
|
290
304
|
updateCellsBatchingPeriod={50}
|
|
305
|
+
onScroll={handleScroll}
|
|
306
|
+
scrollEventThrottle={16}
|
|
291
307
|
/>
|
|
292
308
|
<ModView />
|
|
293
309
|
</View>
|
|
@@ -7,6 +7,7 @@ export interface WebRTCDiagnostics {
|
|
|
7
7
|
rtcSessionDescription: boolean;
|
|
8
8
|
getUserMedia: boolean;
|
|
9
9
|
getDisplayMedia: boolean;
|
|
10
|
+
isHwH264Supported: boolean;
|
|
10
11
|
errors: string[];
|
|
11
12
|
warnings: string[];
|
|
12
13
|
}
|
|
@@ -19,6 +20,7 @@ export function useWebRTCDiagnostics(): WebRTCDiagnostics {
|
|
|
19
20
|
rtcSessionDescription: false,
|
|
20
21
|
getUserMedia: false,
|
|
21
22
|
getDisplayMedia: false,
|
|
23
|
+
isHwH264Supported: false,
|
|
22
24
|
errors: [],
|
|
23
25
|
warnings: [],
|
|
24
26
|
});
|
|
@@ -27,6 +29,23 @@ export function useWebRTCDiagnostics(): WebRTCDiagnostics {
|
|
|
27
29
|
const errors: string[] = [];
|
|
28
30
|
const warnings: string[] = [];
|
|
29
31
|
|
|
32
|
+
const checkH264Support = async (): Promise<boolean> => {
|
|
33
|
+
try {
|
|
34
|
+
const pc = new RTCPeerConnection();
|
|
35
|
+
const offer = await pc.createOffer();
|
|
36
|
+
pc.close();
|
|
37
|
+
|
|
38
|
+
if (offer.sdp) {
|
|
39
|
+
const h264Match = offer.sdp.search(/rtpmap:([0-9]+) H264/g);
|
|
40
|
+
return h264Match !== -1;
|
|
41
|
+
}
|
|
42
|
+
return false;
|
|
43
|
+
} catch (error) {
|
|
44
|
+
console.warn("Failed to check H.264 support:", error);
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
|
|
30
49
|
// Check if we're in a browser environment
|
|
31
50
|
if (typeof window === "undefined") {
|
|
32
51
|
errors.push("Running in non-browser environment");
|
|
@@ -37,6 +56,7 @@ export function useWebRTCDiagnostics(): WebRTCDiagnostics {
|
|
|
37
56
|
rtcSessionDescription: false,
|
|
38
57
|
getUserMedia: false,
|
|
39
58
|
getDisplayMedia: false,
|
|
59
|
+
isHwH264Supported: false,
|
|
40
60
|
errors,
|
|
41
61
|
warnings,
|
|
42
62
|
});
|
|
@@ -105,22 +125,45 @@ export function useWebRTCDiagnostics(): WebRTCDiagnostics {
|
|
|
105
125
|
|
|
106
126
|
const browserSupport = rtcPeerConnection && rtcSessionDescription;
|
|
107
127
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
128
|
+
// Check H.264 support asynchronously
|
|
129
|
+
if (rtcPeerConnection) {
|
|
130
|
+
checkH264Support().then((isHwH264Supported) => {
|
|
131
|
+
if (!isHwH264Supported) {
|
|
132
|
+
warnings.push(
|
|
133
|
+
"H.264 hardware acceleration is not supported\n In Firefox, try enabling 'media.webrtc.hw.h264.enabled' in about:config",
|
|
134
|
+
);
|
|
135
|
+
}
|
|
136
|
+
setDiagnostics({
|
|
137
|
+
done: true,
|
|
138
|
+
browserSupport,
|
|
139
|
+
rtcPeerConnection,
|
|
140
|
+
rtcSessionDescription,
|
|
141
|
+
getUserMedia,
|
|
142
|
+
getDisplayMedia,
|
|
143
|
+
isHwH264Supported,
|
|
144
|
+
errors,
|
|
145
|
+
warnings,
|
|
146
|
+
});
|
|
147
|
+
});
|
|
148
|
+
} else {
|
|
149
|
+
setDiagnostics({
|
|
150
|
+
done: true,
|
|
151
|
+
browserSupport,
|
|
152
|
+
rtcPeerConnection,
|
|
153
|
+
rtcSessionDescription,
|
|
154
|
+
getUserMedia,
|
|
155
|
+
getDisplayMedia,
|
|
156
|
+
isHwH264Supported: false,
|
|
157
|
+
errors,
|
|
158
|
+
warnings,
|
|
159
|
+
});
|
|
160
|
+
}
|
|
118
161
|
}, []);
|
|
119
162
|
|
|
120
163
|
return diagnostics;
|
|
121
164
|
}
|
|
122
165
|
|
|
123
|
-
export function logWebRTCDiagnostics() {
|
|
166
|
+
export async function logWebRTCDiagnostics() {
|
|
124
167
|
console.group("WebRTC Diagnostics");
|
|
125
168
|
|
|
126
169
|
// Log browser support
|
|
@@ -133,17 +176,32 @@ export function logWebRTCDiagnostics() {
|
|
|
133
176
|
console.log("User Agent:", navigator.userAgent);
|
|
134
177
|
console.log("Protocol:", location.protocol);
|
|
135
178
|
console.log("Host:", location.hostname);
|
|
136
|
-
|
|
137
|
-
// Test basic WebRTC functionality
|
|
179
|
+
console.groupEnd();
|
|
138
180
|
if (window.RTCPeerConnection) {
|
|
139
181
|
try {
|
|
140
182
|
const pc = new RTCPeerConnection();
|
|
141
|
-
|
|
183
|
+
// Check H.264 support
|
|
184
|
+
try {
|
|
185
|
+
const offer = await pc.createOffer({ offerToReceiveVideo: true });
|
|
186
|
+
const isHwH264Supported = offer.sdp
|
|
187
|
+
? offer.sdp.search(/rtpmap:([0-9]+) H264/g) !== -1
|
|
188
|
+
: false;
|
|
189
|
+
console.group("WebRTC Peer Connection Test");
|
|
190
|
+
console.log("RTCPeerConnection creation: ✓ Success");
|
|
191
|
+
console.log(
|
|
192
|
+
"H.264 support:",
|
|
193
|
+
isHwH264Supported ? "✓ Supported" : "✗ Not supported",
|
|
194
|
+
);
|
|
195
|
+
} catch (error) {
|
|
196
|
+
console.group("WebRTC Peer Connection Test");
|
|
197
|
+
console.error("H.264 check failed:", error);
|
|
198
|
+
}
|
|
199
|
+
|
|
142
200
|
pc.close();
|
|
143
201
|
} catch (error) {
|
|
202
|
+
console.group("WebRTC Peer Connection Test");
|
|
144
203
|
console.error("RTCPeerConnection creation: ✗ Failed", error);
|
|
145
204
|
}
|
|
146
205
|
}
|
|
147
|
-
|
|
148
206
|
console.groupEnd();
|
|
149
207
|
}
|
|
@@ -76,6 +76,7 @@ export const useCreateChatMessage = () => {
|
|
|
76
76
|
await rt.detectFacets(pdsAgent);
|
|
77
77
|
|
|
78
78
|
const record: PlaceStreamChatMessage.Record = {
|
|
79
|
+
$type: "place.stream.chat.message",
|
|
79
80
|
text: msg.text,
|
|
80
81
|
createdAt: new Date().toISOString(),
|
|
81
82
|
streamer: streamerProfile.did,
|
|
@@ -295,6 +296,7 @@ export const reduceChatIncremental = (
|
|
|
295
296
|
processedMessage = {
|
|
296
297
|
...message,
|
|
297
298
|
replyTo: {
|
|
299
|
+
$type: "place.stream.chat.defs#messageView",
|
|
298
300
|
cid: parentMsg.cid,
|
|
299
301
|
uri: parentMsg.uri,
|
|
300
302
|
author: parentMsg.author,
|
|
@@ -20,7 +20,7 @@ export const handleWebSocketMessages = (
|
|
|
20
20
|
state: LivestreamState,
|
|
21
21
|
messages: any[],
|
|
22
22
|
): LivestreamState => {
|
|
23
|
-
for (
|
|
23
|
+
for (let message of messages) {
|
|
24
24
|
if (PlaceStreamLivestream.isLivestreamView(message)) {
|
|
25
25
|
const newLivestream = message as LivestreamViewHydrated;
|
|
26
26
|
const oldLivestream = state.livestream;
|
|
@@ -41,11 +41,13 @@ export const handleWebSocketMessages = (
|
|
|
41
41
|
livestream: newLivestream,
|
|
42
42
|
};
|
|
43
43
|
} else if (PlaceStreamLivestream.isViewerCount(message)) {
|
|
44
|
+
message = message as PlaceStreamLivestream.ViewerCount;
|
|
44
45
|
state = {
|
|
45
46
|
...state,
|
|
46
47
|
viewers: message.count,
|
|
47
48
|
};
|
|
48
49
|
} else if (PlaceStreamChatDefs.isMessageView(message)) {
|
|
50
|
+
message = message as PlaceStreamChatDefs.MessageView;
|
|
49
51
|
// Explicitly map MessageView to MessageViewHydrated
|
|
50
52
|
const hydrated: ChatMessageViewHydrated = {
|
|
51
53
|
uri: message.uri,
|
|
@@ -74,6 +76,7 @@ export const handleWebSocketMessages = (
|
|
|
74
76
|
const block = message as PlaceStreamDefs.BlockView;
|
|
75
77
|
state = reduceChat(state, [], [block], []);
|
|
76
78
|
} else if (PlaceStreamDefs.isRenditions(message)) {
|
|
79
|
+
message = message as PlaceStreamDefs.Renditions;
|
|
77
80
|
state = {
|
|
78
81
|
...state,
|
|
79
82
|
renditions: message.renditions,
|
|
@@ -248,6 +248,7 @@ export function useCreateStreamRecord() {
|
|
|
248
248
|
}
|
|
249
249
|
|
|
250
250
|
const record: PlaceStreamLivestream.Record = {
|
|
251
|
+
$type: "place.stream.livestream",
|
|
251
252
|
title: title,
|
|
252
253
|
url: finalUrl,
|
|
253
254
|
createdAt: new Date().toISOString(),
|
|
@@ -313,6 +314,7 @@ export function useUpdateStreamRecord(customUrl: string | null = null) {
|
|
|
313
314
|
}
|
|
314
315
|
|
|
315
316
|
const record: PlaceStreamLivestream.Record = {
|
|
317
|
+
$type: "place.stream.livestream",
|
|
316
318
|
title: title,
|
|
317
319
|
url: finalUrl,
|
|
318
320
|
createdAt: new Date().toISOString(),
|