@adriansteffan/reactive 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.cjs +18 -0
- package/.prettierrc +5 -0
- package/Dockerfile +20 -0
- package/README.md +68 -0
- package/bin/setup.js +100 -0
- package/dist/mod.d.ts +102 -0
- package/dist/reactivepsych.es.js +71241 -0
- package/dist/reactivepsych.umd.js +120 -0
- package/dist/style.css +5 -0
- package/dist/tailwind.config.js +33 -0
- package/package.json +75 -0
- package/postcss.config.js +6 -0
- package/src/components/experiment.tsx +156 -0
- package/src/components/experimentprovider.tsx +28 -0
- package/src/components/mastermindlewrapper.tsx +662 -0
- package/src/components/microphonecheck.tsx +167 -0
- package/src/components/quest.tsx +102 -0
- package/src/components/text.tsx +45 -0
- package/src/components/upload.tsx +149 -0
- package/src/components/voicerecorder.tsx +346 -0
- package/src/index.css +74 -0
- package/src/mod.tsx +14 -0
- package/src/utils/common.ts +80 -0
- package/src/utils/request.ts +25 -0
- package/src/vite-env.d.ts +1 -0
- package/tailwind.config.js +33 -0
- package/template/.dockerignore +5 -0
- package/template/.eslintrc.cjs +18 -0
- package/template/.prettierrc +5 -0
- package/template/Dockerfile +25 -0
- package/template/README.md +102 -0
- package/template/backend/package-lock.json +2398 -0
- package/template/backend/package.json +31 -0
- package/template/backend/src/backend.ts +99 -0
- package/template/backend/tsconfig.json +110 -0
- package/template/docker-compose.yaml +13 -0
- package/template/index.html +15 -0
- package/template/package-lock.json +6031 -0
- package/template/package.json +48 -0
- package/template/postcss.config.js +6 -0
- package/template/public/Atkinson_Hyperlegible/AtkinsonHyperlegible-Bold.ttf +0 -0
- package/template/public/Atkinson_Hyperlegible/AtkinsonHyperlegible-BoldItalic.ttf +0 -0
- package/template/public/Atkinson_Hyperlegible/AtkinsonHyperlegible-Italic.ttf +0 -0
- package/template/public/Atkinson_Hyperlegible/AtkinsonHyperlegible-Regular.ttf +0 -0
- package/template/public/Atkinson_Hyperlegible/OFL.txt +93 -0
- package/template/src/App.tsx +116 -0
- package/template/src/index.css +3 -0
- package/template/src/main.tsx +14 -0
- package/template/tailwind.config.js +7 -0
- package/template/tsconfig.json +25 -0
- package/template/tsconfig.node.json +11 -0
- package/template/vite.config.ts +24 -0
- package/tsconfig.json +28 -0
- package/tsconfig.node.json +12 -0
- package/vite.config.ts +48 -0
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
import React, { useState, useRef, useEffect } from 'react';
|
|
2
|
+
import { HiMicrophone, HiStop, HiTrash } from 'react-icons/hi2';
|
|
3
|
+
|
|
4
|
+
interface AudioVisualizerProps {
|
|
5
|
+
stream: MediaStream;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
interface RecordingData {
|
|
9
|
+
blob: Blob;
|
|
10
|
+
url: string;
|
|
11
|
+
timestamp: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
// I quickly AI-genned this viz so that the participants have some feedback if their audio is being picked up - probably highly inaccurate, but that should not matter at all
|
|
15
|
+
const AudioVisualizer: React.FC<AudioVisualizerProps> = ({ stream }) => {
|
|
16
|
+
const canvasRef = useRef<HTMLCanvasElement | null>(null);
|
|
17
|
+
const animationRef = useRef<number | null>(null);
|
|
18
|
+
const analyserRef = useRef<AnalyserNode | null>(null);
|
|
19
|
+
const previousDataRef = useRef<number[]>([]);
|
|
20
|
+
|
|
21
|
+
useEffect(() => {
|
|
22
|
+
if (!stream || !canvasRef.current) return;
|
|
23
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
24
|
+
//@ts-ignore
|
|
25
|
+
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
26
|
+
const analyser = audioContext.createAnalyser();
|
|
27
|
+
const source = audioContext.createMediaStreamSource(stream);
|
|
28
|
+
|
|
29
|
+
// Increased FFT size for smoother data
|
|
30
|
+
analyser.fftSize = 2048;
|
|
31
|
+
source.connect(analyser);
|
|
32
|
+
analyserRef.current = analyser;
|
|
33
|
+
|
|
34
|
+
const canvas = canvasRef.current;
|
|
35
|
+
const ctx = canvas.getContext('2d', { alpha: false });
|
|
36
|
+
if (!ctx) return;
|
|
37
|
+
|
|
38
|
+
ctx.imageSmoothingEnabled = true;
|
|
39
|
+
ctx.imageSmoothingQuality = 'high';
|
|
40
|
+
|
|
41
|
+
const dpr = window.devicePixelRatio || 1;
|
|
42
|
+
const rect = canvas.getBoundingClientRect();
|
|
43
|
+
|
|
44
|
+
canvas.width = rect.width * dpr;
|
|
45
|
+
canvas.height = rect.height * dpr;
|
|
46
|
+
ctx.scale(dpr, dpr);
|
|
47
|
+
|
|
48
|
+
canvas.style.width = `${rect.width}px`;
|
|
49
|
+
canvas.style.height = `${rect.height}px`;
|
|
50
|
+
|
|
51
|
+
const dataArray = new Uint8Array(analyser.frequencyBinCount);
|
|
52
|
+
previousDataRef.current = Array(analyser.frequencyBinCount).fill(128);
|
|
53
|
+
|
|
54
|
+
// Enhanced smoothing function with weighted average
|
|
55
|
+
const smoothValue = (current: number, previous: number) => {
|
|
56
|
+
const weight = 0.08; // Lower = smoother, higher = more responsive
|
|
57
|
+
return previous + (current - previous) * weight;
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
// Function to compute running average for additional smoothing
|
|
61
|
+
const averageValues = (data: number[], windowSize: number = 3) => {
|
|
62
|
+
const result = new Array(data.length).fill(0);
|
|
63
|
+
|
|
64
|
+
for (let i = 0; i < data.length; i++) {
|
|
65
|
+
let sum = 0;
|
|
66
|
+
let count = 0;
|
|
67
|
+
|
|
68
|
+
for (
|
|
69
|
+
let j = Math.max(0, i - windowSize);
|
|
70
|
+
j < Math.min(data.length, i + windowSize + 1);
|
|
71
|
+
j++
|
|
72
|
+
) {
|
|
73
|
+
sum += data[j];
|
|
74
|
+
count++;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
result[i] = sum / count;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return result;
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
const draw = () => {
|
|
84
|
+
const width = rect.width;
|
|
85
|
+
const height = rect.height;
|
|
86
|
+
const centerY = height / 2;
|
|
87
|
+
|
|
88
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
89
|
+
analyser.getByteTimeDomainData(dataArray);
|
|
90
|
+
|
|
91
|
+
// Apply initial smoothing
|
|
92
|
+
const smoothedData = Array.from(dataArray).map((value, i) =>
|
|
93
|
+
smoothValue(value, previousDataRef.current[i]),
|
|
94
|
+
);
|
|
95
|
+
|
|
96
|
+
// Apply running average smoothing
|
|
97
|
+
const averagedData = averageValues(smoothedData);
|
|
98
|
+
|
|
99
|
+
// Update previous data
|
|
100
|
+
previousDataRef.current = averagedData;
|
|
101
|
+
|
|
102
|
+
ctx.fillStyle = '#ffffff';
|
|
103
|
+
ctx.fillRect(0, 0, width * dpr, height * dpr);
|
|
104
|
+
|
|
105
|
+
// Draw the center line
|
|
106
|
+
ctx.beginPath();
|
|
107
|
+
ctx.strokeStyle = 'rgba(0, 0, 0, 0.1)';
|
|
108
|
+
ctx.lineWidth = 1;
|
|
109
|
+
ctx.moveTo(0, centerY);
|
|
110
|
+
ctx.lineTo(width, centerY);
|
|
111
|
+
ctx.stroke();
|
|
112
|
+
|
|
113
|
+
// Draw waveform
|
|
114
|
+
ctx.beginPath();
|
|
115
|
+
ctx.lineWidth = 2;
|
|
116
|
+
ctx.strokeStyle = 'rgba(0, 0, 0, 0.85)';
|
|
117
|
+
|
|
118
|
+
const skipPoints = 4;
|
|
119
|
+
const points: [number, number][] = [];
|
|
120
|
+
|
|
121
|
+
for (let i = 0; i < averagedData.length; i += skipPoints) {
|
|
122
|
+
const x = (i / averagedData.length) * width;
|
|
123
|
+
const normalizedValue = (averagedData[i] - 128) / 128;
|
|
124
|
+
const y = centerY + normalizedValue * height * 4.0;
|
|
125
|
+
points.push([x, y]);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Draw smooth curve through points
|
|
129
|
+
if (points.length > 0) {
|
|
130
|
+
ctx.moveTo(points[0][0], points[0][1]);
|
|
131
|
+
|
|
132
|
+
for (let i = 1; i < points.length - 2; i++) {
|
|
133
|
+
const xc = (points[i][0] + points[i + 1][0]) / 2;
|
|
134
|
+
const yc = (points[i][1] + points[i + 1][1]) / 2;
|
|
135
|
+
ctx.quadraticCurveTo(points[i][0], points[i][1], xc, yc);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// For the last two points
|
|
139
|
+
if (points.length > 2) {
|
|
140
|
+
const last = points.length - 1;
|
|
141
|
+
ctx.quadraticCurveTo(
|
|
142
|
+
points[last - 1][0],
|
|
143
|
+
points[last - 1][1],
|
|
144
|
+
points[last][0],
|
|
145
|
+
points[last][1],
|
|
146
|
+
);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
ctx.stroke();
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
draw();
|
|
154
|
+
|
|
155
|
+
return () => {
|
|
156
|
+
if (animationRef.current) {
|
|
157
|
+
cancelAnimationFrame(animationRef.current);
|
|
158
|
+
}
|
|
159
|
+
source.disconnect();
|
|
160
|
+
audioContext.close();
|
|
161
|
+
};
|
|
162
|
+
}, [stream]);
|
|
163
|
+
|
|
164
|
+
return (
|
|
165
|
+
<canvas
|
|
166
|
+
ref={canvasRef}
|
|
167
|
+
width={300}
|
|
168
|
+
height={60}
|
|
169
|
+
className='mx-auto rounded-lg bg-white shadow-sm'
|
|
170
|
+
/>
|
|
171
|
+
);
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
export const VoiceRecorder = ({
|
|
175
|
+
question,
|
|
176
|
+
handleSaveVoiceData,
|
|
177
|
+
handleDiscardVoiceData,
|
|
178
|
+
}: {
|
|
179
|
+
question: {
|
|
180
|
+
value: RecordingData | null;
|
|
181
|
+
};
|
|
182
|
+
handleSaveVoiceData: (data: object) => void;
|
|
183
|
+
handleDiscardVoiceData: () => void;
|
|
184
|
+
}) => {
|
|
185
|
+
const [isRecording, setIsRecording] = useState<boolean>(false);
|
|
186
|
+
const [audioUrl, setAudioUrl] = useState<string | null>(null);
|
|
187
|
+
const [audioStream, setAudioStream] = useState<MediaStream | null>(null);
|
|
188
|
+
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
|
189
|
+
const chunksRef = useRef<BlobPart[]>([]);
|
|
190
|
+
|
|
191
|
+
const startRecording = async () => {
|
|
192
|
+
try {
|
|
193
|
+
/*This is really hacky but it works and there is a deadline, we should find a way to pass around such values in the future */
|
|
194
|
+
/*eslint-disable-next-line @typescript-eslint/no-explicit-any*/
|
|
195
|
+
const deviceId = (window as any).audioInputId;
|
|
196
|
+
const constraints: MediaStreamConstraints = {
|
|
197
|
+
audio: deviceId
|
|
198
|
+
? {
|
|
199
|
+
deviceId: { exact: deviceId },
|
|
200
|
+
}
|
|
201
|
+
: true,
|
|
202
|
+
};
|
|
203
|
+
|
|
204
|
+
const stream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
205
|
+
|
|
206
|
+
setAudioStream(stream);
|
|
207
|
+
mediaRecorderRef.current = new MediaRecorder(stream);
|
|
208
|
+
|
|
209
|
+
chunksRef.current = [];
|
|
210
|
+
|
|
211
|
+
mediaRecorderRef.current.ondataavailable = (e: BlobEvent) => {
|
|
212
|
+
if (e.data.size > 0) {
|
|
213
|
+
chunksRef.current.push(e.data);
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
|
|
217
|
+
mediaRecorderRef.current.onstop = () => {
|
|
218
|
+
const blob = new Blob(chunksRef.current, { type: 'audio/webm' });
|
|
219
|
+
const url = URL.createObjectURL(blob);
|
|
220
|
+
setAudioUrl(url);
|
|
221
|
+
setAudioStream(null);
|
|
222
|
+
saveRecording(blob, url);
|
|
223
|
+
};
|
|
224
|
+
|
|
225
|
+
mediaRecorderRef.current.start();
|
|
226
|
+
|
|
227
|
+
setIsRecording(true);
|
|
228
|
+
} catch (err) {
|
|
229
|
+
console.error('Error accessing microphone:', err);
|
|
230
|
+
}
|
|
231
|
+
};
|
|
232
|
+
|
|
233
|
+
const stopRecording = () => {
|
|
234
|
+
if (mediaRecorderRef.current && isRecording) {
|
|
235
|
+
mediaRecorderRef.current.stop();
|
|
236
|
+
mediaRecorderRef.current.stream.getTracks().forEach((track) => track.stop());
|
|
237
|
+
setIsRecording(false);
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
const saveRecording = async (blob: Blob, url: string) => {
|
|
242
|
+
if (blob && url) {
|
|
243
|
+
const base64Data = await new Promise<string>((resolve) => {
|
|
244
|
+
const reader = new FileReader();
|
|
245
|
+
reader.onloadend = () => {
|
|
246
|
+
// Remove data URL prefix (e.g., "data:audio/webm;base64,")
|
|
247
|
+
const base64 = reader.result?.toString().split(',')[1] || '';
|
|
248
|
+
resolve(base64);
|
|
249
|
+
};
|
|
250
|
+
reader.readAsDataURL(blob);
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
handleSaveVoiceData({
|
|
254
|
+
blob: blob,
|
|
255
|
+
// dont change this type, since the upload function depends on it while looking for audio. we might want to refactor this at some point
|
|
256
|
+
type: 'audiorecording',
|
|
257
|
+
url: url,
|
|
258
|
+
data64: base64Data,
|
|
259
|
+
timestamp: new Date().toISOString(),
|
|
260
|
+
});
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
const discardRecording = () => {
|
|
265
|
+
if (audioUrl) {
|
|
266
|
+
URL.revokeObjectURL(audioUrl);
|
|
267
|
+
}
|
|
268
|
+
setAudioUrl(null);
|
|
269
|
+
question.value = null;
|
|
270
|
+
handleDiscardVoiceData();
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
return (
|
|
274
|
+
<div className='flex flex-col items-center space-y-4 p-4 bg-white'>
|
|
275
|
+
{/* Recording button */}
|
|
276
|
+
{!audioUrl && (
|
|
277
|
+
<button
|
|
278
|
+
onClick={isRecording ? stopRecording : startRecording}
|
|
279
|
+
className={`flex items-center justify-center space-x-2 p-4 rounded-full border-2 border-black shadow-[2px_2px_0px_rgba(0,0,0,1)] hover:translate-x-[2px] hover:translate-y-[2px] hover:shadow-none cursor-pointer
|
|
280
|
+
${isRecording ? 'bg-red-500 hover:bg-red-600' : ''}
|
|
281
|
+
text-white transition-colors duration-200`}
|
|
282
|
+
aria-label={isRecording ? 'Stop Recording' : 'Start Recording'}
|
|
283
|
+
>
|
|
284
|
+
{isRecording ? (
|
|
285
|
+
<HiStop className='w-6 h-6' />
|
|
286
|
+
) : (
|
|
287
|
+
<HiMicrophone className='w-6 h-6 text-black' />
|
|
288
|
+
)}
|
|
289
|
+
</button>
|
|
290
|
+
)}
|
|
291
|
+
|
|
292
|
+
{/* Audio visualizer */}
|
|
293
|
+
{isRecording && audioStream && (
|
|
294
|
+
<div className='w-full max-w-md mx-auto'>
|
|
295
|
+
<AudioVisualizer stream={audioStream} />
|
|
296
|
+
</div>
|
|
297
|
+
)}
|
|
298
|
+
|
|
299
|
+
{/* Recording status */}
|
|
300
|
+
{isRecording && (
|
|
301
|
+
<div className='flex items-center space-x-2'>
|
|
302
|
+
<div className='w-3 h-3 bg-red-500 rounded-full animate-pulse'></div>
|
|
303
|
+
<span className='text-sm text-black'>Recording...</span>
|
|
304
|
+
</div>
|
|
305
|
+
)}
|
|
306
|
+
|
|
307
|
+
{/* Audio player and action buttons */}
|
|
308
|
+
{audioUrl && !isRecording && (
|
|
309
|
+
<div className='flex flex-col items-center space-y-4 w-full max-w-md'>
|
|
310
|
+
<audio controls preload='none' className='w-full' playsInline>
|
|
311
|
+
<source src={audioUrl} type='audio/mp4' />
|
|
312
|
+
<source src={audioUrl} type='audio/webm' />
|
|
313
|
+
Your browser does not support the audio element.
|
|
314
|
+
</audio>
|
|
315
|
+
|
|
316
|
+
<div className='flex space-x-4'>
|
|
317
|
+
<button
|
|
318
|
+
onClick={discardRecording}
|
|
319
|
+
className='border-2 border-black shadow-[2px_2px_0px_rgba(0,0,0,1)] hover:translate-x-[2px] hover:translate-y-[2px] hover:shadow-none cursor-pointer flex items-center space-x-2 px-4 py-2 text-black
|
|
320
|
+
rounded-xl transition-colors duration-200'
|
|
321
|
+
>
|
|
322
|
+
<HiTrash className='w-4 h-4' />
|
|
323
|
+
<span>Discard</span>
|
|
324
|
+
</button>
|
|
325
|
+
</div>
|
|
326
|
+
</div>
|
|
327
|
+
)}
|
|
328
|
+
</div>
|
|
329
|
+
);
|
|
330
|
+
};
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
export default function VoicerecorderQuestionComponent({
|
|
335
|
+
setValue,
|
|
336
|
+
}: {
|
|
337
|
+
setValue: (data: object | null) => void;
|
|
338
|
+
}) {
|
|
339
|
+
return (
|
|
340
|
+
<VoiceRecorder
|
|
341
|
+
question={{ value: null }}
|
|
342
|
+
handleSaveVoiceData={(data) => setValue(data)}
|
|
343
|
+
handleDiscardVoiceData={() => setValue(null)}
|
|
344
|
+
/>
|
|
345
|
+
);
|
|
346
|
+
}
|
package/src/index.css
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
@tailwind base;
|
|
2
|
+
@tailwind components;
|
|
3
|
+
@tailwind utilities;
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
:root {
|
|
7
|
+
--font-atkinson: 'Atkinson Hyperlegible', system-ui, sans-serif;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
* {
|
|
11
|
+
-webkit-touch-callout: none;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
@layer base {
|
|
15
|
+
body {
|
|
16
|
+
@apply select-none;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
@font-face {
|
|
21
|
+
font-family: 'Atkinson Hyperlegible';
|
|
22
|
+
src: url('/Atkinson_Hyperlegible/AtkinsonHyperlegible-Regular.ttf') format('truetype');
|
|
23
|
+
font-weight: 400;
|
|
24
|
+
font-style: normal;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
@font-face {
|
|
28
|
+
font-family: 'Atkinson Hyperlegible';
|
|
29
|
+
src: url('/Atkinson_Hyperlegible/AtkinsonHyperlegible-Bold.ttf') format('truetype');
|
|
30
|
+
font-weight: 700;
|
|
31
|
+
font-style: normal;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
@font-face {
|
|
35
|
+
font-family: 'Atkinson Hyperlegible';
|
|
36
|
+
src: url('/Atkinson_Hyperlegible/AtkinsonHyperlegible-Italic.ttf') format('truetype');
|
|
37
|
+
font-weight: 400;
|
|
38
|
+
font-style: italic;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
@font-face {
|
|
42
|
+
font-family: 'Atkinson Hyperlegible';
|
|
43
|
+
src: url('/Atkinson_Hyperlegible/AtkinsonHyperlegible-BoldItalic.ttf') format('truetype');
|
|
44
|
+
font-weight: 700;
|
|
45
|
+
font-style: italic;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/* customization for the surveyjs elements - a bit hacky and broken*/
|
|
49
|
+
.sd-root-modern {
|
|
50
|
+
background-color: white !important;
|
|
51
|
+
background-image: radial-gradient(#e5e7eb 1px, transparent 1px);
|
|
52
|
+
background-size: 16px 16px;
|
|
53
|
+
position: relative; /* Make sure position is set for proper rendering */
|
|
54
|
+
font-family: 'Atkinson Hyperlegible', sans-serif; /* Added this line */
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
.sd-root-modern * {
|
|
58
|
+
font-family: 'Atkinson Hyperlegible', sans-serif;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/* for the rating group panel, maybe we can move that into the component at some point*/
|
|
62
|
+
.sd-row,
|
|
63
|
+
.sd-clearfix {
|
|
64
|
+
padding-bottom: 20px !important;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/* fix pet peeve in surveyjs */
|
|
68
|
+
@media (max-width: 639px) {
|
|
69
|
+
.sd-imagepicker__item--inline {
|
|
70
|
+
margin-left: auto;
|
|
71
|
+
margin-right: auto;
|
|
72
|
+
margin-top: 10px;
|
|
73
|
+
}
|
|
74
|
+
}
|
package/src/mod.tsx
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import './index.css';
|
|
2
|
+
import Text from './components/text';
|
|
3
|
+
import MicCheck from './components/microphonecheck';
|
|
4
|
+
import Quest from './components/quest';
|
|
5
|
+
import Upload from './components/upload';
|
|
6
|
+
import ExperimentProvider from './components/experimentprovider';
|
|
7
|
+
import Experiment from './components/experiment';
|
|
8
|
+
import { shuffle, BaseComponentProps, ExperimentConfig } from './utils/common';
|
|
9
|
+
import MasterMindleWrapper from './components/mastermindlewrapper';
|
|
10
|
+
|
|
11
|
+
export { Text, MicCheck, Quest, Upload, MasterMindleWrapper, Experiment, ExperimentProvider, shuffle };
|
|
12
|
+
export type { BaseComponentProps, ExperimentConfig };
|
|
13
|
+
export * from './utils/common';
|
|
14
|
+
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
export function now() {
|
|
2
|
+
return Math.round(performance.now());
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
6
|
+
export function shuffle(array: any[]) {
|
|
7
|
+
for (let i = array.length - 1; i >= 0; i--) {
|
|
8
|
+
const j = Math.floor(Math.random() * (i + 1));
|
|
9
|
+
[array[i], array[j]] = [array[j], array[i]];
|
|
10
|
+
}
|
|
11
|
+
return array;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
// Generic type for all data structures
|
|
15
|
+
export interface StudyEvent {
|
|
16
|
+
index: number;
|
|
17
|
+
type: string;
|
|
18
|
+
name: string;
|
|
19
|
+
data: any;
|
|
20
|
+
start: number;
|
|
21
|
+
end: number;
|
|
22
|
+
duration: number;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface FileUpload {
|
|
26
|
+
filename: string;
|
|
27
|
+
content: string;
|
|
28
|
+
encoding: 'base64' | 'utf8';
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export interface ExperimentConfig {
|
|
32
|
+
showProgressBar: boolean;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export interface BaseComponentProps {
|
|
36
|
+
next: (data: object) => void;
|
|
37
|
+
data?: object;
|
|
38
|
+
metaData?: object;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
type ParamType = 'string' | 'number' | 'boolean' | 'array' | 'json';
|
|
42
|
+
type ParamValue<T extends ParamType> = T extends 'number'
|
|
43
|
+
? number | undefined
|
|
44
|
+
: T extends 'boolean'
|
|
45
|
+
? boolean | undefined
|
|
46
|
+
: T extends 'array' | 'json'
|
|
47
|
+
? any | undefined
|
|
48
|
+
: string | undefined;
|
|
49
|
+
|
|
50
|
+
export function getParam<T extends ParamType>(
|
|
51
|
+
name: string,
|
|
52
|
+
defaultValue: ParamValue<T> | undefined,
|
|
53
|
+
type: T = 'string' as T,
|
|
54
|
+
): ParamValue<T> | undefined {
|
|
55
|
+
const value = new URLSearchParams(window.location.search).get(name);
|
|
56
|
+
if (!value) return defaultValue;
|
|
57
|
+
if (value.toLowerCase() === 'undefined') return undefined;
|
|
58
|
+
|
|
59
|
+
const conversions: Record<ParamType, (v: string) => any> = {
|
|
60
|
+
string: (v) => v,
|
|
61
|
+
number: (v) => Number(v) || defaultValue,
|
|
62
|
+
boolean: (v) => v.toLowerCase() === 'true',
|
|
63
|
+
array: (v) => {
|
|
64
|
+
try {
|
|
65
|
+
return JSON.parse(v);
|
|
66
|
+
} catch {
|
|
67
|
+
return defaultValue;
|
|
68
|
+
}
|
|
69
|
+
},
|
|
70
|
+
json: (v) => {
|
|
71
|
+
try {
|
|
72
|
+
return JSON.parse(v);
|
|
73
|
+
} catch {
|
|
74
|
+
return defaultValue;
|
|
75
|
+
}
|
|
76
|
+
},
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
return conversions[type](value);
|
|
80
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
const BACKEND_ROUTE = "/backend";
|
|
2
|
+
|
|
3
|
+
export async function post(endpoint: string, body: object | FormData) {
|
|
4
|
+
if (body instanceof FormData) {
|
|
5
|
+
return await fetch(`${BACKEND_ROUTE}${endpoint}`, {
|
|
6
|
+
method: 'POST',
|
|
7
|
+
body,
|
|
8
|
+
});
|
|
9
|
+
} else {
|
|
10
|
+
return fetch(`${BACKEND_ROUTE}${endpoint}`, {
|
|
11
|
+
method: 'POST',
|
|
12
|
+
headers: { 'Content-Type': 'application/json' },
|
|
13
|
+
body: JSON.stringify(body),
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function get(endpoint: string) {
|
|
19
|
+
const res = await fetch(`${BACKEND_ROUTE}${endpoint}`);
|
|
20
|
+
return res;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function getJson(endpoint: string) {
|
|
24
|
+
return await (await get(endpoint)).json();
|
|
25
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
/// <reference types="vite/client" />
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
/** @type {import('tailwindcss').Config} */
|
|
2
|
+
export default {
|
|
3
|
+
content: ['./index.html', './src/**/*.{js,ts,jsx,tsx}', './dist/**/*.{js,ts,jsx,tsx}'],
|
|
4
|
+
theme: {
|
|
5
|
+
fontFamily: {
|
|
6
|
+
sans: ['Atkinson Hyperlegible', 'sans-serif'],
|
|
7
|
+
atkinson: ['Atkinson Hyperlegible', 'sans-serif'],
|
|
8
|
+
},
|
|
9
|
+
extend: {
|
|
10
|
+
keyframes: {
|
|
11
|
+
slideDown: {
|
|
12
|
+
'0%': {
|
|
13
|
+
transform: 'translateY(-10px)',
|
|
14
|
+
opacity: '0',
|
|
15
|
+
},
|
|
16
|
+
'100%': {
|
|
17
|
+
transform: 'translateY(0)',
|
|
18
|
+
opacity: '1',
|
|
19
|
+
},
|
|
20
|
+
},
|
|
21
|
+
fadeIn: {
|
|
22
|
+
'0%': { opacity: '0' },
|
|
23
|
+
'100%': { opacity: '1' },
|
|
24
|
+
},
|
|
25
|
+
},
|
|
26
|
+
animation: {
|
|
27
|
+
slideDown: 'slideDown 0.8s ease-out forwards',
|
|
28
|
+
fadeIn: 'fadeIn 0.5s ease-out forwards',
|
|
29
|
+
},
|
|
30
|
+
},
|
|
31
|
+
},
|
|
32
|
+
plugins: [],
|
|
33
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
module.exports = {
|
|
2
|
+
root: true,
|
|
3
|
+
env: { browser: true, es2020: true, node: true, commonjs: true },
|
|
4
|
+
extends: [
|
|
5
|
+
'eslint:recommended',
|
|
6
|
+
'plugin:@typescript-eslint/recommended',
|
|
7
|
+
'plugin:react-hooks/recommended',
|
|
8
|
+
],
|
|
9
|
+
ignorePatterns: ['dist', '.eslintrc.cjs'],
|
|
10
|
+
parser: '@typescript-eslint/parser',
|
|
11
|
+
plugins: ['react-refresh'],
|
|
12
|
+
rules: {
|
|
13
|
+
'react-refresh/only-export-components': [
|
|
14
|
+
'warn',
|
|
15
|
+
{ allowConstantExport: true },
|
|
16
|
+
],
|
|
17
|
+
},
|
|
18
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
FROM node:20-alpine as frontend-builder
|
|
2
|
+
WORKDIR /app
|
|
3
|
+
COPY package*.json ./
|
|
4
|
+
|
|
5
|
+
RUN npm install
|
|
6
|
+
|
|
7
|
+
COPY . .
|
|
8
|
+
RUN npm run build
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
FROM node:20-alpine
|
|
12
|
+
WORKDIR /app
|
|
13
|
+
|
|
14
|
+
COPY backend/package*.json ./
|
|
15
|
+
RUN npm install
|
|
16
|
+
|
|
17
|
+
COPY backend/ ./
|
|
18
|
+
RUN npm run build
|
|
19
|
+
RUN npm install -g pm2
|
|
20
|
+
|
|
21
|
+
COPY --from=frontend-builder /app/dist ./static
|
|
22
|
+
|
|
23
|
+
EXPOSE 8001
|
|
24
|
+
|
|
25
|
+
CMD ["npx", "pm2-runtime", "./dist/backend.js"]
|