@harmonia-core/ui 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +267 -0
- package/dist/capacity/animation.d.ts +77 -0
- package/dist/capacity/animation.d.ts.map +1 -0
- package/dist/capacity/constants.d.ts +119 -0
- package/dist/capacity/constants.d.ts.map +1 -0
- package/dist/capacity/feedback.d.ts +55 -0
- package/dist/capacity/feedback.d.ts.map +1 -0
- package/dist/capacity/fields/field-manager.d.ts +45 -0
- package/dist/capacity/fields/field-manager.d.ts.map +1 -0
- package/dist/capacity/index.d.ts +15 -0
- package/dist/capacity/index.d.ts.map +1 -0
- package/dist/capacity/index.js +1313 -0
- package/dist/capacity/index.js.map +1 -0
- package/dist/capacity/index.mjs +1267 -0
- package/dist/capacity/index.mjs.map +1 -0
- package/dist/capacity/mode.d.ts +50 -0
- package/dist/capacity/mode.d.ts.map +1 -0
- package/dist/capacity/prediction/hooks.d.ts +11 -0
- package/dist/capacity/prediction/hooks.d.ts.map +1 -0
- package/dist/capacity/prediction/pattern-extractor.d.ts +26 -0
- package/dist/capacity/prediction/pattern-extractor.d.ts.map +1 -0
- package/dist/capacity/prediction/pattern-store.d.ts +35 -0
- package/dist/capacity/prediction/pattern-store.d.ts.map +1 -0
- package/dist/capacity/prediction/prediction-engine.d.ts +39 -0
- package/dist/capacity/prediction/prediction-engine.d.ts.map +1 -0
- package/dist/capacity/prediction/types.d.ts +24 -0
- package/dist/capacity/prediction/types.d.ts.map +1 -0
- package/dist/capacity/provider.d.ts +119 -0
- package/dist/capacity/provider.d.ts.map +1 -0
- package/dist/capacity/signals/aggregator.d.ts +38 -0
- package/dist/capacity/signals/aggregator.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/environment-detector.d.ts +31 -0
- package/dist/capacity/signals/detectors/environment-detector.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/input-detector.d.ts +23 -0
- package/dist/capacity/signals/detectors/input-detector.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/interaction-detector.d.ts +27 -0
- package/dist/capacity/signals/detectors/interaction-detector.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/scroll-detector.d.ts +35 -0
- package/dist/capacity/signals/detectors/scroll-detector.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/session-detector.d.ts +23 -0
- package/dist/capacity/signals/detectors/session-detector.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/time-detector.d.ts +20 -0
- package/dist/capacity/signals/detectors/time-detector.d.ts.map +1 -0
- package/dist/capacity/signals/detectors/types.d.ts +25 -0
- package/dist/capacity/signals/detectors/types.d.ts.map +1 -0
- package/dist/capacity/signals/signal-bus.d.ts +50 -0
- package/dist/capacity/signals/signal-bus.d.ts.map +1 -0
- package/dist/capacity/types.d.ts +239 -0
- package/dist/capacity/types.d.ts.map +1 -0
- package/dist/capacity/utils/index.d.ts +7 -0
- package/dist/capacity/utils/index.d.ts.map +1 -0
- package/dist/capacity/utils/typography.d.ts +176 -0
- package/dist/capacity/utils/typography.d.ts.map +1 -0
- package/dist/components/ambient-field-monitor.d.ts +10 -0
- package/dist/components/ambient-field-monitor.d.ts.map +1 -0
- package/dist/components/capacity-controls.d.ts +15 -0
- package/dist/components/capacity-controls.d.ts.map +1 -0
- package/dist/components/capacity-demo-card.d.ts +13 -0
- package/dist/components/capacity-demo-card.d.ts.map +1 -0
- package/dist/components/index.d.ts +18 -0
- package/dist/components/index.d.ts.map +1 -0
- package/dist/components/index.js +1703 -0
- package/dist/components/index.js.map +1 -0
- package/dist/components/index.mjs +1688 -0
- package/dist/components/index.mjs.map +1 -0
- package/dist/components/ui/badge.d.ts +8 -0
- package/dist/components/ui/badge.d.ts.map +1 -0
- package/dist/components/ui/button.d.ts +10 -0
- package/dist/components/ui/button.d.ts.map +1 -0
- package/dist/components/ui/card.d.ts +10 -0
- package/dist/components/ui/card.d.ts.map +1 -0
- package/dist/components/ui/select.d.ts +6 -0
- package/dist/components/ui/select.d.ts.map +1 -0
- package/dist/components/ui/slider.d.ts +14 -0
- package/dist/components/ui/slider.d.ts.map +1 -0
- package/package.json +98 -0
|
@@ -0,0 +1,1313 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var react = require('react');
|
|
4
|
+
var jsxRuntime = require('react/jsx-runtime');
|
|
5
|
+
|
|
6
|
+
var __defProp = Object.defineProperty;
|
|
7
|
+
var __defProps = Object.defineProperties;
|
|
8
|
+
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
|
|
9
|
+
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
|
10
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
11
|
+
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
|
12
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
13
|
+
var __spreadValues = (a, b) => {
|
|
14
|
+
for (var prop in b || (b = {}))
|
|
15
|
+
if (__hasOwnProp.call(b, prop))
|
|
16
|
+
__defNormalProp(a, prop, b[prop]);
|
|
17
|
+
if (__getOwnPropSymbols)
|
|
18
|
+
for (var prop of __getOwnPropSymbols(b)) {
|
|
19
|
+
if (__propIsEnum.call(b, prop))
|
|
20
|
+
__defNormalProp(a, prop, b[prop]);
|
|
21
|
+
}
|
|
22
|
+
return a;
|
|
23
|
+
};
|
|
24
|
+
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
|
|
25
|
+
var __async = (__this, __arguments, generator) => {
|
|
26
|
+
return new Promise((resolve, reject) => {
|
|
27
|
+
var fulfilled = (value) => {
|
|
28
|
+
try {
|
|
29
|
+
step(generator.next(value));
|
|
30
|
+
} catch (e) {
|
|
31
|
+
reject(e);
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
var rejected = (value) => {
|
|
35
|
+
try {
|
|
36
|
+
step(generator.throw(value));
|
|
37
|
+
} catch (e) {
|
|
38
|
+
reject(e);
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
|
|
42
|
+
step((generator = generator.apply(__this, __arguments)).next());
|
|
43
|
+
});
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
// lib/capacity/constants.ts
|
|
47
|
+
var PHI = 1.618033988749895;
|
|
48
|
+
var PHI_INVERSE = 0.618033988749895;
|
|
49
|
+
var FIBONACCI = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144];
|
|
50
|
+
var FEEDBACK_FREQUENCIES = {
|
|
51
|
+
low: 396,
|
|
52
|
+
// Foundation/root elements
|
|
53
|
+
mid: 528,
|
|
54
|
+
// Primary interactive content
|
|
55
|
+
high: 741
|
|
56
|
+
// Dynamic/feedback elements
|
|
57
|
+
};
|
|
58
|
+
var DEFAULT_FIELD_CONFIG = {
|
|
59
|
+
smoothing: 0.15,
|
|
60
|
+
// Exponential smoothing factor
|
|
61
|
+
velocityThreshold: 0.05,
|
|
62
|
+
// Min velocity to register as trend
|
|
63
|
+
debounceMs: 100
|
|
64
|
+
// Debounce rapid changes
|
|
65
|
+
};
|
|
66
|
+
var DEFAULT_USER_CAPACITY = {
|
|
67
|
+
cognitive: 0.7,
|
|
68
|
+
temporal: 0.7,
|
|
69
|
+
emotional: 0.7
|
|
70
|
+
};
|
|
71
|
+
var DEFAULT_EMOTIONAL_STATE = {
|
|
72
|
+
valence: 0.3,
|
|
73
|
+
// > 0.15 (with emotional > 0.6) triggers expressive motion mode
|
|
74
|
+
arousal: 0.5
|
|
75
|
+
};
|
|
76
|
+
var DEFAULT_COMPONENT_RESPONSE = {
|
|
77
|
+
visual: {
|
|
78
|
+
opacityRange: [0.4, 1],
|
|
79
|
+
scaleRange: [0.95, 1]
|
|
80
|
+
},
|
|
81
|
+
spatial: {
|
|
82
|
+
densityRange: [0.6, 1],
|
|
83
|
+
spacingMultiplier: [1, PHI]
|
|
84
|
+
},
|
|
85
|
+
sonic: {
|
|
86
|
+
enabled: false
|
|
87
|
+
// Opt-in
|
|
88
|
+
},
|
|
89
|
+
semantic: {
|
|
90
|
+
verbosityLevel: "concise",
|
|
91
|
+
urgencyFraming: "neutral"
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
var MOTION_TOKENS = {
|
|
95
|
+
off: {
|
|
96
|
+
durationFast: 0,
|
|
97
|
+
durationBase: 0,
|
|
98
|
+
durationSlow: 0,
|
|
99
|
+
easing: "linear",
|
|
100
|
+
// Essential transitions still allowed (opacity, focus rings)
|
|
101
|
+
essentialDuration: 100,
|
|
102
|
+
essentialEasing: "ease-out"
|
|
103
|
+
},
|
|
104
|
+
soothing: {
|
|
105
|
+
durationFast: 0,
|
|
106
|
+
// No fast motion -- everything is slow and rhythmic
|
|
107
|
+
durationBase: 800,
|
|
108
|
+
durationSlow: 1200,
|
|
109
|
+
easing: "ease-in-out",
|
|
110
|
+
// Smooth, no sharp edges
|
|
111
|
+
essentialDuration: 200,
|
|
112
|
+
essentialEasing: "ease-in-out"
|
|
113
|
+
},
|
|
114
|
+
subtle: {
|
|
115
|
+
durationFast: 100,
|
|
116
|
+
durationBase: 200,
|
|
117
|
+
durationSlow: 350,
|
|
118
|
+
easing: "ease-out",
|
|
119
|
+
essentialDuration: 150,
|
|
120
|
+
essentialEasing: "ease-out"
|
|
121
|
+
},
|
|
122
|
+
expressive: {
|
|
123
|
+
durationFast: 200,
|
|
124
|
+
durationBase: 400,
|
|
125
|
+
durationSlow: 700,
|
|
126
|
+
easing: "cubic-bezier(0.34, 1.56, 0.64, 1)",
|
|
127
|
+
// Spring-like overshoot
|
|
128
|
+
essentialDuration: 150,
|
|
129
|
+
essentialEasing: "ease-out"
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
// lib/capacity/feedback.ts
|
|
134
|
+
var HAPTIC_PATTERNS = {
|
|
135
|
+
/** Short tap — confirm/select */
|
|
136
|
+
tap: [8],
|
|
137
|
+
/** Two pulses — toggle/switch */
|
|
138
|
+
toggle: [8, 50, 8],
|
|
139
|
+
/** Gentle pulse — ambient/ambient confirmation */
|
|
140
|
+
pulse: [15, 30, 15],
|
|
141
|
+
/** Error/warning — three quick */
|
|
142
|
+
error: [50, 30, 50, 30, 50]
|
|
143
|
+
};
|
|
144
|
+
function triggerHaptic(pattern = "tap") {
|
|
145
|
+
if (typeof navigator !== "undefined" && "vibrate" in navigator) {
|
|
146
|
+
navigator.vibrate(HAPTIC_PATTERNS[pattern]);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
var _audioCtx = null;
|
|
150
|
+
function getAudioContext() {
|
|
151
|
+
if (typeof window === "undefined") return null;
|
|
152
|
+
try {
|
|
153
|
+
if (!_audioCtx || _audioCtx.state === "closed") {
|
|
154
|
+
_audioCtx = new AudioContext();
|
|
155
|
+
}
|
|
156
|
+
if (_audioCtx.state === "suspended") {
|
|
157
|
+
_audioCtx.resume();
|
|
158
|
+
}
|
|
159
|
+
return _audioCtx;
|
|
160
|
+
} catch (e) {
|
|
161
|
+
return null;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
function playSonicFeedback(frequency, duration = 120, volume = 0.06) {
|
|
165
|
+
const ctx = getAudioContext();
|
|
166
|
+
if (!ctx) return;
|
|
167
|
+
const oscillator = ctx.createOscillator();
|
|
168
|
+
const gainNode = ctx.createGain();
|
|
169
|
+
oscillator.connect(gainNode);
|
|
170
|
+
gainNode.connect(ctx.destination);
|
|
171
|
+
oscillator.type = "sine";
|
|
172
|
+
oscillator.frequency.setValueAtTime(frequency, ctx.currentTime);
|
|
173
|
+
gainNode.gain.setValueAtTime(0, ctx.currentTime);
|
|
174
|
+
gainNode.gain.linearRampToValueAtTime(volume, ctx.currentTime + 0.015);
|
|
175
|
+
gainNode.gain.linearRampToValueAtTime(0, ctx.currentTime + duration / 1e3);
|
|
176
|
+
oscillator.start(ctx.currentTime);
|
|
177
|
+
oscillator.stop(ctx.currentTime + duration / 1e3 + 0.02);
|
|
178
|
+
}
|
|
179
|
+
function getFrequencyForPace(pace) {
|
|
180
|
+
if (pace === "activated") return FEEDBACK_FREQUENCIES.high;
|
|
181
|
+
if (pace === "calm") return FEEDBACK_FREQUENCIES.low;
|
|
182
|
+
return FEEDBACK_FREQUENCIES.mid;
|
|
183
|
+
}
|
|
184
|
+
function playPacedSonic(pace, duration) {
|
|
185
|
+
playSonicFeedback(getFrequencyForPace(pace), duration);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// lib/capacity/fields/field-manager.ts
|
|
189
|
+
function deriveEnergyField(capacity) {
|
|
190
|
+
const { cognitive, temporal, emotional } = capacity;
|
|
191
|
+
return Math.pow(cognitive * temporal * emotional, 1 / 3);
|
|
192
|
+
}
|
|
193
|
+
function deriveAttentionField(capacity) {
|
|
194
|
+
return 1 - capacity.temporal * 0.5;
|
|
195
|
+
}
|
|
196
|
+
function deriveEmotionalValenceField(state) {
|
|
197
|
+
return state.valence;
|
|
198
|
+
}
|
|
199
|
+
function createFieldValue(value, previousValue) {
|
|
200
|
+
var _a;
|
|
201
|
+
const now = Date.now();
|
|
202
|
+
const lastChange = (_a = previousValue == null ? void 0 : previousValue.lastChange) != null ? _a : now;
|
|
203
|
+
const timeDelta = (now - lastChange) / 1e3;
|
|
204
|
+
let trend = "stable";
|
|
205
|
+
let velocity;
|
|
206
|
+
if (typeof value === "number" && previousValue && typeof previousValue.value === "number") {
|
|
207
|
+
const valueDelta = value - previousValue.value;
|
|
208
|
+
velocity = timeDelta > 0 ? valueDelta / timeDelta : 0;
|
|
209
|
+
if (Math.abs(velocity) > DEFAULT_FIELD_CONFIG.velocityThreshold) {
|
|
210
|
+
trend = velocity > 0 ? "rising" : "falling";
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
return {
|
|
214
|
+
value,
|
|
215
|
+
lastChange: now,
|
|
216
|
+
trend,
|
|
217
|
+
velocity
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
var FieldManagerClass = class {
|
|
221
|
+
constructor() {
|
|
222
|
+
this.listeners = /* @__PURE__ */ new Set();
|
|
223
|
+
this.config = DEFAULT_FIELD_CONFIG;
|
|
224
|
+
const initialCapacity = DEFAULT_USER_CAPACITY;
|
|
225
|
+
const initialState = DEFAULT_EMOTIONAL_STATE;
|
|
226
|
+
this.context = {
|
|
227
|
+
energy: createFieldValue(deriveEnergyField(initialCapacity)),
|
|
228
|
+
attention: createFieldValue(deriveAttentionField(initialCapacity)),
|
|
229
|
+
emotionalValence: createFieldValue(deriveEmotionalValenceField(initialState)),
|
|
230
|
+
userCapacity: initialCapacity,
|
|
231
|
+
emotionalState: initialState
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
/**
|
|
235
|
+
* Get current ambient context (read-only)
|
|
236
|
+
*/
|
|
237
|
+
getContext() {
|
|
238
|
+
return this.context;
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Update user capacity (Phase 1 slider system writes here)
|
|
242
|
+
*/
|
|
243
|
+
updateCapacity(capacity) {
|
|
244
|
+
const newCapacity = __spreadValues(__spreadValues({}, this.context.userCapacity), capacity);
|
|
245
|
+
this.context = __spreadProps(__spreadValues({}, this.context), {
|
|
246
|
+
userCapacity: newCapacity,
|
|
247
|
+
energy: createFieldValue(deriveEnergyField(newCapacity), this.context.energy),
|
|
248
|
+
attention: createFieldValue(deriveAttentionField(newCapacity), this.context.attention)
|
|
249
|
+
});
|
|
250
|
+
this.notifyListeners();
|
|
251
|
+
}
|
|
252
|
+
/**
|
|
253
|
+
* Update emotional state (Phase 1 slider system writes here)
|
|
254
|
+
*/
|
|
255
|
+
updateEmotionalState(state) {
|
|
256
|
+
const newState = __spreadValues(__spreadValues({}, this.context.emotionalState), state);
|
|
257
|
+
this.context = __spreadProps(__spreadValues({}, this.context), {
|
|
258
|
+
emotionalState: newState,
|
|
259
|
+
emotionalValence: createFieldValue(deriveEmotionalValenceField(newState), this.context.emotionalValence)
|
|
260
|
+
});
|
|
261
|
+
this.notifyListeners();
|
|
262
|
+
}
|
|
263
|
+
/**
|
|
264
|
+
* Subscribe to field changes
|
|
265
|
+
*/
|
|
266
|
+
subscribe(listener) {
|
|
267
|
+
this.listeners.add(listener);
|
|
268
|
+
return () => {
|
|
269
|
+
this.listeners.delete(listener);
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
/**
|
|
273
|
+
* Notify all listeners of field changes
|
|
274
|
+
*/
|
|
275
|
+
notifyListeners() {
|
|
276
|
+
this.listeners.forEach((listener) => {
|
|
277
|
+
try {
|
|
278
|
+
listener(this.context);
|
|
279
|
+
} catch (error) {
|
|
280
|
+
console.error("[v0] Field listener error:", error);
|
|
281
|
+
}
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Update field configuration
|
|
286
|
+
*/
|
|
287
|
+
updateConfig(config) {
|
|
288
|
+
this.config = __spreadValues(__spreadValues({}, this.config), config);
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Get current field configuration
|
|
292
|
+
*/
|
|
293
|
+
getConfig() {
|
|
294
|
+
return this.config;
|
|
295
|
+
}
|
|
296
|
+
};
|
|
297
|
+
var FieldManager = new FieldManagerClass();
|
|
298
|
+
|
|
299
|
+
// lib/capacity/mode.ts
|
|
300
|
+
function deriveMode(field) {
|
|
301
|
+
var _a;
|
|
302
|
+
const lowCognitive = field.cognitive < 0.4;
|
|
303
|
+
const highCognitive = field.cognitive > 0.7;
|
|
304
|
+
const lowEmotional = field.emotional < 0.4;
|
|
305
|
+
const highEmotional = field.emotional > 0.6;
|
|
306
|
+
const lowTemporal = field.temporal < 0.4;
|
|
307
|
+
const highValence = field.valence > 0.15;
|
|
308
|
+
const negValence = field.valence < -0.15;
|
|
309
|
+
const density = lowCognitive ? "low" : highCognitive ? "high" : "medium";
|
|
310
|
+
const choiceLoad = lowTemporal ? "minimal" : "normal";
|
|
311
|
+
const guidance = lowCognitive ? "high" : lowTemporal ? "medium" : "low";
|
|
312
|
+
const veryLowEmotional = field.emotional < 0.15;
|
|
313
|
+
const motion = veryLowEmotional ? "off" : lowEmotional ? "soothing" : highEmotional && highValence ? "expressive" : "subtle";
|
|
314
|
+
const contrast = negValence ? "boosted" : "standard";
|
|
315
|
+
const focus = motion === "off" ? "default" : lowCognitive ? "guided" : !highCognitive ? "gentle" : "default";
|
|
316
|
+
const arousal = (_a = field.arousal) != null ? _a : 0.5;
|
|
317
|
+
const pace = arousal < 0.35 ? "calm" : arousal > 0.65 ? "activated" : "neutral";
|
|
318
|
+
return { density, guidance, motion, contrast, choiceLoad, focus, pace };
|
|
319
|
+
}
|
|
320
|
+
function deriveModeLabel(inputs) {
|
|
321
|
+
const { cognitive, temporal, emotional } = inputs;
|
|
322
|
+
if (cognitive > 0.6 && emotional > 0.6) {
|
|
323
|
+
return "Exploratory";
|
|
324
|
+
}
|
|
325
|
+
if (cognitive < 0.4 && temporal < 0.4) {
|
|
326
|
+
return "Minimal";
|
|
327
|
+
}
|
|
328
|
+
if (cognitive >= 0.55 && temporal >= 0.55) {
|
|
329
|
+
return "Focused";
|
|
330
|
+
}
|
|
331
|
+
return "Calm";
|
|
332
|
+
}
|
|
333
|
+
function getModeBadgeColor(label) {
|
|
334
|
+
switch (label) {
|
|
335
|
+
case "Calm":
|
|
336
|
+
return "oklch(0.65 0.15 220)";
|
|
337
|
+
// Soft blue
|
|
338
|
+
case "Focused":
|
|
339
|
+
return "oklch(0.68 0.16 45)";
|
|
340
|
+
// Primary rust
|
|
341
|
+
case "Exploratory":
|
|
342
|
+
return "oklch(0.65 0.2 135)";
|
|
343
|
+
// Toxic green
|
|
344
|
+
case "Minimal":
|
|
345
|
+
return "oklch(0.55 0.1 280)";
|
|
346
|
+
// Muted purple
|
|
347
|
+
default:
|
|
348
|
+
return "oklch(0.5 0 0)";
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
// lib/capacity/signals/detectors/time-detector.ts
|
|
353
|
+
var TimeDetector = class {
|
|
354
|
+
constructor() {
|
|
355
|
+
this.name = "TimeDetector";
|
|
356
|
+
this.weight = 0.6;
|
|
357
|
+
}
|
|
358
|
+
// Medium weight — time is significant but a broad generalisation
|
|
359
|
+
/**
|
|
360
|
+
* Detects and returns SignalReadings based on the current time and day.
|
|
361
|
+
* Returns two readings: cognitive (hour-of-day) and temporal (weekday/weekend).
|
|
362
|
+
*/
|
|
363
|
+
detect() {
|
|
364
|
+
const now = /* @__PURE__ */ new Date();
|
|
365
|
+
const hour = now.getHours();
|
|
366
|
+
const dayOfWeek = now.getDay();
|
|
367
|
+
const ts = now.getTime();
|
|
368
|
+
let cognitiveValue;
|
|
369
|
+
if (hour >= 9 && hour < 12) {
|
|
370
|
+
cognitiveValue = 0.8;
|
|
371
|
+
} else if (hour >= 14 && hour < 17) {
|
|
372
|
+
cognitiveValue = 0.6;
|
|
373
|
+
} else if (hour >= 17 && hour < 20) {
|
|
374
|
+
cognitiveValue = 0.5;
|
|
375
|
+
} else if (hour >= 20 || hour < 6) {
|
|
376
|
+
cognitiveValue = 0.3;
|
|
377
|
+
} else {
|
|
378
|
+
cognitiveValue = 0.7;
|
|
379
|
+
}
|
|
380
|
+
const temporalValue = dayOfWeek >= 1 && dayOfWeek <= 5 ? 0.7 : 0.9;
|
|
381
|
+
return [
|
|
382
|
+
{
|
|
383
|
+
dimension: "cognitive",
|
|
384
|
+
value: cognitiveValue,
|
|
385
|
+
confidence: 0.7,
|
|
386
|
+
// Medium — population average, not personalised
|
|
387
|
+
timestamp: ts,
|
|
388
|
+
detectorName: this.name
|
|
389
|
+
},
|
|
390
|
+
{
|
|
391
|
+
dimension: "temporal",
|
|
392
|
+
value: temporalValue,
|
|
393
|
+
confidence: 0.6,
|
|
394
|
+
// Slightly lower — weekday/weekend is a coarser signal
|
|
395
|
+
timestamp: ts,
|
|
396
|
+
detectorName: this.name
|
|
397
|
+
}
|
|
398
|
+
];
|
|
399
|
+
}
|
|
400
|
+
};
|
|
401
|
+
|
|
402
|
+
// lib/capacity/signals/detectors/session-detector.ts
|
|
403
|
+
var SessionDetector = class {
|
|
404
|
+
constructor() {
|
|
405
|
+
this.name = "SessionDetector";
|
|
406
|
+
this.weight = 0.7;
|
|
407
|
+
this.sessionStartTime = Date.now();
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Detects and returns a SignalReading based on the current session duration.
|
|
411
|
+
* It provides insights into the temporal dimension.
|
|
412
|
+
*
|
|
413
|
+
* @returns {SignalReading} A reading indicating the inferred capacity.
|
|
414
|
+
*/
|
|
415
|
+
detect() {
|
|
416
|
+
const now = Date.now();
|
|
417
|
+
const sessionDurationMinutes = (now - this.sessionStartTime) / (1e3 * 60);
|
|
418
|
+
let temporalValue;
|
|
419
|
+
let confidence;
|
|
420
|
+
if (sessionDurationMinutes < 15) {
|
|
421
|
+
temporalValue = 0.9;
|
|
422
|
+
confidence = 0.8;
|
|
423
|
+
} else if (sessionDurationMinutes < 60) {
|
|
424
|
+
temporalValue = 0.7;
|
|
425
|
+
confidence = 0.7;
|
|
426
|
+
} else if (sessionDurationMinutes < 180) {
|
|
427
|
+
temporalValue = 0.5;
|
|
428
|
+
confidence = 0.6;
|
|
429
|
+
} else {
|
|
430
|
+
temporalValue = 0.3;
|
|
431
|
+
confidence = 0.7;
|
|
432
|
+
}
|
|
433
|
+
return [{
|
|
434
|
+
dimension: "temporal",
|
|
435
|
+
value: temporalValue,
|
|
436
|
+
confidence,
|
|
437
|
+
timestamp: now,
|
|
438
|
+
detectorName: this.name
|
|
439
|
+
}];
|
|
440
|
+
}
|
|
441
|
+
};
|
|
442
|
+
|
|
443
|
+
// lib/capacity/signals/detectors/scroll-detector.ts
|
|
444
|
+
var DEBOUNCE_TIME_MS = 100;
|
|
445
|
+
var ScrollDetector = class {
|
|
446
|
+
constructor() {
|
|
447
|
+
this.name = "ScrollDetector";
|
|
448
|
+
this.weight = 0.5;
|
|
449
|
+
// Moderate weight, as scroll velocity can indicate engagement or frustration
|
|
450
|
+
this.lastScrollY = 0;
|
|
451
|
+
this.lastScrollTime = 0;
|
|
452
|
+
this.scrollVelocity = 0;
|
|
453
|
+
this.timeoutId = null;
|
|
454
|
+
/**
|
|
455
|
+
* Handles the scroll event, debouncing it and calculating scroll velocity.
|
|
456
|
+
* @private
|
|
457
|
+
*/
|
|
458
|
+
this.handleScroll = () => {
|
|
459
|
+
if (this.timeoutId) {
|
|
460
|
+
clearTimeout(this.timeoutId);
|
|
461
|
+
}
|
|
462
|
+
this.timeoutId = setTimeout(() => {
|
|
463
|
+
const now = Date.now();
|
|
464
|
+
const scrollY = window.scrollY;
|
|
465
|
+
const distance = Math.abs(scrollY - this.lastScrollY);
|
|
466
|
+
const timeElapsed = now - this.lastScrollTime;
|
|
467
|
+
if (timeElapsed > 0) {
|
|
468
|
+
this.scrollVelocity = distance / timeElapsed * 1e3;
|
|
469
|
+
} else {
|
|
470
|
+
this.scrollVelocity = 0;
|
|
471
|
+
}
|
|
472
|
+
this.lastScrollY = scrollY;
|
|
473
|
+
this.lastScrollTime = now;
|
|
474
|
+
}, DEBOUNCE_TIME_MS);
|
|
475
|
+
};
|
|
476
|
+
if (typeof window !== "undefined") {
|
|
477
|
+
window.addEventListener("scroll", this.handleScroll, { passive: true });
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
/**
|
|
481
|
+
* Detects and returns a SignalReading based on the current scroll velocity.
|
|
482
|
+
* It provides insights into the cognitive dimension.
|
|
483
|
+
*
|
|
484
|
+
* @returns {SignalReading} A reading indicating the inferred capacity.
|
|
485
|
+
*/
|
|
486
|
+
detect() {
|
|
487
|
+
const now = Date.now();
|
|
488
|
+
let cognitiveValue;
|
|
489
|
+
let confidence;
|
|
490
|
+
if (this.scrollVelocity > 1500) {
|
|
491
|
+
cognitiveValue = 0.4;
|
|
492
|
+
confidence = 0.6;
|
|
493
|
+
} else if (this.scrollVelocity > 500) {
|
|
494
|
+
cognitiveValue = 0.7;
|
|
495
|
+
confidence = 0.8;
|
|
496
|
+
} else if (this.scrollVelocity > 50) {
|
|
497
|
+
cognitiveValue = 0.6;
|
|
498
|
+
confidence = 0.7;
|
|
499
|
+
} else {
|
|
500
|
+
cognitiveValue = 0.5;
|
|
501
|
+
confidence = 0.5;
|
|
502
|
+
}
|
|
503
|
+
return [{
|
|
504
|
+
dimension: "cognitive",
|
|
505
|
+
value: cognitiveValue,
|
|
506
|
+
confidence,
|
|
507
|
+
timestamp: now,
|
|
508
|
+
detectorName: this.name
|
|
509
|
+
}];
|
|
510
|
+
}
|
|
511
|
+
/**
|
|
512
|
+
* Cleans up the scroll event listener when the detector is no longer needed.
|
|
513
|
+
*/
|
|
514
|
+
destroy() {
|
|
515
|
+
if (typeof window !== "undefined") {
|
|
516
|
+
window.removeEventListener("scroll", this.handleScroll);
|
|
517
|
+
if (this.timeoutId) {
|
|
518
|
+
clearTimeout(this.timeoutId);
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
};
|
|
523
|
+
|
|
524
|
+
// lib/capacity/signals/detectors/interaction-detector.ts
|
|
525
|
+
var IDLE_THRESHOLD_MS = 15e3;
|
|
526
|
+
var CLICK_WINDOW_MS = 6e4;
|
|
527
|
+
var InteractionDetector = class {
|
|
528
|
+
constructor() {
|
|
529
|
+
this.name = "InteractionDetector";
|
|
530
|
+
this.weight = 0.7;
|
|
531
|
+
this.lastMouseMoveTime = 0;
|
|
532
|
+
this.lastClickTime = 0;
|
|
533
|
+
this.lastClickPosition = null;
|
|
534
|
+
this.clickHistory = [];
|
|
535
|
+
// rolling 60-second window
|
|
536
|
+
this.idleTimer = null;
|
|
537
|
+
this.isIdle = false;
|
|
538
|
+
this.resetIdleTimer = () => {
|
|
539
|
+
if (this.idleTimer) clearTimeout(this.idleTimer);
|
|
540
|
+
this.isIdle = false;
|
|
541
|
+
this.idleTimer = setTimeout(() => {
|
|
542
|
+
this.isIdle = true;
|
|
543
|
+
}, IDLE_THRESHOLD_MS);
|
|
544
|
+
};
|
|
545
|
+
this.handleMouseMove = () => {
|
|
546
|
+
this.lastMouseMoveTime = Date.now();
|
|
547
|
+
this.resetIdleTimer();
|
|
548
|
+
};
|
|
549
|
+
this.handleClick = (event) => {
|
|
550
|
+
this.resetIdleTimer();
|
|
551
|
+
const now = Date.now();
|
|
552
|
+
this.lastClickTime = now;
|
|
553
|
+
let distance = 0;
|
|
554
|
+
if (this.lastClickPosition) {
|
|
555
|
+
const dx = event.clientX - this.lastClickPosition.x;
|
|
556
|
+
const dy = event.clientY - this.lastClickPosition.y;
|
|
557
|
+
distance = Math.sqrt(dx * dx + dy * dy);
|
|
558
|
+
}
|
|
559
|
+
this.lastClickPosition = { x: event.clientX, y: event.clientY };
|
|
560
|
+
this.clickHistory.push({ time: now, distance });
|
|
561
|
+
};
|
|
562
|
+
if (typeof window !== "undefined") {
|
|
563
|
+
window.addEventListener("mousemove", this.handleMouseMove, { passive: true });
|
|
564
|
+
window.addEventListener("click", this.handleClick, { passive: true });
|
|
565
|
+
}
|
|
566
|
+
this.resetIdleTimer();
|
|
567
|
+
}
|
|
568
|
+
detect() {
|
|
569
|
+
const now = Date.now();
|
|
570
|
+
const cutoff = now - CLICK_WINDOW_MS;
|
|
571
|
+
this.clickHistory = this.clickHistory.filter((c) => c.time >= cutoff);
|
|
572
|
+
const clickCount = this.clickHistory.length;
|
|
573
|
+
const avgClickDistance = clickCount > 0 ? this.clickHistory.reduce((sum, c) => sum + c.distance, 0) / clickCount : 0;
|
|
574
|
+
const timeSinceLastClick = now - this.lastClickTime;
|
|
575
|
+
let cognitiveValue;
|
|
576
|
+
let confidence;
|
|
577
|
+
if (this.isIdle) {
|
|
578
|
+
cognitiveValue = 0.4;
|
|
579
|
+
confidence = 0.6;
|
|
580
|
+
} else if (timeSinceLastClick < 500 && clickCount > 5 && avgClickDistance < 20) {
|
|
581
|
+
cognitiveValue = 0.9;
|
|
582
|
+
confidence = 0.9;
|
|
583
|
+
} else if (timeSinceLastClick < 1500 && clickCount > 1 && avgClickDistance < 50) {
|
|
584
|
+
cognitiveValue = 0.7;
|
|
585
|
+
confidence = 0.7;
|
|
586
|
+
} else if (avgClickDistance > 100) {
|
|
587
|
+
cognitiveValue = 0.3;
|
|
588
|
+
confidence = 0.6;
|
|
589
|
+
} else {
|
|
590
|
+
cognitiveValue = 0.5;
|
|
591
|
+
confidence = 0.5;
|
|
592
|
+
}
|
|
593
|
+
return [{
|
|
594
|
+
dimension: "cognitive",
|
|
595
|
+
value: cognitiveValue,
|
|
596
|
+
confidence,
|
|
597
|
+
timestamp: now,
|
|
598
|
+
detectorName: this.name
|
|
599
|
+
}];
|
|
600
|
+
}
|
|
601
|
+
destroy() {
|
|
602
|
+
if (typeof window !== "undefined") {
|
|
603
|
+
window.removeEventListener("mousemove", this.handleMouseMove);
|
|
604
|
+
window.removeEventListener("click", this.handleClick);
|
|
605
|
+
}
|
|
606
|
+
if (this.idleTimer) clearTimeout(this.idleTimer);
|
|
607
|
+
}
|
|
608
|
+
};
|
|
609
|
+
|
|
610
|
+
// lib/capacity/signals/detectors/input-detector.ts
|
|
611
|
+
var TYPING_SPEED_SAMPLE_SIZE = 10;
|
|
612
|
+
var ERROR_CHECK_WINDOW = 5e3;
|
|
613
|
+
var InputDetector = class {
|
|
614
|
+
// timestamps of recent Backspace/Delete presses
|
|
615
|
+
constructor() {
|
|
616
|
+
this.name = "InputDetector";
|
|
617
|
+
this.weight = 0.6;
|
|
618
|
+
this.keyPressTimes = [];
|
|
619
|
+
this.errorTimes = [];
|
|
620
|
+
this.handleKeyDown = (event) => {
|
|
621
|
+
const now = Date.now();
|
|
622
|
+
this.keyPressTimes.push(now);
|
|
623
|
+
if (this.keyPressTimes.length > TYPING_SPEED_SAMPLE_SIZE) {
|
|
624
|
+
this.keyPressTimes.shift();
|
|
625
|
+
}
|
|
626
|
+
if (event.key === "Backspace" || event.key === "Delete") {
|
|
627
|
+
this.errorTimes.push(now);
|
|
628
|
+
}
|
|
629
|
+
};
|
|
630
|
+
if (typeof window !== "undefined") {
|
|
631
|
+
window.addEventListener("keydown", this.handleKeyDown, { passive: true });
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
detect() {
|
|
635
|
+
const now = Date.now();
|
|
636
|
+
let typingSpeedCPM = 0;
|
|
637
|
+
if (this.keyPressTimes.length > 1) {
|
|
638
|
+
const elapsed = this.keyPressTimes[this.keyPressTimes.length - 1] - this.keyPressTimes[0];
|
|
639
|
+
if (elapsed > 0) {
|
|
640
|
+
typingSpeedCPM = this.keyPressTimes.length / elapsed * 6e4;
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
const cutoff = now - ERROR_CHECK_WINDOW;
|
|
644
|
+
this.errorTimes = this.errorTimes.filter((t) => t >= cutoff);
|
|
645
|
+
const recentErrorCount = this.errorTimes.length;
|
|
646
|
+
let cognitiveValue;
|
|
647
|
+
let confidence;
|
|
648
|
+
if (typingSpeedCPM > 100 && recentErrorCount === 0) {
|
|
649
|
+
cognitiveValue = 0.9;
|
|
650
|
+
confidence = 0.8;
|
|
651
|
+
} else if (typingSpeedCPM > 40 && recentErrorCount <= 1) {
|
|
652
|
+
cognitiveValue = 0.7;
|
|
653
|
+
confidence = 0.7;
|
|
654
|
+
} else if (recentErrorCount > 2 || typingSpeedCPM < 20) {
|
|
655
|
+
cognitiveValue = 0.4;
|
|
656
|
+
confidence = 0.6;
|
|
657
|
+
} else {
|
|
658
|
+
cognitiveValue = 0.6;
|
|
659
|
+
confidence = 0.5;
|
|
660
|
+
}
|
|
661
|
+
return [{
|
|
662
|
+
dimension: "cognitive",
|
|
663
|
+
value: cognitiveValue,
|
|
664
|
+
confidence,
|
|
665
|
+
timestamp: now,
|
|
666
|
+
detectorName: this.name
|
|
667
|
+
}];
|
|
668
|
+
}
|
|
669
|
+
destroy() {
|
|
670
|
+
if (typeof window !== "undefined") {
|
|
671
|
+
window.removeEventListener("keydown", this.handleKeyDown);
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
};
|
|
675
|
+
|
|
676
|
+
// lib/capacity/signals/detectors/environment-detector.ts
|
|
677
|
+
var EnvironmentDetector = class {
|
|
678
|
+
constructor() {
|
|
679
|
+
this.name = "EnvironmentDetector";
|
|
680
|
+
this.weight = 0.8;
|
|
681
|
+
// High weight — these are explicit user preferences
|
|
682
|
+
this.mqlReducedMotion = null;
|
|
683
|
+
this.mqlDarkMode = null;
|
|
684
|
+
this.handleChange = () => {
|
|
685
|
+
};
|
|
686
|
+
if (typeof window !== "undefined") {
|
|
687
|
+
this.mqlReducedMotion = window.matchMedia("(prefers-reduced-motion: reduce)");
|
|
688
|
+
this.mqlDarkMode = window.matchMedia("(prefers-color-scheme: dark)");
|
|
689
|
+
this.mqlReducedMotion.addEventListener("change", this.handleChange);
|
|
690
|
+
this.mqlDarkMode.addEventListener("change", this.handleChange);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
/**
|
|
694
|
+
* Returns two readings:
|
|
695
|
+
* - temporal: based on prefers-reduced-motion (low → less time pressure on animations)
|
|
696
|
+
* - emotional: based on prefers-color-scheme (dark → slightly lower emotional load)
|
|
697
|
+
*/
|
|
698
|
+
detect() {
|
|
699
|
+
const now = Date.now();
|
|
700
|
+
const prefersReducedMotion = this.mqlReducedMotion != null ? this.mqlReducedMotion.matches : typeof window !== "undefined" && window.matchMedia("(prefers-reduced-motion: reduce)").matches;
|
|
701
|
+
const prefersDarkMode = this.mqlDarkMode != null ? this.mqlDarkMode.matches : typeof window !== "undefined" && window.matchMedia("(prefers-color-scheme: dark)").matches;
|
|
702
|
+
return [
|
|
703
|
+
{
|
|
704
|
+
dimension: "temporal",
|
|
705
|
+
// prefers-reduced-motion → user may have lower tolerance for demanding UIs
|
|
706
|
+
value: prefersReducedMotion ? 0.3 : 0.8,
|
|
707
|
+
confidence: 0.9,
|
|
708
|
+
timestamp: now,
|
|
709
|
+
detectorName: this.name
|
|
710
|
+
},
|
|
711
|
+
{
|
|
712
|
+
dimension: "emotional",
|
|
713
|
+
// Dark mode preference → slightly lower emotional capacity or reduced-stimulation preference
|
|
714
|
+
value: prefersDarkMode ? 0.6 : 0.7,
|
|
715
|
+
confidence: 0.9,
|
|
716
|
+
timestamp: now,
|
|
717
|
+
detectorName: this.name
|
|
718
|
+
}
|
|
719
|
+
];
|
|
720
|
+
}
|
|
721
|
+
/**
|
|
722
|
+
* Removes the event listeners registered in the constructor.
|
|
723
|
+
* Uses the stored refs so the same function reference is unregistered.
|
|
724
|
+
*/
|
|
725
|
+
destroy() {
|
|
726
|
+
var _a, _b;
|
|
727
|
+
(_a = this.mqlReducedMotion) == null ? void 0 : _a.removeEventListener("change", this.handleChange);
|
|
728
|
+
(_b = this.mqlDarkMode) == null ? void 0 : _b.removeEventListener("change", this.handleChange);
|
|
729
|
+
this.mqlReducedMotion = null;
|
|
730
|
+
this.mqlDarkMode = null;
|
|
731
|
+
}
|
|
732
|
+
};
|
|
733
|
+
|
|
734
|
+
// lib/capacity/signals/aggregator.ts
|
|
735
|
+
var _SignalAggregator = class _SignalAggregator {
|
|
736
|
+
constructor() {
|
|
737
|
+
this.detectors = [
|
|
738
|
+
new TimeDetector(),
|
|
739
|
+
new SessionDetector(),
|
|
740
|
+
new ScrollDetector(),
|
|
741
|
+
new InteractionDetector(),
|
|
742
|
+
new InputDetector(),
|
|
743
|
+
new EnvironmentDetector()
|
|
744
|
+
];
|
|
745
|
+
}
|
|
746
|
+
/**
|
|
747
|
+
* Collects signal readings from all detectors and aggregates them into a
|
|
748
|
+
* confidence-weighted CapacityField.
|
|
749
|
+
*/
|
|
750
|
+
aggregateSignals() {
|
|
751
|
+
return __async(this, null, function* () {
|
|
752
|
+
const readings = [];
|
|
753
|
+
for (const detector of this.detectors) {
|
|
754
|
+
const detectorReadings = yield detector.detect();
|
|
755
|
+
readings.push(...detectorReadings);
|
|
756
|
+
}
|
|
757
|
+
const weightedSums = {
|
|
758
|
+
cognitive: 0,
|
|
759
|
+
temporal: 0,
|
|
760
|
+
emotional: 0,
|
|
761
|
+
valence: 0
|
|
762
|
+
};
|
|
763
|
+
const totalWeights = {
|
|
764
|
+
cognitive: 0,
|
|
765
|
+
temporal: 0,
|
|
766
|
+
emotional: 0,
|
|
767
|
+
valence: 0
|
|
768
|
+
};
|
|
769
|
+
for (const reading of readings) {
|
|
770
|
+
const effectiveWeight = reading.confidence * this.getDetectorWeight(reading.dimension, reading.detectorName);
|
|
771
|
+
weightedSums[reading.dimension] += reading.value * effectiveWeight;
|
|
772
|
+
totalWeights[reading.dimension] += effectiveWeight;
|
|
773
|
+
}
|
|
774
|
+
return {
|
|
775
|
+
cognitive: totalWeights.cognitive > 0 ? weightedSums.cognitive / totalWeights.cognitive : 0.5,
|
|
776
|
+
temporal: totalWeights.temporal > 0 ? weightedSums.temporal / totalWeights.temporal : 0.5,
|
|
777
|
+
emotional: totalWeights.emotional > 0 ? weightedSums.emotional / totalWeights.emotional : 0.5,
|
|
778
|
+
valence: totalWeights.valence > 0 ? weightedSums.valence / totalWeights.valence : 0
|
|
779
|
+
};
|
|
780
|
+
});
|
|
781
|
+
}
|
|
782
|
+
/**
|
|
783
|
+
* Returns the effective weight for a detector/dimension pair.
|
|
784
|
+
* Checks DIMENSION_WEIGHTS first; falls back to detector.weight.
|
|
785
|
+
*/
|
|
786
|
+
getDetectorWeight(dimension, detectorName) {
|
|
787
|
+
var _a, _b, _c;
|
|
788
|
+
const override = (_a = _SignalAggregator.DIMENSION_WEIGHTS[detectorName]) == null ? void 0 : _a[dimension];
|
|
789
|
+
if (override !== void 0) return override;
|
|
790
|
+
return (_c = (_b = this.detectors.find((d) => d.name === detectorName)) == null ? void 0 : _b.weight) != null ? _c : 0;
|
|
791
|
+
}
|
|
792
|
+
/** Cleans up all detector resources (event listeners, timers). */
|
|
793
|
+
destroy() {
|
|
794
|
+
var _a;
|
|
795
|
+
for (const detector of this.detectors) {
|
|
796
|
+
(_a = detector.destroy) == null ? void 0 : _a.call(detector);
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
};
|
|
800
|
+
/**
|
|
801
|
+
* Per-detector, per-dimension weight overrides.
|
|
802
|
+
* Falls back to detector.weight for any unlisted combination.
|
|
803
|
+
*
|
|
804
|
+
* Rationale for asymmetries:
|
|
805
|
+
* - TimeDetector: cognitive signal is stronger (diurnal pattern) than temporal
|
|
806
|
+
* (weekday/weekend is coarser)
|
|
807
|
+
* - EnvironmentDetector: emotional signal (color scheme) is a stronger explicit
|
|
808
|
+
* preference than temporal (reduced-motion)
|
|
809
|
+
*/
|
|
810
|
+
_SignalAggregator.DIMENSION_WEIGHTS = {
|
|
811
|
+
TimeDetector: { cognitive: 0.6, temporal: 0.5 },
|
|
812
|
+
EnvironmentDetector: { emotional: 0.8, temporal: 0.7 },
|
|
813
|
+
InteractionDetector: { cognitive: 0.7 },
|
|
814
|
+
InputDetector: { cognitive: 0.6 },
|
|
815
|
+
SessionDetector: { temporal: 0.7 },
|
|
816
|
+
ScrollDetector: { cognitive: 0.5 }
|
|
817
|
+
};
|
|
818
|
+
var SignalAggregator = _SignalAggregator;
|
|
819
|
+
var CapacityContext = react.createContext(null);
|
|
820
|
+
var AUTO_EMA_ALPHA = 0.2;
|
|
821
|
+
function applyEMA(prev, next, alpha) {
|
|
822
|
+
return {
|
|
823
|
+
cognitive: prev.cognitive * (1 - alpha) + next.cognitive * alpha,
|
|
824
|
+
temporal: prev.temporal * (1 - alpha) + next.temporal * alpha,
|
|
825
|
+
emotional: prev.emotional * (1 - alpha) + next.emotional * alpha,
|
|
826
|
+
valence: prev.valence * (1 - alpha) + next.valence * alpha
|
|
827
|
+
};
|
|
828
|
+
}
|
|
829
|
+
function CapacityProvider({ children }) {
|
|
830
|
+
const [context, setContext] = react.useState(() => FieldManager.getContext());
|
|
831
|
+
const [isAutoMode, setIsAutoMode] = react.useState(true);
|
|
832
|
+
const [hapticEnabled, setHapticEnabled] = react.useState(false);
|
|
833
|
+
const [sonicEnabled, setSonicEnabled] = react.useState(false);
|
|
834
|
+
const isFirstAggregationComplete = react.useRef(false);
|
|
835
|
+
const smoothedFieldRef = react.useRef(null);
|
|
836
|
+
const aggregatorRef = react.useRef(null);
|
|
837
|
+
react.useEffect(() => {
|
|
838
|
+
aggregatorRef.current = new SignalAggregator();
|
|
839
|
+
const unsubscribe = FieldManager.subscribe((newContext) => {
|
|
840
|
+
setContext(newContext);
|
|
841
|
+
});
|
|
842
|
+
return () => {
|
|
843
|
+
unsubscribe();
|
|
844
|
+
if (aggregatorRef.current) {
|
|
845
|
+
aggregatorRef.current.destroy();
|
|
846
|
+
}
|
|
847
|
+
};
|
|
848
|
+
}, []);
|
|
849
|
+
react.useEffect(() => {
|
|
850
|
+
let intervalId;
|
|
851
|
+
if (isAutoMode && aggregatorRef.current) {
|
|
852
|
+
isFirstAggregationComplete.current = false;
|
|
853
|
+
smoothedFieldRef.current = null;
|
|
854
|
+
intervalId = setInterval(() => __async(null, null, function* () {
|
|
855
|
+
var _a;
|
|
856
|
+
try {
|
|
857
|
+
const suggestedField = yield aggregatorRef.current.aggregateSignals();
|
|
858
|
+
if (!isFirstAggregationComplete.current) {
|
|
859
|
+
isFirstAggregationComplete.current = true;
|
|
860
|
+
smoothedFieldRef.current = suggestedField;
|
|
861
|
+
} else {
|
|
862
|
+
smoothedFieldRef.current = applyEMA(
|
|
863
|
+
(_a = smoothedFieldRef.current) != null ? _a : suggestedField,
|
|
864
|
+
suggestedField,
|
|
865
|
+
AUTO_EMA_ALPHA
|
|
866
|
+
);
|
|
867
|
+
const smoothed = smoothedFieldRef.current;
|
|
868
|
+
FieldManager.updateCapacity({
|
|
869
|
+
cognitive: smoothed.cognitive,
|
|
870
|
+
temporal: smoothed.temporal,
|
|
871
|
+
emotional: smoothed.emotional
|
|
872
|
+
});
|
|
873
|
+
FieldManager.updateEmotionalState({
|
|
874
|
+
valence: smoothed.valence
|
|
875
|
+
});
|
|
876
|
+
}
|
|
877
|
+
} catch (error) {
|
|
878
|
+
console.warn("[CapacityProvider] Signal aggregation failed:", error);
|
|
879
|
+
}
|
|
880
|
+
}), 2e3);
|
|
881
|
+
}
|
|
882
|
+
return () => {
|
|
883
|
+
if (intervalId) {
|
|
884
|
+
clearInterval(intervalId);
|
|
885
|
+
}
|
|
886
|
+
};
|
|
887
|
+
}, [isAutoMode]);
|
|
888
|
+
const updateCapacity = react.useCallback((capacity) => {
|
|
889
|
+
if (isAutoMode) {
|
|
890
|
+
setIsAutoMode(false);
|
|
891
|
+
}
|
|
892
|
+
FieldManager.updateCapacity(capacity);
|
|
893
|
+
}, [isAutoMode]);
|
|
894
|
+
const updateEmotionalState = react.useCallback((state) => {
|
|
895
|
+
if (isAutoMode) {
|
|
896
|
+
setIsAutoMode(false);
|
|
897
|
+
}
|
|
898
|
+
FieldManager.updateEmotionalState(state);
|
|
899
|
+
}, [isAutoMode]);
|
|
900
|
+
const updateCapacityField = react.useCallback((field) => {
|
|
901
|
+
FieldManager.updateCapacity({
|
|
902
|
+
cognitive: field.cognitive,
|
|
903
|
+
temporal: field.temporal,
|
|
904
|
+
emotional: field.emotional
|
|
905
|
+
});
|
|
906
|
+
FieldManager.updateEmotionalState({
|
|
907
|
+
valence: field.valence
|
|
908
|
+
});
|
|
909
|
+
}, []);
|
|
910
|
+
const toggleAutoMode = react.useCallback(() => {
|
|
911
|
+
setIsAutoMode((prev) => !prev);
|
|
912
|
+
}, []);
|
|
913
|
+
return /* @__PURE__ */ jsxRuntime.jsx(CapacityContext.Provider, { value: {
|
|
914
|
+
context,
|
|
915
|
+
updateCapacity,
|
|
916
|
+
updateEmotionalState,
|
|
917
|
+
isAutoMode,
|
|
918
|
+
toggleAutoMode,
|
|
919
|
+
updateCapacityField,
|
|
920
|
+
hapticEnabled,
|
|
921
|
+
sonicEnabled,
|
|
922
|
+
setHapticEnabled,
|
|
923
|
+
setSonicEnabled
|
|
924
|
+
}, children });
|
|
925
|
+
}
|
|
926
|
+
function useCapacityContext() {
|
|
927
|
+
const context = react.useContext(CapacityContext);
|
|
928
|
+
if (!context) {
|
|
929
|
+
throw new Error("useCapacityContext must be used within CapacityProvider");
|
|
930
|
+
}
|
|
931
|
+
return context;
|
|
932
|
+
}
|
|
933
|
+
function useEnergyField() {
|
|
934
|
+
const { context } = useCapacityContext();
|
|
935
|
+
return context.energy;
|
|
936
|
+
}
|
|
937
|
+
function useAttentionField() {
|
|
938
|
+
const { context } = useCapacityContext();
|
|
939
|
+
return context.attention;
|
|
940
|
+
}
|
|
941
|
+
function useEmotionalValenceField() {
|
|
942
|
+
const { context } = useCapacityContext();
|
|
943
|
+
return context.emotionalValence;
|
|
944
|
+
}
|
|
945
|
+
function useFieldControls() {
|
|
946
|
+
const { updateCapacity, updateEmotionalState, isAutoMode, toggleAutoMode, updateCapacityField } = useCapacityContext();
|
|
947
|
+
return { updateCapacity, updateEmotionalState, isAutoMode, toggleAutoMode, updateCapacityField };
|
|
948
|
+
}
|
|
949
|
+
function usePrefersReducedMotion() {
|
|
950
|
+
const [prefersReducedMotion, setPrefersReducedMotion] = react.useState(false);
|
|
951
|
+
react.useEffect(() => {
|
|
952
|
+
const mediaQuery = window.matchMedia("(prefers-reduced-motion: reduce)");
|
|
953
|
+
setPrefersReducedMotion(mediaQuery.matches);
|
|
954
|
+
const handleChange = (event) => {
|
|
955
|
+
setPrefersReducedMotion(event.matches);
|
|
956
|
+
};
|
|
957
|
+
mediaQuery.addEventListener("change", handleChange);
|
|
958
|
+
return () => mediaQuery.removeEventListener("change", handleChange);
|
|
959
|
+
}, []);
|
|
960
|
+
return prefersReducedMotion;
|
|
961
|
+
}
|
|
962
|
+
function useDerivedMode() {
|
|
963
|
+
const { context } = useCapacityContext();
|
|
964
|
+
const field = {
|
|
965
|
+
cognitive: context.userCapacity.cognitive,
|
|
966
|
+
temporal: context.userCapacity.temporal,
|
|
967
|
+
emotional: context.userCapacity.emotional,
|
|
968
|
+
valence: context.emotionalState.valence,
|
|
969
|
+
arousal: context.emotionalState.arousal
|
|
970
|
+
};
|
|
971
|
+
const mode = deriveMode(field);
|
|
972
|
+
return { field, mode };
|
|
973
|
+
}
|
|
974
|
+
function useEffectiveMotion() {
|
|
975
|
+
const { field } = useDerivedMode();
|
|
976
|
+
const prefersReducedMotion = usePrefersReducedMotion();
|
|
977
|
+
const derivedMode = deriveMode(field);
|
|
978
|
+
const effectiveMode = prefersReducedMotion ? "off" : derivedMode.motion;
|
|
979
|
+
return {
|
|
980
|
+
mode: effectiveMode,
|
|
981
|
+
tokens: MOTION_TOKENS[effectiveMode],
|
|
982
|
+
prefersReducedMotion
|
|
983
|
+
};
|
|
984
|
+
}
|
|
985
|
+
function useFeedback() {
|
|
986
|
+
const { hapticEnabled, sonicEnabled, setHapticEnabled, setSonicEnabled } = useCapacityContext();
|
|
987
|
+
const { mode } = useDerivedMode();
|
|
988
|
+
const fire = react.useCallback((pattern = "tap") => {
|
|
989
|
+
if (hapticEnabled) triggerHaptic(pattern);
|
|
990
|
+
if (sonicEnabled) playPacedSonic(mode.pace);
|
|
991
|
+
}, [hapticEnabled, sonicEnabled, mode.pace]);
|
|
992
|
+
return { hapticEnabled, sonicEnabled, setHapticEnabled, setSonicEnabled, fire };
|
|
993
|
+
}
|
|
994
|
+
function usePacedMotionTokens() {
|
|
995
|
+
const { mode } = useDerivedMode();
|
|
996
|
+
const { mode: effectiveMotion, tokens: baseTokens, prefersReducedMotion } = useEffectiveMotion();
|
|
997
|
+
const effectivePace = prefersReducedMotion ? "calm" : mode.pace;
|
|
998
|
+
const multiplier = effectivePace === "calm" ? 1.5 : effectivePace === "activated" ? 0.65 : 1;
|
|
999
|
+
return {
|
|
1000
|
+
mode: effectiveMotion,
|
|
1001
|
+
pace: effectivePace,
|
|
1002
|
+
tokens: __spreadProps(__spreadValues({}, baseTokens), {
|
|
1003
|
+
durationFast: Math.round(baseTokens.durationFast * multiplier),
|
|
1004
|
+
durationBase: Math.round(baseTokens.durationBase * multiplier),
|
|
1005
|
+
durationSlow: Math.round(baseTokens.durationSlow * multiplier)
|
|
1006
|
+
})
|
|
1007
|
+
};
|
|
1008
|
+
}
|
|
1009
|
+
|
|
1010
|
+
// lib/capacity/animation.ts
|
|
1011
|
+
var ENTRANCE_PRESETS = {
|
|
1012
|
+
/** Liquid organic morph -> gentle scale fade -> soft bloom -> none */
|
|
1013
|
+
morph: { expressive: "morph-fade-in", subtle: "sacred-fade", soothing: "bloom", off: "" },
|
|
1014
|
+
/** Spinning vortex -> gentle scale fade -> soft bloom -> none */
|
|
1015
|
+
vortex: { expressive: "vortex-reveal", subtle: "sacred-fade", soothing: "bloom", off: "" },
|
|
1016
|
+
/** Spiral in from corner -> soft bloom -> soft bloom -> none */
|
|
1017
|
+
spiral: { expressive: "spiral-in", subtle: "bloom", soothing: "bloom", off: "" }
|
|
1018
|
+
};
|
|
1019
|
+
function entranceClass(motion, preset, hasPlayed) {
|
|
1020
|
+
if (hasPlayed) return "";
|
|
1021
|
+
return ENTRANCE_PRESETS[preset][motion];
|
|
1022
|
+
}
|
|
1023
|
+
function hoverClass(motion) {
|
|
1024
|
+
if (motion === "expressive") return "hover-expand";
|
|
1025
|
+
if (motion === "subtle" || motion === "soothing") return "hover-lift";
|
|
1026
|
+
return "";
|
|
1027
|
+
}
|
|
1028
|
+
function ambientClass(motion, type) {
|
|
1029
|
+
if (motion === "expressive") return type;
|
|
1030
|
+
if (motion === "soothing" && (type === "breathe" || type === "float")) return type;
|
|
1031
|
+
return "";
|
|
1032
|
+
}
|
|
1033
|
+
function listItemClass(motion) {
|
|
1034
|
+
if (motion === "expressive") return "helix-rise";
|
|
1035
|
+
if (motion === "subtle" || motion === "soothing") return "sacred-fade";
|
|
1036
|
+
return "";
|
|
1037
|
+
}
|
|
1038
|
+
function focusBeaconClass(focus) {
|
|
1039
|
+
if (focus === "guided") return "attention-beacon focus-highlight";
|
|
1040
|
+
if (focus === "gentle") return "gentle-beacon gentle-highlight";
|
|
1041
|
+
return "";
|
|
1042
|
+
}
|
|
1043
|
+
function focusTextClass(focus) {
|
|
1044
|
+
if (focus === "guided") return "attention-text";
|
|
1045
|
+
if (focus === "gentle") return "gentle-text";
|
|
1046
|
+
return "";
|
|
1047
|
+
}
|
|
1048
|
+
|
|
1049
|
+
// lib/capacity/utils/typography.ts
|
|
1050
|
+
var BASE_FONT_SIZE = 16;
|
|
1051
|
+
var MIN_FONT_SIZE = 14;
|
|
1052
|
+
var JITTER_FACTOR = 0.05;
|
|
1053
|
+
var SCALE_STEPS = {
|
|
1054
|
+
h1: 4,
|
|
1055
|
+
// φ^4 ≈ 6.85x base
|
|
1056
|
+
h2: 3,
|
|
1057
|
+
// φ^3 ≈ 4.24x base
|
|
1058
|
+
h3: 2,
|
|
1059
|
+
// φ^2 ≈ 2.62x base
|
|
1060
|
+
h4: 1,
|
|
1061
|
+
// φ^1 ≈ 1.62x base
|
|
1062
|
+
body: 0,
|
|
1063
|
+
// φ^0 = 1x base
|
|
1064
|
+
label: -0.5,
|
|
1065
|
+
// φ^-0.5 ≈ 0.79x base
|
|
1066
|
+
caption: -1
|
|
1067
|
+
// φ^-1 ≈ 0.62x base
|
|
1068
|
+
};
|
|
1069
|
+
var ENERGY_BIAS = {
|
|
1070
|
+
low: 1.05,
|
|
1071
|
+
// +5% for better readability when tired
|
|
1072
|
+
medium: 1,
|
|
1073
|
+
// Neutral
|
|
1074
|
+
high: 0.95
|
|
1075
|
+
// -5% for higher density when alert
|
|
1076
|
+
};
|
|
1077
|
+
var ATTENTION_WEIGHT = {
|
|
1078
|
+
low: 400,
|
|
1079
|
+
// Regular
|
|
1080
|
+
medium: 450,
|
|
1081
|
+
// Medium
|
|
1082
|
+
high: 500
|
|
1083
|
+
// Medium-bold for focus
|
|
1084
|
+
};
|
|
1085
|
+
var ATTENTION_TRACKING = {
|
|
1086
|
+
low: 0.02,
|
|
1087
|
+
// Loose tracking for comfortable reading
|
|
1088
|
+
medium: 0,
|
|
1089
|
+
// Normal
|
|
1090
|
+
high: -0.01
|
|
1091
|
+
// Tight tracking for focus
|
|
1092
|
+
};
|
|
1093
|
+
function modularScale(step, base = BASE_FONT_SIZE) {
|
|
1094
|
+
return base * Math.pow(PHI, step);
|
|
1095
|
+
}
|
|
1096
|
+
function getFontSize(role, energy = "medium", options) {
|
|
1097
|
+
const { base = BASE_FONT_SIZE, jitter = true, minSize = MIN_FONT_SIZE } = options || {};
|
|
1098
|
+
const step = SCALE_STEPS[role];
|
|
1099
|
+
const baseSize = modularScale(step, base);
|
|
1100
|
+
const jitterAmount = jitter ? (Math.random() - 0.5) * 2 * JITTER_FACTOR : 0;
|
|
1101
|
+
const jitteredSize = baseSize * (1 + jitterAmount);
|
|
1102
|
+
const energyAdjustedSize = jitteredSize * ENERGY_BIAS[energy];
|
|
1103
|
+
return Math.max(energyAdjustedSize, minSize);
|
|
1104
|
+
}
|
|
1105
|
+
function getFontWeight(attention = "medium") {
|
|
1106
|
+
return ATTENTION_WEIGHT[attention];
|
|
1107
|
+
}
|
|
1108
|
+
function getLetterSpacing(attention = "medium") {
|
|
1109
|
+
return ATTENTION_TRACKING[attention];
|
|
1110
|
+
}
|
|
1111
|
+
function getLineHeight(role) {
|
|
1112
|
+
const lineHeights = {
|
|
1113
|
+
h1: 1.2,
|
|
1114
|
+
h2: 1.25,
|
|
1115
|
+
h3: 1.3,
|
|
1116
|
+
h4: 1.35,
|
|
1117
|
+
body: 1.5,
|
|
1118
|
+
label: 1.4,
|
|
1119
|
+
caption: 1.45
|
|
1120
|
+
};
|
|
1121
|
+
return lineHeights[role];
|
|
1122
|
+
}
|
|
1123
|
+
function getTypographyStyles(role, energy = "medium", attention = "medium") {
|
|
1124
|
+
return {
|
|
1125
|
+
fontSize: `${getFontSize(role, energy)}px`,
|
|
1126
|
+
fontWeight: getFontWeight(attention),
|
|
1127
|
+
lineHeight: getLineHeight(role),
|
|
1128
|
+
letterSpacing: `${getLetterSpacing(attention)}em`
|
|
1129
|
+
};
|
|
1130
|
+
}
|
|
1131
|
+
function getFluidFontSize(role, energy = "medium") {
|
|
1132
|
+
const minSize = getFontSize(role, energy, { jitter: false });
|
|
1133
|
+
const maxSize = minSize * 1.2;
|
|
1134
|
+
return `clamp(${minSize}px, ${minSize}px + (${maxSize - minSize}) * ((100vw - 320px) / 1600), ${maxSize}px)`;
|
|
1135
|
+
}
|
|
1136
|
+
var SPACING_BASE = 4;
|
|
1137
|
+
var SPACING_SCALE = FIBONACCI.map((f) => f * SPACING_BASE);
|
|
1138
|
+
function getSpacing(step, unit = "px") {
|
|
1139
|
+
const clampedStep = Math.max(0, Math.min(step, SPACING_SCALE.length - 1));
|
|
1140
|
+
const raw = SPACING_SCALE[clampedStep];
|
|
1141
|
+
if (unit === "raw") return raw;
|
|
1142
|
+
if (unit === "rem") return `${(raw / 16).toFixed(4).replace(/\.?0+$/, "")}rem`;
|
|
1143
|
+
return `${raw}px`;
|
|
1144
|
+
}
|
|
1145
|
+
function getProportionalSpacing(density) {
|
|
1146
|
+
const shift = density === "low" ? -1 : density === "high" ? 1 : 0;
|
|
1147
|
+
return {
|
|
1148
|
+
xs: getSpacing(2 + shift),
|
|
1149
|
+
sm: getSpacing(3 + shift),
|
|
1150
|
+
md: getSpacing(5 + shift),
|
|
1151
|
+
lg: getSpacing(7 + shift),
|
|
1152
|
+
gap: getSpacing(4 + shift)
|
|
1153
|
+
};
|
|
1154
|
+
}
|
|
1155
|
+
function phiRatio(steps) {
|
|
1156
|
+
return Math.pow(PHI, steps);
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
// lib/capacity/signals/signal-bus.ts
|
|
1160
|
+
var SignalBusClass = class {
|
|
1161
|
+
constructor() {
|
|
1162
|
+
this.handlers = /* @__PURE__ */ new Map();
|
|
1163
|
+
this.signalQueue = [];
|
|
1164
|
+
this.processing = false;
|
|
1165
|
+
}
|
|
1166
|
+
/**
|
|
1167
|
+
* Emit a signal to all subscribed handlers
|
|
1168
|
+
*/
|
|
1169
|
+
emit(type, payload, priority = "normal", source) {
|
|
1170
|
+
const signal = {
|
|
1171
|
+
type,
|
|
1172
|
+
payload,
|
|
1173
|
+
timestamp: Date.now(),
|
|
1174
|
+
priority,
|
|
1175
|
+
source
|
|
1176
|
+
};
|
|
1177
|
+
if (priority === "critical") {
|
|
1178
|
+
this.signalQueue.unshift(signal);
|
|
1179
|
+
} else {
|
|
1180
|
+
this.signalQueue.push(signal);
|
|
1181
|
+
}
|
|
1182
|
+
this.processQueue();
|
|
1183
|
+
}
|
|
1184
|
+
/**
|
|
1185
|
+
* Subscribe to a specific signal type
|
|
1186
|
+
*/
|
|
1187
|
+
subscribe(type, handler) {
|
|
1188
|
+
if (!this.handlers.has(type)) {
|
|
1189
|
+
this.handlers.set(type, /* @__PURE__ */ new Set());
|
|
1190
|
+
}
|
|
1191
|
+
const handlers = this.handlers.get(type);
|
|
1192
|
+
handlers.add(handler);
|
|
1193
|
+
return () => {
|
|
1194
|
+
handlers.delete(handler);
|
|
1195
|
+
if (handlers.size === 0) {
|
|
1196
|
+
this.handlers.delete(type);
|
|
1197
|
+
}
|
|
1198
|
+
};
|
|
1199
|
+
}
|
|
1200
|
+
/**
|
|
1201
|
+
* Subscribe to multiple signal types with same handler
|
|
1202
|
+
*/
|
|
1203
|
+
subscribeMultiple(types, handler) {
|
|
1204
|
+
const unsubscribers = types.map((type) => this.subscribe(type, handler));
|
|
1205
|
+
return () => {
|
|
1206
|
+
unsubscribers.forEach((unsub) => unsub());
|
|
1207
|
+
};
|
|
1208
|
+
}
|
|
1209
|
+
/**
|
|
1210
|
+
* Process signal queue
|
|
1211
|
+
*/
|
|
1212
|
+
processQueue() {
|
|
1213
|
+
return __async(this, null, function* () {
|
|
1214
|
+
if (this.processing || this.signalQueue.length === 0) {
|
|
1215
|
+
return;
|
|
1216
|
+
}
|
|
1217
|
+
this.processing = true;
|
|
1218
|
+
while (this.signalQueue.length > 0) {
|
|
1219
|
+
const signal = this.signalQueue.shift();
|
|
1220
|
+
const handlers = this.handlers.get(signal.type);
|
|
1221
|
+
if (handlers) {
|
|
1222
|
+
handlers.forEach((handler) => {
|
|
1223
|
+
try {
|
|
1224
|
+
handler(signal);
|
|
1225
|
+
} catch (error) {
|
|
1226
|
+
console.error(`[v0] Signal handler error for "${signal.type}":`, error);
|
|
1227
|
+
}
|
|
1228
|
+
});
|
|
1229
|
+
}
|
|
1230
|
+
}
|
|
1231
|
+
this.processing = false;
|
|
1232
|
+
});
|
|
1233
|
+
}
|
|
1234
|
+
/**
|
|
1235
|
+
* Get count of handlers for a signal type
|
|
1236
|
+
*/
|
|
1237
|
+
getHandlerCount(type) {
|
|
1238
|
+
var _a, _b;
|
|
1239
|
+
return (_b = (_a = this.handlers.get(type)) == null ? void 0 : _a.size) != null ? _b : 0;
|
|
1240
|
+
}
|
|
1241
|
+
/**
|
|
1242
|
+
* Clear all handlers (useful for testing)
|
|
1243
|
+
*/
|
|
1244
|
+
clear() {
|
|
1245
|
+
this.handlers.clear();
|
|
1246
|
+
this.signalQueue = [];
|
|
1247
|
+
}
|
|
1248
|
+
};
|
|
1249
|
+
var SignalBus = new SignalBusClass();
|
|
1250
|
+
var SIGNAL_TYPES = {
|
|
1251
|
+
// Field changes
|
|
1252
|
+
FIELD_ENERGY_CHANGED: "field:energy:changed",
|
|
1253
|
+
FIELD_ATTENTION_CHANGED: "field:attention:changed",
|
|
1254
|
+
FIELD_VALENCE_CHANGED: "field:valence:changed",
|
|
1255
|
+
// User interactions
|
|
1256
|
+
USER_INTERACTION_START: "user:interaction:start",
|
|
1257
|
+
USER_INTERACTION_END: "user:interaction:end",
|
|
1258
|
+
USER_FOCUS_CHANGED: "user:focus:changed",
|
|
1259
|
+
// Component lifecycle
|
|
1260
|
+
COMPONENT_MOUNTED: "component:mounted",
|
|
1261
|
+
COMPONENT_UNMOUNTED: "component:unmounted",
|
|
1262
|
+
// Accessibility
|
|
1263
|
+
A11Y_ANNOUNCE: "a11y:announce",
|
|
1264
|
+
A11Y_FOCUS_TRAP: "a11y:focus:trap"
|
|
1265
|
+
};
|
|
1266
|
+
|
|
1267
|
+
exports.CapacityProvider = CapacityProvider;
|
|
1268
|
+
exports.DEFAULT_COMPONENT_RESPONSE = DEFAULT_COMPONENT_RESPONSE;
|
|
1269
|
+
exports.FEEDBACK_FREQUENCIES = FEEDBACK_FREQUENCIES;
|
|
1270
|
+
exports.FIBONACCI = FIBONACCI;
|
|
1271
|
+
exports.FieldManager = FieldManager;
|
|
1272
|
+
exports.HAPTIC_PATTERNS = HAPTIC_PATTERNS;
|
|
1273
|
+
exports.MOTION_TOKENS = MOTION_TOKENS;
|
|
1274
|
+
exports.PHI = PHI;
|
|
1275
|
+
exports.PHI_INVERSE = PHI_INVERSE;
|
|
1276
|
+
exports.SIGNAL_TYPES = SIGNAL_TYPES;
|
|
1277
|
+
exports.SPACING_SCALE = SPACING_SCALE;
|
|
1278
|
+
exports.SignalBus = SignalBus;
|
|
1279
|
+
exports.ambientClass = ambientClass;
|
|
1280
|
+
exports.deriveMode = deriveMode;
|
|
1281
|
+
exports.deriveModeLabel = deriveModeLabel;
|
|
1282
|
+
exports.entranceClass = entranceClass;
|
|
1283
|
+
exports.focusBeaconClass = focusBeaconClass;
|
|
1284
|
+
exports.focusTextClass = focusTextClass;
|
|
1285
|
+
exports.getFluidFontSize = getFluidFontSize;
|
|
1286
|
+
exports.getFontSize = getFontSize;
|
|
1287
|
+
exports.getFontWeight = getFontWeight;
|
|
1288
|
+
exports.getFrequencyForPace = getFrequencyForPace;
|
|
1289
|
+
exports.getLetterSpacing = getLetterSpacing;
|
|
1290
|
+
exports.getLineHeight = getLineHeight;
|
|
1291
|
+
exports.getModeBadgeColor = getModeBadgeColor;
|
|
1292
|
+
exports.getProportionalSpacing = getProportionalSpacing;
|
|
1293
|
+
exports.getSpacing = getSpacing;
|
|
1294
|
+
exports.getTypographyStyles = getTypographyStyles;
|
|
1295
|
+
exports.hoverClass = hoverClass;
|
|
1296
|
+
exports.listItemClass = listItemClass;
|
|
1297
|
+
exports.modularScale = modularScale;
|
|
1298
|
+
exports.phiRatio = phiRatio;
|
|
1299
|
+
exports.playPacedSonic = playPacedSonic;
|
|
1300
|
+
exports.playSonicFeedback = playSonicFeedback;
|
|
1301
|
+
exports.triggerHaptic = triggerHaptic;
|
|
1302
|
+
exports.useAttentionField = useAttentionField;
|
|
1303
|
+
exports.useCapacityContext = useCapacityContext;
|
|
1304
|
+
exports.useDerivedMode = useDerivedMode;
|
|
1305
|
+
exports.useEffectiveMotion = useEffectiveMotion;
|
|
1306
|
+
exports.useEmotionalValenceField = useEmotionalValenceField;
|
|
1307
|
+
exports.useEnergyField = useEnergyField;
|
|
1308
|
+
exports.useFeedback = useFeedback;
|
|
1309
|
+
exports.useFieldControls = useFieldControls;
|
|
1310
|
+
exports.usePacedMotionTokens = usePacedMotionTokens;
|
|
1311
|
+
exports.usePrefersReducedMotion = usePrefersReducedMotion;
|
|
1312
|
+
//# sourceMappingURL=index.js.map
|
|
1313
|
+
//# sourceMappingURL=index.js.map
|