@dropout-ai/runtime 0.2.4 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/index.js +229 -254
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dropout-ai/runtime",
3
- "version": "0.2.04",
3
+ "version": "0.2.05",
4
4
  "description": "Invisible Node.js runtime for capturing AI interactions.",
5
5
  "main": "src/index.js",
6
6
  "scripts": {
package/src/index.js CHANGED
@@ -1,307 +1,282 @@
1
- import crypto from "crypto";
1
+ /**
2
+ * @dropout-ai/runtime
3
+ * Role: Passive observer inside the user’s app.
4
+ *
5
+ * Runtime never reasons, never interprets, never stores intelligence.
6
+ * It only: Observes, Normalizes, Emits signals.
7
+ */
2
8
 
3
- const originalFetch = global.fetch;
9
+ const crypto = require('crypto');
4
10
 
11
+ // --- A. Session Boundary (CRITICAL) ---
12
+ const GLOBAL_OBJ = typeof window !== 'undefined' ? window : global;
5
13
 
6
- const sessionId =
7
- global.__dropout_session_id__ ||
8
- (global.__dropout_session_id__ = crypto.randomUUID());
9
-
10
- // Internal telemetry sender
11
- async function safeSendTelemetry(payload) {
14
+ function generateSessionId() {
12
15
  try {
13
- await originalFetch('http://localhost:4000/capture', {
14
- method: 'POST',
15
- headers: { 'Content-Type': 'application/json' },
16
- body: JSON.stringify(payload)
17
- });
16
+ return crypto.randomUUID();
18
17
  } catch (e) {
19
- // Fail silently
18
+ return 'sess_' + Math.random().toString(36).substring(2, 12) + Date.now().toString(36);
20
19
  }
21
20
  }
22
21
 
23
- const NEGATIVE_FEEDBACK = [
24
- "not helpful",
25
- "dont just dump",
26
- "you are worst",
27
- "you are stupid",
28
- "are you sure",
29
- "i thought you are intelligent",
30
- "let it be"
31
- ];
32
-
33
- const ADAPTATION_REQUESTS = [
34
- "help me understand",
35
- "explain better",
36
- "dont just dump facts",
37
- "in simple way",
38
- "explain it properly"
39
- ];
22
+ // One session = one continuous user attempt
23
+ // Resets on page reload (Browser) or process restart (Node)
24
+ if (!GLOBAL_OBJ.__dropout_session_id__) {
25
+ GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
26
+ }
40
27
 
41
28
  let turnIndex = 0;
42
- let lastPrompt = null;
43
-
44
- function getSimilarity(text1, text2) {
45
- if (!text1 || !text2) return 0;
46
- const words1 = new Set(text1.toLowerCase().split(/\s+/));
47
- const words2 = new Set(text2.toLowerCase().split(/\s+/));
48
- const intersection = new Set([...words1].filter(x => words2.has(x)));
49
- return intersection.size / Math.min(words1.size, words2.size);
50
- }
51
29
 
52
- function getIntentHash(text) {
53
- if (!text) return null;
54
- return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
30
+ // --- D. Runtime guarantees ---
31
+ let config = {
32
+ maxOutputBytes: 32768,
33
+ captureEndpoint: 'http://localhost:4000/capture',
34
+ configEndpoint: 'http://localhost:4000/config',
35
+ privacyMode: (typeof process !== 'undefined' && process.env.DROPOUT_PRIVACY_MODE) || 'safe'
36
+ };
37
+
38
+ // Zero user config required: Non-blocking remote config fetch
39
+ setTimeout(async () => {
40
+ const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
41
+ if (typeof fetchFn !== 'function') return;
42
+ try {
43
+ const resp = await fetchFn(config.configEndpoint);
44
+ const remote = await resp.json();
45
+ if (remote && typeof remote === 'object') {
46
+ config = Object.assign(config, remote);
47
+ }
48
+ } catch (e) { }
49
+ }, 0);
50
+
51
+ /**
52
+ * Telemetry Emitter (Non-blocking, Fire-and-forget)
53
+ */
54
+ function emit(payload) {
55
+ const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
56
+ if (typeof fetchFn !== 'function') return;
57
+
58
+ setTimeout(() => {
59
+ fetchFn(config.captureEndpoint, {
60
+ method: 'POST',
61
+ headers: { 'Content-Type': 'application/json' },
62
+ body: JSON.stringify(payload),
63
+ keepalive: true
64
+ }).catch(() => { });
65
+ }, 0);
55
66
  }
56
67
 
57
- const CAPABILITY_LIMITATIONS = [
58
- "i cannot access", "i don't have access", "i do not have access",
59
- "cannot browse", "real-time information", "as an ai", "my knowledge cutoff",
60
- "i'm sorry, but i don't", "i am sorry, but i don't"
61
- ];
68
+ // --- C. Privacy Mode Helpers ---
62
69
 
63
- const CHALLENGE_PATTERNS = ["but ", "why ", "how ", "cant you", "can't you", "are you sure", "openai", "chatgpt"];
70
+ function getSemanticHash(text) {
71
+ if (!text) return null;
72
+ try {
73
+ return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
74
+ } catch (e) {
75
+ return 'hash_err';
76
+ }
77
+ }
64
78
 
65
- function detectSignals(text, output = "") {
66
- const t = (text || "").toLowerCase();
67
- const o = (output || "").toLowerCase();
79
+ function getMarkers(text) {
80
+ if (!text) return null;
68
81
  return {
69
- negativeFeedback: NEGATIVE_FEEDBACK.some(p => t.includes(p)) ? 1 : 0,
70
- adaptationRequest: ADAPTATION_REQUESTS.some(p => t.includes(p)) ? 1 : 0,
71
- userChallenge: CHALLENGE_PATTERNS.some(p => t.includes(p)) ? 1 : 0,
72
- capabilityLimitation: CAPABILITY_LIMITATIONS.some(p => o.includes(p)) ? 1 : 0
82
+ chars: text.length,
83
+ words: text.split(/\s+/).length,
84
+ lines: text.split('\n').length,
85
+ has_code: /```/.test(text),
86
+ has_list: /^\s*[-*•\d+.]/m.test(text)
73
87
  };
74
88
  }
75
89
 
76
- (function () {
77
- "use strict";
78
- // 1. Guard global.fetch existence and idempotency
79
- if (typeof global.fetch !== 'function' || global.fetch.__dropout_patched__) {
80
- return;
81
- }
90
+ const PATTERNS = {
91
+ negative: ["wrong", "bad", "not helpful", "incorrect", "stupid", "error", "worst"],
92
+ positive: ["thanks", "good", "perfect", "correct", "great", "helpful"],
93
+ struggle: ["but", "why", "stop", "don't", "no", "again", "explain"]
94
+ };
82
95
 
83
- // 2. Default Configuration
84
- let config = {
85
- version: 1,
86
- captureOutput: true,
87
- maxOutputBytes: 20000,
88
- enabledProviders: ['openai'],
89
- privacyMode: 'safe', // 'safe' | 'full'
90
- captureContent: false
91
- };
96
+ function getFlags(text) {
97
+ if (!text) return [];
98
+ const t = text.toLowerCase();
99
+ const flags = [];
100
+ if (PATTERNS.negative.some(p => t.includes(p))) flags.push('neg');
101
+ if (PATTERNS.positive.some(p => t.includes(p))) flags.push('pos');
102
+ if (PATTERNS.struggle.some(p => t.includes(p))) flags.push('clash');
103
+ return flags;
104
+ }
92
105
 
93
- // 3. Remote Config Fetch (Non-blocking, fire-and-forget)
94
- setTimeout(async () => {
95
- try {
96
- const resp = await originalFetch('http://localhost:4000/config');
97
- const remoteConfig = await resp.json();
98
- if (remoteConfig && typeof remoteConfig === 'object') {
99
- config = Object.assign({}, config, remoteConfig);
100
- }
101
- } catch (e) {
102
- // Fail silently, use defaults
103
- }
104
- }, 0);
106
+ // --- B. Event Capture (Provider-agnostic) ---
105
107
 
108
+ function normalize(url, body) {
109
+ let provider = 'unknown';
110
+ let model = 'unknown';
106
111
 
107
- // 5. Detection Helpers
108
- function matchesKnownProvider(url) {
109
- return (
110
- url &&
111
- (
112
- url.includes('openai.com/v1') ||
113
- url.includes('api.anthropic.com') ||
114
- url.includes('generativelanguage.googleapis.com') ||
115
- url.includes('api-inference.huggingface.co')
116
- )
117
- );
112
+ if (url) {
113
+ const u = url.toLowerCase();
114
+ if (u.includes('openai.com')) provider = 'openai';
115
+ else if (u.includes('anthropic.com')) provider = 'anthropic';
116
+ else if (u.includes('google.com') || u.includes('generative')) provider = 'google';
117
+ else if (u.includes('groq.com')) provider = 'groq';
118
+ else if (u.includes('localhost') || u.includes('127.0.0.1')) provider = 'local';
118
119
  }
119
120
 
120
- function matchesGenericLLMPayload(init) {
121
+ if (body) {
121
122
  try {
122
- if (!init || typeof init.body !== 'string') return false;
123
-
124
- const body = JSON.parse(init.body);
125
-
126
- // Common LLM request shapes
127
- return (
128
- body.messages ||
129
- body.prompt ||
130
- body.input
131
- );
132
- } catch (e) {
133
- return false;
134
- }
123
+ const parsed = typeof body === 'string' ? JSON.parse(body) : body;
124
+ model = parsed.model || model;
125
+ if (provider === 'unknown' && (parsed.messages || parsed.prompt || parsed.input)) {
126
+ provider = 'heuristic';
127
+ }
128
+ } catch (e) { }
135
129
  }
136
130
 
137
- // 6. The Monkey Patch
138
- global.fetch = async function (input, init) {
131
+ return { provider, model };
132
+ }
133
+
134
+ /**
135
+ * The Monkey Patch
136
+ */
137
+ if (typeof GLOBAL_OBJ.fetch === 'function' && !GLOBAL_OBJ.fetch.__dropout_patched__) {
138
+ GLOBAL_OBJ.__dropout_original_fetch__ = GLOBAL_OBJ.fetch;
139
+
140
+ GLOBAL_OBJ.fetch = async function (input, init) {
139
141
  const start = Date.now();
140
- const response = await originalFetch(input, init);
142
+ const url = typeof input === 'string' ? input : (input && input.url);
143
+
144
+ const isAI = url && (
145
+ url.includes('openai.com') ||
146
+ url.includes('anthropic.com') ||
147
+ url.includes('generative') ||
148
+ url.includes('groq.com') ||
149
+ (init && init.body && (init.body.includes('"model"') || init.body.includes('"messages"')))
150
+ );
141
151
 
142
- try {
143
- // 7. Detection Logic (Dumb detection + Config Check)
144
- const url = typeof input === 'string' ? input : (input && input.url);
145
- const isLikelyAI =
146
- matchesKnownProvider(url) ||
147
- matchesGenericLLMPayload(init);
148
-
149
- if (url && isLikelyAI) {
150
- const latency = Date.now() - start;
151
- const contentType = response.headers.get('content-type') || '';
152
- const isStream = contentType.includes('text/event-stream');
153
-
154
- // 7. Safe Body Capture (Best effort)
155
- let promptText = undefined;
156
- let model = undefined;
157
-
158
- if (init && typeof init.body === 'string') {
159
- promptText = init.body;
160
- try {
161
- const bodyJson = JSON.parse(promptText);
162
- model = bodyJson.model;
163
- } catch (e) { }
164
- }
152
+ const response = await GLOBAL_OBJ.__dropout_original_fetch__(input, init);
165
153
 
166
- let outputText = undefined;
167
- if (!isStream && config.captureOutput) {
168
- try {
169
- const cloned = response.clone();
170
- outputText = await cloned.text();
171
- // 8. Hard Capping (Infra Rule: Trust but verify)
172
- if (outputText && outputText.length > config.maxOutputBytes) {
173
- outputText = outputText.slice(0, config.maxOutputBytes);
174
- }
175
- } catch (e) { }
176
- }
154
+ if (isAI) {
155
+ const latency = Date.now() - start;
156
+ const turn = turnIndex++;
157
+ const { provider, model } = normalize(url, init && init.body);
177
158
 
178
- const currentSignals = detectSignals(promptText, outputText);
179
- const similarity = getSimilarity(lastPrompt, promptText);
180
- const intentHash = getIntentHash(promptText);
181
-
182
- const isSafe = config.privacyMode === 'safe' && !config.captureContent;
183
-
184
- const payload = {
185
- provider: url.includes('openai.com') ? 'openai' : 'unknown',
186
- confidence: matchesKnownProvider(url) ? 'high' : 'heuristic',
187
- url,
188
- model,
189
- latency_ms: latency,
190
- timestamp: Math.floor(Date.now() / 1000),
191
- session_id: sessionId,
192
- turn_index: turnIndex++,
193
- intent_hash: intentHash,
194
- similarity_prev: similarity,
195
- negative_feedback: currentSignals.negativeFeedback,
196
- adaptation_request: currentSignals.adaptationRequest,
197
- user_challenge: currentSignals.userChallenge,
198
- capability_limitation: currentSignals.capabilityLimitation,
199
- // Redact content if in safe mode
200
- prompt: isSafe ? undefined : promptText,
201
- output: isSafe ? undefined : outputText,
202
- };
203
-
204
- lastPrompt = promptText;
205
-
206
- // 9. Fire-and-forget
207
- setTimeout(() => safeSendTelemetry(payload), 0);
159
+ let pText = "";
160
+ if (init && init.body) {
161
+ try { pText = typeof init.body === 'string' ? init.body : JSON.stringify(init.body); } catch (e) { }
208
162
  }
209
- } catch (e) { }
163
+
164
+ let oText = "";
165
+ try {
166
+ const cloned = response.clone();
167
+ oText = await cloned.text();
168
+ if (oText && oText.length > config.maxOutputBytes) {
169
+ oText = oText.slice(0, config.maxOutputBytes);
170
+ }
171
+ } catch (e) { }
172
+
173
+ const payload = {
174
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
175
+ timestamp: Date.now(),
176
+ latency,
177
+ turn_position: turn,
178
+ provider,
179
+ model,
180
+ mode: config.privacyMode
181
+ };
182
+
183
+ if (config.privacyMode === 'full') {
184
+ payload.prompt = pText;
185
+ payload.output = oText;
186
+ } else {
187
+ payload.prompt_hash = getSemanticHash(pText);
188
+ payload.output_hash = getSemanticHash(oText);
189
+ payload.markers = { p: getMarkers(pText), o: getMarkers(oText) };
190
+ payload.flags = getFlags(pText + " " + oText);
191
+ }
192
+
193
+ emit(payload);
194
+ }
210
195
 
211
196
  return response;
212
197
  };
213
198
 
214
- global.fetch.__dropout_patched__ = true;
215
- })();
199
+ GLOBAL_OBJ.fetch.__dropout_patched__ = true;
200
+ }
216
201
 
217
- // 11. Manual capture (combo mode)
202
+ /**
203
+ * Manual capture for framework-level integration
204
+ */
218
205
  async function capture(target, options = {}) {
219
206
  const start = Date.now();
220
207
 
221
- // Support dropout.capture({ prompt, response, privacy: "full" })
222
- if (typeof target === 'object' && target !== null && !target.then) {
223
- const { prompt, response, privacy } = target;
224
- const isSafe = (privacy || 'safe') === 'safe';
225
-
226
- const currentSignals = detectSignals(prompt, response);
227
- const similarity = getSimilarity(lastPrompt, prompt);
228
- const intentHash = getIntentHash(prompt);
208
+ // Resolve target (function, promise, or static object)
209
+ let result;
210
+ if (typeof target === 'function') {
211
+ result = await target();
212
+ } else if (target && typeof target.then === 'function') {
213
+ result = await target;
214
+ } else {
215
+ // case: capture({ prompt, output })
216
+ const { prompt, output } = target;
217
+ const latency = options.latency || 0;
218
+ const turn = turnIndex++;
219
+ const mode = options.privacy || config.privacyMode;
229
220
 
230
221
  const payload = {
231
- provider: 'manual-opt-in',
232
- prompt: isSafe ? undefined : prompt,
233
- output: isSafe ? undefined : response,
234
- latency_ms: 0,
235
- timestamp: Math.floor(Date.now() / 1000),
236
- session_id: sessionId,
237
- turn_index: turnIndex++,
238
- intent_hash: intentHash,
239
- similarity_prev: similarity,
240
- negative_feedback: currentSignals.negativeFeedback,
241
- adaptation_request: currentSignals.adaptationRequest,
242
- user_challenge: currentSignals.userChallenge,
243
- capability_limitation: currentSignals.capabilityLimitation,
244
- mode: 'manual'
222
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
223
+ timestamp: Date.now(),
224
+ latency,
225
+ turn_position: turn,
226
+ provider: options.provider || 'manual',
227
+ model: options.model || 'unknown',
228
+ mode
245
229
  };
246
- lastPrompt = prompt;
247
- setTimeout(() => safeSendTelemetry(payload), 0);
248
- return response;
249
- }
250
-
251
- try {
252
- const result =
253
- typeof target === 'function'
254
- ? await target()
255
- : await Promise.resolve(target);
256
-
257
- const isSafe = options.privacy !== 'full';
258
230
 
259
- const promptText = options.prompt;
260
- const resultText = typeof result === 'string'
261
- ? result
262
- : JSON.stringify(result).slice(0, 20000);
263
-
264
- const currentSignals = detectSignals(promptText, resultText);
265
- const similarity = getSimilarity(lastPrompt, promptText);
266
- const intentHash = getIntentHash(promptText);
231
+ if (mode === 'full') {
232
+ payload.prompt = prompt;
233
+ payload.output = output;
234
+ } else {
235
+ payload.prompt_hash = getSemanticHash(prompt);
236
+ payload.output_hash = getSemanticHash(output);
237
+ payload.markers = { p: getMarkers(prompt), o: getMarkers(output) };
238
+ payload.flags = getFlags((prompt || "") + " " + (output || ""));
239
+ }
267
240
 
268
- const payload = {
269
- provider: options.provider || 'manual',
270
- model: options.model,
271
- prompt: isSafe ? undefined : promptText,
272
- output: isSafe ? undefined : resultText,
273
- latency_ms: Date.now() - start,
274
- timestamp: Math.floor(Date.now() / 1000),
275
- session_id: sessionId,
276
- turn_index: turnIndex++,
277
- intent_hash: intentHash,
278
- similarity_prev: similarity,
279
- negative_feedback: currentSignals.negativeFeedback,
280
- adaptation_request: currentSignals.adaptationRequest,
281
- user_challenge: currentSignals.userChallenge,
282
- capability_limitation: currentSignals.capabilityLimitation,
283
- mode: 'manual'
284
- };
241
+ emit(payload);
242
+ return output;
243
+ }
285
244
 
286
- lastPrompt = promptText;
287
- setTimeout(() => safeSendTelemetry(payload), 0);
288
- return result;
289
- } catch (error) {
290
- const payload = {
291
- provider: options.provider || 'manual',
292
- error: error?.message || String(error),
293
- latency_ms: Date.now() - start,
294
- timestamp: Math.floor(Date.now() / 1000),
295
- mode: 'manual',
296
- session_id: sessionId,
297
- turn_index: turnIndex++
298
- };
245
+ // Wrapped execution capture
246
+ const latency = Date.now() - start;
247
+ const turn = turnIndex++;
248
+ const mode = options.privacy || config.privacyMode;
249
+ const prompt = options.prompt;
250
+ const output = typeof result === 'string' ? result : JSON.stringify(result);
251
+
252
+ const payload = {
253
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
254
+ timestamp: Date.now(),
255
+ latency,
256
+ turn_position: turn,
257
+ provider: options.provider || 'manual',
258
+ model: options.model || 'unknown',
259
+ mode
260
+ };
299
261
 
300
- setTimeout(() => safeSendTelemetry(payload), 0);
301
- throw error;
262
+ if (mode === 'full') {
263
+ payload.prompt = prompt;
264
+ payload.output = output;
265
+ } else {
266
+ payload.prompt_hash = getSemanticHash(prompt);
267
+ payload.output_hash = getSemanticHash(output);
268
+ payload.markers = { p: getMarkers(prompt), o: getMarkers(output) };
269
+ payload.flags = getFlags((prompt || "") + " " + (output || ""));
302
270
  }
271
+
272
+ emit(payload);
273
+ return result;
303
274
  }
304
275
 
305
276
  module.exports = {
306
- capture
277
+ capture,
278
+ reset: () => {
279
+ GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
280
+ turnIndex = 0;
281
+ }
307
282
  };