@dropout-ai/runtime 0.2.3 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/index.js +233 -244
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dropout-ai/runtime",
3
- "version": "0.2.03",
3
+ "version": "0.2.05",
4
4
  "description": "Invisible Node.js runtime for capturing AI interactions.",
5
5
  "main": "src/index.js",
6
6
  "scripts": {
package/src/index.js CHANGED
@@ -1,293 +1,282 @@
1
- import crypto from "crypto";
1
+ /**
2
+ * @dropout-ai/runtime
3
+ * Role: Passive observer inside the user’s app.
4
+ *
5
+ * Runtime never reasons, never interprets, never stores intelligence.
6
+ * It only: Observes, Normalizes, Emits signals.
7
+ */
2
8
 
3
- const originalFetch = global.fetch;
9
+ const crypto = require('crypto');
4
10
 
11
+ // --- A. Session Boundary (CRITICAL) ---
12
+ const GLOBAL_OBJ = typeof window !== 'undefined' ? window : global;
5
13
 
6
- const sessionId =
7
- global.__dropout_session_id__ ||
8
- (global.__dropout_session_id__ = crypto.randomUUID());
9
-
10
- const NEGATIVE_FEEDBACK = [
11
- "not helpful",
12
- "dont just dump",
13
- "you are worst",
14
- "you are stupid",
15
- "are you sure",
16
- "i thought you are intelligent",
17
- "let it be"
18
- ];
14
+ function generateSessionId() {
15
+ try {
16
+ return crypto.randomUUID();
17
+ } catch (e) {
18
+ return 'sess_' + Math.random().toString(36).substring(2, 12) + Date.now().toString(36);
19
+ }
20
+ }
19
21
 
20
- const ADAPTATION_REQUESTS = [
21
- "help me understand",
22
- "explain better",
23
- "dont just dump facts",
24
- "in simple way",
25
- "explain it properly"
26
- ];
22
+ // One session = one continuous user attempt
23
+ // Resets on page reload (Browser) or process restart (Node)
24
+ if (!GLOBAL_OBJ.__dropout_session_id__) {
25
+ GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
26
+ }
27
27
 
28
28
  let turnIndex = 0;
29
- let lastPrompt = null;
30
-
31
- function getSimilarity(text1, text2) {
32
- if (!text1 || !text2) return 0;
33
- const words1 = new Set(text1.toLowerCase().split(/\s+/));
34
- const words2 = new Set(text2.toLowerCase().split(/\s+/));
35
- const intersection = new Set([...words1].filter(x => words2.has(x)));
36
- return intersection.size / Math.min(words1.size, words2.size);
37
- }
38
29
 
39
- function getIntentHash(text) {
40
- if (!text) return null;
41
- return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
30
+ // --- D. Runtime guarantees ---
31
+ let config = {
32
+ maxOutputBytes: 32768,
33
+ captureEndpoint: 'http://localhost:4000/capture',
34
+ configEndpoint: 'http://localhost:4000/config',
35
+ privacyMode: (typeof process !== 'undefined' && process.env.DROPOUT_PRIVACY_MODE) || 'safe'
36
+ };
37
+
38
+ // Zero user config required: Non-blocking remote config fetch
39
+ setTimeout(async () => {
40
+ const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
41
+ if (typeof fetchFn !== 'function') return;
42
+ try {
43
+ const resp = await fetchFn(config.configEndpoint);
44
+ const remote = await resp.json();
45
+ if (remote && typeof remote === 'object') {
46
+ config = Object.assign(config, remote);
47
+ }
48
+ } catch (e) { }
49
+ }, 0);
50
+
51
+ /**
52
+ * Telemetry Emitter (Non-blocking, Fire-and-forget)
53
+ */
54
+ function emit(payload) {
55
+ const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
56
+ if (typeof fetchFn !== 'function') return;
57
+
58
+ setTimeout(() => {
59
+ fetchFn(config.captureEndpoint, {
60
+ method: 'POST',
61
+ headers: { 'Content-Type': 'application/json' },
62
+ body: JSON.stringify(payload),
63
+ keepalive: true
64
+ }).catch(() => { });
65
+ }, 0);
42
66
  }
43
67
 
44
- const CAPABILITY_LIMITATIONS = [
45
- "i cannot access", "i don't have access", "i do not have access",
46
- "cannot browse", "real-time information", "as an ai", "my knowledge cutoff",
47
- "i'm sorry, but i don't", "i am sorry, but i don't"
48
- ];
68
+ // --- C. Privacy Mode Helpers ---
49
69
 
50
- const CHALLENGE_PATTERNS = ["but ", "why ", "how ", "cant you", "can't you", "are you sure", "openai", "chatgpt"];
70
+ function getSemanticHash(text) {
71
+ if (!text) return null;
72
+ try {
73
+ return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
74
+ } catch (e) {
75
+ return 'hash_err';
76
+ }
77
+ }
51
78
 
52
- function detectSignals(text, output = "") {
53
- const t = (text || "").toLowerCase();
54
- const o = (output || "").toLowerCase();
79
+ function getMarkers(text) {
80
+ if (!text) return null;
55
81
  return {
56
- negativeFeedback: NEGATIVE_FEEDBACK.some(p => t.includes(p)) ? 1 : 0,
57
- adaptationRequest: ADAPTATION_REQUESTS.some(p => t.includes(p)) ? 1 : 0,
58
- userChallenge: CHALLENGE_PATTERNS.some(p => t.includes(p)) ? 1 : 0,
59
- capabilityLimitation: CAPABILITY_LIMITATIONS.some(p => o.includes(p)) ? 1 : 0
82
+ chars: text.length,
83
+ words: text.split(/\s+/).length,
84
+ lines: text.split('\n').length,
85
+ has_code: /```/.test(text),
86
+ has_list: /^\s*[-*•\d+.]/m.test(text)
60
87
  };
61
88
  }
62
89
 
63
- (function () {
64
- // 1. Guard global.fetch existence and idempotency
65
- if (typeof global.fetch !== 'function' || global.fetch.__dropout_patched__) {
66
- return;
67
- }
90
+ const PATTERNS = {
91
+ negative: ["wrong", "bad", "not helpful", "incorrect", "stupid", "error", "worst"],
92
+ positive: ["thanks", "good", "perfect", "correct", "great", "helpful"],
93
+ struggle: ["but", "why", "stop", "don't", "no", "again", "explain"]
94
+ };
68
95
 
69
- // 2. Default Configuration
70
- let config = {
71
- version: 1,
72
- captureOutput: true,
73
- maxOutputBytes: 20000,
74
- enabledProviders: ['openai'],
75
- privacyMode: 'safe', // 'safe' | 'full'
76
- captureContent: false
77
- };
96
+ function getFlags(text) {
97
+ if (!text) return [];
98
+ const t = text.toLowerCase();
99
+ const flags = [];
100
+ if (PATTERNS.negative.some(p => t.includes(p))) flags.push('neg');
101
+ if (PATTERNS.positive.some(p => t.includes(p))) flags.push('pos');
102
+ if (PATTERNS.struggle.some(p => t.includes(p))) flags.push('clash');
103
+ return flags;
104
+ }
78
105
 
79
- // 3. Remote Config Fetch (Non-blocking, fire-and-forget)
80
- setTimeout(async () => {
81
- try {
82
- const resp = await originalFetch('http://localhost:4000/config');
83
- const remoteConfig = await resp.json();
84
- if (remoteConfig && typeof remoteConfig === 'object') {
85
- config = Object.assign({}, config, remoteConfig);
86
- }
87
- } catch (e) {
88
- // Fail silently, use defaults
89
- }
90
- }, 0);
106
+ // --- B. Event Capture (Provider-agnostic) ---
91
107
 
108
+ function normalize(url, body) {
109
+ let provider = 'unknown';
110
+ let model = 'unknown';
92
111
 
93
- // 5. Detection Helpers
94
- function matchesKnownProvider(url) {
95
- return (
96
- url &&
97
- (
98
- url.includes('openai.com/v1') ||
99
- url.includes('api.anthropic.com') ||
100
- url.includes('generativelanguage.googleapis.com') ||
101
- url.includes('api-inference.huggingface.co')
102
- )
103
- );
112
+ if (url) {
113
+ const u = url.toLowerCase();
114
+ if (u.includes('openai.com')) provider = 'openai';
115
+ else if (u.includes('anthropic.com')) provider = 'anthropic';
116
+ else if (u.includes('google.com') || u.includes('generative')) provider = 'google';
117
+ else if (u.includes('groq.com')) provider = 'groq';
118
+ else if (u.includes('localhost') || u.includes('127.0.0.1')) provider = 'local';
104
119
  }
105
120
 
106
- function matchesGenericLLMPayload(init) {
121
+ if (body) {
107
122
  try {
108
- if (!init || typeof init.body !== 'string') return false;
109
-
110
- const body = JSON.parse(init.body);
111
-
112
- // Common LLM request shapes
113
- return (
114
- body.messages ||
115
- body.prompt ||
116
- body.input
117
- );
118
- } catch (e) {
119
- return false;
120
- }
123
+ const parsed = typeof body === 'string' ? JSON.parse(body) : body;
124
+ model = parsed.model || model;
125
+ if (provider === 'unknown' && (parsed.messages || parsed.prompt || parsed.input)) {
126
+ provider = 'heuristic';
127
+ }
128
+ } catch (e) { }
121
129
  }
122
130
 
123
- // 6. The Monkey Patch
124
- global.fetch = async function (input, init) {
131
+ return { provider, model };
132
+ }
133
+
134
+ /**
135
+ * The Monkey Patch
136
+ */
137
+ if (typeof GLOBAL_OBJ.fetch === 'function' && !GLOBAL_OBJ.fetch.__dropout_patched__) {
138
+ GLOBAL_OBJ.__dropout_original_fetch__ = GLOBAL_OBJ.fetch;
139
+
140
+ GLOBAL_OBJ.fetch = async function (input, init) {
125
141
  const start = Date.now();
126
- const response = await originalFetch(input, init);
142
+ const url = typeof input === 'string' ? input : (input && input.url);
143
+
144
+ const isAI = url && (
145
+ url.includes('openai.com') ||
146
+ url.includes('anthropic.com') ||
147
+ url.includes('generative') ||
148
+ url.includes('groq.com') ||
149
+ (init && init.body && (init.body.includes('"model"') || init.body.includes('"messages"')))
150
+ );
127
151
 
128
- try {
129
- // 7. Detection Logic (Dumb detection + Config Check)
130
- const url = typeof input === 'string' ? input : (input && input.url);
131
- const isLikelyAI =
132
- matchesKnownProvider(url) ||
133
- matchesGenericLLMPayload(init);
134
-
135
- if (url && isLikelyAI) {
136
- const latency = Date.now() - start;
137
- const contentType = response.headers.get('content-type') || '';
138
- const isStream = contentType.includes('text/event-stream');
139
-
140
- // 7. Safe Body Capture (Best effort)
141
- let promptText = undefined;
142
- let model = undefined;
143
-
144
- if (init && typeof init.body === 'string') {
145
- promptText = init.body;
146
- try {
147
- const bodyJson = JSON.parse(promptText);
148
- model = bodyJson.model;
149
- } catch (e) { }
150
- }
152
+ const response = await GLOBAL_OBJ.__dropout_original_fetch__(input, init);
151
153
 
152
- let outputText = undefined;
153
- if (!isStream && config.captureOutput) {
154
- try {
155
- const cloned = response.clone();
156
- outputText = await cloned.text();
157
- // 8. Hard Capping (Infra Rule: Trust but verify)
158
- if (outputText && outputText.length > config.maxOutputBytes) {
159
- outputText = outputText.slice(0, config.maxOutputBytes);
160
- }
161
- } catch (e) { }
162
- }
154
+ if (isAI) {
155
+ const latency = Date.now() - start;
156
+ const turn = turnIndex++;
157
+ const { provider, model } = normalize(url, init && init.body);
163
158
 
164
- const currentSignals = detectSignals(promptText, outputText);
165
- const similarity = getSimilarity(lastPrompt, promptText);
166
- const intentHash = getIntentHash(promptText);
167
-
168
- const isSafe = config.privacyMode === 'safe' && !config.captureContent;
169
-
170
- const payload = {
171
- provider: url.includes('openai.com') ? 'openai' : 'unknown',
172
- confidence: matchesKnownProvider(url) ? 'high' : 'heuristic',
173
- url,
174
- model,
175
- latency_ms: latency,
176
- timestamp: Math.floor(Date.now() / 1000),
177
- session_id: sessionId,
178
- turn_index: turnIndex++,
179
- intent_hash: intentHash,
180
- similarity_prev: similarity,
181
- negative_feedback: currentSignals.negativeFeedback,
182
- adaptation_request: currentSignals.adaptationRequest,
183
- user_challenge: currentSignals.userChallenge,
184
- capability_limitation: currentSignals.capabilityLimitation,
185
- // Redact content if in safe mode
186
- prompt: isSafe ? undefined : promptText,
187
- output: isSafe ? undefined : outputText,
188
- };
189
-
190
- lastPrompt = promptText;
191
-
192
- // 9. Fire-and-forget
193
- setTimeout(() => safeSendTelemetry(payload), 0);
159
+ let pText = "";
160
+ if (init && init.body) {
161
+ try { pText = typeof init.body === 'string' ? init.body : JSON.stringify(init.body); } catch (e) { }
194
162
  }
195
- } catch (e) { }
163
+
164
+ let oText = "";
165
+ try {
166
+ const cloned = response.clone();
167
+ oText = await cloned.text();
168
+ if (oText && oText.length > config.maxOutputBytes) {
169
+ oText = oText.slice(0, config.maxOutputBytes);
170
+ }
171
+ } catch (e) { }
172
+
173
+ const payload = {
174
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
175
+ timestamp: Date.now(),
176
+ latency,
177
+ turn_position: turn,
178
+ provider,
179
+ model,
180
+ mode: config.privacyMode
181
+ };
182
+
183
+ if (config.privacyMode === 'full') {
184
+ payload.prompt = pText;
185
+ payload.output = oText;
186
+ } else {
187
+ payload.prompt_hash = getSemanticHash(pText);
188
+ payload.output_hash = getSemanticHash(oText);
189
+ payload.markers = { p: getMarkers(pText), o: getMarkers(oText) };
190
+ payload.flags = getFlags(pText + " " + oText);
191
+ }
192
+
193
+ emit(payload);
194
+ }
196
195
 
197
196
  return response;
198
197
  };
199
198
 
200
- global.fetch.__dropout_patched__ = true;
201
- })();
199
+ GLOBAL_OBJ.fetch.__dropout_patched__ = true;
200
+ }
202
201
 
203
- // 11. Manual capture (combo mode)
202
+ /**
203
+ * Manual capture for framework-level integration
204
+ */
204
205
  async function capture(target, options = {}) {
205
206
  const start = Date.now();
206
207
 
207
- // Support dropout.capture({ prompt, response, privacy: "full" })
208
- if (typeof target === 'object' && target !== null && !target.then) {
209
- const { prompt, response, privacy } = target;
210
- const isSafe = (privacy || 'safe') === 'safe';
211
-
212
- const currentSignals = detectSignals(prompt, response);
213
- const similarity = getSimilarity(lastPrompt, prompt);
214
- const intentHash = getIntentHash(prompt);
208
+ // Resolve target (function, promise, or static object)
209
+ let result;
210
+ if (typeof target === 'function') {
211
+ result = await target();
212
+ } else if (target && typeof target.then === 'function') {
213
+ result = await target;
214
+ } else {
215
+ // case: capture({ prompt, output })
216
+ const { prompt, output } = target;
217
+ const latency = options.latency || 0;
218
+ const turn = turnIndex++;
219
+ const mode = options.privacy || config.privacyMode;
215
220
 
216
221
  const payload = {
217
- provider: 'manual-opt-in',
218
- prompt: isSafe ? undefined : prompt,
219
- output: isSafe ? undefined : response,
220
- latency_ms: 0,
221
- timestamp: Math.floor(Date.now() / 1000),
222
- session_id: sessionId,
223
- turn_index: turnIndex++,
224
- intent_hash: intentHash,
225
- similarity_prev: similarity,
226
- negative_feedback: currentSignals.negativeFeedback,
227
- adaptation_request: currentSignals.adaptationRequest,
228
- user_challenge: currentSignals.userChallenge,
229
- capability_limitation: currentSignals.capabilityLimitation,
230
- mode: 'manual'
222
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
223
+ timestamp: Date.now(),
224
+ latency,
225
+ turn_position: turn,
226
+ provider: options.provider || 'manual',
227
+ model: options.model || 'unknown',
228
+ mode
231
229
  };
232
- lastPrompt = prompt;
233
- setTimeout(() => safeSendTelemetry(payload), 0);
234
- return response;
235
- }
236
230
 
237
- try {
238
- const result =
239
- typeof target === 'function'
240
- ? await target()
241
- : await Promise.resolve(target);
242
-
243
- const isSafe = options.privacy !== 'full';
244
-
245
- const promptText = options.prompt;
246
- const resultText = typeof result === 'string'
247
- ? result
248
- : JSON.stringify(result).slice(0, 20000);
249
-
250
- const currentSignals = detectSignals(promptText, resultText);
251
- const similarity = getSimilarity(lastPrompt, promptText);
252
- const intentHash = getIntentHash(promptText);
231
+ if (mode === 'full') {
232
+ payload.prompt = prompt;
233
+ payload.output = output;
234
+ } else {
235
+ payload.prompt_hash = getSemanticHash(prompt);
236
+ payload.output_hash = getSemanticHash(output);
237
+ payload.markers = { p: getMarkers(prompt), o: getMarkers(output) };
238
+ payload.flags = getFlags((prompt || "") + " " + (output || ""));
239
+ }
253
240
 
254
- const payload = {
255
- provider: options.provider || 'manual',
256
- model: options.model,
257
- prompt: isSafe ? undefined : promptText,
258
- output: isSafe ? undefined : resultText,
259
- latency_ms: Date.now() - start,
260
- timestamp: Math.floor(Date.now() / 1000),
261
- session_id: sessionId,
262
- turn_index: turnIndex++,
263
- intent_hash: intentHash,
264
- similarity_prev: similarity,
265
- negative_feedback: currentSignals.negativeFeedback,
266
- adaptation_request: currentSignals.adaptationRequest,
267
- user_challenge: currentSignals.userChallenge,
268
- capability_limitation: currentSignals.capabilityLimitation,
269
- mode: 'manual'
270
- };
241
+ emit(payload);
242
+ return output;
243
+ }
271
244
 
272
- lastPrompt = promptText;
273
- setTimeout(() => safeSendTelemetry(payload), 0);
274
- return result;
275
- } catch (error) {
276
- const payload = {
277
- provider: options.provider || 'manual',
278
- error: error?.message || String(error),
279
- latency_ms: Date.now() - start,
280
- timestamp: Math.floor(Date.now() / 1000),
281
- mode: 'manual',
282
- session_id: sessionId,
283
- turn_index: turnIndex++
284
- };
245
+ // Wrapped execution capture
246
+ const latency = Date.now() - start;
247
+ const turn = turnIndex++;
248
+ const mode = options.privacy || config.privacyMode;
249
+ const prompt = options.prompt;
250
+ const output = typeof result === 'string' ? result : JSON.stringify(result);
251
+
252
+ const payload = {
253
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
254
+ timestamp: Date.now(),
255
+ latency,
256
+ turn_position: turn,
257
+ provider: options.provider || 'manual',
258
+ model: options.model || 'unknown',
259
+ mode
260
+ };
285
261
 
286
- setTimeout(() => safeSendTelemetry(payload), 0);
287
- throw error;
262
+ if (mode === 'full') {
263
+ payload.prompt = prompt;
264
+ payload.output = output;
265
+ } else {
266
+ payload.prompt_hash = getSemanticHash(prompt);
267
+ payload.output_hash = getSemanticHash(output);
268
+ payload.markers = { p: getMarkers(prompt), o: getMarkers(output) };
269
+ payload.flags = getFlags((prompt || "") + " " + (output || ""));
288
270
  }
271
+
272
+ emit(payload);
273
+ return result;
289
274
  }
290
275
 
291
276
  module.exports = {
292
- capture
277
+ capture,
278
+ reset: () => {
279
+ GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
280
+ turnIndex = 0;
281
+ }
293
282
  };