@dropout-ai/runtime 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/index.js +231 -263
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dropout-ai/runtime",
3
- "version": "0.2.04",
3
+ "version": "0.2.06",
4
4
  "description": "Invisible Node.js runtime for capturing AI interactions.",
5
5
  "main": "src/index.js",
6
6
  "scripts": {
package/src/index.js CHANGED
@@ -1,307 +1,275 @@
1
- import crypto from "crypto";
1
+ /**
2
+ * @dropout-ai/runtime
3
+ * Role: Passive observer inside the user’s app.
4
+ *
5
+ * Runtime never reasons, never interprets, never stores intelligence.
6
+ * it only: Observes, Normalizes, Emits signals.
7
+ */
2
8
 
3
- const originalFetch = global.fetch;
9
+ const crypto = require('crypto');
4
10
 
11
+ // --- Identity & State ---
12
+ const GLOBAL_OBJ = typeof window !== 'undefined' ? window : global;
5
13
 
6
- const sessionId =
7
- global.__dropout_session_id__ ||
8
- (global.__dropout_session_id__ = crypto.randomUUID());
9
-
10
- // Internal telemetry sender
11
- async function safeSendTelemetry(payload) {
14
+ function generateSessionId() {
12
15
  try {
13
- await originalFetch('http://localhost:4000/capture', {
14
- method: 'POST',
15
- headers: { 'Content-Type': 'application/json' },
16
- body: JSON.stringify(payload)
17
- });
16
+ return crypto.randomUUID();
18
17
  } catch (e) {
19
- // Fail silently
18
+ return 'sess_' + Math.random().toString(36).substring(2, 12) + Date.now().toString(36);
20
19
  }
21
20
  }
22
21
 
23
- const NEGATIVE_FEEDBACK = [
24
- "not helpful",
25
- "dont just dump",
26
- "you are worst",
27
- "you are stupid",
28
- "are you sure",
29
- "i thought you are intelligent",
30
- "let it be"
31
- ];
32
-
33
- const ADAPTATION_REQUESTS = [
34
- "help me understand",
35
- "explain better",
36
- "dont just dump facts",
37
- "in simple way",
38
- "explain it properly"
39
- ];
22
+ if (!GLOBAL_OBJ.__dropout_session_id__) {
23
+ GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
24
+ }
40
25
 
41
26
  let turnIndex = 0;
42
- let lastPrompt = null;
43
-
44
- function getSimilarity(text1, text2) {
45
- if (!text1 || !text2) return 0;
46
- const words1 = new Set(text1.toLowerCase().split(/\s+/));
47
- const words2 = new Set(text2.toLowerCase().split(/\s+/));
48
- const intersection = new Set([...words1].filter(x => words2.has(x)));
49
- return intersection.size / Math.min(words1.size, words2.size);
27
+ let lastPromptHash = null;
28
+ let lastResponseHash = null;
29
+
30
+ // --- Runtime Guarantees ---
31
+ let config = {
32
+ maxOutputBytes: 32768,
33
+ captureEndpoint: 'http://localhost:4000/capture',
34
+ configEndpoint: 'http://localhost:4000/config',
35
+ privacyMode: (typeof process !== 'undefined' && process.env.DROPOUT_PRIVACY_MODE) || 'safe'
36
+ };
37
+
38
+ // Remote config fetch (Non-blocking)
39
+ setTimeout(async () => {
40
+ const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
41
+ if (typeof fetchFn !== 'function') return;
42
+ try {
43
+ const resp = await fetchFn(config.configEndpoint);
44
+ const remote = await resp.json();
45
+ if (remote && typeof remote === 'object') {
46
+ config = Object.assign(config, remote);
47
+ }
48
+ } catch (e) { }
49
+ }, 0);
50
+
51
+ /**
52
+ * Telemetry Emitter (Non-blocking, Fire-and-forget)
53
+ */
54
+ function emit(payload) {
55
+ const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
56
+ if (typeof fetchFn !== 'function') return;
57
+
58
+ setTimeout(() => {
59
+ fetchFn(config.captureEndpoint, {
60
+ method: 'POST',
61
+ headers: { 'Content-Type': 'application/json' },
62
+ body: JSON.stringify(payload),
63
+ keepalive: true
64
+ }).catch(() => { });
65
+ }, 0);
50
66
  }
51
67
 
52
- function getIntentHash(text) {
68
+ // --- Content Utilities ---
69
+
70
+ function hash(text) {
53
71
  if (!text) return null;
54
- return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
72
+ try {
73
+ return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
74
+ } catch (e) {
75
+ return 'hash_err';
76
+ }
55
77
  }
56
78
 
57
- const CAPABILITY_LIMITATIONS = [
58
- "i cannot access", "i don't have access", "i do not have access",
59
- "cannot browse", "real-time information", "as an ai", "my knowledge cutoff",
60
- "i'm sorry, but i don't", "i am sorry, but i don't"
61
- ];
62
-
63
- const CHALLENGE_PATTERNS = ["but ", "why ", "how ", "cant you", "can't you", "are you sure", "openai", "chatgpt"];
64
-
65
- function detectSignals(text, output = "") {
66
- const t = (text || "").toLowerCase();
67
- const o = (output || "").toLowerCase();
68
- return {
69
- negativeFeedback: NEGATIVE_FEEDBACK.some(p => t.includes(p)) ? 1 : 0,
70
- adaptationRequest: ADAPTATION_REQUESTS.some(p => t.includes(p)) ? 1 : 0,
71
- userChallenge: CHALLENGE_PATTERNS.some(p => t.includes(p)) ? 1 : 0,
72
- capabilityLimitation: CAPABILITY_LIMITATIONS.some(p => o.includes(p)) ? 1 : 0
73
- };
74
- }
79
+ // --- Provider Normalization ---
75
80
 
76
- (function () {
77
- "use strict";
78
- // 1. Guard global.fetch existence and idempotency
79
- if (typeof global.fetch !== 'function' || global.fetch.__dropout_patched__) {
80
- return;
81
- }
81
+ function normalize(url, body) {
82
+ let provider = 'unknown';
83
+ let model = 'unknown';
82
84
 
83
- // 2. Default Configuration
84
- let config = {
85
- version: 1,
86
- captureOutput: true,
87
- maxOutputBytes: 20000,
88
- enabledProviders: ['openai'],
89
- privacyMode: 'safe', // 'safe' | 'full'
90
- captureContent: false
91
- };
85
+ if (url) {
86
+ const u = url.toLowerCase();
87
+ if (u.includes('openai.com')) provider = 'openai';
88
+ else if (u.includes('anthropic.com')) provider = 'anthropic';
89
+ else if (u.includes('google.com') || u.includes('generative')) provider = 'google';
90
+ else if (u.includes('groq.com')) provider = 'groq';
91
+ else if (u.includes('localhost') || u.includes('127.0.0.1')) provider = 'local';
92
+ }
92
93
 
93
- // 3. Remote Config Fetch (Non-blocking, fire-and-forget)
94
- setTimeout(async () => {
94
+ if (body) {
95
95
  try {
96
- const resp = await originalFetch('http://localhost:4000/config');
97
- const remoteConfig = await resp.json();
98
- if (remoteConfig && typeof remoteConfig === 'object') {
99
- config = Object.assign({}, config, remoteConfig);
96
+ const parsed = typeof body === 'string' ? JSON.parse(body) : body;
97
+ model = parsed.model || model;
98
+ if (provider === 'unknown' && (parsed.messages || parsed.prompt || parsed.input)) {
99
+ provider = 'heuristic';
100
100
  }
101
- } catch (e) {
102
- // Fail silently, use defaults
103
- }
104
- }, 0);
101
+ } catch (e) { }
102
+ }
103
+
104
+ return { provider, model };
105
+ }
106
+
107
+ // --- The Monkey Patch ---
105
108
 
109
+ if (typeof GLOBAL_OBJ.fetch === 'function' && !GLOBAL_OBJ.fetch.__dropout_patched__) {
110
+ GLOBAL_OBJ.__dropout_original_fetch__ = GLOBAL_OBJ.fetch;
106
111
 
107
- // 5. Detection Helpers
108
- function matchesKnownProvider(url) {
109
- return (
110
- url &&
111
- (
112
- url.includes('openai.com/v1') ||
113
- url.includes('api.anthropic.com') ||
114
- url.includes('generativelanguage.googleapis.com') ||
115
- url.includes('api-inference.huggingface.co')
116
- )
112
+ GLOBAL_OBJ.fetch = async function (input, init) {
113
+ const url = typeof input === 'string' ? input : (input && input.url);
114
+ const isAI = url && (
115
+ url.includes('openai.com') ||
116
+ url.includes('anthropic.com') ||
117
+ url.includes('generative') ||
118
+ url.includes('groq.com') ||
119
+ (init && init.body && (init.body.includes('"model"') || init.body.includes('"messages"')))
117
120
  );
118
- }
119
121
 
120
- function matchesGenericLLMPayload(init) {
122
+ if (!isAI) return GLOBAL_OBJ.__dropout_original_fetch__(input, init);
123
+
124
+ const start = Date.now();
125
+ const turn = turnIndex++;
126
+ const { provider, model } = normalize(url, init && init.body);
127
+
128
+ // --- 1. Emit Request Event ---
129
+ let pText = "";
130
+ if (init && init.body) {
131
+ try { pText = typeof init.body === 'string' ? init.body : JSON.stringify(init.body); } catch (e) { }
132
+ }
133
+ const pHash = hash(pText);
134
+ const isRetry = pHash && pHash === lastPromptHash;
135
+
136
+ const requestEvent = {
137
+ identity: {
138
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
139
+ turn_index: turn,
140
+ direction: 'request',
141
+ turn_role: 'user'
142
+ },
143
+ timing: {
144
+ created_at: Date.now()
145
+ },
146
+ provider_context: {
147
+ provider,
148
+ model
149
+ },
150
+ content: {
151
+ content_raw: config.privacyMode === 'full' ? pText : null,
152
+ content_hash: pHash
153
+ },
154
+ metadata_flags: {
155
+ retry_like: isRetry ? 1 : 0
156
+ }
157
+ };
158
+
159
+ emit(requestEvent);
160
+ lastPromptHash = pHash;
161
+
162
+ // Execute actual fetch
163
+ let response;
164
+ let oText = "";
121
165
  try {
122
- if (!init || typeof init.body !== 'string') return false;
123
-
124
- const body = JSON.parse(init.body);
125
-
126
- // Common LLM request shapes
127
- return (
128
- body.messages ||
129
- body.prompt ||
130
- body.input
131
- );
132
- } catch (e) {
133
- return false;
166
+ response = await GLOBAL_OBJ.__dropout_original_fetch__(input, init);
167
+ } catch (err) {
168
+ // Re-throw after giving it a chance to be reported if needed (though runtime usually silent)
169
+ throw err;
134
170
  }
135
- }
136
171
 
137
- // 6. The Monkey Patch
138
- global.fetch = async function (input, init) {
139
- const start = Date.now();
140
- const response = await originalFetch(input, init);
172
+ const latency = Date.now() - start;
141
173
 
174
+ // --- 2. Emit Response Event ---
142
175
  try {
143
- // 7. Detection Logic (Dumb detection + Config Check)
144
- const url = typeof input === 'string' ? input : (input && input.url);
145
- const isLikelyAI =
146
- matchesKnownProvider(url) ||
147
- matchesGenericLLMPayload(init);
148
-
149
- if (url && isLikelyAI) {
150
- const latency = Date.now() - start;
151
- const contentType = response.headers.get('content-type') || '';
152
- const isStream = contentType.includes('text/event-stream');
153
-
154
- // 7. Safe Body Capture (Best effort)
155
- let promptText = undefined;
156
- let model = undefined;
157
-
158
- if (init && typeof init.body === 'string') {
159
- promptText = init.body;
160
- try {
161
- const bodyJson = JSON.parse(promptText);
162
- model = bodyJson.model;
163
- } catch (e) { }
164
- }
165
-
166
- let outputText = undefined;
167
- if (!isStream && config.captureOutput) {
168
- try {
169
- const cloned = response.clone();
170
- outputText = await cloned.text();
171
- // 8. Hard Capping (Infra Rule: Trust but verify)
172
- if (outputText && outputText.length > config.maxOutputBytes) {
173
- outputText = outputText.slice(0, config.maxOutputBytes);
174
- }
175
- } catch (e) { }
176
- }
177
-
178
- const currentSignals = detectSignals(promptText, outputText);
179
- const similarity = getSimilarity(lastPrompt, promptText);
180
- const intentHash = getIntentHash(promptText);
181
-
182
- const isSafe = config.privacyMode === 'safe' && !config.captureContent;
183
-
184
- const payload = {
185
- provider: url.includes('openai.com') ? 'openai' : 'unknown',
186
- confidence: matchesKnownProvider(url) ? 'high' : 'heuristic',
187
- url,
188
- model,
189
- latency_ms: latency,
190
- timestamp: Math.floor(Date.now() / 1000),
191
- session_id: sessionId,
192
- turn_index: turnIndex++,
193
- intent_hash: intentHash,
194
- similarity_prev: similarity,
195
- negative_feedback: currentSignals.negativeFeedback,
196
- adaptation_request: currentSignals.adaptationRequest,
197
- user_challenge: currentSignals.userChallenge,
198
- capability_limitation: currentSignals.capabilityLimitation,
199
- // Redact content if in safe mode
200
- prompt: isSafe ? undefined : promptText,
201
- output: isSafe ? undefined : outputText,
202
- };
203
-
204
- lastPrompt = promptText;
205
-
206
- // 9. Fire-and-forget
207
- setTimeout(() => safeSendTelemetry(payload), 0);
176
+ const cloned = response.clone();
177
+ oText = await cloned.text();
178
+ if (oText && oText.length > config.maxOutputBytes) {
179
+ oText = oText.slice(0, config.maxOutputBytes);
208
180
  }
209
181
  } catch (e) { }
210
182
 
183
+ const oHash = hash(oText);
184
+ const isNonAdaptive = oHash && oHash === lastResponseHash;
185
+
186
+ const responseEvent = {
187
+ identity: {
188
+ session_id: GLOBAL_OBJ.__dropout_session_id__,
189
+ turn_index: turn,
190
+ direction: 'response',
191
+ turn_role: 'assistant'
192
+ },
193
+ timing: {
194
+ created_at: Date.now(),
195
+ latency_ms: latency
196
+ },
197
+ provider_context: {
198
+ provider,
199
+ model
200
+ },
201
+ content: {
202
+ content_raw: config.privacyMode === 'full' ? oText : null,
203
+ content_hash: oHash
204
+ },
205
+ metadata_flags: {
206
+ non_adaptive_response: isNonAdaptive ? 1 : 0,
207
+ turn_boundary_confirmed: 1
208
+ }
209
+ };
210
+
211
+ emit(responseEvent);
212
+ lastResponseHash = oHash;
213
+
211
214
  return response;
212
215
  };
213
216
 
214
- global.fetch.__dropout_patched__ = true;
215
- })();
217
+ GLOBAL_OBJ.fetch.__dropout_patched__ = true;
218
+ }
216
219
 
217
- // 11. Manual capture (combo mode)
220
+ /**
221
+ * Manual capture for framework-level integration
222
+ */
218
223
  async function capture(target, options = {}) {
219
224
  const start = Date.now();
220
-
221
- // Support dropout.capture({ prompt, response, privacy: "full" })
222
- if (typeof target === 'object' && target !== null && !target.then) {
223
- const { prompt, response, privacy } = target;
224
- const isSafe = (privacy || 'safe') === 'safe';
225
-
226
- const currentSignals = detectSignals(prompt, response);
227
- const similarity = getSimilarity(lastPrompt, prompt);
228
- const intentHash = getIntentHash(prompt);
229
-
230
- const payload = {
231
- provider: 'manual-opt-in',
232
- prompt: isSafe ? undefined : prompt,
233
- output: isSafe ? undefined : response,
234
- latency_ms: 0,
235
- timestamp: Math.floor(Date.now() / 1000),
236
- session_id: sessionId,
237
- turn_index: turnIndex++,
238
- intent_hash: intentHash,
239
- similarity_prev: similarity,
240
- negative_feedback: currentSignals.negativeFeedback,
241
- adaptation_request: currentSignals.adaptationRequest,
242
- user_challenge: currentSignals.userChallenge,
243
- capability_limitation: currentSignals.capabilityLimitation,
244
- mode: 'manual'
245
- };
246
- lastPrompt = prompt;
247
- setTimeout(() => safeSendTelemetry(payload), 0);
248
- return response;
225
+ const turn = turnIndex++;
226
+ const mode = options.privacy || config.privacyMode;
227
+
228
+ let prompt, output, latency_ms = options.latency || 0;
229
+
230
+ if (typeof target === 'function' || (target && typeof target.then === 'function')) {
231
+ prompt = options.prompt;
232
+ const result = typeof target === 'function' ? await target() : await target;
233
+ output = typeof result === 'string' ? result : JSON.stringify(result);
234
+ latency_ms = Date.now() - start;
235
+ } else {
236
+ prompt = target.prompt;
237
+ output = target.output;
249
238
  }
250
239
 
251
- try {
252
- const result =
253
- typeof target === 'function'
254
- ? await target()
255
- : await Promise.resolve(target);
256
-
257
- const isSafe = options.privacy !== 'full';
258
-
259
- const promptText = options.prompt;
260
- const resultText = typeof result === 'string'
261
- ? result
262
- : JSON.stringify(result).slice(0, 20000);
263
-
264
- const currentSignals = detectSignals(promptText, resultText);
265
- const similarity = getSimilarity(lastPrompt, promptText);
266
- const intentHash = getIntentHash(promptText);
267
-
268
- const payload = {
269
- provider: options.provider || 'manual',
270
- model: options.model,
271
- prompt: isSafe ? undefined : promptText,
272
- output: isSafe ? undefined : resultText,
273
- latency_ms: Date.now() - start,
274
- timestamp: Math.floor(Date.now() / 1000),
275
- session_id: sessionId,
276
- turn_index: turnIndex++,
277
- intent_hash: intentHash,
278
- similarity_prev: similarity,
279
- negative_feedback: currentSignals.negativeFeedback,
280
- adaptation_request: currentSignals.adaptationRequest,
281
- user_challenge: currentSignals.userChallenge,
282
- capability_limitation: currentSignals.capabilityLimitation,
283
- mode: 'manual'
284
- };
285
-
286
- lastPrompt = promptText;
287
- setTimeout(() => safeSendTelemetry(payload), 0);
288
- return result;
289
- } catch (error) {
290
- const payload = {
291
- provider: options.provider || 'manual',
292
- error: error?.message || String(error),
293
- latency_ms: Date.now() - start,
294
- timestamp: Math.floor(Date.now() / 1000),
295
- mode: 'manual',
296
- session_id: sessionId,
297
- turn_index: turnIndex++
298
- };
299
-
300
- setTimeout(() => safeSendTelemetry(payload), 0);
301
- throw error;
302
- }
240
+ const pHash = hash(prompt);
241
+ const oHash = hash(output);
242
+
243
+ // Emit Request
244
+ emit({
245
+ identity: { session_id: GLOBAL_OBJ.__dropout_session_id__, turn_index: turn, direction: 'request', turn_role: 'user' },
246
+ timing: { created_at: Date.now() },
247
+ provider_context: { provider: options.provider || 'manual', model: options.model || 'unknown' },
248
+ content: { content_raw: mode === 'full' ? prompt : null, content_hash: pHash },
249
+ metadata_flags: { retry_like: pHash === lastPromptHash ? 1 : 0 }
250
+ });
251
+
252
+ // Emit Response
253
+ emit({
254
+ identity: { session_id: GLOBAL_OBJ.__dropout_session_id__, turn_index: turn, direction: 'response', turn_role: 'assistant' },
255
+ timing: { created_at: Date.now(), latency_ms },
256
+ provider_context: { provider: options.provider || 'manual', model: options.model || 'unknown' },
257
+ content: { content_raw: mode === 'full' ? output : null, content_hash: oHash },
258
+ metadata_flags: { non_adaptive_response: oHash === lastResponseHash ? 1 : 0, turn_boundary_confirmed: 1 }
259
+ });
260
+
261
+ lastPromptHash = pHash;
262
+ lastResponseHash = oHash;
263
+
264
+ return output;
303
265
  }
304
266
 
305
267
  module.exports = {
306
- capture
268
+ capture,
269
+ reset: () => {
270
+ GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
271
+ turnIndex = 0;
272
+ lastPromptHash = null;
273
+ lastResponseHash = null;
274
+ }
307
275
  };