@contractspec/example.product-intent 1.57.0 → 1.59.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/.turbo/turbo-build.log +21 -96
  2. package/.turbo/turbo-prebuild.log +1 -0
  3. package/CHANGELOG.md +29 -0
  4. package/dist/example.d.ts +2 -6
  5. package/dist/example.d.ts.map +1 -1
  6. package/dist/example.js +27 -37
  7. package/dist/index.d.ts +6 -4
  8. package/dist/index.d.ts.map +1 -0
  9. package/dist/index.js +342 -4
  10. package/dist/load-evidence.d.ts +13 -17
  11. package/dist/load-evidence.d.ts.map +1 -1
  12. package/dist/load-evidence.js +307 -68
  13. package/dist/load-evidence.test.d.ts +2 -0
  14. package/dist/load-evidence.test.d.ts.map +1 -0
  15. package/dist/node/example.js +28 -0
  16. package/dist/node/index.js +342 -0
  17. package/dist/node/load-evidence.js +313 -0
  18. package/dist/node/posthog-signals.js +248 -0
  19. package/dist/node/script.js +512 -0
  20. package/dist/node/sync-actions.js +491 -0
  21. package/dist/posthog-signals.d.ts +15 -19
  22. package/dist/posthog-signals.d.ts.map +1 -1
  23. package/dist/posthog-signals.js +222 -178
  24. package/dist/script.d.ts +2 -1
  25. package/dist/script.d.ts.map +1 -0
  26. package/dist/script.js +493 -152
  27. package/dist/sync-actions.d.ts +2 -1
  28. package/dist/sync-actions.d.ts.map +1 -0
  29. package/dist/sync-actions.js +466 -128
  30. package/package.json +57 -27
  31. package/tsdown.config.js +1 -2
  32. package/.turbo/turbo-build$colon$bundle.log +0 -99
  33. package/dist/example.js.map +0 -1
  34. package/dist/libs/analytics/dist/funnel/analyzer.js +0 -64
  35. package/dist/libs/analytics/dist/funnel/analyzer.js.map +0 -1
  36. package/dist/libs/analytics/dist/types.d.ts +0 -22
  37. package/dist/libs/analytics/dist/types.d.ts.map +0 -1
  38. package/dist/load-evidence.js.map +0 -1
  39. package/dist/posthog-signals.js.map +0 -1
  40. package/dist/script.js.map +0 -1
  41. package/dist/sync-actions.js.map +0 -1
  42. package/tsconfig.tsbuildinfo +0 -1
@@ -0,0 +1,512 @@
1
+ // src/posthog-signals.ts
2
+ import { FunnelAnalyzer } from "@contractspec/lib.analytics/funnel";
3
+ import { PosthogAnalyticsProvider } from "@contractspec/integration.providers-impls/impls/posthog";
4
+ async function loadPosthogEvidenceChunks(options) {
5
+ const chunks = [];
6
+ const range = resolveRange(options.dateRange);
7
+ const eventSummary = await loadEventSummary(options, range);
8
+ if (eventSummary) {
9
+ chunks.push(eventSummary);
10
+ }
11
+ const funnelEvidence = await loadFunnelEvidence(options, range);
12
+ if (funnelEvidence) {
13
+ chunks.push(funnelEvidence);
14
+ }
15
+ const featureFlags = await loadFeatureFlagEvidence(options);
16
+ if (featureFlags) {
17
+ chunks.push(featureFlags);
18
+ }
19
+ return chunks;
20
+ }
21
+ async function loadEventSummary(options, range) {
22
+ if (!options.reader.queryHogQL)
23
+ return null;
24
+ const eventFilter = buildEventFilter(options.eventNames);
25
+ const limit = options.limit ?? 10;
26
+ const result = await options.reader.queryHogQL({
27
+ query: [
28
+ "select",
29
+ " event as eventName,",
30
+ " count() as total",
31
+ "from events",
32
+ "where timestamp >= {dateFrom} and timestamp < {dateTo}",
33
+ eventFilter.clause ? `and ${eventFilter.clause}` : "",
34
+ "group by eventName",
35
+ "order by total desc",
36
+ `limit ${limit}`
37
+ ].filter(Boolean).join(`
38
+ `),
39
+ values: {
40
+ dateFrom: range.from.toISOString(),
41
+ dateTo: range.to.toISOString(),
42
+ ...eventFilter.values
43
+ }
44
+ });
45
+ const rows = mapRows(result);
46
+ if (rows.length === 0)
47
+ return null;
48
+ const lines = rows.map((row) => {
49
+ const name = asString(row.eventName) ?? "unknown";
50
+ const total = asNumber(row.total);
51
+ return `- ${name}: ${total}`;
52
+ });
53
+ return {
54
+ chunkId: `posthog:event_summary:${range.from.toISOString()}`,
55
+ text: [
56
+ `PostHog event summary (${range.from.toISOString()} → ${range.to.toISOString()}):`,
57
+ ...lines
58
+ ].join(`
59
+ `),
60
+ meta: {
61
+ source: "posthog",
62
+ kind: "event_summary",
63
+ dateFrom: range.from.toISOString(),
64
+ dateTo: range.to.toISOString()
65
+ }
66
+ };
67
+ }
68
+ async function loadFunnelEvidence(options, range) {
69
+ if (!options.funnel)
70
+ return null;
71
+ if (!options.reader.getEvents)
72
+ return null;
73
+ const events = [];
74
+ const eventNames = options.funnel.steps.map((step) => step.eventName);
75
+ for (const eventName of eventNames) {
76
+ const response = await options.reader.getEvents({
77
+ event: eventName,
78
+ dateRange: {
79
+ from: range.from,
80
+ to: range.to
81
+ },
82
+ limit: options.limit ?? 500
83
+ });
84
+ response.results.forEach((event) => {
85
+ events.push({
86
+ name: event.event,
87
+ userId: event.distinctId,
88
+ tenantId: typeof event.properties?.tenantId === "string" ? event.properties.tenantId : undefined,
89
+ timestamp: event.timestamp,
90
+ properties: event.properties
91
+ });
92
+ });
93
+ }
94
+ if (events.length === 0)
95
+ return null;
96
+ const analyzer = new FunnelAnalyzer;
97
+ const analysis = analyzer.analyze(events, options.funnel);
98
+ const lines = analysis.steps.map((step) => {
99
+ return `- ${step.step.eventName}: ${step.count} (conversion ${step.conversionRate}, drop-off ${step.dropOffRate})`;
100
+ });
101
+ return {
102
+ chunkId: `posthog:funnel:${options.funnel.name}`,
103
+ text: [`PostHog funnel analysis — ${options.funnel.name}:`, ...lines].join(`
104
+ `),
105
+ meta: {
106
+ source: "posthog",
107
+ kind: "funnel",
108
+ funnelName: options.funnel.name,
109
+ dateFrom: range.from.toISOString(),
110
+ dateTo: range.to.toISOString()
111
+ }
112
+ };
113
+ }
114
+ async function loadFeatureFlagEvidence(options) {
115
+ if (!options.includeFeatureFlags)
116
+ return null;
117
+ if (!options.reader.getFeatureFlags)
118
+ return null;
119
+ const response = await options.reader.getFeatureFlags({ limit: 10 });
120
+ if (!response.results.length)
121
+ return null;
122
+ const lines = response.results.map((flag) => {
123
+ const key = flag.key ?? "unknown";
124
+ const active = flag.active ? "active" : "inactive";
125
+ return `- ${key}: ${active}`;
126
+ });
127
+ return {
128
+ chunkId: "posthog:feature_flags",
129
+ text: ["PostHog feature flags:", ...lines].join(`
130
+ `),
131
+ meta: {
132
+ source: "posthog",
133
+ kind: "feature_flags"
134
+ }
135
+ };
136
+ }
137
+ function resolveRange(dateRange) {
138
+ const now = new Date;
139
+ const from = dateRange?.from instanceof Date ? dateRange.from : dateRange?.from ? new Date(dateRange.from) : new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
140
+ const to = dateRange?.to instanceof Date ? dateRange.to : dateRange?.to ? new Date(dateRange.to) : now;
141
+ return { from, to };
142
+ }
143
+ function buildEventFilter(eventNames) {
144
+ if (!eventNames || eventNames.length === 0)
145
+ return {};
146
+ if (eventNames.length === 1) {
147
+ return {
148
+ clause: "event = {event0}",
149
+ values: { event0: eventNames[0] }
150
+ };
151
+ }
152
+ const values = {};
153
+ const clauses = eventNames.map((eventName, index) => {
154
+ values[`event${index}`] = eventName;
155
+ return `event = {event${index}}`;
156
+ });
157
+ return {
158
+ clause: `(${clauses.join(" or ")})`,
159
+ values
160
+ };
161
+ }
162
+ function mapRows(result) {
163
+ if (!Array.isArray(result.results) || !Array.isArray(result.columns)) {
164
+ return [];
165
+ }
166
+ const columns = result.columns;
167
+ return result.results.flatMap((row) => {
168
+ if (!Array.isArray(row))
169
+ return [];
170
+ const record = {};
171
+ columns.forEach((column, index) => {
172
+ record[column] = row[index];
173
+ });
174
+ return [record];
175
+ });
176
+ }
177
+ function asString(value) {
178
+ if (typeof value === "string" && value.trim())
179
+ return value;
180
+ if (typeof value === "number")
181
+ return String(value);
182
+ return null;
183
+ }
184
+ function asNumber(value) {
185
+ if (typeof value === "number" && Number.isFinite(value))
186
+ return value;
187
+ if (typeof value === "string" && value.trim()) {
188
+ const parsed = Number(value);
189
+ if (Number.isFinite(parsed))
190
+ return parsed;
191
+ }
192
+ return 0;
193
+ }
194
+ function resolvePosthogEvidenceOptionsFromEnv(options = {}) {
195
+ const projectId = process.env.POSTHOG_PROJECT_ID;
196
+ const personalApiKey = process.env.POSTHOG_PERSONAL_API_KEY;
197
+ if (!projectId || !personalApiKey)
198
+ return null;
199
+ const lookbackDays = resolveNumberEnv("POSTHOG_EVIDENCE_LOOKBACK_DAYS", options.defaultLookbackDays ?? 30);
200
+ const limit = resolveNumberEnv("POSTHOG_EVIDENCE_LIMIT", options.defaultLimit ?? 10);
201
+ const now = new Date;
202
+ const from = new Date(now.getTime() - lookbackDays * 24 * 60 * 60 * 1000);
203
+ const eventNames = resolveCsvEnv("POSTHOG_EVIDENCE_EVENTS");
204
+ const funnelSteps = resolveCsvEnv("POSTHOG_EVIDENCE_FUNNEL_STEPS");
205
+ const funnel = funnelSteps && funnelSteps.length ? {
206
+ name: "posthog-evidence-funnel",
207
+ steps: funnelSteps.map((eventName, index) => ({
208
+ id: `step_${index + 1}`,
209
+ eventName
210
+ }))
211
+ } : undefined;
212
+ const reader = new PosthogAnalyticsProvider({
213
+ host: process.env.POSTHOG_HOST,
214
+ projectId,
215
+ personalApiKey
216
+ });
217
+ return {
218
+ reader,
219
+ dateRange: { from, to: now },
220
+ eventNames,
221
+ limit,
222
+ funnel,
223
+ includeFeatureFlags: resolveBooleanEnv("POSTHOG_EVIDENCE_FEATURE_FLAGS", true)
224
+ };
225
+ }
226
+ function resolveCsvEnv(key) {
227
+ const value = process.env[key];
228
+ if (!value)
229
+ return;
230
+ return value.split(",").map((item) => item.trim()).filter(Boolean);
231
+ }
232
+ function resolveNumberEnv(key, fallback) {
233
+ const value = process.env[key];
234
+ if (!value)
235
+ return fallback;
236
+ const parsed = Number(value);
237
+ return Number.isFinite(parsed) ? parsed : fallback;
238
+ }
239
+ function resolveBooleanEnv(key, fallback) {
240
+ const value = process.env[key];
241
+ if (value === undefined)
242
+ return fallback;
243
+ return value.toLowerCase() === "true";
244
+ }
245
+
246
+ // src/load-evidence.ts
247
+ import fs from "node:fs";
248
+ import path from "node:path";
249
+ import { fileURLToPath } from "node:url";
250
+ var MODULE_DIR = path.dirname(fileURLToPath(import.meta.url));
251
+ var DEFAULT_EVIDENCE_ROOT = path.join(MODULE_DIR, "../evidence");
252
+ var DEFAULT_TRANSCRIPT_DIRS = ["interviews", "tickets", "public"];
253
+ var DEFAULT_CHUNK_SIZE = 800;
254
+ function stripYamlFrontMatter(contents) {
255
+ const start = contents.indexOf("---");
256
+ if (start === -1)
257
+ return contents;
258
+ const end = contents.indexOf("---", start + 3);
259
+ if (end === -1)
260
+ return contents;
261
+ return contents.slice(end + 3).trimStart();
262
+ }
263
+ function chunkTranscript(fileId, text, chunkSize) {
264
+ const chunks = [];
265
+ const clean = text.trim();
266
+ for (let offset = 0, idx = 0;offset < clean.length; idx += 1) {
267
+ const slice = clean.slice(offset, offset + chunkSize);
268
+ chunks.push({
269
+ chunkId: `${fileId}#c_${String(idx).padStart(2, "0")}`,
270
+ text: slice,
271
+ meta: { source: fileId }
272
+ });
273
+ offset += chunkSize;
274
+ }
275
+ return chunks;
276
+ }
277
+ function loadEvidenceChunks(options = {}) {
278
+ const evidenceRoot = options.evidenceRoot ?? DEFAULT_EVIDENCE_ROOT;
279
+ const transcriptDirs = options.transcriptDirs ?? DEFAULT_TRANSCRIPT_DIRS;
280
+ const chunkSize = options.chunkSize ?? DEFAULT_CHUNK_SIZE;
281
+ const chunks = [];
282
+ for (const dir of transcriptDirs) {
283
+ const fullDir = path.join(evidenceRoot, dir);
284
+ if (!fs.existsSync(fullDir))
285
+ continue;
286
+ for (const fileName of fs.readdirSync(fullDir)) {
287
+ const ext = path.extname(fileName).toLowerCase();
288
+ if (ext !== ".md")
289
+ continue;
290
+ const filePath = path.join(fullDir, fileName);
291
+ const raw = fs.readFileSync(filePath, "utf8");
292
+ const withoutFrontMatter = stripYamlFrontMatter(raw);
293
+ const baseId = path.parse(fileName).name;
294
+ const fileChunks = chunkTranscript(baseId, withoutFrontMatter, chunkSize);
295
+ chunks.push(...fileChunks);
296
+ }
297
+ }
298
+ return chunks;
299
+ }
300
+ async function loadEvidenceChunksWithSignals(options = {}) {
301
+ const baseChunks = loadEvidenceChunks(options);
302
+ if (!options.posthog)
303
+ return baseChunks;
304
+ const posthogChunks = await loadPosthogEvidenceChunks(options.posthog);
305
+ return [...baseChunks, ...posthogChunks];
306
+ }
307
+
308
+ // src/script.ts
309
+ import fs2 from "node:fs";
310
+ import path2 from "node:path";
311
+ import { fileURLToPath as fileURLToPath2 } from "node:url";
312
+ import { createAgentJsonRunner } from "@contractspec/lib.ai-agent";
313
+ import {
314
+ extractEvidence,
315
+ generateTickets,
316
+ groupProblems,
317
+ impactEngine,
318
+ suggestPatch
319
+ } from "@contractspec/lib.product-intent-utils";
320
+ var QUESTION = "Which activation and onboarding friction should we prioritize next?";
321
+ var DEFAULT_PROVIDER = "openai";
322
+ var DEFAULT_MODEL = "gpt-5.2";
323
+ var DEFAULT_TEMPERATURE = 0;
324
+ var DEFAULT_MAX_ATTEMPTS = 2;
325
+ var MODULE_DIR2 = path2.dirname(fileURLToPath2(import.meta.url));
326
+ var REPO_ROOT = path2.resolve(MODULE_DIR2, "../../../..");
327
+ var REPO_SCAN_FILES = [
328
+ "packages/examples/product-intent/src/load-evidence.ts",
329
+ "packages/examples/product-intent/src/script.ts",
330
+ "packages/libs/contracts/src/product-intent/contract-patch-intent.ts",
331
+ "packages/libs/contracts/src/product-intent/spec.ts",
332
+ "packages/libs/product-intent-utils/src/impact-engine.ts"
333
+ ];
334
+ function collectRepoFiles(root, files) {
335
+ const collected = [];
336
+ for (const relativePath of files) {
337
+ const fullPath = path2.join(root, relativePath);
338
+ if (!fs2.existsSync(fullPath))
339
+ continue;
340
+ const content = fs2.readFileSync(fullPath, "utf8");
341
+ collected.push({ path: relativePath, content });
342
+ }
343
+ return collected;
344
+ }
345
+ function resolveProviderName() {
346
+ const raw = process.env.CONTRACTSPEC_AI_PROVIDER ?? process.env.AI_PROVIDER ?? DEFAULT_PROVIDER;
347
+ const normalized = raw.toLowerCase();
348
+ const allowed = [
349
+ "openai",
350
+ "anthropic",
351
+ "mistral",
352
+ "gemini",
353
+ "ollama"
354
+ ];
355
+ if (!allowed.includes(normalized)) {
356
+ throw new Error(`Unsupported AI provider '${raw}'. Allowed: ${allowed.join(", ")}`);
357
+ }
358
+ return normalized;
359
+ }
360
+ function resolveApiKey(provider) {
361
+ switch (provider) {
362
+ case "openai":
363
+ return process.env.OPENAI_API_KEY;
364
+ case "anthropic":
365
+ return process.env.ANTHROPIC_API_KEY;
366
+ case "mistral":
367
+ return process.env.MISTRAL_API_KEY;
368
+ case "gemini":
369
+ return process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
370
+ case "ollama":
371
+ return;
372
+ }
373
+ }
374
+ function resolveTemperature() {
375
+ const raw = process.env.CONTRACTSPEC_AI_TEMPERATURE ?? process.env.AI_TEMPERATURE;
376
+ if (!raw)
377
+ return DEFAULT_TEMPERATURE;
378
+ const value = Number.parseFloat(raw);
379
+ return Number.isNaN(value) ? DEFAULT_TEMPERATURE : value;
380
+ }
381
+ function resolveMaxAttempts() {
382
+ const raw = process.env.CONTRACTSPEC_AI_MAX_ATTEMPTS ?? process.env.AI_MAX_ATTEMPTS;
383
+ if (!raw)
384
+ return DEFAULT_MAX_ATTEMPTS;
385
+ const value = Number.parseInt(raw, 10);
386
+ return Number.isNaN(value) ? DEFAULT_MAX_ATTEMPTS : Math.max(1, value);
387
+ }
388
+ function writeArtifact(dir, name, contents) {
389
+ const filePath = path2.join(dir, name);
390
+ fs2.writeFileSync(filePath, contents, "utf8");
391
+ return filePath;
392
+ }
393
+ function createPipelineLogger(logDir, runId) {
394
+ const tracePath = path2.join(logDir, "trace.jsonl");
395
+ return {
396
+ log(entry) {
397
+ const baseName = `${entry.stage}-attempt-${entry.attempt}-${entry.phase}`;
398
+ const payload = {
399
+ runId,
400
+ stage: entry.stage,
401
+ phase: entry.phase,
402
+ attempt: entry.attempt,
403
+ timestamp: entry.timestamp
404
+ };
405
+ if (entry.prompt) {
406
+ payload.promptPath = path2.relative(REPO_ROOT, writeArtifact(logDir, `${baseName}.prompt.txt`, entry.prompt));
407
+ }
408
+ if (entry.response) {
409
+ payload.responsePath = path2.relative(REPO_ROOT, writeArtifact(logDir, `${baseName}.response.txt`, entry.response));
410
+ }
411
+ if (entry.error) {
412
+ payload.errorPath = path2.relative(REPO_ROOT, writeArtifact(logDir, `${baseName}.error.txt`, entry.error));
413
+ }
414
+ fs2.appendFileSync(tracePath, `${JSON.stringify(payload)}
415
+ `, "utf8");
416
+ }
417
+ };
418
+ }
419
+ async function main() {
420
+ const provider = resolveProviderName();
421
+ const temperature = resolveTemperature();
422
+ const maxAttempts = resolveMaxAttempts();
423
+ const apiKey = resolveApiKey(provider);
424
+ const proxyUrl = process.env.CONTRACTSPEC_AI_PROXY_URL;
425
+ const organizationId = process.env.CONTRACTSPEC_ORG_ID;
426
+ const baseUrl = process.env.OLLAMA_BASE_URL;
427
+ const model = process.env.CONTRACTSPEC_AI_MODEL ?? process.env.AI_MODEL ?? (provider === "mistral" ? DEFAULT_MODEL : undefined);
428
+ if (provider !== "ollama" && !apiKey && !proxyUrl && !organizationId) {
429
+ throw new Error(`Missing API credentials for ${provider}. Set the provider API key or CONTRACTSPEC_AI_PROXY_URL.`);
430
+ }
431
+ const runId = new Date().toISOString().replace(/[:.]/g, "-");
432
+ const logDir = path2.join(MODULE_DIR2, "../logs", `run-${runId}`);
433
+ fs2.mkdirSync(logDir, { recursive: true });
434
+ const logger = createPipelineLogger(logDir, runId);
435
+ const modelRunner = await createAgentJsonRunner({
436
+ provider: {
437
+ provider,
438
+ model,
439
+ apiKey,
440
+ baseUrl,
441
+ proxyUrl,
442
+ organizationId
443
+ },
444
+ temperature,
445
+ system: "You are a product discovery analyst. Respond with strict JSON only and use exact quotes for citations."
446
+ });
447
+ console.log(`AI provider: ${provider}`);
448
+ console.log(`Model: ${model ?? "(provider default)"}`);
449
+ console.log(`Temperature: ${temperature}`);
450
+ console.log(`Max attempts: ${maxAttempts}`);
451
+ console.log(`Trace log: ${path2.relative(REPO_ROOT, logDir)}/trace.jsonl`);
452
+ const posthogEvidence = resolvePosthogEvidenceOptionsFromEnv();
453
+ const evidenceChunks = await loadEvidenceChunksWithSignals({
454
+ posthog: posthogEvidence ?? undefined
455
+ });
456
+ console.log(`Loaded ${evidenceChunks.length} evidence chunks`);
457
+ const findings = await extractEvidence(evidenceChunks, QUESTION, {
458
+ maxFindings: 12,
459
+ modelRunner,
460
+ logger,
461
+ maxAttempts
462
+ });
463
+ console.log(`
464
+ Evidence findings:
465
+ `);
466
+ console.log(JSON.stringify(findings, null, 2));
467
+ const problems = await groupProblems(findings, QUESTION, {
468
+ modelRunner,
469
+ logger,
470
+ maxAttempts
471
+ });
472
+ console.log(`
473
+ Problems:
474
+ `);
475
+ console.log(JSON.stringify(problems, null, 2));
476
+ const tickets = await generateTickets(problems, findings, QUESTION, {
477
+ modelRunner,
478
+ logger,
479
+ maxAttempts
480
+ });
481
+ console.log(`
482
+ Tickets:
483
+ `);
484
+ console.log(JSON.stringify(tickets, null, 2));
485
+ if (!tickets[0]) {
486
+ console.log(`
487
+ No tickets generated.`);
488
+ return;
489
+ }
490
+ const patchIntent = await suggestPatch(tickets[0], {
491
+ modelRunner,
492
+ logger,
493
+ maxAttempts
494
+ });
495
+ console.log(`
496
+ Patch intent:
497
+ `);
498
+ console.log(JSON.stringify(patchIntent, null, 2));
499
+ const repoFiles = collectRepoFiles(REPO_ROOT, REPO_SCAN_FILES);
500
+ const impact = impactEngine(patchIntent, {
501
+ repoFiles,
502
+ maxHitsPerChange: 3
503
+ });
504
+ console.log(`
505
+ Impact report (deterministic):
506
+ `);
507
+ console.log(JSON.stringify(impact, null, 2));
508
+ }
509
+ main().catch((error) => {
510
+ console.error(error);
511
+ process.exitCode = 1;
512
+ });