@systima/aiact-audit-log 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/COMPLIANCE.md +180 -0
- package/LICENSE +21 -0
- package/README.md +406 -0
- package/dist/ai-sdk/index.cjs +102 -0
- package/dist/ai-sdk/index.cjs.map +1 -0
- package/dist/ai-sdk/index.d.cts +341 -0
- package/dist/ai-sdk/index.d.ts +341 -0
- package/dist/ai-sdk/index.js +77 -0
- package/dist/ai-sdk/index.js.map +1 -0
- package/dist/ai-sdk/middleware/index.cjs +259 -0
- package/dist/ai-sdk/middleware/index.cjs.map +1 -0
- package/dist/ai-sdk/middleware/index.d.cts +323 -0
- package/dist/ai-sdk/middleware/index.d.ts +323 -0
- package/dist/ai-sdk/middleware/index.js +236 -0
- package/dist/ai-sdk/middleware/index.js.map +1 -0
- package/dist/cli/index.js +3332 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/index.cjs +1385 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +501 -0
- package/dist/index.d.ts +501 -0
- package/dist/index.js +1342 -0
- package/dist/index.js.map +1 -0
- package/dist/prompt-F4GUFYMH.js +755 -0
- package/dist/prompt-F4GUFYMH.js.map +1 -0
- package/package.json +91 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
// src/ai-sdk/middleware/index.ts
|
|
2
|
+
import {
|
|
3
|
+
wrapLanguageModel
|
|
4
|
+
} from "ai";
|
|
5
|
+
|
|
6
|
+
// src/context.ts
|
|
7
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
8
|
+
var auditStorage = new AsyncLocalStorage();
|
|
9
|
+
function getAuditContext() {
|
|
10
|
+
return auditStorage.getStore();
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
// src/utils/uuid.ts
|
|
14
|
+
import { randomBytes } from "crypto";
|
|
15
|
+
function generateUUIDv7() {
|
|
16
|
+
const now = Date.now();
|
|
17
|
+
const bytes = new Uint8Array(16);
|
|
18
|
+
const randomPart = randomBytes(10);
|
|
19
|
+
bytes.set(randomPart, 6);
|
|
20
|
+
bytes[0] = now / 2 ** 40 & 255;
|
|
21
|
+
bytes[1] = now / 2 ** 32 & 255;
|
|
22
|
+
bytes[2] = now / 2 ** 24 & 255;
|
|
23
|
+
bytes[3] = now / 2 ** 16 & 255;
|
|
24
|
+
bytes[4] = now / 2 ** 8 & 255;
|
|
25
|
+
bytes[5] = now & 255;
|
|
26
|
+
bytes[6] = bytes[6] & 15 | 112;
|
|
27
|
+
bytes[8] = bytes[8] & 63 | 128;
|
|
28
|
+
return formatUUID(bytes);
|
|
29
|
+
}
|
|
30
|
+
function formatUUID(bytes) {
|
|
31
|
+
const hex = Array.from(bytes, (b) => b.toString(16).padStart(2, "0")).join("");
|
|
32
|
+
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20, 32)}`;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// src/ai-sdk/middleware/index.ts
|
|
36
|
+
function auditMiddleware(model, options) {
|
|
37
|
+
const {
|
|
38
|
+
logger,
|
|
39
|
+
captureInputs = true,
|
|
40
|
+
captureOutputs = true
|
|
41
|
+
} = options;
|
|
42
|
+
const middleware = {
|
|
43
|
+
middlewareVersion: "v1",
|
|
44
|
+
wrapGenerate: async ({ doGenerate, params }) => {
|
|
45
|
+
const startTime = Date.now();
|
|
46
|
+
const context = getAuditContext();
|
|
47
|
+
const decisionId = context?.decisionId ?? generateUUIDv7();
|
|
48
|
+
try {
|
|
49
|
+
const result = await doGenerate();
|
|
50
|
+
const latencyMs = Date.now() - startTime;
|
|
51
|
+
const responseModelId = extractModelId(result);
|
|
52
|
+
const text = extractText(result);
|
|
53
|
+
const usage = extractUsage(result);
|
|
54
|
+
const finishReason = extractFinishReason(result);
|
|
55
|
+
await logger.log({
|
|
56
|
+
decisionId,
|
|
57
|
+
eventType: "inference",
|
|
58
|
+
modelId: responseModelId ?? model.modelId ?? null,
|
|
59
|
+
providerId: responseModelId?.split("/")[0] ?? model.provider ?? null,
|
|
60
|
+
input: captureInputs ? { value: serialisePrompt(params.prompt) } : { value: "", type: "hash" },
|
|
61
|
+
output: captureOutputs ? { value: text, finishReason } : text ? { value: "", type: "hash", finishReason } : null,
|
|
62
|
+
latencyMs,
|
|
63
|
+
usage,
|
|
64
|
+
parameters: options.captureParameters !== false ? extractParams(params) : null,
|
|
65
|
+
error: null,
|
|
66
|
+
captureMethod: "middleware",
|
|
67
|
+
metadata: context?.metadata
|
|
68
|
+
});
|
|
69
|
+
if (options.captureToolCalls !== false) {
|
|
70
|
+
const toolCalls = extractToolCalls(result);
|
|
71
|
+
for (const tc of toolCalls) {
|
|
72
|
+
await logger.log({
|
|
73
|
+
decisionId,
|
|
74
|
+
eventType: "tool_call",
|
|
75
|
+
modelId: responseModelId ?? model.modelId ?? null,
|
|
76
|
+
providerId: null,
|
|
77
|
+
input: { value: JSON.stringify(tc.args) },
|
|
78
|
+
output: null,
|
|
79
|
+
latencyMs: null,
|
|
80
|
+
usage: null,
|
|
81
|
+
parameters: null,
|
|
82
|
+
error: null,
|
|
83
|
+
toolCall: {
|
|
84
|
+
toolName: tc.toolName,
|
|
85
|
+
toolArgs: tc.args
|
|
86
|
+
},
|
|
87
|
+
captureMethod: "middleware",
|
|
88
|
+
metadata: context?.metadata
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return result;
|
|
93
|
+
} catch (error) {
|
|
94
|
+
const latencyMs = Date.now() - startTime;
|
|
95
|
+
await logger.log({
|
|
96
|
+
decisionId,
|
|
97
|
+
eventType: "inference",
|
|
98
|
+
modelId: model.modelId ?? null,
|
|
99
|
+
providerId: model.provider ?? null,
|
|
100
|
+
input: captureInputs ? { value: serialisePrompt(params.prompt) } : { value: "", type: "hash" },
|
|
101
|
+
output: null,
|
|
102
|
+
latencyMs,
|
|
103
|
+
usage: null,
|
|
104
|
+
parameters: options.captureParameters !== false ? extractParams(params) : null,
|
|
105
|
+
error: {
|
|
106
|
+
code: error.name ?? "UNKNOWN",
|
|
107
|
+
message: error.message ?? String(error)
|
|
108
|
+
},
|
|
109
|
+
captureMethod: "middleware",
|
|
110
|
+
metadata: context?.metadata
|
|
111
|
+
});
|
|
112
|
+
throw error;
|
|
113
|
+
}
|
|
114
|
+
},
|
|
115
|
+
wrapStream: async ({ doStream, params }) => {
|
|
116
|
+
const startTime = Date.now();
|
|
117
|
+
const context = getAuditContext();
|
|
118
|
+
const decisionId = context?.decisionId ?? generateUUIDv7();
|
|
119
|
+
try {
|
|
120
|
+
const result = await doStream();
|
|
121
|
+
const { stream, ...rest } = result;
|
|
122
|
+
const chunks = [];
|
|
123
|
+
let streamUsage = null;
|
|
124
|
+
let streamFinishReason;
|
|
125
|
+
let streamModelId = null;
|
|
126
|
+
const loggingStream = stream.pipeThrough(
|
|
127
|
+
new TransformStream({
|
|
128
|
+
transform(chunk, controller) {
|
|
129
|
+
if (chunk.type === "text-delta") {
|
|
130
|
+
chunks.push(chunk.textDelta);
|
|
131
|
+
}
|
|
132
|
+
if (chunk.type === "finish") {
|
|
133
|
+
const finishChunk = chunk;
|
|
134
|
+
streamUsage = finishChunk.usage ?? null;
|
|
135
|
+
streamFinishReason = finishChunk.finishReason;
|
|
136
|
+
const response = finishChunk.response;
|
|
137
|
+
streamModelId = response?.modelId ?? null;
|
|
138
|
+
}
|
|
139
|
+
controller.enqueue(chunk);
|
|
140
|
+
},
|
|
141
|
+
async flush() {
|
|
142
|
+
const latencyMs = Date.now() - startTime;
|
|
143
|
+
const fullText = chunks.join("");
|
|
144
|
+
await logger.log({
|
|
145
|
+
decisionId,
|
|
146
|
+
eventType: "inference",
|
|
147
|
+
modelId: streamModelId ?? model.modelId ?? null,
|
|
148
|
+
providerId: streamModelId?.split("/")[0] ?? model.provider ?? null,
|
|
149
|
+
input: captureInputs ? { value: serialisePrompt(params.prompt) } : { value: "", type: "hash" },
|
|
150
|
+
output: captureOutputs ? { value: fullText, finishReason: streamFinishReason } : fullText ? { value: "", type: "hash", finishReason: streamFinishReason } : null,
|
|
151
|
+
latencyMs,
|
|
152
|
+
usage: streamUsage,
|
|
153
|
+
parameters: options.captureParameters !== false ? extractParams(params) : null,
|
|
154
|
+
error: null,
|
|
155
|
+
captureMethod: "middleware",
|
|
156
|
+
metadata: context?.metadata
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
})
|
|
160
|
+
);
|
|
161
|
+
return { stream: loggingStream, ...rest };
|
|
162
|
+
} catch (error) {
|
|
163
|
+
const latencyMs = Date.now() - startTime;
|
|
164
|
+
await logger.log({
|
|
165
|
+
decisionId,
|
|
166
|
+
eventType: "inference",
|
|
167
|
+
modelId: model.modelId ?? null,
|
|
168
|
+
providerId: model.provider ?? null,
|
|
169
|
+
input: captureInputs ? { value: serialisePrompt(params.prompt) } : { value: "", type: "hash" },
|
|
170
|
+
output: null,
|
|
171
|
+
latencyMs,
|
|
172
|
+
usage: null,
|
|
173
|
+
parameters: options.captureParameters !== false ? extractParams(params) : null,
|
|
174
|
+
error: {
|
|
175
|
+
code: error.name ?? "UNKNOWN",
|
|
176
|
+
message: error.message ?? String(error)
|
|
177
|
+
},
|
|
178
|
+
captureMethod: "middleware",
|
|
179
|
+
metadata: context?.metadata
|
|
180
|
+
});
|
|
181
|
+
throw error;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
};
|
|
185
|
+
return wrapLanguageModel({ model, middleware });
|
|
186
|
+
}
|
|
187
|
+
function serialisePrompt(prompt) {
|
|
188
|
+
if (typeof prompt === "string") return prompt;
|
|
189
|
+
try {
|
|
190
|
+
return JSON.stringify(prompt);
|
|
191
|
+
} catch {
|
|
192
|
+
return String(prompt);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
function extractModelId(result) {
|
|
196
|
+
const response = result.response;
|
|
197
|
+
return response?.modelId ?? null;
|
|
198
|
+
}
|
|
199
|
+
function extractText(result) {
|
|
200
|
+
return result.text ?? "";
|
|
201
|
+
}
|
|
202
|
+
function extractFinishReason(result) {
|
|
203
|
+
return result.finishReason;
|
|
204
|
+
}
|
|
205
|
+
function extractUsage(result) {
|
|
206
|
+
const usage = result.usage;
|
|
207
|
+
if (!usage) return null;
|
|
208
|
+
return {
|
|
209
|
+
promptTokens: usage.promptTokens ?? 0,
|
|
210
|
+
completionTokens: usage.completionTokens ?? 0,
|
|
211
|
+
totalTokens: usage.totalTokens ?? 0
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
function extractToolCalls(result) {
|
|
215
|
+
const toolCalls = result.toolCalls;
|
|
216
|
+
if (!toolCalls || !Array.isArray(toolCalls)) return [];
|
|
217
|
+
return toolCalls.map((tc) => ({
|
|
218
|
+
toolName: tc.toolName ?? "unknown",
|
|
219
|
+
args: tc.args ?? {}
|
|
220
|
+
}));
|
|
221
|
+
}
|
|
222
|
+
function extractParams(params) {
|
|
223
|
+
const extracted = {};
|
|
224
|
+
if (params.temperature !== void 0) extracted["temperature"] = params.temperature;
|
|
225
|
+
if (params.maxTokens !== void 0) extracted["maxTokens"] = params.maxTokens;
|
|
226
|
+
if (params.topP !== void 0) extracted["topP"] = params.topP;
|
|
227
|
+
if (params.topK !== void 0) extracted["topK"] = params.topK;
|
|
228
|
+
if (params.frequencyPenalty !== void 0) extracted["frequencyPenalty"] = params.frequencyPenalty;
|
|
229
|
+
if (params.presencePenalty !== void 0) extracted["presencePenalty"] = params.presencePenalty;
|
|
230
|
+
if (params.seed !== void 0) extracted["seed"] = params.seed;
|
|
231
|
+
return Object.keys(extracted).length > 0 ? extracted : {};
|
|
232
|
+
}
|
|
233
|
+
export {
|
|
234
|
+
auditMiddleware
|
|
235
|
+
};
|
|
236
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/ai-sdk/middleware/index.ts","../../../src/context.ts","../../../src/utils/uuid.ts"],"sourcesContent":["/**\n * AI SDK middleware — automatic capture for Vercel AI SDK models.\n *\n * Wraps a LanguageModelV1 and automatically logs every inference call.\n * This is the primary mechanism for satisfying Article 12(1)'s\n * \"automatic recording\" requirement for inference events.\n *\n * Usage:\n * const model = auditMiddleware(anthropic('claude-sonnet-4-5-20250929'), { logger })\n * const result = await generateText({ model, prompt })\n * // ^ Inference is automatically logged\n */\n\nimport {\n wrapLanguageModel,\n type LanguageModelV1,\n type LanguageModelV1Middleware,\n} from 'ai'\nimport type { AuditLogger } from '../../logger.js'\nimport { getAuditContext } from '../../context.js'\nimport { generateUUIDv7 } from '../../utils/uuid.js'\n\nexport interface AuditMiddlewareOptions {\n logger: AuditLogger\n captureInputs?: boolean\n captureOutputs?: boolean\n captureToolCalls?: boolean\n captureParameters?: boolean\n}\n\nexport function auditMiddleware(\n model: LanguageModelV1,\n options: AuditMiddlewareOptions,\n): LanguageModelV1 {\n const {\n logger,\n captureInputs = true,\n captureOutputs = true,\n } = options\n\n const middleware: LanguageModelV1Middleware = {\n middlewareVersion: 'v1',\n\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n const context = getAuditContext()\n const decisionId = context?.decisionId ?? generateUUIDv7()\n\n try {\n const result = await doGenerate()\n const latencyMs = Date.now() - startTime\n\n const responseModelId = extractModelId(result)\n const text = extractText(result)\n const usage = extractUsage(result)\n const finishReason = extractFinishReason(result)\n\n await logger.log({\n decisionId,\n eventType: 'inference',\n modelId: responseModelId ?? model.modelId ?? null,\n providerId: responseModelId?.split('/')[0] ?? model.provider ?? null,\n input: captureInputs\n ? { value: serialisePrompt(params.prompt) }\n : { value: '', type: 'hash' },\n output: captureOutputs\n ? { value: text, finishReason }\n : text\n ? { value: '', type: 'hash', finishReason }\n : null,\n latencyMs,\n usage,\n parameters: options.captureParameters !== false\n ? extractParams(params)\n : null,\n error: null,\n captureMethod: 'middleware',\n metadata: context?.metadata,\n })\n\n if (options.captureToolCalls !== false) {\n const toolCalls = extractToolCalls(result)\n for (const tc of toolCalls) {\n await logger.log({\n decisionId,\n eventType: 'tool_call',\n modelId: responseModelId ?? model.modelId ?? null,\n providerId: null,\n input: { value: JSON.stringify(tc.args) },\n output: null,\n latencyMs: null,\n usage: null,\n parameters: null,\n error: null,\n toolCall: {\n toolName: tc.toolName,\n toolArgs: tc.args,\n },\n captureMethod: 'middleware',\n metadata: context?.metadata,\n })\n }\n }\n\n return result\n } catch (error) {\n const latencyMs = Date.now() - startTime\n\n await logger.log({\n decisionId,\n eventType: 'inference',\n modelId: model.modelId ?? null,\n providerId: model.provider ?? null,\n input: captureInputs\n ? { value: serialisePrompt(params.prompt) }\n : { value: '', type: 'hash' },\n output: null,\n latencyMs,\n usage: null,\n parameters: options.captureParameters !== false\n ? extractParams(params)\n : null,\n error: {\n code: (error as Error).name ?? 'UNKNOWN',\n message: (error as Error).message ?? String(error),\n },\n captureMethod: 'middleware',\n metadata: context?.metadata,\n })\n\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n const context = getAuditContext()\n const decisionId = context?.decisionId ?? generateUUIDv7()\n\n try {\n const result = await doStream()\n const { stream, ...rest } = result\n\n const chunks: string[] = []\n let streamUsage: { promptTokens: number; completionTokens: number; totalTokens: number } | null = null\n let streamFinishReason: string | undefined\n let streamModelId: string | null = null\n\n const loggingStream = stream.pipeThrough(\n new TransformStream({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n chunks.push(chunk.textDelta)\n }\n if (chunk.type === 'finish') {\n const finishChunk = chunk as Record<string, unknown>\n streamUsage = finishChunk.usage as typeof streamUsage ?? null\n streamFinishReason = finishChunk.finishReason as string | undefined\n const response = finishChunk.response as Record<string, unknown> | undefined\n streamModelId = response?.modelId as string ?? null\n }\n controller.enqueue(chunk)\n },\n async flush() {\n const latencyMs = Date.now() - startTime\n const fullText = chunks.join('')\n\n await logger.log({\n decisionId,\n eventType: 'inference',\n modelId: streamModelId ?? model.modelId ?? null,\n providerId: streamModelId?.split('/')[0] ?? model.provider ?? null,\n input: captureInputs\n ? { value: serialisePrompt(params.prompt) }\n : { value: '', type: 'hash' },\n output: captureOutputs\n ? { value: fullText, finishReason: streamFinishReason }\n : fullText\n ? { value: '', type: 'hash', finishReason: streamFinishReason }\n : null,\n latencyMs,\n usage: streamUsage,\n parameters: options.captureParameters !== false\n ? extractParams(params)\n : null,\n error: null,\n captureMethod: 'middleware',\n metadata: context?.metadata,\n })\n },\n }),\n )\n\n return { stream: loggingStream, ...rest }\n } catch (error) {\n const latencyMs = Date.now() - startTime\n\n await logger.log({\n decisionId,\n eventType: 'inference',\n modelId: model.modelId ?? null,\n providerId: model.provider ?? null,\n input: captureInputs\n ? { value: serialisePrompt(params.prompt) }\n : { value: '', type: 'hash' },\n output: null,\n latencyMs,\n usage: null,\n parameters: options.captureParameters !== false\n ? extractParams(params)\n : null,\n error: {\n code: (error as Error).name ?? 'UNKNOWN',\n message: (error as Error).message ?? String(error),\n },\n captureMethod: 'middleware',\n metadata: context?.metadata,\n })\n\n throw error\n }\n },\n }\n\n return wrapLanguageModel({ model, middleware })\n}\n\n// ── Extraction helpers ──────────────────────────────────────\n\nfunction serialisePrompt(prompt: unknown): string {\n if (typeof prompt === 'string') return prompt\n try {\n return JSON.stringify(prompt)\n } catch {\n return String(prompt)\n }\n}\n\nfunction extractModelId(result: Record<string, unknown>): string | null {\n const response = result.response as Record<string, unknown> | undefined\n return (response?.modelId as string) ?? null\n}\n\nfunction extractText(result: Record<string, unknown>): string {\n return (result.text as string) ?? ''\n}\n\nfunction extractFinishReason(result: Record<string, unknown>): string | undefined {\n return result.finishReason as string | undefined\n}\n\nfunction extractUsage(result: Record<string, unknown>): {\n promptTokens: number\n completionTokens: number\n totalTokens: number\n} | null {\n const usage = result.usage as Record<string, unknown> | undefined\n if (!usage) return null\n return {\n promptTokens: (usage.promptTokens as number) ?? 0,\n completionTokens: (usage.completionTokens as number) ?? 0,\n totalTokens: (usage.totalTokens as number) ?? 0,\n }\n}\n\nfunction extractToolCalls(result: Record<string, unknown>): Array<{\n toolName: string\n args: Record<string, unknown>\n}> {\n const toolCalls = result.toolCalls as Array<Record<string, unknown>> | undefined\n if (!toolCalls || !Array.isArray(toolCalls)) return []\n\n return toolCalls.map((tc) => ({\n toolName: (tc.toolName as string) ?? 'unknown',\n args: (tc.args as Record<string, unknown>) ?? {},\n }))\n}\n\nfunction extractParams(params: Record<string, unknown>): Record<string, unknown> {\n const extracted: Record<string, unknown> = {}\n if (params.temperature !== undefined) extracted['temperature'] = params.temperature\n if (params.maxTokens !== undefined) extracted['maxTokens'] = params.maxTokens\n if (params.topP !== undefined) extracted['topP'] = params.topP\n if (params.topK !== undefined) extracted['topK'] = params.topK\n if (params.frequencyPenalty !== undefined) extracted['frequencyPenalty'] = params.frequencyPenalty\n if (params.presencePenalty !== undefined) extracted['presencePenalty'] = params.presencePenalty\n if (params.seed !== undefined) extracted['seed'] = params.seed\n return Object.keys(extracted).length > 0 ? extracted : {}\n}\n","/**\n * AsyncLocalStorage-based context propagation for audit logging.\n *\n * Allows decision IDs and metadata to flow through async call chains\n * without manual threading. This reduces the integration burden that\n * leads to coverage gaps.\n */\n\nimport { AsyncLocalStorage } from 'node:async_hooks'\n\nexport interface AuditContext {\n decisionId: string\n parentDecisionId?: string\n metadata?: Record<string, string | number | boolean>\n}\n\nconst auditStorage = new AsyncLocalStorage<AuditContext>()\n\nexport function withAuditContext<T>(\n context: AuditContext,\n callback: () => T | Promise<T>,\n): T | Promise<T> {\n return auditStorage.run(context, callback)\n}\n\nexport function getAuditContext(): AuditContext | undefined {\n return auditStorage.getStore()\n}\n\nexport class MissingDecisionIdError extends Error {\n constructor() {\n super(\n 'decisionId is required. Either provide it explicitly in the log entry, ' +\n 'or wrap the call in withAuditContext({ decisionId: \"...\" }, callback).',\n )\n this.name = 'MissingDecisionIdError'\n }\n}\n","import { randomBytes } from 'node:crypto'\n\n/**\n * Generate a UUIDv7 (time-ordered, RFC 9562).\n *\n * UUIDv7 embeds a Unix timestamp in the most significant 48 bits,\n * making IDs naturally time-ordered and sortable without parsing\n * timestamps. The remaining bits are random for uniqueness.\n *\n * Format: tttttttt-tttt-7rrr-Rrrr-rrrrrrrrrrrr\n * t = timestamp bits (48-bit ms since epoch)\n * 7 = version nibble\n * R = variant bits (10xx)\n * r = random bits\n */\nexport function generateUUIDv7(): string {\n const now = Date.now()\n const bytes = new Uint8Array(16)\n\n const randomPart = randomBytes(10)\n bytes.set(randomPart, 6)\n\n bytes[0] = (now / 2 ** 40) & 0xff\n bytes[1] = (now / 2 ** 32) & 0xff\n bytes[2] = (now / 2 ** 24) & 0xff\n bytes[3] = (now / 2 ** 16) & 0xff\n bytes[4] = (now / 2 ** 8) & 0xff\n bytes[5] = now & 0xff\n\n bytes[6] = (bytes[6] & 0x0f) | 0x70\n bytes[8] = (bytes[8] & 0x3f) | 0x80\n\n return formatUUID(bytes)\n}\n\nfunction formatUUID(bytes: Uint8Array): string {\n const hex = Array.from(bytes, (b) => b.toString(16).padStart(2, '0')).join('')\n return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20, 32)}`\n}\n\nconst UUID_PATTERN = /^[0-9a-f]{8}-[0-9a-f]{4}-7[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/\n\nexport function isValidUUIDv7(id: string): boolean {\n return UUID_PATTERN.test(id)\n}\n\nexport function extractTimestampFromUUIDv7(id: string): number {\n const hex = id.replace(/-/g, '').slice(0, 12)\n return parseInt(hex, 16)\n}\n"],"mappings":";AAaA;AAAA,EACE;AAAA,OAGK;;;ACTP,SAAS,yBAAyB;AAQlC,IAAM,eAAe,IAAI,kBAAgC;AASlD,SAAS,kBAA4C;AAC1D,SAAO,aAAa,SAAS;AAC/B;;;AC3BA,SAAS,mBAAmB;AAerB,SAAS,iBAAyB;AACvC,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,QAAQ,IAAI,WAAW,EAAE;AAE/B,QAAM,aAAa,YAAY,EAAE;AACjC,QAAM,IAAI,YAAY,CAAC;AAEvB,QAAM,CAAC,IAAK,MAAM,KAAK,KAAM;AAC7B,QAAM,CAAC,IAAK,MAAM,KAAK,KAAM;AAC7B,QAAM,CAAC,IAAK,MAAM,KAAK,KAAM;AAC7B,QAAM,CAAC,IAAK,MAAM,KAAK,KAAM;AAC7B,QAAM,CAAC,IAAK,MAAM,KAAK,IAAK;AAC5B,QAAM,CAAC,IAAI,MAAM;AAEjB,QAAM,CAAC,IAAK,MAAM,CAAC,IAAI,KAAQ;AAC/B,QAAM,CAAC,IAAK,MAAM,CAAC,IAAI,KAAQ;AAE/B,SAAO,WAAW,KAAK;AACzB;AAEA,SAAS,WAAW,OAA2B;AAC7C,QAAM,MAAM,MAAM,KAAK,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,EAAE;AAC7E,SAAO,GAAG,IAAI,MAAM,GAAG,CAAC,CAAC,IAAI,IAAI,MAAM,GAAG,EAAE,CAAC,IAAI,IAAI,MAAM,IAAI,EAAE,CAAC,IAAI,IAAI,MAAM,IAAI,EAAE,CAAC,IAAI,IAAI,MAAM,IAAI,EAAE,CAAC;AAC9G;;;AFRO,SAAS,gBACd,OACA,SACiB;AACjB,QAAM;AAAA,IACJ;AAAA,IACA,gBAAgB;AAAA,IAChB,iBAAiB;AAAA,EACnB,IAAI;AAEJ,QAAM,aAAwC;AAAA,IAC5C,mBAAmB;AAAA,IAEnB,cAAc,OAAO,EAAE,YAAY,OAAO,MAAM;AAC9C,YAAM,YAAY,KAAK,IAAI;AAC3B,YAAM,UAAU,gBAAgB;AAChC,YAAM,aAAa,SAAS,cAAc,eAAe;AAEzD,UAAI;AACF,cAAM,SAAS,MAAM,WAAW;AAChC,cAAM,YAAY,KAAK,IAAI,IAAI;AAE/B,cAAM,kBAAkB,eAAe,MAAM;AAC7C,cAAM,OAAO,YAAY,MAAM;AAC/B,cAAM,QAAQ,aAAa,MAAM;AACjC,cAAM,eAAe,oBAAoB,MAAM;AAE/C,cAAM,OAAO,IAAI;AAAA,UACf;AAAA,UACA,WAAW;AAAA,UACX,SAAS,mBAAmB,MAAM,WAAW;AAAA,UAC7C,YAAY,iBAAiB,MAAM,GAAG,EAAE,CAAC,KAAK,MAAM,YAAY;AAAA,UAChE,OAAO,gBACH,EAAE,OAAO,gBAAgB,OAAO,MAAM,EAAE,IACxC,EAAE,OAAO,IAAI,MAAM,OAAO;AAAA,UAC9B,QAAQ,iBACJ,EAAE,OAAO,MAAM,aAAa,IAC5B,OACE,EAAE,OAAO,IAAI,MAAM,QAAQ,aAAa,IACxC;AAAA,UACN;AAAA,UACA;AAAA,UACA,YAAY,QAAQ,sBAAsB,QACtC,cAAc,MAAM,IACpB;AAAA,UACJ,OAAO;AAAA,UACP,eAAe;AAAA,UACf,UAAU,SAAS;AAAA,QACrB,CAAC;AAED,YAAI,QAAQ,qBAAqB,OAAO;AACtC,gBAAM,YAAY,iBAAiB,MAAM;AACzC,qBAAW,MAAM,WAAW;AAC1B,kBAAM,OAAO,IAAI;AAAA,cACf;AAAA,cACA,WAAW;AAAA,cACX,SAAS,mBAAmB,MAAM,WAAW;AAAA,cAC7C,YAAY;AAAA,cACZ,OAAO,EAAE,OAAO,KAAK,UAAU,GAAG,IAAI,EAAE;AAAA,cACxC,QAAQ;AAAA,cACR,WAAW;AAAA,cACX,OAAO;AAAA,cACP,YAAY;AAAA,cACZ,OAAO;AAAA,cACP,UAAU;AAAA,gBACR,UAAU,GAAG;AAAA,gBACb,UAAU,GAAG;AAAA,cACf;AAAA,cACA,eAAe;AAAA,cACf,UAAU,SAAS;AAAA,YACrB,CAAC;AAAA,UACH;AAAA,QACF;AAEA,eAAO;AAAA,MACT,SAAS,OAAO;AACd,cAAM,YAAY,KAAK,IAAI,IAAI;AAE/B,cAAM,OAAO,IAAI;AAAA,UACf;AAAA,UACA,WAAW;AAAA,UACX,SAAS,MAAM,WAAW;AAAA,UAC1B,YAAY,MAAM,YAAY;AAAA,UAC9B,OAAO,gBACH,EAAE,OAAO,gBAAgB,OAAO,MAAM,EAAE,IACxC,EAAE,OAAO,IAAI,MAAM,OAAO;AAAA,UAC9B,QAAQ;AAAA,UACR;AAAA,UACA,OAAO;AAAA,UACP,YAAY,QAAQ,sBAAsB,QACtC,cAAc,MAAM,IACpB;AAAA,UACJ,OAAO;AAAA,YACL,MAAO,MAAgB,QAAQ;AAAA,YAC/B,SAAU,MAAgB,WAAW,OAAO,KAAK;AAAA,UACnD;AAAA,UACA,eAAe;AAAA,UACf,UAAU,SAAS;AAAA,QACrB,CAAC;AAED,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IAEA,YAAY,OAAO,EAAE,UAAU,OAAO,MAAM;AAC1C,YAAM,YAAY,KAAK,IAAI;AAC3B,YAAM,UAAU,gBAAgB;AAChC,YAAM,aAAa,SAAS,cAAc,eAAe;AAEzD,UAAI;AACF,cAAM,SAAS,MAAM,SAAS;AAC9B,cAAM,EAAE,QAAQ,GAAG,KAAK,IAAI;AAE5B,cAAM,SAAmB,CAAC;AAC1B,YAAI,cAA8F;AAClG,YAAI;AACJ,YAAI,gBAA+B;AAEnC,cAAM,gBAAgB,OAAO;AAAA,UAC3B,IAAI,gBAAgB;AAAA,YAClB,UAAU,OAAO,YAAY;AAC3B,kBAAI,MAAM,SAAS,cAAc;AAC/B,uBAAO,KAAK,MAAM,SAAS;AAAA,cAC7B;AACA,kBAAI,MAAM,SAAS,UAAU;AAC3B,sBAAM,cAAc;AACpB,8BAAc,YAAY,SAA+B;AACzD,qCAAqB,YAAY;AACjC,sBAAM,WAAW,YAAY;AAC7B,gCAAgB,UAAU,WAAqB;AAAA,cACjD;AACA,yBAAW,QAAQ,KAAK;AAAA,YAC1B;AAAA,YACA,MAAM,QAAQ;AACZ,oBAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,oBAAM,WAAW,OAAO,KAAK,EAAE;AAE/B,oBAAM,OAAO,IAAI;AAAA,gBACf;AAAA,gBACA,WAAW;AAAA,gBACX,SAAS,iBAAiB,MAAM,WAAW;AAAA,gBAC3C,YAAY,eAAe,MAAM,GAAG,EAAE,CAAC,KAAK,MAAM,YAAY;AAAA,gBAC9D,OAAO,gBACH,EAAE,OAAO,gBAAgB,OAAO,MAAM,EAAE,IACxC,EAAE,OAAO,IAAI,MAAM,OAAO;AAAA,gBAC9B,QAAQ,iBACJ,EAAE,OAAO,UAAU,cAAc,mBAAmB,IACpD,WACE,EAAE,OAAO,IAAI,MAAM,QAAQ,cAAc,mBAAmB,IAC5D;AAAA,gBACN;AAAA,gBACA,OAAO;AAAA,gBACP,YAAY,QAAQ,sBAAsB,QACtC,cAAc,MAAM,IACpB;AAAA,gBACJ,OAAO;AAAA,gBACP,eAAe;AAAA,gBACf,UAAU,SAAS;AAAA,cACrB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,QAAQ,eAAe,GAAG,KAAK;AAAA,MAC1C,SAAS,OAAO;AACd,cAAM,YAAY,KAAK,IAAI,IAAI;AAE/B,cAAM,OAAO,IAAI;AAAA,UACf;AAAA,UACA,WAAW;AAAA,UACX,SAAS,MAAM,WAAW;AAAA,UAC1B,YAAY,MAAM,YAAY;AAAA,UAC9B,OAAO,gBACH,EAAE,OAAO,gBAAgB,OAAO,MAAM,EAAE,IACxC,EAAE,OAAO,IAAI,MAAM,OAAO;AAAA,UAC9B,QAAQ;AAAA,UACR;AAAA,UACA,OAAO;AAAA,UACP,YAAY,QAAQ,sBAAsB,QACtC,cAAc,MAAM,IACpB;AAAA,UACJ,OAAO;AAAA,YACL,MAAO,MAAgB,QAAQ;AAAA,YAC/B,SAAU,MAAgB,WAAW,OAAO,KAAK;AAAA,UACnD;AAAA,UACA,eAAe;AAAA,UACf,UAAU,SAAS;AAAA,QACrB,CAAC;AAED,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO,kBAAkB,EAAE,OAAO,WAAW,CAAC;AAChD;AAIA,SAAS,gBAAgB,QAAyB;AAChD,MAAI,OAAO,WAAW,SAAU,QAAO;AACvC,MAAI;AACF,WAAO,KAAK,UAAU,MAAM;AAAA,EAC9B,QAAQ;AACN,WAAO,OAAO,MAAM;AAAA,EACtB;AACF;AAEA,SAAS,eAAe,QAAgD;AACtE,QAAM,WAAW,OAAO;AACxB,SAAQ,UAAU,WAAsB;AAC1C;AAEA,SAAS,YAAY,QAAyC;AAC5D,SAAQ,OAAO,QAAmB;AACpC;AAEA,SAAS,oBAAoB,QAAqD;AAChF,SAAO,OAAO;AAChB;AAEA,SAAS,aAAa,QAIb;AACP,QAAM,QAAQ,OAAO;AACrB,MAAI,CAAC,MAAO,QAAO;AACnB,SAAO;AAAA,IACL,cAAe,MAAM,gBAA2B;AAAA,IAChD,kBAAmB,MAAM,oBAA+B;AAAA,IACxD,aAAc,MAAM,eAA0B;AAAA,EAChD;AACF;AAEA,SAAS,iBAAiB,QAGvB;AACD,QAAM,YAAY,OAAO;AACzB,MAAI,CAAC,aAAa,CAAC,MAAM,QAAQ,SAAS,EAAG,QAAO,CAAC;AAErD,SAAO,UAAU,IAAI,CAAC,QAAQ;AAAA,IAC5B,UAAW,GAAG,YAAuB;AAAA,IACrC,MAAO,GAAG,QAAoC,CAAC;AAAA,EACjD,EAAE;AACJ;AAEA,SAAS,cAAc,QAA0D;AAC/E,QAAM,YAAqC,CAAC;AAC5C,MAAI,OAAO,gBAAgB,OAAW,WAAU,aAAa,IAAI,OAAO;AACxE,MAAI,OAAO,cAAc,OAAW,WAAU,WAAW,IAAI,OAAO;AACpE,MAAI,OAAO,SAAS,OAAW,WAAU,MAAM,IAAI,OAAO;AAC1D,MAAI,OAAO,SAAS,OAAW,WAAU,MAAM,IAAI,OAAO;AAC1D,MAAI,OAAO,qBAAqB,OAAW,WAAU,kBAAkB,IAAI,OAAO;AAClF,MAAI,OAAO,oBAAoB,OAAW,WAAU,iBAAiB,IAAI,OAAO;AAChF,MAAI,OAAO,SAAS,OAAW,WAAU,MAAM,IAAI,OAAO;AAC1D,SAAO,OAAO,KAAK,SAAS,EAAE,SAAS,IAAI,YAAY,CAAC;AAC1D;","names":[]}
|