vericify 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,91 @@
1
+ import { mkdirSync, writeFileSync } from "node:fs";
2
+ import { dirname, resolve } from "node:path";
3
+ import { buildRunComparison, findRunById } from "../compare/engine.js";
4
+ import { hashText, isoNow, slugify } from "../core/util.js";
5
+ import { resolveStoreLayout } from "../store/paths.js";
6
+ import { writeJson } from "../core/fs.js";
7
+
8
+ function markdownFromArtifact(artifact) {
9
+ const lines = [
10
+ `# ${artifact.title}`,
11
+ "",
12
+ `Published: ${artifact.published_at}`,
13
+ `Run ID: ${artifact.run.run_id}`,
14
+ `Status: ${artifact.run.status}`,
15
+ `Workspace: ${artifact.workspace.label ?? artifact.workspace.root_path}`,
16
+ "",
17
+ "## Current Pulse",
18
+ "",
19
+ `- Objective: ${artifact.run.objective ?? "-"}`,
20
+ `- Latest checkpoint: ${artifact.latest_checkpoint?.process_summary.split("\n")[0] ?? "-"}`,
21
+ `- Branches: ${artifact.run.branch_ids?.length ?? 0}`,
22
+ `- Lanes: ${artifact.run.lane_ids?.length ?? 0}`,
23
+ "",
24
+ "## Adapters",
25
+ "",
26
+ ...artifact.adapter_profiles.map((adapter) => `- ${adapter.label}: ${adapter.detection_status}`),
27
+ "",
28
+ ];
29
+ if (artifact.compare_report) {
30
+ lines.push(
31
+ "## Comparison",
32
+ "",
33
+ `- Summary: ${artifact.compare_report.summary}`,
34
+ ...artifact.compare_report.divergence_explanations.map((row) => `- Divergence: ${row}`),
35
+ ...artifact.compare_report.recovery_explanations.map((row) => `- Recovery: ${row}`),
36
+ "",
37
+ "## Recommended Actions",
38
+ "",
39
+ ...artifact.compare_report.recommended_actions.map((row) => `- ${row}`),
40
+ ""
41
+ );
42
+ }
43
+ lines.push(
44
+ "## Recent Checkpoints",
45
+ "",
46
+ ...artifact.recent_checkpoints.slice(-5).map((checkpoint) => `- ${checkpoint.timestamp}: ${checkpoint.process_summary.split("\n")[0]}`),
47
+ ""
48
+ );
49
+ return `${lines.join("\n")}\n`;
50
+ }
51
+
52
+ export function buildPublishedRunArtifact({ projected, runId, compareRunId, title }) {
53
+ const run = findRunById(projected, runId);
54
+ if (!run) throw new Error(`Unknown run id: ${runId}`);
55
+ const compareRun = compareRunId ? findRunById(projected, compareRunId) : undefined;
56
+ if (compareRunId && !compareRun) throw new Error(`Unknown compare run id: ${compareRunId}`);
57
+ const compareReport = compareRun ? buildRunComparison(run, compareRun) : undefined;
58
+ const artifactId = `pub-${slugify(title ?? runId) || "artifact"}-${hashText(`${runId}:${compareRunId ?? "none"}:${isoNow()}`).slice(0, 10)}`;
59
+ return {
60
+ schema_version: "1.0.0",
61
+ artifact_id: artifactId,
62
+ published_at: isoNow(),
63
+ title: title ?? `${run.run.title} run artifact`,
64
+ workspace: projected.workspace,
65
+ adapter_profiles: projected.adapter_profiles ?? [],
66
+ run: run.run,
67
+ run_summary: run.run_summary,
68
+ latest_checkpoint: run.recent_checkpoints.at(-1),
69
+ recent_checkpoints: run.recent_checkpoints,
70
+ deltas: run.deltas,
71
+ similarity: run.similarity,
72
+ compare_report: compareReport,
73
+ };
74
+ }
75
+
76
+ export function publishRunArtifact({ workspaceRoot, projected, runId, compareRunId, title }) {
77
+ const artifact = buildPublishedRunArtifact({ projected, runId, compareRunId, title });
78
+ const layout = resolveStoreLayout(workspaceRoot);
79
+ const directory = resolve(layout.vericify.publishedRoot, artifact.artifact_id);
80
+ const artifactPath = resolve(directory, "run-artifact.json");
81
+ const summaryPath = resolve(directory, "SUMMARY.md");
82
+ mkdirSync(dirname(artifactPath), { recursive: true });
83
+ writeJson(artifactPath, artifact);
84
+ mkdirSync(dirname(summaryPath), { recursive: true });
85
+ writeFileSync(summaryPath, markdownFromArtifact(artifact));
86
+ return {
87
+ artifact_path: artifactPath,
88
+ summary_path: summaryPath,
89
+ artifact,
90
+ };
91
+ }
@@ -0,0 +1,95 @@
1
+ import { hashText } from "../core/util.js";
2
+
3
+ export function getShingles(text, k = 3) {
4
+ const clean = text.replace(/\s+/g, " ").toLowerCase();
5
+ const shingles = new Set();
6
+ for (let i = 0; i <= clean.length - k; i++) {
7
+ shingles.add(clean.substring(i, i + k));
8
+ }
9
+ return shingles;
10
+ }
11
+
12
+ export function calculateJaccard(setA, setB) {
13
+ if (setA.size === 0 && setB.size === 0) return 1;
14
+ if (setA.size === 0 || setB.size === 0) return 0;
15
+ let intersection = 0;
16
+ const smaller = setA.size <= setB.size ? setA : setB;
17
+ const larger = setA.size <= setB.size ? setB : setA;
18
+ for (const value of smaller) {
19
+ if (larger.has(value)) intersection += 1;
20
+ }
21
+ return intersection / (setA.size + setB.size - intersection);
22
+ }
23
+
24
+ function seededRandom(seed) {
25
+ let value = seed >>> 0;
26
+ return () => {
27
+ value = (value * 1664525 + 1013904223) >>> 0;
28
+ return value;
29
+ };
30
+ }
31
+
32
+ export class MinHash {
33
+ constructor(numPermutations = 64, seed = 42) {
34
+ this.numPermutations = numPermutations;
35
+ this.prime = 4294967311n;
36
+ this.maxHash = 4294967295;
37
+ this.permutations = [];
38
+ const rng = seededRandom(seed);
39
+ for (let i = 0; i < numPermutations; i += 1) {
40
+ this.permutations.push({
41
+ a: BigInt((rng() % this.maxHash) + 1),
42
+ b: BigInt((rng() % this.maxHash) + 1),
43
+ });
44
+ }
45
+ }
46
+
47
+ hashString(text) {
48
+ let hash = 5381;
49
+ for (let i = 0; i < text.length; i += 1) {
50
+ hash = ((hash << 5) + hash + text.charCodeAt(i)) >>> 0;
51
+ }
52
+ return hash;
53
+ }
54
+
55
+ computeSignature(shingles) {
56
+ const signature = new Array(this.numPermutations).fill(Infinity);
57
+ if (shingles.size === 0) return signature.fill(0);
58
+ for (const shingle of shingles) {
59
+ const h = BigInt(this.hashString(shingle));
60
+ for (let i = 0; i < this.numPermutations; i += 1) {
61
+ const { a, b } = this.permutations[i];
62
+ const permuted = Number((a * h + b) % this.prime);
63
+ if (permuted < signature[i]) signature[i] = permuted;
64
+ }
65
+ }
66
+ return signature;
67
+ }
68
+
69
+ estimateSimilarity(left, right) {
70
+ if (left.length !== right.length) return 0;
71
+ let matches = 0;
72
+ for (let i = 0; i < left.length; i += 1) {
73
+ if (left[i] === right[i]) matches += 1;
74
+ }
75
+ return matches / left.length;
76
+ }
77
+ }
78
+
79
+ const defaultHasher = new MinHash();
80
+
81
+ export function fingerprintText(text) {
82
+ const shingles = getShingles(text, 3);
83
+ return {
84
+ signature_version: "v1-minhash64",
85
+ minhash_signature: defaultHasher.computeSignature(shingles),
86
+ content_hashes: [hashText(text)],
87
+ jaccard_basis_size: shingles.size,
88
+ };
89
+ }
90
+
91
+ export function similarityFromText(leftText, rightText) {
92
+ const left = fingerprintText(leftText);
93
+ const right = fingerprintText(rightText);
94
+ return defaultHasher.estimateSimilarity(left.minhash_signature, right.minhash_signature);
95
+ }
@@ -0,0 +1,47 @@
1
+ import { readJson, writeJson } from "../core/fs.js";
2
+ import { isoNow } from "../core/util.js";
3
+ import { resolveStoreLayout } from "./paths.js";
4
+
5
+ export function normalizeAdapterAttachment(input, existing = {}) {
6
+ const adapterId = String(input.adapter_id ?? existing.adapter_id ?? "").trim();
7
+ if (!adapterId) throw new Error("Adapter attachments require --adapter.");
8
+ return {
9
+ adapter_id: adapterId,
10
+ attached_at: existing.attached_at ?? isoNow(),
11
+ updated_at: isoNow(),
12
+ capture_mode: String(input.capture_mode ?? existing.capture_mode ?? "manual").trim() || "manual",
13
+ session_id: input.session_id ?? existing.session_id,
14
+ label: input.label ?? existing.label,
15
+ notes: input.notes ?? existing.notes,
16
+ };
17
+ }
18
+
19
+ export function readAdapterAttachments(workspaceRoot) {
20
+ const layout = resolveStoreLayout(workspaceRoot);
21
+ return readJson(layout.vericify.adapterAttachments, {
22
+ version: 1,
23
+ updated_at: null,
24
+ attachments: [],
25
+ });
26
+ }
27
+
28
+ export function upsertAdapterAttachment(workspaceRoot, input) {
29
+ const layout = resolveStoreLayout(workspaceRoot);
30
+ const current = readAdapterAttachments(workspaceRoot);
31
+ const attachments = new Map(
32
+ (Array.isArray(current.attachments) ? current.attachments : []).map((attachment) => [attachment.adapter_id, attachment])
33
+ );
34
+ const next = normalizeAdapterAttachment(input, attachments.get(String(input.adapter_id ?? "").trim()));
35
+ attachments.set(next.adapter_id, next);
36
+ const value = {
37
+ version: current.version ?? 1,
38
+ updated_at: isoNow(),
39
+ attachments: [...attachments.values()].sort((left, right) => left.adapter_id.localeCompare(right.adapter_id)),
40
+ };
41
+ writeJson(layout.vericify.adapterAttachments, value);
42
+ return {
43
+ path: layout.vericify.adapterAttachments,
44
+ attachment: next,
45
+ value,
46
+ };
47
+ }
@@ -0,0 +1,38 @@
1
+ import { appendFileSync, existsSync, mkdirSync } from "node:fs";
2
+ import { dirname } from "node:path";
3
+ import { readJson, writeJson } from "../core/fs.js";
4
+
5
+ export function shouldMirrorPath(path) {
6
+ return existsSync(path) || existsSync(dirname(path));
7
+ }
8
+
9
+ export function appendNdjsonRecord(path, record) {
10
+ mkdirSync(dirname(path), { recursive: true });
11
+ appendFileSync(path, `${JSON.stringify(record)}\n`);
12
+ }
13
+
14
+ export function writeJsonWithMirrors({ primary, mirrors, fallback, update }) {
15
+ const current = readJson(primary, fallback);
16
+ const next = update(current);
17
+ writeJson(primary, next);
18
+ const mirrored_paths = [];
19
+ for (const mirrorPath of mirrors) {
20
+ if (!shouldMirrorPath(mirrorPath)) continue;
21
+ const mirrorCurrent = readJson(mirrorPath, fallback);
22
+ const mirrorNext = update(mirrorCurrent);
23
+ writeJson(mirrorPath, mirrorNext);
24
+ mirrored_paths.push(mirrorPath);
25
+ }
26
+ return { path: primary, mirrored_paths, value: next };
27
+ }
28
+
29
+ export function appendNdjsonWithMirrors({ primary, mirrors, record }) {
30
+ appendNdjsonRecord(primary, record);
31
+ const mirrored_paths = [];
32
+ for (const mirrorPath of mirrors) {
33
+ if (!shouldMirrorPath(mirrorPath)) continue;
34
+ appendNdjsonRecord(mirrorPath, record);
35
+ mirrored_paths.push(mirrorPath);
36
+ }
37
+ return { path: primary, mirrored_paths, record };
38
+ }
@@ -0,0 +1,64 @@
1
+ import { isoNow } from "../core/util.js";
2
+ import { resolveStoreLayout } from "./paths.js";
3
+ import { writeJsonWithMirrors } from "./common.js";
4
+
5
+ export function normalizeHandoff(input, existing = {}) {
6
+ const timestamp = input.timestamp ?? isoNow();
7
+ const handoffId = String(input.handoff_id ?? existing.handoff_id ?? "").trim();
8
+ const from = String(input.from ?? existing.from ?? "").trim();
9
+ const to = String(input.to ?? existing.to ?? "").trim();
10
+ const title = String(input.title ?? existing.title ?? "").trim();
11
+ const status = String(input.status ?? existing.status ?? "").trim();
12
+ if (!handoffId) throw new Error("Handoffs require --id.");
13
+ if (!from) throw new Error("Handoffs require --from.");
14
+ if (!to) throw new Error("Handoffs require --to.");
15
+ if (!title) throw new Error("Handoffs require --title.");
16
+ if (!status) throw new Error("Handoffs require --status.");
17
+ const note = String(input.note ?? "").trim();
18
+ const actor = String(input.actor ?? input.agent_id ?? from).trim();
19
+ const history = [...(Array.isArray(existing.history) ? existing.history : [])];
20
+ history.push({
21
+ timestamp_utc: timestamp,
22
+ status,
23
+ actor,
24
+ note: note || title,
25
+ });
26
+ return {
27
+ handoff_id: handoffId,
28
+ created_at: existing.created_at ?? timestamp,
29
+ updated_at: timestamp,
30
+ from,
31
+ to,
32
+ title,
33
+ task_type: input.task_type ?? existing.task_type,
34
+ priority: input.priority ?? existing.priority,
35
+ source_file: input.source_file ?? existing.source_file,
36
+ status,
37
+ history,
38
+ };
39
+ }
40
+
41
+ export function upsertHandoff(workspaceRoot, input) {
42
+ const layout = resolveStoreLayout(workspaceRoot);
43
+ const handoffId = String(input.handoff_id ?? "").trim();
44
+ if (!handoffId) throw new Error("Handoffs require --id.");
45
+ const result = writeJsonWithMirrors({
46
+ primary: layout.vericify.handoffRegistry,
47
+ mirrors: [layout.partner.handoffRegistry],
48
+ fallback: { version: 1, updated_at: isoNow(), handoffs: {} },
49
+ update(current) {
50
+ const handoffs = { ...(current.handoffs ?? {}) };
51
+ const existing = handoffs[handoffId] ?? {};
52
+ handoffs[handoffId] = normalizeHandoff(input, existing);
53
+ return {
54
+ version: current.version ?? 1,
55
+ updated_at: isoNow(),
56
+ handoffs,
57
+ };
58
+ },
59
+ });
60
+ return {
61
+ ...result,
62
+ handoff: result.value.handoffs[handoffId],
63
+ };
64
+ }
@@ -0,0 +1,40 @@
1
+ import { resolve } from "node:path";
2
+
3
+ export const STORE_LAYOUT = {
4
+ vericify: {
5
+ adapterAttachments: ".vericify/adapters.json",
6
+ handoffRegistry: ".vericify/handoffs.json",
7
+ todoState: ".vericify/todo-state.json",
8
+ runLedger: ".vericify/run-ledger.json",
9
+ statusEvents: ".vericify/status-events.ndjson",
10
+ processPosts: ".vericify/process-posts.json",
11
+ publishedRoot: ".vericify/published",
12
+ syncOutboxRoot: ".vericify/sync-outbox",
13
+ },
14
+ partner: {
15
+ handoffRegistry: "agent-state/handoff-registry.json",
16
+ todoState: "agent-state/todo-state.json",
17
+ runLedger: "agent-state/run-ledger.json",
18
+ statusEvents: "agent-state/STATUS_EVENTS.ndjson",
19
+ processPosts: "agent-state/vericify/process-posts.json",
20
+ },
21
+ };
22
+
23
+ export function resolveStoreLayout(workspaceRoot) {
24
+ return {
25
+ vericify: Object.fromEntries(
26
+ Object.entries(STORE_LAYOUT.vericify).map(([key, relPath]) => [key, resolve(workspaceRoot, relPath)])
27
+ ),
28
+ partner: Object.fromEntries(
29
+ Object.entries(STORE_LAYOUT.partner).map(([key, relPath]) => [key, resolve(workspaceRoot, relPath)])
30
+ ),
31
+ };
32
+ }
33
+
34
+ export function listStorePaths(workspaceRoot) {
35
+ const layout = resolveStoreLayout(workspaceRoot);
36
+ return [
37
+ ...Object.values(layout.vericify),
38
+ ...Object.values(layout.partner),
39
+ ];
40
+ }
@@ -0,0 +1,46 @@
1
+ import { asArray, hashText, isoNow, slugify } from "../core/util.js";
2
+ import { resolveStoreLayout } from "./paths.js";
3
+ import { writeJsonWithMirrors } from "./common.js";
4
+
5
+ export function normalizeRunLedgerEntry(entry) {
6
+ const timestamp = entry.timestamp_utc ?? isoNow();
7
+ const tool = String(entry.tool ?? "").trim();
8
+ const category = String(entry.category ?? "").trim();
9
+ const message = String(entry.message ?? "").trim();
10
+ if (!tool) throw new Error("Run-ledger entries require --tool.");
11
+ if (!category) throw new Error("Run-ledger entries require --category.");
12
+ if (!message) throw new Error("Run-ledger entries require --message.");
13
+ return {
14
+ id: entry.id ?? `ledger-${slugify(`${tool}-${category}`) || "entry"}-${hashText(`${timestamp}:${tool}:${message}`).slice(0, 10)}`,
15
+ timestamp_utc: timestamp,
16
+ tool,
17
+ category,
18
+ message,
19
+ artifacts: asArray(entry.artifacts),
20
+ metadata: entry.metadata ?? {},
21
+ };
22
+ }
23
+
24
+ export function appendRunLedgerEntry(workspaceRoot, input) {
25
+ const layout = resolveStoreLayout(workspaceRoot);
26
+ const entry = normalizeRunLedgerEntry(input);
27
+ const result = writeJsonWithMirrors({
28
+ primary: layout.vericify.runLedger,
29
+ mirrors: [layout.partner.runLedger],
30
+ fallback: { version: 1, updated_at: isoNow(), entries: [] },
31
+ update(current) {
32
+ const entries = [...(Array.isArray(current.entries) ? current.entries : []), entry].sort((left, right) =>
33
+ String(left.timestamp_utc).localeCompare(String(right.timestamp_utc))
34
+ );
35
+ return {
36
+ version: current.version ?? 1,
37
+ updated_at: isoNow(),
38
+ entries,
39
+ };
40
+ },
41
+ });
42
+ return {
43
+ ...result,
44
+ entry,
45
+ };
46
+ }
@@ -0,0 +1,39 @@
1
+ import { hashText, isoNow, slugify } from "../core/util.js";
2
+ import { resolveStoreLayout } from "./paths.js";
3
+ import { appendNdjsonWithMirrors } from "./common.js";
4
+
5
+ export function normalizeStatusEvent(event) {
6
+ const timestamp = event.timestamp ?? isoNow();
7
+ const sourceModule = String(event.source_module ?? "").trim();
8
+ const eventType = String(event.event_type ?? "").trim();
9
+ const status = String(event.status ?? "").trim();
10
+ if (!sourceModule) throw new Error("Status events require --source-module.");
11
+ if (!eventType) throw new Error("Status events require --event-type.");
12
+ if (!status) throw new Error("Status events require --status.");
13
+ return {
14
+ schema_version: event.schema_version ?? "1.0.0",
15
+ event_id: event.event_id ?? `evt-${slugify(`${sourceModule}-${eventType}`) || "event"}-${hashText(`${timestamp}:${sourceModule}:${eventType}:${status}`).slice(0, 10)}`,
16
+ trace_id: event.trace_id,
17
+ timestamp,
18
+ source_module: sourceModule,
19
+ event_type: eventType,
20
+ status,
21
+ objective_id: event.objective_id,
22
+ decision_id: event.decision_id,
23
+ payload: event.payload ?? {},
24
+ };
25
+ }
26
+
27
+ export function appendStatusEvent(workspaceRoot, input) {
28
+ const layout = resolveStoreLayout(workspaceRoot);
29
+ const event = normalizeStatusEvent(input);
30
+ const result = appendNdjsonWithMirrors({
31
+ primary: layout.vericify.statusEvents,
32
+ mirrors: [layout.partner.statusEvents],
33
+ record: event,
34
+ });
35
+ return {
36
+ ...result,
37
+ event,
38
+ };
39
+ }
@@ -0,0 +1,49 @@
1
+ import { asArray, isoNow, unique } from "../core/util.js";
2
+ import { resolveStoreLayout } from "./paths.js";
3
+ import { writeJsonWithMirrors } from "./common.js";
4
+
5
+ export function normalizeTodoNode(input, existing = {}) {
6
+ const nodeId = String(input.id ?? existing.id ?? "").trim();
7
+ const title = String(input.title ?? existing.title ?? "").trim();
8
+ const status = String(input.status ?? existing.status ?? "").trim();
9
+ if (!nodeId) throw new Error("Todo writes require --id.");
10
+ if (!title) throw new Error("Todo writes require --title.");
11
+ if (!status) throw new Error("Todo writes require --status.");
12
+ return {
13
+ id: nodeId,
14
+ title,
15
+ section: input.section ?? existing.section,
16
+ source_line: input.source_line ?? existing.source_line,
17
+ depends_on: unique(asArray(input.depends_on ?? existing.depends_on ?? [])),
18
+ status,
19
+ priority: input.priority ?? existing.priority,
20
+ };
21
+ }
22
+
23
+ export function upsertTodoNode(workspaceRoot, input) {
24
+ const layout = resolveStoreLayout(workspaceRoot);
25
+ const nodeId = String(input.id ?? "").trim();
26
+ if (!nodeId) throw new Error("Todo writes require --id.");
27
+ const result = writeJsonWithMirrors({
28
+ primary: layout.vericify.todoState,
29
+ mirrors: [layout.partner.todoState],
30
+ fallback: { version: 1, updated_at: isoNow(), order: [], nodes: {} },
31
+ update(current) {
32
+ const nodes = { ...(current.nodes ?? {}) };
33
+ const existing = nodes[nodeId] ?? {};
34
+ nodes[nodeId] = normalizeTodoNode(input, existing);
35
+ const order = unique([...(Array.isArray(current.order) ? current.order : []), nodeId]);
36
+ return {
37
+ version: current.version ?? 1,
38
+ updated_at: isoNow(),
39
+ source_todo_path: current.source_todo_path ?? input.source_todo_path,
40
+ order,
41
+ nodes,
42
+ };
43
+ },
44
+ });
45
+ return {
46
+ ...result,
47
+ node: result.value.nodes[nodeId],
48
+ };
49
+ }
@@ -0,0 +1,29 @@
1
+ import { resolve } from "node:path";
2
+ import { writeJson } from "../core/fs.js";
3
+ import { hashText, isoNow, slugify } from "../core/util.js";
4
+ import { resolveStoreLayout } from "../store/paths.js";
5
+
6
+ export function enqueueSyncOutboxItem({ workspaceRoot, artifact, artifactPath, endpoint, target = "vericify-cloud-v1" }) {
7
+ const layout = resolveStoreLayout(workspaceRoot);
8
+ const queuedAt = isoNow();
9
+ const item = {
10
+ schema_version: "1.0.0",
11
+ outbox_item_id: `sync-${slugify(artifact?.run?.run_id ?? artifact?.artifact_id ?? "artifact")}-${hashText(`${queuedAt}:${artifactPath}:${target}`).slice(0, 10)}`,
12
+ queued_at: queuedAt,
13
+ status: "queued",
14
+ target,
15
+ endpoint,
16
+ workspace: artifact?.workspace,
17
+ run_id: artifact?.run?.run_id,
18
+ compare_run_id: artifact?.compare_report?.right_run_id,
19
+ artifact_path: artifactPath,
20
+ artifact_hash: hashText(JSON.stringify(artifact)),
21
+ delivery_mode: endpoint ? "manual-http-push" : "manual-export",
22
+ };
23
+ const path = resolve(layout.vericify.syncOutboxRoot, `${item.outbox_item_id}.json`);
24
+ writeJson(path, item);
25
+ return {
26
+ path,
27
+ item,
28
+ };
29
+ }