inkdex 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/.claude/settings.local.json +15 -0
  2. package/.github/workflows/ci.yml +73 -0
  3. package/.github/workflows/release.yml +65 -0
  4. package/AGENTS.md +32 -0
  5. package/LICENSE +190 -0
  6. package/README.md +40 -0
  7. package/biome.json +43 -0
  8. package/dist/cli.d.ts +2 -0
  9. package/dist/cli.js +38 -0
  10. package/dist/embedder/embedder.d.ts +9 -0
  11. package/dist/embedder/embedder.js +39 -0
  12. package/dist/ingest/chunker.d.ts +7 -0
  13. package/dist/ingest/chunker.js +114 -0
  14. package/dist/ingest/index-docs.d.ts +2 -0
  15. package/dist/ingest/index-docs.js +78 -0
  16. package/dist/logger.d.ts +6 -0
  17. package/dist/logger.js +28 -0
  18. package/dist/search/search.d.ts +7 -0
  19. package/dist/search/search.js +70 -0
  20. package/dist/server.d.ts +2 -0
  21. package/dist/server.js +66 -0
  22. package/dist/store/db.d.ts +13 -0
  23. package/dist/store/db.js +149 -0
  24. package/dist/types.d.ts +14 -0
  25. package/dist/types.js +1 -0
  26. package/dist/version.d.ts +1 -0
  27. package/dist/version.js +13 -0
  28. package/inkdex-0.0.1.tgz +0 -0
  29. package/package.json +46 -0
  30. package/release.sh +33 -0
  31. package/src/cli.ts +45 -0
  32. package/src/embedder/embedder.ts +52 -0
  33. package/src/ingest/chunker.ts +158 -0
  34. package/src/ingest/index-docs.ts +120 -0
  35. package/src/logger.ts +39 -0
  36. package/src/search/search.ts +93 -0
  37. package/src/server.ts +96 -0
  38. package/src/store/db.ts +217 -0
  39. package/src/types.ts +16 -0
  40. package/src/version.ts +16 -0
  41. package/test/fixtures/docs/api.md +26 -0
  42. package/test/fixtures/docs/getting-started.md +13 -0
  43. package/test/helpers/index.ts +14 -0
  44. package/test/integration/embedder.test.ts +52 -0
  45. package/test/integration/server.test.ts +125 -0
  46. package/test/unit/chunker.test.ts +193 -0
  47. package/test/unit/db.test.ts +190 -0
  48. package/test/unit/index-docs.test.ts +120 -0
  49. package/test/unit/logger.test.ts +11 -0
  50. package/test/unit/search.test.ts +93 -0
  51. package/test/unit/version.test.ts +16 -0
  52. package/test-docs/api-reference.md +76 -0
  53. package/test-docs/deployment.md +55 -0
  54. package/test-docs/getting-started.md +52 -0
  55. package/tsconfig.json +18 -0
@@ -0,0 +1,190 @@
1
+ import assert from "node:assert";
2
+ import { rmSync } from "node:fs";
3
+ import { tmpdir } from "node:os";
4
+ import { join } from "node:path";
5
+ import { afterEach, beforeEach, describe, it } from "node:test";
6
+ import {
7
+ closeDb,
8
+ dbPath,
9
+ getAllChunks,
10
+ getAllDocumentHashes,
11
+ getChunkCount,
12
+ insertChunk,
13
+ openDb,
14
+ removeDocument,
15
+ runInTransaction,
16
+ searchFts,
17
+ setDocumentHash,
18
+ } from "../../src/store/db.js";
19
+
20
+ const TEST_DOCS_PATH = join(tmpdir(), "inkdex-test-db-" + process.pid);
21
+
22
+ describe("dbPath", () => {
23
+ it("should return the same path for the same docs directory", () => {
24
+ assert.strictEqual(dbPath("/home/user/docs"), dbPath("/home/user/docs"));
25
+ });
26
+
27
+ it("should return different paths for different docs directories", () => {
28
+ assert.notStrictEqual(
29
+ dbPath("/home/user/docs-a"),
30
+ dbPath("/home/user/docs-b"),
31
+ );
32
+ });
33
+
34
+ it("should end with .db", () => {
35
+ assert.ok(dbPath("/any/path").endsWith(".db"));
36
+ });
37
+ });
38
+
39
+ describe("db operations", () => {
40
+ beforeEach(() => {
41
+ openDb(TEST_DOCS_PATH);
42
+ });
43
+
44
+ afterEach(() => {
45
+ closeDb();
46
+ try {
47
+ rmSync(dbPath(TEST_DOCS_PATH), { force: true });
48
+ rmSync(`${dbPath(TEST_DOCS_PATH)}-wal`, { force: true });
49
+ rmSync(`${dbPath(TEST_DOCS_PATH)}-shm`, { force: true });
50
+ } catch {
51
+ // ignore cleanup errors
52
+ }
53
+ });
54
+
55
+ it("should start with zero chunks", () => {
56
+ assert.strictEqual(getChunkCount(), 0);
57
+ });
58
+
59
+ it("should start with no document hashes", () => {
60
+ const hashes = getAllDocumentHashes();
61
+ assert.deepStrictEqual(hashes, {});
62
+ });
63
+
64
+ it("should store and retrieve document hashes", () => {
65
+ setDocumentHash("readme.md", "abc123");
66
+ const hashes = getAllDocumentHashes();
67
+ assert.strictEqual(hashes["readme.md"], "abc123");
68
+ });
69
+
70
+ it("should upsert document hashes", () => {
71
+ setDocumentHash("readme.md", "abc123");
72
+ setDocumentHash("readme.md", "def456");
73
+ const hashes = getAllDocumentHashes();
74
+ assert.strictEqual(hashes["readme.md"], "def456");
75
+ });
76
+
77
+ it("should insert and retrieve chunks", () => {
78
+ setDocumentHash("doc.md", "hash1");
79
+ const embedding = [0.1, 0.2, 0.3];
80
+ insertChunk("doc.md", "Doc Title", "Section", "Some text", {}, embedding);
81
+
82
+ const chunks = getAllChunks();
83
+ assert.strictEqual(chunks.length, 1);
84
+ assert.strictEqual(chunks[0].path, "doc.md");
85
+ assert.strictEqual(chunks[0].fileHeading, "Doc Title");
86
+ assert.strictEqual(chunks[0].heading, "Section");
87
+ assert.strictEqual(chunks[0].text, "Some text");
88
+ assert.deepStrictEqual(chunks[0].metadata, {});
89
+ });
90
+
91
+ it("should round-trip embeddings through blob conversion", () => {
92
+ setDocumentHash("doc.md", "hash1");
93
+ const embedding = [0.1, 0.2, 0.3, -0.5, 1.0];
94
+ insertChunk("doc.md", "Title", "Heading", "text", {}, embedding);
95
+
96
+ const chunks = getAllChunks();
97
+ assert.strictEqual(chunks[0].embedding.length, embedding.length);
98
+ for (let i = 0; i < embedding.length; i++) {
99
+ assert.ok(
100
+ Math.abs(chunks[0].embedding[i] - embedding[i]) < 1e-6,
101
+ `embedding[${i}] mismatch`,
102
+ );
103
+ }
104
+ });
105
+
106
+ it("should preserve chunk metadata", () => {
107
+ setDocumentHash("doc.md", "hash1");
108
+ const meta = { title: "Test", version: "1.0" };
109
+ insertChunk("doc.md", "Title", "Heading", "text", meta, [0.1]);
110
+
111
+ const chunks = getAllChunks();
112
+ assert.deepStrictEqual(chunks[0].metadata, meta);
113
+ });
114
+
115
+ it("should count chunks correctly", () => {
116
+ setDocumentHash("doc.md", "hash1");
117
+ insertChunk("doc.md", "T", "H", "text1", {}, [0.1]);
118
+ insertChunk("doc.md", "T", "H", "text2", {}, [0.2]);
119
+ assert.strictEqual(getChunkCount(), 2);
120
+ });
121
+
122
+ it("should remove document and its chunks", () => {
123
+ setDocumentHash("doc.md", "hash1");
124
+ insertChunk("doc.md", "T", "H", "text", {}, [0.1]);
125
+
126
+ removeDocument("doc.md");
127
+ assert.strictEqual(getChunkCount(), 0);
128
+ assert.deepStrictEqual(getAllDocumentHashes(), {});
129
+ });
130
+
131
+ it("should only remove chunks for the specified document", () => {
132
+ setDocumentHash("a.md", "hash1");
133
+ setDocumentHash("b.md", "hash2");
134
+ insertChunk("a.md", "T", "H", "text-a", {}, [0.1]);
135
+ insertChunk("b.md", "T", "H", "text-b", {}, [0.2]);
136
+
137
+ removeDocument("a.md");
138
+ assert.strictEqual(getChunkCount(), 1);
139
+ assert.strictEqual(getAllChunks()[0].text, "text-b");
140
+ });
141
+
142
+ it("should search via FTS", () => {
143
+ setDocumentHash("doc.md", "hash1");
144
+ insertChunk("doc.md", "T", "H", "the quick brown fox", {}, [0.1]);
145
+ insertChunk("doc.md", "T", "H", "lazy sleeping dog", {}, [0.2]);
146
+
147
+ const results = searchFts("quick fox", 10);
148
+ assert.ok(results.length > 0);
149
+ });
150
+
151
+ it("should return empty for FTS with no matches", () => {
152
+ setDocumentHash("doc.md", "hash1");
153
+ insertChunk("doc.md", "T", "H", "hello world", {}, [0.1]);
154
+
155
+ const results = searchFts("zzzznotfound", 10);
156
+ assert.strictEqual(results.length, 0);
157
+ });
158
+
159
+ it("should return empty for empty FTS query", () => {
160
+ const results = searchFts("", 10);
161
+ assert.strictEqual(results.length, 0);
162
+ });
163
+
164
+ it("should handle FTS queries with special characters", () => {
165
+ setDocumentHash("doc.md", "hash1");
166
+ insertChunk("doc.md", "T", "H", "some text", {}, [0.1]);
167
+
168
+ const results = searchFts('AND OR NOT "quotes"', 10);
169
+ assert.ok(Array.isArray(results));
170
+ });
171
+
172
+ it("should commit transaction on success", () => {
173
+ setDocumentHash("doc.md", "hash1");
174
+ runInTransaction(() => {
175
+ insertChunk("doc.md", "T", "H", "text", {}, [0.1]);
176
+ });
177
+ assert.strictEqual(getChunkCount(), 1);
178
+ });
179
+
180
+ it("should rollback transaction on error", () => {
181
+ setDocumentHash("doc.md", "hash1");
182
+ assert.throws(() => {
183
+ runInTransaction(() => {
184
+ insertChunk("doc.md", "T", "H", "text", {}, [0.1]);
185
+ throw new Error("deliberate failure");
186
+ });
187
+ });
188
+ assert.strictEqual(getChunkCount(), 0);
189
+ });
190
+ });
@@ -0,0 +1,120 @@
1
+ import assert from "node:assert";
2
+ import { mkdirSync, rmSync, writeFileSync } from "node:fs";
3
+ import { tmpdir } from "node:os";
4
+ import { join } from "node:path";
5
+ import { afterEach, beforeEach, describe, it } from "node:test";
6
+ import type { Embedder } from "../../src/embedder/embedder.js";
7
+ import { indexDocs } from "../../src/ingest/index-docs.js";
8
+ import {
9
+ closeDb,
10
+ dbPath,
11
+ getAllChunks,
12
+ getAllDocumentHashes,
13
+ getChunkCount,
14
+ openDb,
15
+ } from "../../src/store/db.js";
16
+
17
+ const TEST_DIR = join(tmpdir(), "inkdex-test-index-" + process.pid);
18
+ const DOCS_DIR = join(TEST_DIR, "docs");
19
+
20
+ function createMockEmbedder(): Embedder {
21
+ return {
22
+ maxTokens: 256,
23
+ tokenize: (text: string) => text.split(/\s+/).map((_, i) => i),
24
+ embed: async (_text: string) => [0.1, 0.2, 0.3],
25
+ embedBatch: async (texts: string[]) => texts.map(() => [0.1, 0.2, 0.3]),
26
+ } as unknown as Embedder;
27
+ }
28
+
29
+ describe("indexDocs", () => {
30
+ beforeEach(() => {
31
+ mkdirSync(DOCS_DIR, { recursive: true });
32
+ openDb(DOCS_DIR);
33
+ });
34
+
35
+ afterEach(() => {
36
+ closeDb();
37
+ try {
38
+ rmSync(TEST_DIR, { recursive: true, force: true });
39
+ rmSync(dbPath(DOCS_DIR), { force: true });
40
+ rmSync(`${dbPath(DOCS_DIR)}-wal`, { force: true });
41
+ rmSync(`${dbPath(DOCS_DIR)}-shm`, { force: true });
42
+ } catch {
43
+ // ignore cleanup errors
44
+ }
45
+ });
46
+
47
+ it("should index markdown files", async () => {
48
+ writeFileSync(join(DOCS_DIR, "test.md"), "# Title\n\n## Section\n\nHello");
49
+ await indexDocs(createMockEmbedder(), DOCS_DIR);
50
+ assert.ok(getChunkCount() > 0);
51
+ });
52
+
53
+ it("should handle empty directory", async () => {
54
+ await indexDocs(createMockEmbedder(), DOCS_DIR);
55
+ assert.strictEqual(getChunkCount(), 0);
56
+ });
57
+
58
+ it("should skip unchanged files on second run", async () => {
59
+ writeFileSync(join(DOCS_DIR, "test.md"), "# Title\n\n## Section\n\nHello");
60
+ const embedder = createMockEmbedder();
61
+
62
+ await indexDocs(embedder, DOCS_DIR);
63
+ const countAfterFirst = getChunkCount();
64
+
65
+ await indexDocs(embedder, DOCS_DIR);
66
+ assert.strictEqual(getChunkCount(), countAfterFirst);
67
+ });
68
+
69
+ it("should re-index changed files", async () => {
70
+ const embedder = createMockEmbedder();
71
+ writeFileSync(join(DOCS_DIR, "test.md"), "# Title\n\n## Section\n\nOld");
72
+ await indexDocs(embedder, DOCS_DIR);
73
+
74
+ writeFileSync(
75
+ join(DOCS_DIR, "test.md"),
76
+ "# Title\n\n## Section\n\nUpdated content",
77
+ );
78
+ await indexDocs(embedder, DOCS_DIR);
79
+
80
+ const chunks = getAllChunks();
81
+ const texts = chunks.map((c) => c.text).join(" ");
82
+ assert.ok(texts.includes("Updated content"));
83
+ assert.ok(!texts.includes("Old"));
84
+ });
85
+
86
+ it("should remove deleted files from index", async () => {
87
+ const embedder = createMockEmbedder();
88
+ writeFileSync(join(DOCS_DIR, "a.md"), "# A\n\n## Section\n\nFile A");
89
+ writeFileSync(join(DOCS_DIR, "b.md"), "# B\n\n## Section\n\nFile B");
90
+ await indexDocs(embedder, DOCS_DIR);
91
+ assert.strictEqual(Object.keys(getAllDocumentHashes()).length, 2);
92
+
93
+ rmSync(join(DOCS_DIR, "b.md"));
94
+ await indexDocs(embedder, DOCS_DIR);
95
+
96
+ const hashes = getAllDocumentHashes();
97
+ assert.strictEqual(Object.keys(hashes).length, 1);
98
+ assert.ok(hashes["a.md"]);
99
+ });
100
+
101
+ it("should index files in subdirectories", async () => {
102
+ mkdirSync(join(DOCS_DIR, "sub"), { recursive: true });
103
+ writeFileSync(
104
+ join(DOCS_DIR, "sub", "nested.md"),
105
+ "# Nested\n\n## Section\n\nNested content",
106
+ );
107
+ await indexDocs(createMockEmbedder(), DOCS_DIR);
108
+ assert.ok(getChunkCount() > 0);
109
+ });
110
+
111
+ it("should store document hashes", async () => {
112
+ writeFileSync(join(DOCS_DIR, "test.md"), "# Title\n\n## Section\n\nHello");
113
+ await indexDocs(createMockEmbedder(), DOCS_DIR);
114
+
115
+ const hashes = getAllDocumentHashes();
116
+ assert.ok(hashes["test.md"]);
117
+ assert.strictEqual(typeof hashes["test.md"], "string");
118
+ assert.strictEqual(hashes["test.md"].length, 64); // SHA-256 hex
119
+ });
120
+ });
@@ -0,0 +1,11 @@
1
+ import assert from "node:assert";
2
+ import { describe, it } from "node:test";
3
+ import { logger } from "../../src/logger.js";
4
+
5
+ describe("logger", () => {
6
+ it("should return a logger instance", () => {
7
+ assert.ok(logger);
8
+ assert.strictEqual(typeof logger.info, "function");
9
+ assert.strictEqual(typeof logger.error, "function");
10
+ });
11
+ });
@@ -0,0 +1,93 @@
1
+ import assert from "node:assert";
2
+ import { describe, it } from "node:test";
3
+ import { cosineSimilarity, rankChunksHybrid } from "../../src/search/search.js";
4
+ import type { ChunkRow } from "../../src/types.js";
5
+
6
+ function makeChunk(id: number, embedding: number[]): ChunkRow {
7
+ return {
8
+ id,
9
+ path: `doc${id}.md`,
10
+ fileHeading: `Doc ${id}`,
11
+ heading: `Section ${id}`,
12
+ text: `text ${id}`,
13
+ metadata: {},
14
+ embedding,
15
+ };
16
+ }
17
+
18
+ describe("cosineSimilarity", () => {
19
+ it("should return 1 for identical vectors", () => {
20
+ const v = [1, 2, 3];
21
+ const sim = cosineSimilarity(v, v);
22
+ assert.ok(Math.abs(sim - 1) < 1e-6);
23
+ });
24
+
25
+ it("should return 0 for orthogonal vectors", () => {
26
+ const a = [1, 0, 0];
27
+ const b = [0, 1, 0];
28
+ const sim = cosineSimilarity(a, b);
29
+ assert.ok(Math.abs(sim) < 1e-6);
30
+ });
31
+
32
+ it("should return -1 for opposite vectors", () => {
33
+ const a = [1, 0];
34
+ const b = [-1, 0];
35
+ const sim = cosineSimilarity(a, b);
36
+ assert.ok(Math.abs(sim + 1) < 1e-6);
37
+ });
38
+ });
39
+
40
+ describe("rankChunksHybrid", () => {
41
+ const queryEmbedding = [1, 0, 0];
42
+ const chunks: ChunkRow[] = [
43
+ makeChunk(1, [1, 0, 0]), // best vector match
44
+ makeChunk(2, [0, 1, 0]), // orthogonal
45
+ makeChunk(3, [0.5, 0.5, 0]), // moderate vector match
46
+ ];
47
+
48
+ it("should boost results that appear in both rankings", () => {
49
+ // FTS ranks chunk 1 first too — both signals agree
50
+ const results = rankChunksHybrid(chunks, queryEmbedding, [1, 3, 2], 3);
51
+ assert.strictEqual(results[0].path, "doc1.md");
52
+ });
53
+
54
+ it("should include results from only one signal", () => {
55
+ // FTS returns no results — pure vector ranking
56
+ const results = rankChunksHybrid(chunks, queryEmbedding, [], 3);
57
+ assert.strictEqual(results.length, 3);
58
+ assert.strictEqual(results[0].path, "doc1.md");
59
+ });
60
+
61
+ it("should rank FTS-only match above absent chunks", () => {
62
+ // Chunk 4 only exists in FTS results (not in chunks array) — should be ignored
63
+ const results = rankChunksHybrid(chunks, queryEmbedding, [4, 2], 3);
64
+ assert.strictEqual(results.length, 3);
65
+ });
66
+
67
+ it("should respect limit", () => {
68
+ const results = rankChunksHybrid(chunks, queryEmbedding, [1, 2, 3], 1);
69
+ assert.strictEqual(results.length, 1);
70
+ });
71
+
72
+ it("should rank dual-signal result higher than single-signal", () => {
73
+ // Chunk 2 is orthogonal (bad vector) but FTS rank 1
74
+ // Chunk 1 is best vector but not in FTS
75
+ // Chunk 3 is moderate vector and FTS rank 2
76
+ const results = rankChunksHybrid(chunks, queryEmbedding, [2, 3], 3);
77
+ // Chunk 1: vector rank 1 only → 1/(60+1) = 0.01639
78
+ // Chunk 2: vector rank 3 + FTS rank 1 → 1/(60+3) + 1/(60+1) = 0.01587 + 0.01639 = 0.03226
79
+ // Chunk 3: vector rank 2 + FTS rank 2 → 1/(60+2) + 1/(60+2) = 0.01613 + 0.01613 = 0.03226
80
+ // Both chunk 2 and 3 (dual signal) should rank above chunk 1 (single signal)
81
+ const chunk1Idx = results.findIndex((r) => r.path === "doc1.md");
82
+ const chunk2Idx = results.findIndex((r) => r.path === "doc2.md");
83
+ const chunk3Idx = results.findIndex((r) => r.path === "doc3.md");
84
+ assert.ok(
85
+ chunk2Idx < chunk1Idx,
86
+ "dual-signal chunk 2 should rank above single-signal chunk 1",
87
+ );
88
+ assert.ok(
89
+ chunk3Idx < chunk1Idx,
90
+ "dual-signal chunk 3 should rank above single-signal chunk 1",
91
+ );
92
+ });
93
+ });
@@ -0,0 +1,16 @@
1
+ import assert from "node:assert";
2
+ import { describe, it } from "node:test";
3
+ import { getVersion } from "../../src/version.js";
4
+
5
+ describe("version", () => {
6
+ it("should return a version string", () => {
7
+ const version = getVersion();
8
+ assert.ok(version);
9
+ assert.strictEqual(typeof version, "string");
10
+ });
11
+
12
+ it("should match package.json version format", () => {
13
+ const version = getVersion();
14
+ assert.match(version, /^\d+\.\d+\.\d+$|^unknown$/);
15
+ });
16
+ });
@@ -0,0 +1,76 @@
1
+ # API Reference
2
+
3
+ ## Authentication
4
+
5
+ All API requests require a Bearer token in the Authorization header.
6
+
7
+ ```
8
+ Authorization: Bearer <your-api-key>
9
+ ```
10
+
11
+ Keys are generated from the dashboard under Settings > API Keys. Each key is scoped to a single project.
12
+
13
+ ## Endpoints
14
+
15
+ ### GET /api/users
16
+
17
+ Returns a paginated list of users.
18
+
19
+ **Query parameters:**
20
+
21
+ | Param | Type | Description |
22
+ |-------|------|-------------|
23
+ | page | number | Page number (default: 1) |
24
+ | limit | number | Results per page (default: 20, max: 100) |
25
+ | search | string | Filter by name or email |
26
+
27
+ **Response:**
28
+
29
+ ```json
30
+ {
31
+ "data": [
32
+ { "id": "u_123", "name": "Alice", "email": "alice@example.com" }
33
+ ],
34
+ "total": 42,
35
+ "page": 1
36
+ }
37
+ ```
38
+
39
+ ### POST /api/users
40
+
41
+ Create a new user.
42
+
43
+ **Request body:**
44
+
45
+ ```json
46
+ {
47
+ "name": "Bob",
48
+ "email": "bob@example.com",
49
+ "role": "member"
50
+ }
51
+ ```
52
+
53
+ **Response:** Returns the created user object with a generated `id`.
54
+
55
+ ### DELETE /api/users/:id
56
+
57
+ Delete a user by ID. Returns 204 on success. Requires admin role.
58
+
59
+ ## Rate Limiting
60
+
61
+ The API enforces a rate limit of 100 requests per minute per API key. Exceeding the limit returns a 429 status with a `Retry-After` header.
62
+
63
+ ## Error Responses
64
+
65
+ All errors follow a consistent format:
66
+
67
+ ```json
68
+ {
69
+ "error": {
70
+ "code": "not_found",
71
+ "message": "User not found"
72
+ }
73
+ }
74
+ ```
75
+
76
+ Common error codes: `bad_request`, `unauthorized`, `not_found`, `rate_limited`, `internal_error`.
@@ -0,0 +1,55 @@
1
+ # Deployment
2
+
3
+ ## Building for Production
4
+
5
+ Run the build command to generate optimized output:
6
+
7
+ ```bash
8
+ acme-cli build
9
+ ```
10
+
11
+ This compiles TypeScript, bundles assets, and writes everything to the `outDir` specified in your config (default: `dist/`).
12
+
13
+ ## Docker
14
+
15
+ A minimal Dockerfile for production:
16
+
17
+ ```dockerfile
18
+ FROM node:22-alpine
19
+ WORKDIR /app
20
+ COPY dist/ ./dist/
21
+ COPY package.json ./
22
+ RUN npm install --production
23
+ EXPOSE 3000
24
+ CMD ["node", "dist/index.js"]
25
+ ```
26
+
27
+ Build and run:
28
+
29
+ ```bash
30
+ docker build -t my-app .
31
+ docker run -p 3000:3000 my-app
32
+ ```
33
+
34
+ ## Environment Variables
35
+
36
+ | Variable | Required | Description |
37
+ |----------|----------|-------------|
38
+ | DATABASE_URL | Yes | PostgreSQL connection string |
39
+ | REDIS_URL | No | Redis URL for caching (falls back to in-memory) |
40
+ | API_KEY_SECRET | Yes | Secret used to sign API keys |
41
+ | LOG_LEVEL | No | Logging verbosity: debug, info, warn, error |
42
+
43
+ ## Health Check
44
+
45
+ The `/healthz` endpoint returns 200 when the service is ready. Use this for load balancer and Kubernetes liveness probes.
46
+
47
+ ## Rollback
48
+
49
+ If a deployment goes wrong, revert to the previous version:
50
+
51
+ ```bash
52
+ acme-cli rollback --to v1.2.3
53
+ ```
54
+
55
+ This restores the previous build artifacts and runs any reverse migrations automatically.
@@ -0,0 +1,52 @@
1
+ # Getting Started
2
+
3
+ ## Installation
4
+
5
+ Install the CLI globally using npm:
6
+
7
+ ```bash
8
+ npm install -g acme-cli
9
+ ```
10
+
11
+ Or use it directly with npx:
12
+
13
+ ```bash
14
+ npx acme-cli init my-project
15
+ ```
16
+
17
+ ## Quick Start
18
+
19
+ Create a new project by running the init command. This scaffolds a directory with a default configuration file and example templates.
20
+
21
+ ```bash
22
+ acme-cli init my-app
23
+ cd my-app
24
+ acme-cli dev
25
+ ```
26
+
27
+ The dev server starts on port 3000 by default. Open your browser to http://localhost:3000 to see the welcome page.
28
+
29
+ ## Configuration
30
+
31
+ All settings live in `acme.config.json` at the project root.
32
+
33
+ | Option | Type | Default | Description |
34
+ |--------|------|---------|-------------|
35
+ | port | number | 3000 | Dev server port |
36
+ | outDir | string | "dist" | Build output directory |
37
+ | minify | boolean | true | Minify production builds |
38
+
39
+ ## Project Structure
40
+
41
+ A typical project looks like this:
42
+
43
+ ```
44
+ my-app/
45
+ acme.config.json
46
+ src/
47
+ index.ts
48
+ routes/
49
+ components/
50
+ public/
51
+ favicon.ico
52
+ ```
package/tsconfig.json ADDED
@@ -0,0 +1,18 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "module": "ESNext",
5
+ "moduleResolution": "bundler",
6
+ "esModuleInterop": true,
7
+ "strict": true,
8
+ "skipLibCheck": true,
9
+ "outDir": "./dist",
10
+ "rootDir": "./src",
11
+ "declaration": true,
12
+ "resolveJsonModule": true,
13
+ "allowSyntheticDefaultImports": true,
14
+ "forceConsistentCasingInFileNames": true
15
+ },
16
+ "include": ["src/**/*"],
17
+ "exclude": ["node_modules", "dist", "test"]
18
+ }