fa-mcp-sdk 0.4.67 → 0.4.69

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,12 +15,13 @@ npm install fa-mcp-sdk
15
15
  | [01-getting-started](01-getting-started.md) | `initMcpServer()`, `McpServerData`, `IPromptData`, `IResourceData`, `AppConfig` | Starting new project |
16
16
  | [02-1-tools-and-api](02-1-tools-and-api.md) | Tool definitions, `toolHandler`, outbound webhooks, REST API with tsoa, OpenAPI/Swagger | Creating tools, REST endpoints, webhook callbacks |
17
17
  | [02-2-prompts-and-resources](02-2-prompts-and-resources.md) | Standard/custom prompts, resources, `requireAuth` | Configuring prompts/resources |
18
- | [03-configuration](03-configuration.md) | `appConfig`, YAML config, access points for external services, cache, PostgreSQL | Server configuration, external services, DB |
18
+ | [03-configuration](03-configuration.md) | `appConfig`, YAML config, access points for external services, cache | Server configuration, external services |
19
19
  | [04-authentication](04-authentication.md) | JWT, Basic auth, server tokens, `createAuthMW()`, Token Generator, CLI Token Generator, JWT Generation API | Authentication setup |
20
20
  | [05-ad-authorization](05-ad-authorization.md) | AD group authorization at HTTP/tool levels | AD group restrictions |
21
21
  | [06-utilities](06-utilities.md) | `ServerError`, `normalizeHeaders`, logging, Consul, graceful shutdown | Error handling, utilities |
22
22
  | [07-testing-and-operations](07-testing-and-operations.md) | Test clients (STDIO, HTTP, SSE, Streamable HTTP) | Testing, deployment |
23
23
  | [08-agent-tester-and-headless-api](08-agent-tester-and-headless-api.md) | Agent Tester, Headless API, structured logging, automated testing, UI `data-testid` reference | Agent-driven tool development, CLI automation, UI E2E tests |
24
+ | [09-database](09-database.md) | PostgreSQL sugar layer (`queryMAIN`, `execMAIN`, `getInsertSqlMAIN`, `getMergeSqlMAIN`, `mergeByBatch`), `pgvector`, secondary DBs | Database access, upserts, batching |
24
25
 
25
26
  ## Key Exports
26
27
 
@@ -35,7 +36,13 @@ import { createAuthMW, generateToken, getAuthHeadersForTests, TTokenType, genera
35
36
  import { formatToolResult, ToolExecutionError, ServerError, BaseMcpError, ValidationError, getTools } from 'fa-mcp-sdk';
36
37
 
37
38
  // Database & Cache
38
- import { queryMAIN, execMAIN, oneRowMAIN, checkMainDB, getCache } from 'fa-mcp-sdk';
39
+ import {
40
+ queryMAIN, queryRsMAIN, oneRowMAIN, execMAIN,
41
+ getInsertSqlMAIN, getMergeSqlMAIN, mergeByBatch,
42
+ checkMainDB, getMainDBConnectionStatus,
43
+ IQueryPgArgsCOptional,
44
+ getCache,
45
+ } from 'fa-mcp-sdk';
39
46
 
40
47
  // Utilities
41
48
  import { logger, fileLogger, Logger, trim, ppj, toError, toStr, normalizeHeaders } from 'fa-mcp-sdk';
@@ -62,65 +62,208 @@ const clientIP = headers?.['x-real-ip'] || headers?.['x-forwarded-for'];
62
62
  `IToolHandlerParams` includes `ITransportContext` fields (`transport`, `headers`, `payload`).
63
63
  See [ITransportContext](./02-2-prompts-and-resources.md#itransportcontext).
64
64
 
65
- ### Outbound Webhooks
65
+ ### Outbound Webhooks (`x-web-hook`)
66
66
 
67
- The SDK does not ship a built-in webhook — it is a **handler-level pattern** enabled by
68
- the fact that `params.headers` already carries every client header through to the tool.
69
- Use it when the caller should be notified of each tool result (audit, dashboards, CI
70
- chains). Reference implementation: `mcp-jira` (`src/tools/tools-manager.ts`,
71
- `callWebHook` + dispatch block).
67
+ Handler-level pattern. The SDK does **not** ship a built-in webhook dispatcher — it exposes
68
+ everything you need (`params.headers`, `appConfig`, `logger`) and leaves the policy to the project.
69
+ This section is the **canonical recipe**: implement it as written so every fa-mcp-sdk-based MCP
70
+ server behaves the same way for clients and downstream collectors.
72
71
 
73
- **Recipe:**
72
+ **What it is:** after every tool invocation the server can `POST` the tool result to an external
73
+ URL. Useful for audit trails, real-time dashboards, chaining MCP calls into CI/automation pipelines.
74
+ Opt-in per request (via header) and optionally per tool (via the response object). A failing webhook
75
+ **must never** fail the tool call.
74
76
 
75
- 1. **Declare the header** so Agent Tester and `use://http-headers` advertise it:
77
+ #### Contract (stable across all MCPs)
76
78
 
77
- ```typescript
78
- usedHttpHeaders: [
79
- { name: 'x-web-hook', description: 'URL to POST the tool result to.', isOptional: true },
80
- ],
81
- ```
79
+ **Inbound — precedence:**
82
80
 
83
- 2. **Dispatch inside the handler** — fire-and-forget, never throw, never block the reply:
81
+ | Source | Form | Precedence |
82
+ |---------------------|---------------------------------------------------------|------------|
83
+ | Per-tool override | `IToolResponse.hook: string` returned by the handler | wins |
84
+ | Per-request header | `x-web-hook: <http(s) URL>` | fallback |
84
85
 
85
- ```typescript
86
- import axios from 'axios';
87
- import { appConfig, logger, toStr, IToolHandlerParams } from 'fa-mcp-sdk';
86
+ If neither is present, no webhook fires.
88
87
 
89
- const URL_REGEX = /^https?:\/\/[^\s]+$/i;
88
+ **Outbound request:**
90
89
 
91
- const callWebHook = (url: string, tool: string, response: unknown, user?: string): void => {
92
- if (!URL_REGEX.test(url)) { return; }
93
- axios.post(url, { mcpName: appConfig.name, tool, user, response }, { timeout: 10_000 })
94
- .catch((err) => logger.warn(`Web-hook POST ${url} failed: ${toStr(err?.message || err)}`));
95
- };
90
+ - Method: `POST`, `Content-Type: application/json`, timeout 10 000 ms
91
+ - Body:
96
92
 
97
- export const handleToolCall = async (params: IToolHandlerParams) => {
98
- const { name, headers = {} } = params;
99
- const result = await runTool(params); // produce { text, json, hook? }
100
- const hookUrl = (result.hook || headers['x-web-hook'] || '').trim();
101
- if (hookUrl) { callWebHook(hookUrl, name, result.json, resolveUser(headers, params.payload)); }
102
- return formatToolResult(result.json);
103
- };
104
- ```
93
+ ```json
94
+ {
95
+ "mcpName": "<appConfig.name>",
96
+ "tool": "<tool_name>",
97
+ "user": "<caller-id-or-omitted>",
98
+ "response": { "...": "tool's full JSON result" }
99
+ }
100
+ ```
101
+
102
+ | Field | Description |
103
+ |------------|------------------------------------------------------------------------------|
104
+ | `mcpName` | `appConfig.name` — identifies which MCP sent the callback |
105
+ | `tool` | Name of the invoked tool |
106
+ | `user` | Best-effort caller identity (see *User resolution*); **omit** if unresolved |
107
+ | `response` | Full JSON returned by the tool handler (same payload sent to the client) |
108
+
109
+ Do **not** add ad-hoc fields on a per-project basis without versioning the body — downstream
110
+ collectors rely on this exact shape.
111
+
112
+ #### Implementation recipe
113
+
114
+ **1. Declare the header** so `use://http-headers`, Agent Tester, and tool-call introspection
115
+ advertise it:
116
+
117
+ ```typescript
118
+ // src/start.ts
119
+ usedHttpHeaders.push({
120
+ name: 'x-web-hook',
121
+ description:
122
+ 'Optional URL called via POST after each tool invocation. '
123
+ + 'Body: { mcpName, tool, user, response }. Fire-and-forget; failures are logged only.',
124
+ isOptional: true,
125
+ });
126
+ ```
127
+
128
+ **2. Add `hook?` to the internal tool-response type** (lets a handler override the URL per tool):
129
+
130
+ ```typescript
131
+ // src/_types_/tool.ts
132
+ export interface IToolResponse {
133
+ text: string;
134
+ json: Record<string, any>;
135
+ hook?: string; // per-tool URL override; takes precedence over x-web-hook header
136
+ }
137
+ ```
138
+
139
+ **3. Dispatcher — fire-and-forget, never throws:**
140
+
141
+ ```typescript
142
+ // src/tools/tools-manager.ts
143
+ import axios from 'axios';
144
+ import { appConfig, logger as lgr, toStr } from 'fa-mcp-sdk';
145
+
146
+ const logger = lgr.getSubLogger({ name: 'tools' });
147
+ const URL_REGEX = /^https?:\/\/[^\s]+$/i;
148
+
149
+ const callWebHook = (
150
+ url: string,
151
+ toolName: string,
152
+ json: Record<string, any>,
153
+ user?: string,
154
+ ): void => {
155
+ if (!URL_REGEX.test(url)) { return; } // silently drop garbage URLs
156
+ const body = { mcpName: appConfig.name, tool: toolName, response: json, user };
157
+ axios.post(url, body, { timeout: 10_000 })
158
+ .catch((err) => logger.warn(`Web-hook POST ${url} failed: ${toStr(err?.message || err)}`));
159
+ };
160
+ ```
161
+
162
+ Rules:
105
163
 
106
- 3. **Per-tool override (optional)** let a tool return its own `hook` URL that wins over
107
- the client header. Extend your internal tool-response type:
164
+ - **No `await`.** The webhook must not delay the MCP response.
165
+ - **No re-throws.** A 5xx, timeout, or DNS failure is a `warn` log, nothing more.
166
+ - **URL allow-list.** At minimum, require `http(s)://`. Add an internal-net allow-list via config
167
+ (e.g. `webhook.allowedHosts`) if the threat model requires it (see *Security*).
108
168
 
109
- ```typescript
110
- export interface IToolResponse { text: string; json: Record<string, any>; hook?: string; }
111
- ```
169
+ **4. Wire it into the tool-call entry point** — dispatch after the handler resolves and before
170
+ the result is returned:
112
171
 
113
- **Body contract** (recommended; keep stable across tools):
172
+ ```typescript
173
+ export const handleToolCall = async (params: IToolHandlerParams): Promise<any> => {
174
+ const { name: toolName, arguments: args, headers: mcpRequestHeaders = {} } = params;
175
+
176
+ const tool = (await getTools(mcpRequestHeaders)).get(toolName);
177
+ if (!tool?.handler) { throw new ToolExecutionError(toolName, `Unknown tool: ${toolName}`); }
178
+
179
+ const ctx: ToolContext = {
180
+ httpClient: createHttpClient(mcpRequestHeaders),
181
+ logger: logger.getSubLogger({ name: toolName }),
182
+ mcpRequestHeaders,
183
+ };
184
+
185
+ const toolResponse: IToolResponse = await tool.handler(args, ctx);
186
+
187
+ // ─── webhook dispatch (fire-and-forget) ─────────────────────────────────────
188
+ const hookUrl = (toolResponse?.hook || mcpRequestHeaders['x-web-hook'] || '').trim();
189
+ if (hookUrl) {
190
+ const syncUser = resolveActualUser(mcpRequestHeaders); // see step 5
191
+ if (syncUser) {
192
+ callWebHook(hookUrl, toolName, toolResponse.json, syncUser);
193
+ } else {
194
+ // Async user resolution — still fire-and-forget; do not block the tool response.
195
+ getCachedSelfUser(ctx.httpClient, mcpRequestHeaders)
196
+ .then((u) => callWebHook(hookUrl, toolName, toolResponse.json, u));
197
+ }
198
+ }
199
+ // ────────────────────────────────────────────────────────────────────────────
200
+
201
+ return formatToolResult(toolResponse);
202
+ };
203
+ ```
114
204
 
115
- | Field | Description |
116
- |-------|-------------|
117
- | `mcpName` | `appConfig.name` — which MCP sent the callback |
118
- | `tool` | Tool name that was invoked |
119
- | `user` | Caller identity (JWT `payload.user`, auth header, or a lookup project-specific) |
120
- | `response` | Full JSON the tool produced |
205
+ **5. User resolution best-effort, two-step.** The `user` field is what makes the webhook useful
206
+ for audit. Resolve carefully, but never let resolution fail the call.
207
+
208
+ - **Step A Sync (preferred):** derive from headers / JWT payload / config without I/O
209
+ (e.g. JWT `payload.user`, a custom `x-actual-user` header your auth layer stamps, etc.).
210
+ - **Step B Async fallback (only when sync returns nothing):** call the upstream "who am I"
211
+ endpoint with the same auth, **cache the result** (recommended TTL: 1 h, key by hashed
212
+ `Authorization`), and dedupe in-flight requests (thundering-herd protection).
213
+ - If both steps fail → **omit** the `user` field. Never invent a placeholder like `"unknown"`.
214
+
215
+ ```typescript
216
+ export function resolveActualUser (headers: Record<string, string>): string | undefined { /* … */ }
217
+
218
+ export const getCachedSelfUser = async (
219
+ httpClient: AxiosInstance,
220
+ headers: Record<string, string>,
221
+ ): Promise<string | undefined> => { /* GET /me, cache by hashed Authorization, dedupe */ };
222
+ ```
223
+
224
+ #### Per-tool override — when to use
225
+
226
+ A handler may force a specific webhook URL:
227
+
228
+ ```typescript
229
+ return { text, json, hook: 'https://collector.internal/special' };
230
+ ```
121
231
 
122
- **Rules of thumb:** validate the URL (`http(s)://…`), short timeout (≤10 s), catch+log
123
- only, **never** `await` the POST, and never let a webhook failure surface as a tool error.
232
+ Use sparingly. Legitimate cases:
233
+
234
+ - a long-running tool whose result feeds a fixed pipeline regardless of the client;
235
+ - a tool that should **never** webhook (e.g. read of a secret) — return `hook: ''` only if the
236
+ dispatcher treats empty string as "skip even if header is set". With the snippet above this works
237
+ naturally because `(toolResponse?.hook || header)` short-circuits on any truthy `hook`; to force
238
+ skip, have the handler strip the header from `ctx` or short-circuit `hookUrl` explicitly.
239
+
240
+ If neither applies, do not set `hook` — let the client decide.
241
+
242
+ #### Security
243
+
244
+ - **URL validation** — reject anything that does not match `http(s)://…`. For public-facing MCPs,
245
+ restrict to a configured allow-list (`webhook.allowedHosts` in `config/default.yaml`).
246
+ - **SSRF surface** — the webhook is a server-side `POST` to a client-supplied URL. Acceptable for
247
+ trusted MCP clients; not acceptable open on the internet without an allow-list.
248
+ - **No secrets in the body** — `response` is the same JSON the client already received. Do **not**
249
+ add credentials, raw tokens, or PII not present in the response.
250
+ - **No retries** — duplicate POSTs to a flaky collector are worse than a missed event. If the
251
+ collector needs guarantees, let it poll.
252
+ - **Logging** — log `tool`, target host, and outcome at `warn`/`debug`; **never** log the full body
253
+ at `info` level (audit log noise + potential PII).
254
+
255
+ #### Testing checklist
256
+
257
+ - [ ] Header declared in `usedHttpHeaders` and visible at `/use://http-headers`.
258
+ - [ ] Tool call **without** `x-web-hook` → no outbound POST.
259
+ - [ ] Tool call **with** valid `x-web-hook` → exactly one POST, body matches the contract above.
260
+ - [ ] Collector returns 500 → tool response still succeeds; one `warn` line in the log.
261
+ - [ ] Collector hangs → tool response returns within normal latency; POST aborts at 10 s.
262
+ - [ ] Malformed URL (`javascript:…`, missing scheme) → no POST, no error to client.
263
+ - [ ] Per-tool `hook` set → wins over the header.
264
+ - [ ] Sync user resolution hits → `user` populated immediately, no extra HTTP call.
265
+ - [ ] Sync empty, async succeeds → POST fires after `/me` resolves; tool response was not delayed.
266
+ - [ ] Both user paths fail → POST fires with `user` **field omitted** (not `null`, not `"unknown"`).
124
267
 
125
268
 
126
269
  ## REST API Endpoints
@@ -1,4 +1,4 @@
1
- # Configuration, Cache, and Database
1
+ # Configuration, Cache, and Access Points
2
2
 
3
3
  ## Custom Startup Diagnostics
4
4
 
@@ -362,151 +362,23 @@ cache.close();
362
362
  const data = await cache.getOrSet('key', async () => await fetchData(), 3600);
363
363
  ```
364
364
 
365
- ## Database Integration
365
+ ## Database
366
366
 
367
- To disable the use of the database, you need to set appConfig.db.postgres.dbs.main.host to an empty value.
368
- In this case, when the configuration is formed, appConfig.isMainDBUsed is set to false.
367
+ PostgreSQL integration (including the `MAIN` sugar layer `queryMAIN`, `execMAIN`, `getMergeSqlMAIN`,
368
+ `mergeByBatch`, `pgvector` support, etc.) is documented in [09-database.md](09-database.md).
369
369
 
370
+ Minimal config snippet (see [09-database.md](09-database.md) for the full reference):
370
371
 
371
- If you enable database support (`isMainDBUsed: true` in config):
372
-
373
- ```typescript
374
- import { queryMAIN, execMAIN, oneRowMAIN, queryRsMAIN, checkMainDB } from 'fa-mcp-sdk';
375
-
376
- // Check database connection. If there is no connection, the application stops
377
- await checkMainDB();
378
-
379
- // queryMAIN - the main function of executing SQL queries to the main database
380
-
381
- // Function Signature:
382
- const queryMAIN = async <R extends QueryResultRow = any> (
383
- arg: string | IQueryPgArgsCOptional,
384
- sqlValues?: any[],
385
- throwError = false,
386
- ): Promise<QueryResult<R> | undefined> {...}
387
-
388
- // Types used:
389
- export interface IQueryPgArgs {
390
- connectionId: string,
391
- poolConfig?: PoolConfig & IDbOptionsPg,
392
- client?: IPoolPg,
393
- sqlText: string,
394
- sqlValues?: any[],
395
- throwError?: boolean,
396
- prefix?: string,
397
- registerTypesFunctions?: IRegisterTypeFn[],
398
- }
399
- export interface IQueryPgArgsCOptional extends Omit<IQueryPgArgs, 'connectionId'> {
400
- connectionId?: string
401
- }
402
-
403
- // Examples of use
404
- const users1 = await queryMAIN('SELECT * FROM users WHERE active = $1', [true]);
405
- // Alternative use case
406
- const users2 = await queryMAIN({ sqlText: 'SELECT * FROM users WHERE active = $1', sqlValues: [true] });
407
-
408
-
409
- // execMAIN - execute SQL commands without returning result set
410
- // Function Signature:
411
- const execMAIN = async (
412
- arg: string | IQueryPgArgsCOptional,
413
- ): Promise<number | undefined> {...}
414
-
415
- // Examples:
416
- await execMAIN({ sqlText: 'INSERT INTO logs (message, created_at) VALUES ($1, $2)', sqlValues: ['Server started', new Date()] });
417
- await execMAIN({ sqlText: 'UPDATE users SET active = $1 WHERE id = $2', sqlValues: [false, userId] });
418
-
419
- // queryRsMAIN - execute SQL and return rows array directly
420
- // Function Signature:
421
- const queryRsMAIN = async <R extends QueryResultRow = any> (
422
- arg: string | IQueryPgArgsCOptional,
423
- sqlValues?: any[],
424
- throwError = false,
425
- ): Promise<R[] | undefined> {...}
426
-
427
- // Example:
428
- const users = await queryRsMAIN<User>('SELECT * FROM users WHERE active = $1', [true]);
429
-
430
- // oneRowMAIN - execute SQL and return single row
431
- // Function Signature:
432
- const oneRowMAIN = async <R extends QueryResultRow = any> (
433
- arg: string | IQueryPgArgsCOptional,
434
- sqlValues?: any[],
435
- throwError = false,
436
- ): Promise<R | undefined> {...}
437
-
438
- // Example:
439
- const user = await oneRowMAIN<User>('SELECT * FROM users WHERE id = $1', [userId]);
440
-
441
- // getMainDBConnectionStatus - check database connection status
442
- // Function Signature:
443
- const getMainDBConnectionStatus = async (): Promise<string> {...}
444
-
445
- // Possible return values: 'connected' | 'disconnected' | 'error' | 'db_not_used'
446
- const status = await getMainDBConnectionStatus();
447
-
448
- // checkMainDB - verify database connectivity (stops application if failed)
449
- // Function Signature:
450
- const checkMainDB = async (): Promise<void> {...}
451
-
452
- // Example:
453
- await checkMainDB(); // Throws or exits process if DB connection fails
454
-
455
- // getInsertSqlMAIN - generate INSERT SQL statement
456
- // Function Signature:
457
- const getInsertSqlMAIN = async <U extends TDBRecord = TDBRecord> (arg: {
458
- commonSchemaAndTable: string,
459
- recordset: TRecordSet<U>,
460
- excludeFromInsert?: string[],
461
- addOutputInserted?: boolean,
462
- isErrorOnConflict?: boolean,
463
- keepSerialFields?: boolean,
464
- }): Promise<string> {...}
465
-
466
- // Example:
467
- const insertSql = await getInsertSqlMAIN({
468
- commonSchemaAndTable: 'public.users',
469
- recordset: [{ name: 'John', email: 'john@example.com' }],
470
- addOutputInserted: true
471
- });
472
-
473
- // getMergeSqlMAIN - generate UPSERT (INSERT...ON CONFLICT) SQL statement
474
- // Function Signature:
475
- const getMergeSqlMAIN = async <U extends TDBRecord = TDBRecord> (arg: {
476
- commonSchemaAndTable: string,
477
- recordset: TRecordSet<U>,
478
- conflictFields?: string[],
479
- omitFields?: string[],
480
- updateFields?: string[],
481
- fieldsExcludedFromUpdatePart?: string[],
482
- noUpdateIfNull?: boolean,
483
- mergeCorrection?: (_sql: string) => string,
484
- returning?: string,
485
- }): Promise<string> {...}
486
-
487
- // Example:
488
- const mergeSql = await getMergeSqlMAIN({
489
- commonSchemaAndTable: 'public.users',
490
- recordset: [{ id: 1, name: 'John Updated', email: 'john@example.com' }],
491
- conflictFields: ['email'],
492
- returning: '*'
493
- });
494
-
495
- // mergeByBatch - execute merge operations in batches
496
- // Function Signature:
497
- const mergeByBatch = async <U extends TDBRecord = TDBRecord> (arg: {
498
- recordset: TRecordSet<U>,
499
- getMergeSqlFn: Function
500
- batchSize?: number
501
- }): Promise<any[]> {...}
502
-
503
- // Example:
504
- const results = await mergeByBatch({
505
- recordset: largeDataSet,
506
- getMergeSqlFn: (batch) => getMergeSqlMAIN({
507
- commonSchemaAndTable: 'public.users',
508
- recordset: batch
509
- }),
510
- batchSize: 500
511
- });
372
+ ```yaml
373
+ db:
374
+ postgres:
375
+ dbs:
376
+ main:
377
+ label: 'My Database'
378
+ host: '' # empty string disables DB (isMainDBUsed = false)
379
+ port: 5432
380
+ database: <database>
381
+ user: <user>
382
+ password: <password>
383
+ usedExtensions: [] # e.g. [pgvector]
512
384
  ```
@@ -0,0 +1,295 @@
1
+ # PostgreSQL Database
2
+
3
+ The SDK wraps [`af-db-ts`](https://www.npmjs.com/package/af-db-ts) with a thin sugar layer bound to a single
4
+ logical connection — `main`. All helper functions below are pre-configured with `connectionId = 'main'`,
5
+ automatically register `pgvector` when the extension is enabled, and normalize the call shape (SQL string or
6
+ full argument object).
7
+
8
+ For the vast majority of MCP servers **only the sugar layer is needed** — direct `af-db-ts` calls are
9
+ reserved for edge cases (secondary databases, transactions on an explicit client, cursor streaming,
10
+ cross-DB migration).
11
+
12
+ ## 1. Enabling / Disabling the Database
13
+
14
+ Database support is driven entirely by `config/*.yaml`. The SDK computes `appConfig.isMainDBUsed` at startup
15
+ based on whether a host is configured:
16
+
17
+ ```yaml
18
+ db:
19
+ postgres:
20
+ dbs:
21
+ main:
22
+ label: 'My Database' # shown in diagnostics and admin pages
23
+ host: '' # empty string disables DB (isMainDBUsed = false)
24
+ port: 5432
25
+ database: <database>
26
+ user: <user>
27
+ password: <password>
28
+ usedExtensions: [] # e.g. [pgvector]
29
+ ```
30
+
31
+ - `host: ''` — DB is disabled. `getMainDBConnectionStatus()` returns `'db_not_used'`; the `MAIN` helpers
32
+ are not meant to be called in this state.
33
+ - `host: <value>` — DB is enabled. Call `await checkMainDB()` early in startup so a misconfigured server
34
+ fails fast instead of returning 500s later.
35
+
36
+ ### Enabling `pgvector`
37
+
38
+ ```yaml
39
+ db:
40
+ postgres:
41
+ dbs:
42
+ main:
43
+ # ...
44
+ usedExtensions:
45
+ - pgvector
46
+ ```
47
+
48
+ When `pgvector` is listed, the SDK automatically injects `pgvector.registerType` into every `queryMAIN`
49
+ call, so `vector` columns come back as `number[]` with no per-call setup.
50
+
51
+ ## 2. Sugar Layer — the `MAIN` Family
52
+
53
+ All imports come from `fa-mcp-sdk`:
54
+
55
+ ```typescript
56
+ import {
57
+ queryMAIN, queryRsMAIN, oneRowMAIN, execMAIN,
58
+ getInsertSqlMAIN, getMergeSqlMAIN, mergeByBatch,
59
+ checkMainDB, getMainDBConnectionStatus,
60
+ IQueryPgArgsCOptional,
61
+ } from 'fa-mcp-sdk';
62
+ ```
63
+
64
+ Every query-style helper accepts **two call shapes**:
65
+
66
+ 1. `fn(sqlText, sqlValues?, throwError?)` — shortest form, preferred for most reads.
67
+ 2. `fn({ sqlText, sqlValues, throwError, client, ... })` — full `IQueryPgArgsCOptional` object, needed when
68
+ you want to pass `client` (external pool client for transactions), a log `prefix`, or other advanced
69
+ options.
70
+
71
+ ### 2.1. `queryMAIN<R>(arg, sqlValues?, throwError?)`
72
+
73
+ Returns the full `QueryResult<R>` (`rows`, `rowCount`, `fields`, …) or `undefined` on error when
74
+ `throwError=false`.
75
+
76
+ ```typescript
77
+ // Prepared parameters — always preferred for user input
78
+ const res = await queryMAIN<{ id: number; email: string }>(
79
+ `SELECT id, email FROM public.users WHERE active = $1 ORDER BY id`,
80
+ [true],
81
+ );
82
+ const firstEmail = res?.rows?.[0]?.email;
83
+
84
+ // Object form — e.g. inside an externally-opened transaction
85
+ await queryMAIN({ client, sqlText: `TRUNCATE TABLE public.staging;` });
86
+ ```
87
+
88
+ ### 2.2. `queryRsMAIN<R>(arg, sqlValues?, throwError?)`
89
+
90
+ "Rows only" — returns `R[] | undefined`. Use in ~90% of reads when metadata isn't needed.
91
+
92
+ ```typescript
93
+ const rows = await queryRsMAIN<{ userId: number }>(
94
+ `SELECT "userId" FROM public.sessions WHERE "expiresAt" > NOW()`,
95
+ );
96
+ const ids = new Set((rows || []).map((r) => r.userId));
97
+ ```
98
+
99
+ ### 2.3. `oneRowMAIN<R>(arg, sqlValues?, throwError?)`
100
+
101
+ Returns the first row or `undefined` — the most readable form for look-ups.
102
+
103
+ ```typescript
104
+ const user = await oneRowMAIN<{ id: number; role: string }>(
105
+ `SELECT id, role FROM public.users WHERE email = $1`,
106
+ [email],
107
+ );
108
+ if (!user) throw new Error('User not found');
109
+ ```
110
+
111
+ ### 2.4. `execMAIN(arg): Promise<number | undefined>`
112
+
113
+ For DDL/DML without consuming rows. Returns `rowCount` (or the **sum** of `rowCount` for batch SQL
114
+ concatenated with `;`). Handy for "how many rows did I affect" counters and for transaction primitives.
115
+
116
+ ```typescript
117
+ // Single statement
118
+ await execMAIN(`UPDATE public.jobs SET status = 'done' WHERE id = ${jobId}`);
119
+
120
+ // Batch UPDATE — sum of rowCount across ;-separated statements
121
+ const sqls = await Promise.all(items.map((it) => buildUpdateSql(it)));
122
+ const affected = await execMAIN(sqls.join('\n'));
123
+
124
+ // Transaction primitives — simple flow on the cached pool
125
+ try {
126
+ await execMAIN({ sqlText: 'BEGIN' });
127
+ // ... writes via queryMAIN / execMAIN ...
128
+ await execMAIN({ sqlText: 'COMMIT' });
129
+ } catch (err) {
130
+ await execMAIN({ sqlText: 'ROLLBACK' });
131
+ throw err;
132
+ }
133
+ ```
134
+
135
+ ### 2.5. `getInsertSqlMAIN<U>(arg): Promise<string>`
136
+
137
+ Generates an `INSERT` statement from table metadata — the recordset is filtered against the table schema,
138
+ so fields that don't exist in the table are silently dropped. Pair with `queryMAIN` to execute.
139
+
140
+ | Field | Purpose |
141
+ |------------------------|-----------------------------------------------------------------------------------|
142
+ | `commonSchemaAndTable` | `'schema.table'` |
143
+ | `recordset` | `TRecordSet<U>` — rows to insert |
144
+ | `excludeFromInsert` | Columns to skip (typically the auto-increment PK) |
145
+ | `addOutputInserted` | Append `RETURNING *` to get generated ids / defaults |
146
+ | `isErrorOnConflict` | Throw on uniqueness violation (default: swallowed) |
147
+ | `keepSerialFields` | Do **not** drop `serial` values from the recordset (used when migrating ids) |
148
+
149
+ ```typescript
150
+ const sql = await getInsertSqlMAIN({
151
+ commonSchemaAndTable: 'public.users',
152
+ recordset: [{ name: 'John', email: 'john@example.com' }],
153
+ excludeFromInsert: ['id'], // PK is auto-increment
154
+ addOutputInserted: true,
155
+ });
156
+ const res = await queryMAIN<{ id: number; name: string }>(sql, undefined, true);
157
+ const created = res?.rows?.[0];
158
+ ```
159
+
160
+ ### 2.6. `getMergeSqlMAIN<U>(arg): Promise<string>`
161
+
162
+ Generates an upsert — `INSERT ... ON CONFLICT (...) DO UPDATE ...`.
163
+
164
+ | Field | Purpose |
165
+ |--------------------------------|-----------------------------------------------------------------------------------------------|
166
+ | `commonSchemaAndTable` | `'schema.table'` |
167
+ | `recordset` | `TRecordSet<U>` — rows to upsert |
168
+ | `conflictFields` | Columns for `ON CONFLICT (...)`. Defaults to the PK |
169
+ | `omitFields` | Excluded from both `INSERT` and `UPDATE` (no effect when `updateFields` is set explicitly) |
170
+ | `updateFields` | If set — only these fields appear in `DO UPDATE` (minus `fieldsExcludedFromUpdatePart`) |
171
+ | `fieldsExcludedFromUpdatePart` | Present in `INSERT`, excluded from `UPDATE` — typical for `createdAt`, `createdBy` |
172
+ | `noUpdateIfNull` | Don't overwrite existing values with `NULL` — **critical for incremental syncs with partial payloads** |
173
+ | `mergeCorrection` | `(sql) => sql` — final rewrite hook |
174
+ | `returning` | `'*'` or quoted field list for `RETURNING` |
175
+
176
+ ```typescript
177
+ const mergeSql = await getMergeSqlMAIN({
178
+ commonSchemaAndTable: 'public.external_items',
179
+ recordset: batch,
180
+ noUpdateIfNull: true, // partial payload upsert
181
+ fieldsExcludedFromUpdatePart: ['createdBy', 'createdAt'],
182
+ });
183
+ await queryMAIN(mergeSql);
184
+ ```
185
+
186
+ ### 2.7. `mergeByBatch<U>({ recordset, getMergeSqlFn, batchSize? })`
187
+
188
+ Universal batched-upsert runner. Slices `recordset` into batches, calls `getMergeSqlFn(batch)` for each, and
189
+ executes the generated SQL through `queryMAIN`. Returns one entry per batch.
190
+
191
+ - Default `batchSize` is `999`; in practice **use 50–100 for wide rows** — you hit Postgres' parameter
192
+ limit or statement-size limit well before 999.
193
+ - **The runner mutates the input via `Array.prototype.splice`.** By the time it returns, `recordset` is
194
+ empty. Clone the array upfront if you need to retain the data.
195
+
196
+ ```typescript
197
+ const getMergeSqlFn = async (batch: TRecordSet) => getMergeSqlMAIN({
198
+ commonSchemaAndTable: 'public.publications',
199
+ recordset: batch,
200
+ noUpdateIfNull: true,
201
+ });
202
+ await mergeByBatch({ recordset: dataset, getMergeSqlFn, batchSize: 100 });
203
+ // dataset is now []
204
+ ```
205
+
206
+ ### 2.8. `checkMainDB()`
207
+
208
+ Startup liveness check. Runs `SELECT 1 FROM pg_catalog.pg_class LIMIT 1` — a neutral query that works on
209
+ any PostgreSQL instance. On failure (except under `NODE_ENV=test`) the process exits with code `1`. Call
210
+ it early in `start.ts` so misconfigured servers fail immediately.
211
+
212
+ ### 2.9. `getMainDBConnectionStatus()`
213
+
214
+ Returns one of `'connected' | 'disconnected' | 'error' | 'db_not_used'`. Safe to call from a `/health`
215
+ endpoint or admin page — never throws, never exits.
216
+
217
+ ## 3. Types
218
+
219
+ ```typescript
220
+ // Re-exported by the SDK
221
+ import { IQueryPgArgsCOptional } from 'fa-mcp-sdk';
222
+
223
+ // Directly from af-db-ts when you need them
224
+ import { IQueryPgArgs, TDBRecord, TRecordSet } from 'af-db-ts';
225
+ ```
226
+
227
+ - `IQueryPgArgs` — full query-arg shape used by `queryPg` directly; `connectionId` is required.
228
+ - `IQueryPgArgsCOptional` — what the `MAIN` helpers accept; `connectionId` is pre-filled by the SDK.
229
+ - `TDBRecord` — `Record<string, any>` — a generic row shape. Prefer concrete interfaces (`IUserRow`, …)
230
+ where they exist; use `TDBRecord` only when the row shape is not fixed.
231
+ - `TRecordSet<U extends TDBRecord = TDBRecord>` — the array shape expected by `getInsertSqlMAIN`,
232
+ `getMergeSqlMAIN`, and `mergeByBatch`.
233
+
234
+ ## 4. Decision Tree
235
+
236
+ ```
237
+ Need to talk to the main DB?
238
+ ├─ Yes → use the sugar layer
239
+ │ ├─ rows only (R[]) → queryRsMAIN
240
+ │ ├─ single row (R | undefined) → oneRowMAIN
241
+ │ ├─ full QueryResult (rowCount…) → queryMAIN
242
+ │ ├─ DDL / DML, no rows → execMAIN
243
+ │ ├─ generate INSERT SQL → getInsertSqlMAIN → queryMAIN
244
+ │ ├─ generate UPSERT SQL → getMergeSqlMAIN → queryMAIN
245
+ │ └─ batch upsert many rows → mergeByBatch + getMergeSqlMAIN
246
+ └─ No (secondary DB / low level) → direct af-db-ts imports
247
+ ├─ plain query → queryPg + IQueryPgArgs (wrap it, mirror pg-db.ts)
248
+ ├─ transaction / cursor → getPoolPg(<id>) + manual BEGIN/COMMIT/ROLLBACK
249
+ └─ cross-DB SQL generation → getInsertSqlPg / getMergeSqlPg / getUpdateSqlPg
250
+ ```
251
+
252
+ ## 5. Best-Practice Checklist
253
+
254
+ - [ ] Use the `MAIN` sugar for the main DB — reach for `queryPg` only when talking to a secondary database.
255
+ - [ ] Always pass user input through `sqlValues` (`$1`, `$2`, …) — no string concatenation.
256
+ - [ ] Type your rows: `queryMAIN<IUserRow>(...)`, `TRecordSet<IUserRow>` in SQL generators.
257
+ - [ ] For auto-increment tables: `excludeFromInsert: ['<pk>']` + `addOutputInserted: true` when you need
258
+ the generated id back.
259
+ - [ ] For incremental syncs of external sources with partial payloads: `noUpdateIfNull: true`; put audit
260
+ columns (`createdAt`, `createdBy`) into `fieldsExcludedFromUpdatePart`.
261
+ - [ ] For large recordsets go through `mergeByBatch` — remember it **mutates** the input.
262
+ - [ ] For transactions on the main DB the simplest form is
263
+ `execMAIN({ sqlText: 'BEGIN' | 'COMMIT' | 'ROLLBACK' })`. When you need a single physical client
264
+ across many operations, use `getPoolPg(...)` from `af-db-ts` and pass the resulting `client` through
265
+ the object form of the `MAIN` helpers.
266
+ - [ ] Never call `client.release()` on a client obtained from `getPoolPg` — pool lifecycle is owned by the
267
+ SDK and closed during graceful shutdown (via `closeAllPgConnectionsPg`).
268
+ - [ ] For writes whose success must be verified, pass `throwError = true` so failures surface instead of
269
+ silently returning `undefined`.
270
+ - [ ] Call `await checkMainDB()` early at startup; expose `getMainDBConnectionStatus()` from `/health`.
271
+
272
+ ## 6. Secondary Databases (advanced)
273
+
274
+ The SDK only exposes sugar for the single `main` connection. If your server needs extra databases, declare
275
+ them under `db.postgres.dbs.<alias>` and write a small wrapper mirroring `src/core/db/pg-db.ts` — set the
276
+ appropriate `connectionId` and, if needed, supply `registerTypesFunctions`. Typical cases: read-only
277
+ replicas, legacy sources, cross-service ETL jobs.
278
+
279
+ ```typescript
280
+ import { queryPg, IQueryPgArgs } from 'af-db-ts';
281
+ import type { QueryResult, QueryResultRow } from 'pg';
282
+
283
+ const SECONDARY = 'reporting'; // must match a key under db.postgres.dbs
284
+
285
+ export const queryReporting = async <R extends QueryResultRow = any> (
286
+ arg: string | Omit<IQueryPgArgs, 'connectionId'>,
287
+ sqlValues?: any[],
288
+ throwError = false,
289
+ ): Promise<QueryResult<R> | undefined> => {
290
+ const q: IQueryPgArgs = typeof arg === 'string'
291
+ ? { sqlText: arg, connectionId: SECONDARY, sqlValues, throwError }
292
+ : { ...arg, connectionId: SECONDARY };
293
+ return queryPg<R>(q);
294
+ };
295
+ ```
@@ -50,7 +50,7 @@
50
50
  "dependencies": {
51
51
  "@modelcontextprotocol/sdk": "^1.29.0",
52
52
  "dotenv": "^17.4.1",
53
- "fa-mcp-sdk": "^0.4.67"
53
+ "fa-mcp-sdk": "^0.4.69"
54
54
  },
55
55
  "devDependencies": {
56
56
  "@types/express": "^5.0.6",
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "fa-mcp-sdk",
3
3
  "productName": "FA MCP SDK",
4
- "version": "0.4.67",
4
+ "version": "0.4.69",
5
5
  "description": "Core infrastructure and templates for building Model Context Protocol (MCP) servers with TypeScript",
6
6
  "type": "module",
7
7
  "main": "dist/core/index.js",
@@ -1,23 +1,36 @@
1
1
  #!/usr/bin/env node
2
- import { cpSync, existsSync, rmSync } from 'fs';
3
- import { join } from 'path';
2
+ import { cpSync, existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from 'fs';
3
+ import { basename, dirname, join } from 'path';
4
4
 
5
5
  const templateDir = join(process.cwd(), './node_modules/fa-mcp-sdk/cli-template');
6
6
  const cwd = process.cwd();
7
7
 
8
8
  const targets = [
9
9
  { name: 'FA-MCP-SDK-DOC', src: join(templateDir, 'FA-MCP-SDK-DOC'), dest: join(cwd, 'FA-MCP-SDK-DOC') },
10
- { name: '.claude', src: join(templateDir, '.claude'), dest: join(cwd, '.claude') },
10
+ { name: '.claude', src: join(templateDir, '.claude'), dest: join(cwd, '.claude'), preserve: ['settings.json'] },
11
11
  ];
12
12
 
13
- for (const { name, src, dest } of targets) {
13
+ for (const { name, src, dest, preserve = [] } of targets) {
14
14
  if (!existsSync(src)) {
15
15
  console.error('Source not found:', src);
16
16
  process.exit(1);
17
17
  }
18
+ const saved = {};
19
+ for (const file of preserve) {
20
+ const p = join(dest, file);
21
+ if (existsSync(p)) saved[file] = readFileSync(p);
22
+ }
18
23
  if (existsSync(dest)) {
19
24
  rmSync(dest, { recursive: true });
20
25
  }
21
- cpSync(src, dest, { recursive: true });
26
+ cpSync(src, dest, {
27
+ recursive: true,
28
+ filter: (srcPath) => !preserve.includes(basename(srcPath)),
29
+ });
30
+ for (const [file, content] of Object.entries(saved)) {
31
+ const p = join(dest, file);
32
+ mkdirSync(dirname(p), { recursive: true });
33
+ writeFileSync(p, content);
34
+ }
22
35
  console.log(`${name} updated`);
23
36
  }