@jacobbubu/md-to-lark 1.0.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -48,6 +48,12 @@ LARK_TOKEN_TYPE=tenant
48
48
  LARK_FOLDER_TOKEN="xxx"
49
49
  ```
50
50
 
51
+ If you want the returned `documentUrl` to use a specific browser domain, set this too:
52
+
53
+ ```env
54
+ LARK_DOCUMENT_BASE_URL="https://li.feishu.cn"
55
+ ```
56
+
51
57
  Notes:
52
58
 
53
59
  - `--dry-run` still validates Feishu configuration first. It is not a zero-config mode.
@@ -65,6 +71,21 @@ This runs the full pipeline without actually writing to Feishu. After that looks
65
71
  npm run publish:md -- --input ./test-md/comp/comp.md
66
72
  ```
67
73
 
74
+ Successful CLI runs now print a JSON array to stdout. Each item contains:
75
+
76
+ - `documentId`
77
+ - `title`
78
+ - `status`
79
+ - `documentUrl`
80
+
81
+ Progress logs and exceptions are written to stderr.
82
+
83
+ `documentUrl` is built from `documentId` plus a document base URL:
84
+
85
+ - Prefer `--document-base-url`
86
+ - Otherwise use `LARK_DOCUMENT_BASE_URL`
87
+ - Otherwise fall back to the current compatibility derivation from `LARK_BASE_URL`
88
+
68
89
  ## Common Commands
69
90
 
70
91
  Basic publish:
@@ -1,6 +1,6 @@
1
1
  function usage() {
2
2
  return [
3
- 'Usage: npm run publish:md -- --input <file.md|dir> [--title <doc_title_or_prefix>] [--date-prefix|--no-date-prefix] [--preset <preset_name_or_module_path>] [--folder <folder_token>] [--doc <document_id>] [--download-remote-images|--no-download-remote-images] [--yt-dlp-path <path>] [--yt-dlp-cookies-path <path>] [--pipeline-cache-dir <dir>] [--mermaid-target <text-drawing|board>] [--mermaid-board-syntax-type <int>] [--mermaid-board-style-type <int>] [--mermaid-board-diagram-type <int>] [--dry-run] [--help|-h]',
3
+ 'Usage: npm run publish:md -- --input <file.md|dir> [--title <doc_title_or_prefix>] [--date-prefix|--no-date-prefix] [--preset <preset_name_or_module_path>] [--document-base-url <base_url>] [--folder <folder_token>] [--doc <document_id>] [--download-remote-images|--no-download-remote-images] [--yt-dlp-path <path>] [--yt-dlp-cookies-path <path>] [--pipeline-cache-dir <dir>] [--mermaid-target <text-drawing|board>] [--mermaid-board-syntax-type <int>] [--mermaid-board-style-type <int>] [--mermaid-board-diagram-type <int>] [--dry-run] [--help|-h]',
4
4
  '',
5
5
  'Options:',
6
6
  ' --input Markdown file path, or directory path (publish all *.md recursively).',
@@ -8,6 +8,7 @@ function usage() {
8
8
  ' --date-prefix Enable date prefix in final title: YYYYMMDD-<title>. Default: enabled.',
9
9
  ' --no-date-prefix Disable date prefix in final title.',
10
10
  ' --preset Optional preset module path (js/mjs/cjs/ts) or built-in name (e.g. medium). Used to transform markdown before publish pipeline.',
11
+ ' --document-base-url Base URL used to build documentUrl results (for example https://li.feishu.cn).',
11
12
  ' --folder Feishu folder token. Default: LARK_FOLDER_TOKEN from .env',
12
13
  ' --doc Existing Feishu document id (single-file only). If set, publish directly into this doc (and clear content first).',
13
14
  ' --download-remote-images Enable prepare-stage remote image pre-download + link rewrite.',
@@ -51,6 +52,7 @@ export function parsePublishMdArgs(argv, env = process.env) {
51
52
  let title = '';
52
53
  let titleDatePrefix;
53
54
  let presetPath = '';
55
+ let documentBaseUrl = '';
54
56
  let folderToken = (env.LARK_FOLDER_TOKEN ?? '').trim();
55
57
  let documentId;
56
58
  let downloadRemoteImages;
@@ -113,6 +115,14 @@ export function parsePublishMdArgs(argv, env = process.env) {
113
115
  i += 1;
114
116
  continue;
115
117
  }
118
+ if (arg === '--document-base-url') {
119
+ const value = argv[i + 1];
120
+ if (!value)
121
+ throw new Error('Missing value for --document-base-url.');
122
+ documentBaseUrl = value;
123
+ i += 1;
124
+ continue;
125
+ }
116
126
  if (arg === '--doc') {
117
127
  const value = argv[i + 1];
118
128
  if (!value)
@@ -209,6 +219,7 @@ export function parsePublishMdArgs(argv, env = process.env) {
209
219
  ...(title.trim() ? { title: title.trim() } : {}),
210
220
  ...(titleDatePrefix === undefined ? {} : { titleDatePrefix }),
211
221
  ...(presetPath.trim() ? { presetPath: presetPath.trim() } : {}),
222
+ ...(documentBaseUrl.trim() ? { documentBaseUrl: documentBaseUrl.trim() } : {}),
212
223
  folderToken,
213
224
  ...(documentId ? { documentId: documentId.trim() } : {}),
214
225
  ...(downloadRemoteImages === undefined ? {} : { downloadRemoteImages }),
@@ -70,10 +70,11 @@ export async function publishMdToLark(options, env = process.env) {
70
70
  const resolveTargetDocumentId = options.dryRun || normalizedDocumentId
71
71
  ? undefined
72
72
  : createFolderDocumentResolver(runtime, options);
73
+ const results = [];
73
74
  for (let index = 0; index < inputSet.markdownFiles.length; index += 1) {
74
75
  const markdownPath = inputSet.markdownFiles[index];
75
76
  const perFileOptions = normalizedDocumentId ? { ...options, documentId: normalizedDocumentId } : options;
76
- await processSingleMarkdownFile({
77
+ const result = await processSingleMarkdownFile({
77
78
  runtime,
78
79
  inputSet,
79
80
  options: perFileOptions,
@@ -81,11 +82,18 @@ export async function publishMdToLark(options, env = process.env) {
81
82
  index,
82
83
  ...(resolveTargetDocumentId ? { resolveTargetDocumentId } : {}),
83
84
  });
85
+ results.push({
86
+ documentId: result.documentId,
87
+ title: result.title,
88
+ status: result.status,
89
+ documentUrl: result.documentUrl,
90
+ });
84
91
  if (!options.dryRun && index < inputSet.markdownFiles.length - 1 && runtime.publishCooldownMs > 0) {
85
- console.log(`[${index + 1}/${inputSet.markdownFiles.length}] Cooldown ${runtime.publishCooldownMs}ms before next markdown...`);
92
+ console.error(`[${index + 1}/${inputSet.markdownFiles.length}] Cooldown ${runtime.publishCooldownMs}ms before next markdown...`);
86
93
  await sleep(runtime.publishCooldownMs);
87
94
  }
88
95
  }
96
+ return results;
89
97
  }
90
98
  export async function runPublishMdToLarkCli(argv, env = process.env) {
91
99
  if (hasPublishMdHelpFlag(argv)) {
@@ -93,5 +101,6 @@ export async function runPublishMdToLarkCli(argv, env = process.env) {
93
101
  return;
94
102
  }
95
103
  const options = parsePublishMdArgs(argv, env);
96
- await publishMdToLark(options, env);
104
+ const results = await publishMdToLark(options, env);
105
+ process.stdout.write(`${JSON.stringify(results, null, 2)}\n`);
97
106
  }
@@ -1,6 +1,9 @@
1
1
  function trimSlashSuffix(input) {
2
2
  return input.endsWith('/') ? input.slice(0, -1) : input;
3
3
  }
4
+ function trimDocxSuffix(input) {
5
+ return input.endsWith('/docx') ? input.slice(0, -5) : input;
6
+ }
4
7
  function assertNonEmpty(value, name) {
5
8
  if (!value) {
6
9
  throw new Error(`${name} is required.`);
@@ -34,3 +37,21 @@ export function createLarkClientConfigFromEnv(env) {
34
37
  userAccessToken,
35
38
  };
36
39
  }
40
+ function buildDocumentOrigin(baseUrl) {
41
+ const resolved = new URL(baseUrl);
42
+ const hostname = resolved.hostname.startsWith('open.') ? resolved.hostname.slice(5) : resolved.hostname;
43
+ return `${resolved.protocol}//${hostname}`;
44
+ }
45
+ export function normalizeLarkDocumentBaseUrl(baseUrl) {
46
+ const trimmed = trimDocxSuffix(trimSlashSuffix(baseUrl.trim()));
47
+ const resolved = new URL(trimmed);
48
+ const pathname = trimSlashSuffix(resolved.pathname);
49
+ return `${resolved.protocol}//${resolved.host}${pathname && pathname !== '/' ? pathname : ''}`;
50
+ }
51
+ export function deriveLarkDocumentBaseUrl(apiBaseUrl) {
52
+ return normalizeLarkDocumentBaseUrl(buildDocumentOrigin(apiBaseUrl));
53
+ }
54
+ export function buildLarkDocumentUrl(baseUrl, documentId) {
55
+ const origin = normalizeLarkDocumentBaseUrl(baseUrl);
56
+ return `${origin}/docx/${encodeURIComponent(documentId)}`;
57
+ }
@@ -1,2 +1,2 @@
1
- export { createLarkClientConfigFromEnv } from './client.js';
1
+ export { buildLarkDocumentUrl, createLarkClientConfigFromEnv, deriveLarkDocumentBaseUrl, normalizeLarkDocumentBaseUrl, } from './client.js';
2
2
  export { getLarkBlockTypeName, LARK_BLOCK_TYPE_NAME } from './block-types.js';
@@ -76,7 +76,7 @@ export async function processSingleMarkdownFile(params) {
76
76
  index,
77
77
  total: inputSet.markdownFiles.length,
78
78
  env: runtime.env,
79
- log: (...args) => console.log(`[preset ${index + 1}/${inputSet.markdownFiles.length}]`, ...args.map((arg) => String(arg))),
79
+ log: (...args) => console.error(`[preset ${index + 1}/${inputSet.markdownFiles.length}]`, ...args.map((arg) => String(arg))),
80
80
  });
81
81
  }
82
82
  await writeSourceStage(stagePaths, sourceMarkdown, markdown, {
@@ -90,7 +90,7 @@ export async function processSingleMarkdownFile(params) {
90
90
  });
91
91
  markdown = prepareResult.preparedContent;
92
92
  await writePrepareStage(stagePaths, markdown, prepareResult);
93
- console.log(`[prepare ${index + 1}/${inputSet.markdownFiles.length}] rewritten=${prepareResult.rewrittenCount} downloaded=${prepareResult.downloadedCount} failed=${prepareResult.failedCount} log=${prepareResult.logFilePath}`);
93
+ console.error(`[prepare ${index + 1}/${inputSet.markdownFiles.length}] rewritten=${prepareResult.rewrittenCount} downloaded=${prepareResult.downloadedCount} failed=${prepareResult.failedCount} log=${prepareResult.logFilePath}`);
94
94
  const hast = await markdownToHast(markdown);
95
95
  await writeHastStage(stagePaths, hast);
96
96
  const h1RuleResult = options.title ? {} : applySingleH1TitleRule(hast);
@@ -126,6 +126,7 @@ export async function processSingleMarkdownFile(params) {
126
126
  sourcePath: path.resolve(markdownPath),
127
127
  title,
128
128
  documentId: null,
129
+ documentUrl: null,
129
130
  rootBlockId: null,
130
131
  createdAt: startedAt,
131
132
  finishedAt: new Date().toISOString(),
@@ -134,17 +135,18 @@ export async function processSingleMarkdownFile(params) {
134
135
  mediaTokenMappings: [],
135
136
  };
136
137
  await writePublishStageArtifact(stagePaths, dryRunArtifact);
137
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] input: ${markdownPath}`);
138
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] title: ${title}`);
139
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] blocks: ${Object.keys(last.blocks).length}`);
140
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] btt blocks: ${Object.keys(btt.flatBlocks).length}`);
141
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] mermaid patches: ${mermaidByBlockId.size}`);
142
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] mermaid target: ${runtime.mermaidRenderConfig.target}`);
143
- console.log(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] local assets: ${localAssetByBlockId.size}`);
138
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] input: ${markdownPath}`);
139
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] title: ${title}`);
140
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] blocks: ${Object.keys(last.blocks).length}`);
141
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] btt blocks: ${Object.keys(btt.flatBlocks).length}`);
142
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] mermaid patches: ${mermaidByBlockId.size}`);
143
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] mermaid target: ${runtime.mermaidRenderConfig.target}`);
144
+ console.error(`[dry-run ${index + 1}/${inputSet.markdownFiles.length}] local assets: ${localAssetByBlockId.size}`);
144
145
  return {
145
146
  stagePaths,
146
147
  title,
147
148
  documentId: null,
149
+ documentUrl: null,
148
150
  status: 'dry-run',
149
151
  };
150
152
  }
@@ -191,6 +193,7 @@ export async function processSingleMarkdownFile(params) {
191
193
  sourcePath: path.resolve(markdownPath),
192
194
  title,
193
195
  documentId: documentId || null,
196
+ documentUrl: documentId ? runtime.documentUrlFor(documentId) : null,
194
197
  rootBlockId,
195
198
  createdAt: startedAt,
196
199
  finishedAt: new Date().toISOString(),
@@ -202,11 +205,13 @@ export async function processSingleMarkdownFile(params) {
202
205
  await writePublishStageArtifact(stagePaths, failedArtifact);
203
206
  throw error;
204
207
  }
208
+ const documentUrl = runtime.documentUrlFor(documentId);
205
209
  const successArtifact = {
206
210
  status: 'published',
207
211
  sourcePath: path.resolve(markdownPath),
208
212
  title,
209
213
  documentId,
214
+ documentUrl,
210
215
  rootBlockId,
211
216
  createdAt: startedAt,
212
217
  finishedAt: new Date().toISOString(),
@@ -215,14 +220,16 @@ export async function processSingleMarkdownFile(params) {
215
220
  mediaTokenMappings,
216
221
  };
217
222
  await writePublishStageArtifact(stagePaths, successArtifact);
218
- console.log(`[${index + 1}/${inputSet.markdownFiles.length}] Published markdown: ${markdownPath}`);
219
- console.log(`[${index + 1}/${inputSet.markdownFiles.length}] Document ID: ${documentId}`);
220
- console.log(`[${index + 1}/${inputSet.markdownFiles.length}] Title: ${title}`);
221
- console.log(`[${index + 1}/${inputSet.markdownFiles.length}] stage-cache: ${stagePaths.rootDir} (00-source..05-publish)`);
223
+ console.error(`[${index + 1}/${inputSet.markdownFiles.length}] Published markdown: ${markdownPath}`);
224
+ console.error(`[${index + 1}/${inputSet.markdownFiles.length}] Document ID: ${documentId}`);
225
+ console.error(`[${index + 1}/${inputSet.markdownFiles.length}] Document URL: ${documentUrl}`);
226
+ console.error(`[${index + 1}/${inputSet.markdownFiles.length}] Title: ${title}`);
227
+ console.error(`[${index + 1}/${inputSet.markdownFiles.length}] stage-cache: ${stagePaths.rootDir} (00-source..05-publish)`);
222
228
  return {
223
229
  stagePaths,
224
230
  title,
225
231
  documentId,
232
+ documentUrl,
226
233
  status: 'published',
227
234
  };
228
235
  }
@@ -1,7 +1,7 @@
1
1
  import path from 'node:path';
2
2
  import * as lark from '@larksuiteoapi/node-sdk';
3
3
  import { DEFAULT_MERMAID_BOARD_SYNTAX_TYPE, normalizeMermaidRenderTarget, } from '../commands/publish-md/mermaid-render.js';
4
- import { createLarkClientConfigFromEnv } from '../lark/index.js';
4
+ import { buildLarkDocumentUrl, createLarkClientConfigFromEnv, deriveLarkDocumentBaseUrl, normalizeLarkDocumentBaseUrl, } from '../lark/index.js';
5
5
  import { RateLimiter } from '../shared/rate-limiter.js';
6
6
  function getSdkDomain(baseUrl) {
7
7
  const lower = baseUrl.toLowerCase();
@@ -71,6 +71,12 @@ export function buildPublishRuntime(options, env, markdownPreset) {
71
71
  const ytDlpPath = normalizeOptionalPath(options.ytDlpPath ?? env.YT_DLP_PATH);
72
72
  const ytDlpCookiesPath = normalizeOptionalPath(options.ytDlpCookiesPath ?? env.YT_DLP_COOKIES_PATH);
73
73
  const pipelineCacheRootDir = path.resolve(options.pipelineCacheDir ?? env.PIPELINE_CACHE_DIR ?? './out/pipeline-cache');
74
+ const documentBaseUrlCandidate = options.documentBaseUrl?.trim()
75
+ ? options.documentBaseUrl.trim()
76
+ : env.LARK_DOCUMENT_BASE_URL?.trim()
77
+ ? env.LARK_DOCUMENT_BASE_URL.trim()
78
+ : deriveLarkDocumentBaseUrl(config.baseUrl);
79
+ const documentBaseUrl = normalizeLarkDocumentBaseUrl(documentBaseUrlCandidate);
74
80
  const prepareTimeoutMs = toPositiveInt(Number((env.PREPARE_TIMEOUT_MS ?? '').trim())) ?? 15_000;
75
81
  const prepareMaxRetries = toNonNegativeInt(Number((env.PREPARE_MAX_RETRIES ?? '').trim())) ?? 3;
76
82
  const prepareBackoffBaseMs = toPositiveInt(Number((env.PREPARE_BACKOFF_BASE_MS ?? '').trim())) ?? 500;
@@ -95,6 +101,8 @@ export function buildPublishRuntime(options, env, markdownPreset) {
95
101
  return {
96
102
  env,
97
103
  markdownPreset,
104
+ documentBaseUrl,
105
+ documentUrlFor: (documentId) => buildLarkDocumentUrl(documentBaseUrl, documentId),
98
106
  authOptions,
99
107
  sdkClient,
100
108
  docxLimiter: new RateLimiter(docxLimiterIntervalMs),
@@ -121,13 +129,14 @@ export function buildPublishRuntime(options, env, markdownPreset) {
121
129
  };
122
130
  }
123
131
  export function logPublishRuntimeSummary(runtime, inputCount, inputMode) {
124
- console.log(`Resolved markdown files: ${inputCount} (${inputMode === 'single' ? 'single' : 'directory'})`);
125
- console.log(`Rate limits: docx=${runtime.docxLimiterIntervalMs}ms media=${runtime.mediaLimiterIntervalMs}ms cooldown=${runtime.publishCooldownMs}ms`);
126
- console.log(`Prepare: download_remote_images=${String(runtime.downloadRemoteImages)} yt_dlp=${runtime.ytDlpPath ? 'enabled' : 'disabled'}`);
127
- console.log(runtime.mermaidRenderConfig.target === 'board'
132
+ console.error(`Resolved markdown files: ${inputCount} (${inputMode === 'single' ? 'single' : 'directory'})`);
133
+ console.error(`Rate limits: docx=${runtime.docxLimiterIntervalMs}ms media=${runtime.mediaLimiterIntervalMs}ms cooldown=${runtime.publishCooldownMs}ms`);
134
+ console.error(`Prepare: download_remote_images=${String(runtime.downloadRemoteImages)} yt_dlp=${runtime.ytDlpPath ? 'enabled' : 'disabled'}`);
135
+ console.error(runtime.mermaidRenderConfig.target === 'board'
128
136
  ? `Mermaid: target=board syntax_type=${String(runtime.mermaidRenderConfig.board.syntaxType)} style_type=${String(runtime.mermaidRenderConfig.board.styleType ?? '(default)')} diagram_type=${String(runtime.mermaidRenderConfig.board.diagramType ?? '(default)')}`
129
137
  : 'Mermaid: target=text-drawing');
138
+ console.error(`Document URL base: ${runtime.documentBaseUrl}`);
130
139
  if (runtime.markdownPreset) {
131
- console.log(`Preset: ${runtime.markdownPreset.displayPath}`);
140
+ console.error(`Preset: ${runtime.markdownPreset.displayPath}`);
132
141
  }
133
142
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@jacobbubu/md-to-lark",
3
- "version": "1.0.0",
3
+ "version": "1.2.0",
4
4
  "description": "Publish Markdown to Feishu docs with a stable pipeline.",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",