vidpipe 1.3.23 → 1.3.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -272,9 +272,6 @@ async function renameFile(oldPath, newPath) {
272
272
  }
273
273
  }
274
274
  }
275
- async function copyDirectory(src, dest) {
276
- await fsp.cp(src, dest, { recursive: true });
277
- }
278
275
  async function writeFileRaw(filePath, data, opts) {
279
276
  await fsp.writeFile(filePath, data, opts);
280
277
  }
@@ -580,7 +577,22 @@ function resolveConfig(cliOptions = {}) {
580
577
  process.env.GITHUB_TOKEN,
581
578
  globalConfig.credentials.githubToken
582
579
  ),
583
- MODEL_OVERRIDES: resolveModelOverrides()
580
+ MODEL_OVERRIDES: resolveModelOverrides(),
581
+ AZURE_STORAGE_ACCOUNT_NAME: resolveString(
582
+ cliOptions.azureStorageAccountName,
583
+ process.env.AZURE_STORAGE_ACCOUNT_NAME,
584
+ globalConfig.credentials.azureStorageAccountName
585
+ ),
586
+ AZURE_STORAGE_ACCOUNT_KEY: resolveString(
587
+ cliOptions.azureStorageAccountKey,
588
+ process.env.AZURE_STORAGE_ACCOUNT_KEY,
589
+ globalConfig.credentials.azureStorageAccountKey
590
+ ),
591
+ AZURE_CONTAINER_NAME: resolveString(
592
+ cliOptions.azureContainerName,
593
+ process.env.AZURE_CONTAINER_NAME,
594
+ "vidpipe"
595
+ )
584
596
  };
585
597
  }
586
598
  function resolveModelOverrides() {
@@ -1139,7 +1151,8 @@ var init_types2 = __esm({
1139
1151
  { stage: "short-posts" /* ShortPosts */, name: "Short Posts", stageNumber: 14 },
1140
1152
  { stage: "medium-clip-posts" /* MediumClipPosts */, name: "Medium Clip Posts", stageNumber: 15 },
1141
1153
  { stage: "queue-build" /* QueueBuild */, name: "Queue Build", stageNumber: 16 },
1142
- { stage: "blog" /* Blog */, name: "Blog", stageNumber: 17 }
1154
+ { stage: "blog" /* Blog */, name: "Blog", stageNumber: 17 },
1155
+ { stage: "cloud-upload" /* CloudUpload */, name: "Cloud Upload", stageNumber: 18 }
1143
1156
  ];
1144
1157
  TOTAL_STAGES = PIPELINE_STAGES.length;
1145
1158
  PLATFORM_CHAR_LIMITS = {
@@ -2218,7 +2231,8 @@ async function convertWithSmartLayout(inputPath, outputPath, config2, webcamOver
2218
2231
  const webcam = webcamOverride !== void 0 ? webcamOverride : await detectWebcamRegion(inputPath);
2219
2232
  if (!webcam) {
2220
2233
  logger_default.info(`[${label}] No webcam found, falling back to center-crop`);
2221
- return convertAspectRatio(inputPath, outputPath, fallbackRatio);
2234
+ const path = await convertAspectRatio(inputPath, outputPath, fallbackRatio);
2235
+ return { path, isSplitScreen: false };
2222
2236
  }
2223
2237
  const resolution = await getVideoResolution(inputPath);
2224
2238
  const margin = Math.round(resolution.width * 0.02);
@@ -2283,7 +2297,7 @@ async function convertWithSmartLayout(inputPath, outputPath, config2, webcamOver
2283
2297
  return;
2284
2298
  }
2285
2299
  logger_default.info(`[${label}] Complete: ${outputPath}`);
2286
- resolve4(outputPath);
2300
+ resolve4({ path: outputPath, isSplitScreen: true });
2287
2301
  });
2288
2302
  });
2289
2303
  }
@@ -2329,21 +2343,25 @@ async function generatePlatformVariants(inputPath, outputDir, slug, platforms =
2329
2343
  const suffix = ratio === "9:16" ? "portrait" : ratio === "4:5" ? "feed" : "square";
2330
2344
  const outPath = join(outputDir, `${slug}-${suffix}.mp4`);
2331
2345
  try {
2346
+ let isSplitScreen = false;
2332
2347
  if (ratio === "9:16") {
2333
2348
  if (options.useAgent) {
2334
2349
  logger_default.warn(`[generatePlatformVariants] LayoutAgent is disabled, falling back to ONNX pipeline`);
2335
2350
  }
2336
- await convertToPortraitSmart(inputPath, outPath, options.webcamOverride);
2351
+ const result = await convertToPortraitSmart(inputPath, outPath, options.webcamOverride);
2352
+ isSplitScreen = result.isSplitScreen;
2337
2353
  } else if (ratio === "1:1") {
2338
- await convertToSquareSmart(inputPath, outPath, options.webcamOverride);
2354
+ const result = await convertToSquareSmart(inputPath, outPath, options.webcamOverride);
2355
+ isSplitScreen = result.isSplitScreen;
2339
2356
  } else if (ratio === "4:5") {
2340
- await convertToFeedSmart(inputPath, outPath, options.webcamOverride);
2357
+ const result = await convertToFeedSmart(inputPath, outPath, options.webcamOverride);
2358
+ isSplitScreen = result.isSplitScreen;
2341
2359
  } else {
2342
2360
  await convertAspectRatio(inputPath, outPath, ratio);
2343
2361
  }
2344
2362
  const dims = DIMENSIONS[ratio];
2345
2363
  for (const p of associatedPlatforms) {
2346
- variants.push({ platform: p, aspectRatio: ratio, path: outPath, width: dims.width, height: dims.height });
2364
+ variants.push({ platform: p, aspectRatio: ratio, path: outPath, width: dims.width, height: dims.height, isSplitScreen });
2347
2365
  }
2348
2366
  } catch (err) {
2349
2367
  const message = err instanceof Error ? err.message : String(err);
@@ -3919,7 +3937,7 @@ var require_semaphore = __commonJS({
3919
3937
  Object.defineProperty(exports, "__esModule", { value: true });
3920
3938
  exports.Semaphore = void 0;
3921
3939
  var ral_1 = require_ral();
3922
- var Semaphore = class {
3940
+ var Semaphore2 = class {
3923
3941
  constructor(capacity = 1) {
3924
3942
  if (capacity <= 0) {
3925
3943
  throw new Error("Capacity must be greater than 0");
@@ -3976,7 +3994,7 @@ var require_semaphore = __commonJS({
3976
3994
  }
3977
3995
  }
3978
3996
  };
3979
- exports.Semaphore = Semaphore;
3997
+ exports.Semaphore = Semaphore2;
3980
3998
  }
3981
3999
  });
3982
4000
 
@@ -9247,9 +9265,9 @@ var init_BaseAgent = __esm({
9247
9265
  getUserInputHandler() {
9248
9266
  return void 0;
9249
9267
  }
9250
- /** Timeout for sendAndWait calls. Override in interactive agents that need longer timeouts. */
9268
+ /** Timeout for sendAndWait calls. 0 = no timeout. Override in subclasses if needed. */
9251
9269
  getTimeoutMs() {
9252
- return 3e5;
9270
+ return 0;
9253
9271
  }
9254
9272
  /**
9255
9273
  * Reset agent-specific state before a retry attempt.
@@ -9669,6 +9687,236 @@ Call the generate_thumbnail tool with a detailed, vivid prompt that will create
9669
9687
  }
9670
9688
  });
9671
9689
 
9690
+ // src/L3-services/postStore/postStore.ts
9691
+ function getQueueDir() {
9692
+ const { OUTPUT_DIR } = getConfig();
9693
+ return join(OUTPUT_DIR, "publish-queue");
9694
+ }
9695
+ function getPublishedDir() {
9696
+ const { OUTPUT_DIR } = getConfig();
9697
+ return join(OUTPUT_DIR, "published");
9698
+ }
9699
+ async function readQueueItem(folderPath, id) {
9700
+ const metadataPath = join(folderPath, "metadata.json");
9701
+ const postPath = join(folderPath, "post.md");
9702
+ try {
9703
+ const metadataRaw = await readTextFile(metadataPath);
9704
+ const metadata = JSON.parse(metadataRaw);
9705
+ let postContent = "";
9706
+ try {
9707
+ postContent = await readTextFile(postPath);
9708
+ } catch {
9709
+ logger_default.debug(`No post.md found for ${String(id).replace(/[\r\n]/g, "")}`);
9710
+ }
9711
+ const videoPath = join(folderPath, "media.mp4");
9712
+ const imagePath = join(folderPath, "media.png");
9713
+ let mediaPath = null;
9714
+ let hasMedia = false;
9715
+ if (await fileExists(videoPath)) {
9716
+ mediaPath = videoPath;
9717
+ hasMedia = true;
9718
+ } else if (await fileExists(imagePath)) {
9719
+ mediaPath = imagePath;
9720
+ hasMedia = true;
9721
+ }
9722
+ const thumbnailPath = join(folderPath, "thumbnail.png");
9723
+ const hasThumbnail = await fileExists(thumbnailPath);
9724
+ return {
9725
+ id,
9726
+ metadata,
9727
+ postContent,
9728
+ hasMedia,
9729
+ mediaPath,
9730
+ thumbnailPath: hasThumbnail ? thumbnailPath : metadata.thumbnailPath ?? null,
9731
+ folderPath
9732
+ };
9733
+ } catch (err) {
9734
+ logger_default.debug(`Failed to read queue item ${String(id).replace(/[\r\n]/g, "")}: ${String(err).replace(/[\r\n]/g, "")}`);
9735
+ return null;
9736
+ }
9737
+ }
9738
+ async function getPendingItems() {
9739
+ const queueDir = getQueueDir();
9740
+ await ensureDirectory(queueDir);
9741
+ let entries;
9742
+ try {
9743
+ const dirents = await listDirectoryWithTypes(queueDir);
9744
+ entries = dirents.filter((d) => d.isDirectory()).map((d) => d.name);
9745
+ } catch {
9746
+ return [];
9747
+ }
9748
+ const items = [];
9749
+ for (const name of entries) {
9750
+ const item = await readQueueItem(join(queueDir, name), name);
9751
+ if (item) items.push(item);
9752
+ }
9753
+ items.sort((a, b) => {
9754
+ if (a.hasMedia !== b.hasMedia) return a.hasMedia ? -1 : 1;
9755
+ return a.metadata.createdAt.localeCompare(b.metadata.createdAt);
9756
+ });
9757
+ return items;
9758
+ }
9759
+ async function getItem(id) {
9760
+ if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
9761
+ throw new Error(`Invalid ID format: ${id}`);
9762
+ }
9763
+ const folderPath = join(getQueueDir(), basename(id));
9764
+ return readQueueItem(folderPath, id);
9765
+ }
9766
+ async function createItem(id, metadata, postContent, mediaSourcePath, thumbnailSourcePath) {
9767
+ if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
9768
+ throw new Error(`Invalid ID format: ${id}`);
9769
+ }
9770
+ const folderPath = join(getQueueDir(), basename(id));
9771
+ await ensureDirectory(folderPath);
9772
+ await writeJsonFile(join(folderPath, "metadata.json"), metadata);
9773
+ await writeTextFile(join(folderPath, "post.md"), postContent);
9774
+ let hasMedia = false;
9775
+ const ext = mediaSourcePath ? extname(mediaSourcePath) : ".mp4";
9776
+ const mediaFilename = `media${ext}`;
9777
+ const mediaPath = join(folderPath, mediaFilename);
9778
+ if (mediaSourcePath) {
9779
+ await copyFile(mediaSourcePath, mediaPath);
9780
+ hasMedia = true;
9781
+ }
9782
+ let thumbnailPath = null;
9783
+ if (thumbnailSourcePath) {
9784
+ const thumbDest = join(folderPath, "thumbnail.png");
9785
+ await copyFile(thumbnailSourcePath, thumbDest);
9786
+ thumbnailPath = thumbDest;
9787
+ }
9788
+ logger_default.debug(`Created queue item: ${String(id).replace(/[\r\n]/g, "")}`);
9789
+ return {
9790
+ id,
9791
+ metadata,
9792
+ postContent,
9793
+ hasMedia,
9794
+ mediaPath: hasMedia ? mediaPath : null,
9795
+ thumbnailPath,
9796
+ folderPath
9797
+ };
9798
+ }
9799
+ async function updateItem(id, updates) {
9800
+ if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
9801
+ throw new Error(`Invalid ID format: ${id}`);
9802
+ }
9803
+ const existing = await getItem(id);
9804
+ if (!existing) return null;
9805
+ if (updates.metadata) {
9806
+ const sanitized = {
9807
+ id: String(existing.metadata.id),
9808
+ platform: String(updates.metadata.platform ?? existing.metadata.platform),
9809
+ accountId: String(updates.metadata.accountId ?? existing.metadata.accountId),
9810
+ sourceVideo: String(existing.metadata.sourceVideo),
9811
+ sourceClip: existing.metadata.sourceClip !== null ? String(existing.metadata.sourceClip) : null,
9812
+ clipType: existing.metadata.clipType,
9813
+ sourceMediaPath: existing.metadata.sourceMediaPath !== null ? String(existing.metadata.sourceMediaPath) : null,
9814
+ hashtags: Array.isArray(updates.metadata.hashtags) ? updates.metadata.hashtags.map(String) : Array.isArray(existing.metadata.hashtags) ? existing.metadata.hashtags.map(String) : [],
9815
+ links: Array.isArray(updates.metadata.links) ? updates.metadata.links : Array.isArray(existing.metadata.links) ? existing.metadata.links : [],
9816
+ characterCount: updates.metadata.characterCount !== void 0 ? Number(updates.metadata.characterCount) || 0 : Number(existing.metadata.characterCount) || 0,
9817
+ platformCharLimit: updates.metadata.platformCharLimit !== void 0 ? Number(updates.metadata.platformCharLimit) || 0 : Number(existing.metadata.platformCharLimit) || 0,
9818
+ suggestedSlot: updates.metadata.suggestedSlot !== void 0 ? updates.metadata.suggestedSlot !== null ? String(updates.metadata.suggestedSlot) : null : existing.metadata.suggestedSlot !== null ? String(existing.metadata.suggestedSlot) : null,
9819
+ scheduledFor: updates.metadata.scheduledFor !== void 0 ? updates.metadata.scheduledFor !== null ? String(updates.metadata.scheduledFor) : null : existing.metadata.scheduledFor !== null ? String(existing.metadata.scheduledFor) : null,
9820
+ status: updates.metadata.status ?? existing.metadata.status,
9821
+ latePostId: updates.metadata.latePostId !== void 0 ? updates.metadata.latePostId !== null ? String(updates.metadata.latePostId) : null : existing.metadata.latePostId !== null ? String(existing.metadata.latePostId) : null,
9822
+ publishedUrl: updates.metadata.publishedUrl !== void 0 ? updates.metadata.publishedUrl !== null ? String(updates.metadata.publishedUrl) : null : existing.metadata.publishedUrl !== null ? String(existing.metadata.publishedUrl) : null,
9823
+ createdAt: String(existing.metadata.createdAt),
9824
+ reviewedAt: updates.metadata.reviewedAt !== void 0 ? updates.metadata.reviewedAt !== null ? String(updates.metadata.reviewedAt) : null : existing.metadata.reviewedAt !== null ? String(existing.metadata.reviewedAt) : null,
9825
+ publishedAt: updates.metadata.publishedAt !== void 0 ? updates.metadata.publishedAt !== null ? String(updates.metadata.publishedAt) : null : existing.metadata.publishedAt !== null ? String(existing.metadata.publishedAt) : null,
9826
+ textOnly: updates.metadata.textOnly ?? existing.metadata.textOnly,
9827
+ mediaType: updates.metadata.mediaType ?? existing.metadata.mediaType,
9828
+ ideaIds: Array.isArray(updates.metadata.ideaIds) ? updates.metadata.ideaIds.map(String) : Array.isArray(existing.metadata.ideaIds) ? existing.metadata.ideaIds.map(String) : void 0,
9829
+ platformSpecificData: updates.metadata.platformSpecificData ?? existing.metadata.platformSpecificData
9830
+ };
9831
+ existing.metadata = sanitized;
9832
+ const metadataWritePath = resolve(join(existing.folderPath, "metadata.json"));
9833
+ if (!metadataWritePath.startsWith(resolve(getQueueDir()) + sep)) {
9834
+ throw new Error("Write target outside queue directory");
9835
+ }
9836
+ await writeTextFile(
9837
+ metadataWritePath,
9838
+ JSON.stringify(existing.metadata, null, 2)
9839
+ );
9840
+ }
9841
+ if (updates.postContent !== void 0) {
9842
+ const sanitizedContent = String(updates.postContent);
9843
+ existing.postContent = sanitizedContent;
9844
+ const postWritePath = resolve(join(existing.folderPath, "post.md"));
9845
+ if (!postWritePath.startsWith(resolve(getQueueDir()) + sep)) {
9846
+ throw new Error("Write target outside queue directory");
9847
+ }
9848
+ await writeTextFile(postWritePath, sanitizedContent);
9849
+ }
9850
+ logger_default.debug(`Updated queue item: ${String(id).replace(/[\r\n]/g, "")}`);
9851
+ return existing;
9852
+ }
9853
+ async function getPublishedItems() {
9854
+ const publishedDir = getPublishedDir();
9855
+ await ensureDirectory(publishedDir);
9856
+ let entries;
9857
+ try {
9858
+ const dirents = await listDirectoryWithTypes(publishedDir);
9859
+ entries = dirents.filter((d) => d.isDirectory()).map((d) => d.name);
9860
+ } catch {
9861
+ return [];
9862
+ }
9863
+ const items = [];
9864
+ for (const name of entries) {
9865
+ const item = await readQueueItem(join(publishedDir, name), name);
9866
+ if (item) items.push(item);
9867
+ }
9868
+ items.sort((a, b) => a.metadata.createdAt.localeCompare(b.metadata.createdAt));
9869
+ return items;
9870
+ }
9871
+ async function getScheduledItemsByIdeaIds(ideaIds) {
9872
+ if (ideaIds.length === 0) return [];
9873
+ const ideaIdSet = new Set(ideaIds);
9874
+ const [pendingItems, publishedItems] = await Promise.all([
9875
+ getPendingItems(),
9876
+ getPublishedItems()
9877
+ ]);
9878
+ return [...pendingItems, ...publishedItems].filter(
9879
+ (item) => item.metadata.ideaIds?.some((id) => ideaIdSet.has(id)) ?? false
9880
+ );
9881
+ }
9882
+ async function updatePublishedItemSchedule(id, scheduledFor) {
9883
+ if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
9884
+ throw new Error(`Invalid ID format: ${id}`);
9885
+ }
9886
+ const publishedDir = getPublishedDir();
9887
+ const folderPath = join(publishedDir, basename(id));
9888
+ const metadataPath = join(folderPath, "metadata.json");
9889
+ if (!resolve(metadataPath).startsWith(resolve(publishedDir) + sep)) {
9890
+ throw new Error("Write target outside published directory");
9891
+ }
9892
+ const raw = await readTextFile(metadataPath);
9893
+ const metadata = JSON.parse(raw);
9894
+ metadata.scheduledFor = String(scheduledFor);
9895
+ await writeTextFile(metadataPath, JSON.stringify(metadata, null, 2));
9896
+ }
9897
+ async function itemExists(id) {
9898
+ if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
9899
+ throw new Error(`Invalid ID format: ${id}`);
9900
+ }
9901
+ if (await fileExists(join(getQueueDir(), basename(id)))) {
9902
+ return "pending";
9903
+ }
9904
+ if (await fileExists(join(getPublishedDir(), basename(id)))) {
9905
+ return "published";
9906
+ }
9907
+ return null;
9908
+ }
9909
+ var init_postStore = __esm({
9910
+ "src/L3-services/postStore/postStore.ts"() {
9911
+ "use strict";
9912
+ init_types2();
9913
+ init_environment();
9914
+ init_configLogger();
9915
+ init_fileSystem();
9916
+ init_paths();
9917
+ }
9918
+ });
9919
+
9672
9920
  // src/L2-clients/github/githubClient.ts
9673
9921
  import { Octokit } from "octokit";
9674
9922
  function getErrorStatus(error) {
@@ -9678,9 +9926,7 @@ function getErrorStatus(error) {
9678
9926
  return void 0;
9679
9927
  }
9680
9928
  function getErrorMessage(error) {
9681
- if (error instanceof Error) {
9682
- return error.message;
9683
- }
9929
+ if (error instanceof Error) return error.message;
9684
9930
  if (typeof error === "object" && error !== null && "message" in error && typeof error.message === "string") {
9685
9931
  return error.message ?? "Unknown GitHub API error";
9686
9932
  }
@@ -9689,8 +9935,8 @@ function getErrorMessage(error) {
9689
9935
  function normalizeLabels(labels) {
9690
9936
  return Array.from(new Set(labels.map((label) => label.trim()).filter((label) => label.length > 0)));
9691
9937
  }
9692
- function isIssueResponse(value) {
9693
- return !("pull_request" in value);
9938
+ function sleep(ms) {
9939
+ return new Promise((resolve4) => setTimeout(resolve4, ms));
9694
9940
  }
9695
9941
  function getGitHubClient() {
9696
9942
  const config2 = getConfig();
@@ -9701,13 +9947,32 @@ function getGitHubClient() {
9701
9947
  }
9702
9948
  return clientInstance;
9703
9949
  }
9704
- var DEFAULT_PER_PAGE, GitHubClientError, GitHubClient, clientInstance, clientKey;
9950
+ var CACHE_TTL_MS, MAX_CONCURRENT, THROTTLE_WARN_REMAINING, THROTTLE_SLOW_REMAINING, THROTTLE_CRITICAL_REMAINING, MAX_RETRIES2, ISSUE_WITH_COMMENTS_FRAGMENT, GitHubClientError, Semaphore, GitHubClient, clientInstance, clientKey;
9705
9951
  var init_githubClient = __esm({
9706
9952
  "src/L2-clients/github/githubClient.ts"() {
9707
9953
  "use strict";
9708
9954
  init_environment();
9709
9955
  init_configLogger();
9710
- DEFAULT_PER_PAGE = 100;
9956
+ CACHE_TTL_MS = 5 * 60 * 1e3;
9957
+ MAX_CONCURRENT = 4;
9958
+ THROTTLE_WARN_REMAINING = 200;
9959
+ THROTTLE_SLOW_REMAINING = 100;
9960
+ THROTTLE_CRITICAL_REMAINING = 50;
9961
+ MAX_RETRIES2 = 3;
9962
+ ISSUE_WITH_COMMENTS_FRAGMENT = `
9963
+ number
9964
+ title
9965
+ body
9966
+ state
9967
+ labels(first: 50) { nodes { name } }
9968
+ comments(first: 100) {
9969
+ nodes { databaseId body createdAt updatedAt url }
9970
+ pageInfo { hasNextPage endCursor }
9971
+ }
9972
+ createdAt
9973
+ updatedAt
9974
+ url
9975
+ `;
9711
9976
  GitHubClientError = class extends Error {
9712
9977
  constructor(message, status) {
9713
9978
  super(message);
@@ -9715,10 +9980,40 @@ var init_githubClient = __esm({
9715
9980
  this.name = "GitHubClientError";
9716
9981
  }
9717
9982
  };
9983
+ Semaphore = class {
9984
+ constructor(limit) {
9985
+ this.limit = limit;
9986
+ }
9987
+ queue = [];
9988
+ running = 0;
9989
+ async acquire() {
9990
+ if (this.running < this.limit) {
9991
+ this.running++;
9992
+ return;
9993
+ }
9994
+ return new Promise((resolve4) => {
9995
+ this.queue.push(() => {
9996
+ this.running++;
9997
+ resolve4();
9998
+ });
9999
+ });
10000
+ }
10001
+ release() {
10002
+ this.running--;
10003
+ const next = this.queue.shift();
10004
+ if (next) next();
10005
+ }
10006
+ };
9718
10007
  GitHubClient = class {
9719
10008
  octokit;
9720
10009
  owner;
9721
10010
  repo;
10011
+ // Rate limiting
10012
+ rateLimitRemaining = 5e3;
10013
+ rateLimitReset = 0;
10014
+ semaphore = new Semaphore(MAX_CONCURRENT);
10015
+ // Response cache — keyed by "type:identifier"
10016
+ cache = /* @__PURE__ */ new Map();
9722
10017
  constructor(token, repoFullName) {
9723
10018
  const config2 = getConfig();
9724
10019
  const authToken = token || config2.GITHUB_TOKEN;
@@ -9734,87 +10029,184 @@ var init_githubClient = __esm({
9734
10029
  this.repo = repo;
9735
10030
  this.octokit = new Octokit({ auth: authToken });
9736
10031
  }
9737
- async createIssue(input) {
9738
- logger_default.debug(`[GitHubClient] Creating issue in ${this.owner}/${this.repo}: ${input.title}`);
9739
- try {
9740
- const response = await this.octokit.rest.issues.create({
9741
- owner: this.owner,
9742
- repo: this.repo,
9743
- title: input.title,
9744
- body: input.body,
9745
- labels: input.labels ? normalizeLabels(input.labels) : void 0
9746
- });
9747
- const issue = this.mapIssue(response.data);
9748
- logger_default.info(`[GitHubClient] Created issue #${issue.number}: ${input.title}`);
9749
- return issue;
9750
- } catch (error) {
9751
- this.logError("create issue", error);
9752
- throw new GitHubClientError(`Failed to create GitHub issue: ${getErrorMessage(error)}`, getErrorStatus(error));
10032
+ // ── Cache helpers ────────────────────────────────────────────────────
10033
+ getCached(key) {
10034
+ const entry = this.cache.get(key);
10035
+ if (!entry) return void 0;
10036
+ if (Date.now() - entry.cachedAt > CACHE_TTL_MS) {
10037
+ this.cache.delete(key);
10038
+ return void 0;
9753
10039
  }
10040
+ return entry.data;
9754
10041
  }
9755
- async updateIssue(issueNumber, input) {
9756
- logger_default.debug(`[GitHubClient] Updating issue #${issueNumber} in ${this.owner}/${this.repo}`);
10042
+ setCache(key, data) {
10043
+ this.cache.set(key, { data, cachedAt: Date.now() });
10044
+ }
10045
+ invalidateIssue(issueNumber) {
10046
+ this.cache.delete(`issue:${issueNumber}`);
10047
+ this.cache.delete(`comments:${issueNumber}`);
10048
+ for (const key of this.cache.keys()) {
10049
+ if (key.startsWith("issues:") || key.startsWith("search:")) {
10050
+ this.cache.delete(key);
10051
+ }
10052
+ }
10053
+ }
10054
+ /** Clear all cached data. Useful after bulk writes. */
10055
+ clearCache() {
10056
+ this.cache.clear();
10057
+ }
10058
+ // ── Throttle / rate limit ────────────────────────────────────────────
10059
+ async throttle() {
10060
+ if (this.rateLimitRemaining < THROTTLE_CRITICAL_REMAINING) {
10061
+ const waitMs = Math.max(0, this.rateLimitReset * 1e3 - Date.now()) + 1e3;
10062
+ logger_default.warn(`[GitHubClient] Rate limit critical (${this.rateLimitRemaining} remaining) \u2014 waiting ${Math.round(waitMs / 1e3)}s`);
10063
+ await sleep(Math.min(waitMs, 6e4));
10064
+ } else if (this.rateLimitRemaining < THROTTLE_SLOW_REMAINING) {
10065
+ await sleep(500);
10066
+ } else if (this.rateLimitRemaining < THROTTLE_WARN_REMAINING) {
10067
+ await sleep(100);
10068
+ }
10069
+ }
10070
+ updateRateLimit(headers) {
10071
+ const remaining = headers["x-ratelimit-remaining"];
10072
+ const reset = headers["x-ratelimit-reset"];
10073
+ if (remaining !== void 0) this.rateLimitRemaining = parseInt(remaining, 10) || 0;
10074
+ if (reset !== void 0) this.rateLimitReset = parseInt(reset, 10) || 0;
10075
+ }
10076
+ // ── GraphQL transport ────────────────────────────────────────────────
10077
+ async graphql(query, variables = {}) {
10078
+ await this.semaphore.acquire();
9757
10079
  try {
9758
- const response = await this.octokit.rest.issues.update({
9759
- owner: this.owner,
9760
- repo: this.repo,
9761
- issue_number: issueNumber,
9762
- title: input.title,
9763
- body: input.body,
9764
- state: input.state,
9765
- labels: input.labels ? normalizeLabels(input.labels) : void 0
9766
- });
9767
- return this.mapIssue(response.data);
9768
- } catch (error) {
9769
- this.logError(`update issue #${issueNumber}`, error);
9770
- throw new GitHubClientError(
9771
- `Failed to update GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9772
- getErrorStatus(error)
9773
- );
10080
+ await this.throttle();
10081
+ let lastError;
10082
+ for (let attempt = 0; attempt < MAX_RETRIES2; attempt++) {
10083
+ try {
10084
+ const response = await this.octokit.request("POST /graphql", { query, variables });
10085
+ this.updateRateLimit(response.headers);
10086
+ const body = response.data;
10087
+ if (body.errors?.length) {
10088
+ const rateLimited = body.errors.some((e) => e.type === "RATE_LIMITED");
10089
+ if (rateLimited && attempt < MAX_RETRIES2 - 1) {
10090
+ const waitMs = Math.max(0, this.rateLimitReset * 1e3 - Date.now()) + 1e3;
10091
+ logger_default.warn(`[GitHubClient] GraphQL rate limited \u2014 retrying in ${Math.round(waitMs / 1e3)}s`);
10092
+ await sleep(Math.min(waitMs, 6e4));
10093
+ continue;
10094
+ }
10095
+ throw new GitHubClientError(`GraphQL error: ${body.errors.map((e) => e.message).join("; ")}`);
10096
+ }
10097
+ if (!body.data) throw new GitHubClientError("GraphQL returned no data");
10098
+ return body.data;
10099
+ } catch (error) {
10100
+ lastError = error;
10101
+ if (error instanceof GitHubClientError) throw error;
10102
+ const status = getErrorStatus(error);
10103
+ if (status === 403 && attempt < MAX_RETRIES2 - 1) {
10104
+ const backoff = Math.pow(2, attempt) * 1e3;
10105
+ logger_default.warn(`[GitHubClient] 403 \u2014 retrying in ${backoff}ms (attempt ${attempt + 1}/${MAX_RETRIES2})`);
10106
+ await sleep(backoff);
10107
+ continue;
10108
+ }
10109
+ throw error;
10110
+ }
10111
+ }
10112
+ throw lastError;
10113
+ } finally {
10114
+ this.semaphore.release();
9774
10115
  }
9775
10116
  }
10117
+ // ── GraphQL mappers ──────────────────────────────────────────────────
10118
+ mapGqlIssue(node) {
10119
+ return {
10120
+ number: node.number,
10121
+ title: node.title,
10122
+ body: node.body ?? "",
10123
+ state: node.state === "OPEN" ? "open" : "closed",
10124
+ labels: node.labels.nodes.map((l) => l.name).filter(Boolean),
10125
+ created_at: node.createdAt,
10126
+ updated_at: node.updatedAt,
10127
+ html_url: node.url
10128
+ };
10129
+ }
10130
+ mapGqlComment(node) {
10131
+ return {
10132
+ id: node.databaseId,
10133
+ body: node.body ?? "",
10134
+ created_at: node.createdAt,
10135
+ updated_at: node.updatedAt,
10136
+ html_url: node.url
10137
+ };
10138
+ }
10139
+ mapGqlComments(node) {
10140
+ return node.comments.nodes.map((c) => this.mapGqlComment(c));
10141
+ }
10142
+ // ── Read operations (GraphQL) ────────────────────────────────────────
9776
10143
  async getIssue(issueNumber) {
10144
+ const cached = this.getCached(`issue:${issueNumber}`);
10145
+ if (cached) return cached;
9777
10146
  logger_default.debug(`[GitHubClient] Fetching issue #${issueNumber} from ${this.owner}/${this.repo}`);
9778
10147
  try {
9779
- const response = await this.octokit.rest.issues.get({
9780
- owner: this.owner,
9781
- repo: this.repo,
9782
- issue_number: issueNumber
9783
- });
9784
- return this.mapIssue(response.data);
10148
+ const data = await this.graphql(
10149
+ `query($owner: String!, $repo: String!, $number: Int!) {
10150
+ repository(owner: $owner, name: $repo) {
10151
+ issue(number: $number) { ${ISSUE_WITH_COMMENTS_FRAGMENT} }
10152
+ }
10153
+ }`,
10154
+ { owner: this.owner, repo: this.repo, number: issueNumber }
10155
+ );
10156
+ if (!data.repository.issue) {
10157
+ throw new GitHubClientError(`Issue #${issueNumber} not found`, 404);
10158
+ }
10159
+ const issue = this.mapGqlIssue(data.repository.issue);
10160
+ const comments = this.mapGqlComments(data.repository.issue);
10161
+ this.setCache(`issue:${issueNumber}`, issue);
10162
+ this.setCache(`comments:${issueNumber}`, comments);
10163
+ return issue;
9785
10164
  } catch (error) {
9786
10165
  this.logError(`get issue #${issueNumber}`, error);
9787
- throw new GitHubClientError(
9788
- `Failed to fetch GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9789
- getErrorStatus(error)
9790
- );
10166
+ throw error instanceof GitHubClientError ? error : new GitHubClientError(`Failed to fetch GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
9791
10167
  }
9792
10168
  }
9793
10169
  async listIssues(options = {}) {
10170
+ const labels = options.labels && options.labels.length > 0 ? normalizeLabels(options.labels) : [];
10171
+ const stateFilter = options.state ?? "all";
10172
+ const cacheKey = `issues:${labels.sort().join(",")}:${stateFilter}`;
10173
+ const cached = this.getCached(cacheKey);
10174
+ if (cached) return options.maxResults ? cached.slice(0, options.maxResults) : cached;
9794
10175
  logger_default.debug(`[GitHubClient] Listing issues for ${this.owner}/${this.repo}`);
9795
- const issues = [];
9796
- let page;
9797
10176
  const maxResults = options.maxResults ?? Number.POSITIVE_INFINITY;
10177
+ const gqlStates = stateFilter === "all" ? "[OPEN, CLOSED]" : stateFilter === "open" ? "[OPEN]" : "[CLOSED]";
10178
+ const labelsArg = labels.length > 0 ? `, labels: ${JSON.stringify(labels)}` : "";
9798
10179
  try {
9799
- while (issues.length < maxResults) {
9800
- const response = await this.octokit.rest.issues.listForRepo({
9801
- owner: this.owner,
9802
- repo: this.repo,
9803
- state: options.state ?? "all",
9804
- labels: options.labels && options.labels.length > 0 ? normalizeLabels(options.labels).join(",") : void 0,
9805
- sort: void 0,
9806
- direction: void 0,
9807
- per_page: DEFAULT_PER_PAGE,
9808
- page
9809
- });
9810
- const pageItems = response.data.filter(isIssueResponse).map((issue) => this.mapIssue(issue));
9811
- issues.push(...pageItems);
9812
- if (pageItems.length < DEFAULT_PER_PAGE) {
9813
- break;
10180
+ const allIssues = [];
10181
+ let cursor = null;
10182
+ let hasNext = true;
10183
+ while (hasNext && allIssues.length < maxResults) {
10184
+ const afterArg = cursor ? `, after: "${cursor}"` : "";
10185
+ const data = await this.graphql(
10186
+ `query($owner: String!, $repo: String!) {
10187
+ repository(owner: $owner, name: $repo) {
10188
+ issues(first: 100, states: ${gqlStates}${labelsArg}${afterArg}, orderBy: {field: UPDATED_AT, direction: DESC}) {
10189
+ nodes { ${ISSUE_WITH_COMMENTS_FRAGMENT} }
10190
+ pageInfo { hasNextPage endCursor }
10191
+ }
10192
+ }
10193
+ }`,
10194
+ { owner: this.owner, repo: this.repo }
10195
+ );
10196
+ const pageData = data.repository.issues;
10197
+ for (const node of pageData.nodes) {
10198
+ const issue = this.mapGqlIssue(node);
10199
+ const comments = this.mapGqlComments(node);
10200
+ allIssues.push(issue);
10201
+ this.setCache(`issue:${issue.number}`, issue);
10202
+ this.setCache(`comments:${issue.number}`, comments);
9814
10203
  }
9815
- page = (page ?? 1) + 1;
10204
+ hasNext = pageData.pageInfo.hasNextPage;
10205
+ cursor = pageData.pageInfo.endCursor;
9816
10206
  }
9817
- return issues.slice(0, maxResults);
10207
+ const result = allIssues.slice(0, maxResults);
10208
+ this.setCache(cacheKey, result);
10209
+ return result;
9818
10210
  } catch (error) {
9819
10211
  this.logError("list issues", error);
9820
10212
  throw new GitHubClientError(`Failed to list GitHub issues: ${getErrorMessage(error)}`, getErrorStatus(error));
@@ -9822,22 +10214,89 @@ var init_githubClient = __esm({
9822
10214
  }
9823
10215
  async searchIssues(query, options = {}) {
9824
10216
  const searchQuery = `repo:${this.owner}/${this.repo} is:issue ${query}`.trim();
10217
+ const cacheKey = `search:${searchQuery}`;
10218
+ const cached = this.getCached(cacheKey);
10219
+ if (cached) return options.maxResults ? cached.slice(0, options.maxResults) : cached;
9825
10220
  logger_default.debug(`[GitHubClient] Searching issues in ${this.owner}/${this.repo}: ${query}`);
9826
10221
  try {
9827
- const items = await this.octokit.paginate(this.octokit.rest.search.issuesAndPullRequests, {
9828
- q: searchQuery,
9829
- per_page: DEFAULT_PER_PAGE
10222
+ const data = await this.graphql(
10223
+ `query($q: String!) {
10224
+ search(query: $q, type: ISSUE, first: 100) {
10225
+ nodes {
10226
+ ... on Issue { __typename ${ISSUE_WITH_COMMENTS_FRAGMENT} }
10227
+ }
10228
+ }
10229
+ }`,
10230
+ { q: searchQuery }
10231
+ );
10232
+ const issues = data.search.nodes.filter((n) => n.__typename === "Issue").map((node) => {
10233
+ const issue = this.mapGqlIssue(node);
10234
+ const comments = this.mapGqlComments(node);
10235
+ this.setCache(`issue:${issue.number}`, issue);
10236
+ this.setCache(`comments:${issue.number}`, comments);
10237
+ return issue;
9830
10238
  });
9831
- return items.filter(isIssueResponse).map((issue) => this.mapIssue(issue)).slice(0, options.maxResults ?? Number.POSITIVE_INFINITY);
10239
+ const result = issues.slice(0, options.maxResults ?? Number.POSITIVE_INFINITY);
10240
+ this.setCache(cacheKey, result);
10241
+ return result;
9832
10242
  } catch (error) {
9833
10243
  this.logError("search issues", error);
9834
10244
  throw new GitHubClientError(`Failed to search GitHub issues: ${getErrorMessage(error)}`, getErrorStatus(error));
9835
10245
  }
9836
10246
  }
9837
- async addLabels(issueNumber, labels) {
9838
- if (labels.length === 0) {
9839
- return;
10247
+ async listComments(issueNumber) {
10248
+ const cached = this.getCached(`comments:${issueNumber}`);
10249
+ if (cached) return cached;
10250
+ logger_default.debug(`[GitHubClient] Listing comments for issue #${issueNumber} in ${this.owner}/${this.repo}`);
10251
+ try {
10252
+ await this.getIssue(issueNumber);
10253
+ return this.getCached(`comments:${issueNumber}`) ?? [];
10254
+ } catch (error) {
10255
+ this.logError(`list comments for issue #${issueNumber}`, error);
10256
+ throw error instanceof GitHubClientError ? error : new GitHubClientError(`Failed to list comments for GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
10257
+ }
10258
+ }
10259
+ // ── Write operations (REST — infrequent, keep on separate quota) ─────
10260
+ async createIssue(input) {
10261
+ logger_default.debug(`[GitHubClient] Creating issue in ${this.owner}/${this.repo}: ${input.title}`);
10262
+ try {
10263
+ const response = await this.octokit.rest.issues.create({
10264
+ owner: this.owner,
10265
+ repo: this.repo,
10266
+ title: input.title,
10267
+ body: input.body,
10268
+ labels: input.labels ? normalizeLabels(input.labels) : void 0
10269
+ });
10270
+ const issue = this.mapRestIssue(response.data);
10271
+ logger_default.info(`[GitHubClient] Created issue #${issue.number}: ${input.title}`);
10272
+ return issue;
10273
+ } catch (error) {
10274
+ this.logError("create issue", error);
10275
+ throw new GitHubClientError(`Failed to create GitHub issue: ${getErrorMessage(error)}`, getErrorStatus(error));
10276
+ }
10277
+ }
10278
+ async updateIssue(issueNumber, input) {
10279
+ logger_default.debug(`[GitHubClient] Updating issue #${issueNumber} in ${this.owner}/${this.repo}`);
10280
+ try {
10281
+ const response = await this.octokit.rest.issues.update({
10282
+ owner: this.owner,
10283
+ repo: this.repo,
10284
+ issue_number: issueNumber,
10285
+ title: input.title,
10286
+ body: input.body,
10287
+ state: input.state,
10288
+ labels: input.labels ? normalizeLabels(input.labels) : void 0
10289
+ });
10290
+ const issue = this.mapRestIssue(response.data);
10291
+ this.invalidateIssue(issueNumber);
10292
+ return issue;
10293
+ } catch (error) {
10294
+ this.logError(`update issue #${issueNumber}`, error);
10295
+ throw new GitHubClientError(`Failed to update GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
9840
10296
  }
10297
+ }
10298
+ async addLabels(issueNumber, labels) {
10299
+ if (labels.length === 0) return;
9841
10300
  logger_default.debug(`[GitHubClient] Adding labels to issue #${issueNumber} in ${this.owner}/${this.repo}`);
9842
10301
  try {
9843
10302
  await this.octokit.rest.issues.addLabels({
@@ -9846,12 +10305,10 @@ var init_githubClient = __esm({
9846
10305
  issue_number: issueNumber,
9847
10306
  labels
9848
10307
  });
10308
+ this.invalidateIssue(issueNumber);
9849
10309
  } catch (error) {
9850
10310
  this.logError(`add labels to issue #${issueNumber}`, error);
9851
- throw new GitHubClientError(
9852
- `Failed to add labels to GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9853
- getErrorStatus(error)
9854
- );
10311
+ throw new GitHubClientError(`Failed to add labels to GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
9855
10312
  }
9856
10313
  }
9857
10314
  async removeLabel(issueNumber, label) {
@@ -9863,15 +10320,11 @@ var init_githubClient = __esm({
9863
10320
  issue_number: issueNumber,
9864
10321
  name: label
9865
10322
  });
10323
+ this.invalidateIssue(issueNumber);
9866
10324
  } catch (error) {
9867
- if (getErrorStatus(error) === 404) {
9868
- return;
9869
- }
10325
+ if (getErrorStatus(error) === 404) return;
9870
10326
  this.logError(`remove label from issue #${issueNumber}`, error);
9871
- throw new GitHubClientError(
9872
- `Failed to remove label from GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9873
- getErrorStatus(error)
9874
- );
10327
+ throw new GitHubClientError(`Failed to remove label from GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
9875
10328
  }
9876
10329
  }
9877
10330
  async setLabels(issueNumber, labels) {
@@ -9883,12 +10336,10 @@ var init_githubClient = __esm({
9883
10336
  issue_number: issueNumber,
9884
10337
  labels
9885
10338
  });
10339
+ this.invalidateIssue(issueNumber);
9886
10340
  } catch (error) {
9887
10341
  this.logError(`set labels on issue #${issueNumber}`, error);
9888
- throw new GitHubClientError(
9889
- `Failed to set labels on GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9890
- getErrorStatus(error)
9891
- );
10342
+ throw new GitHubClientError(`Failed to set labels on GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
9892
10343
  }
9893
10344
  }
9894
10345
  async addComment(issueNumber, body) {
@@ -9900,52 +10351,34 @@ var init_githubClient = __esm({
9900
10351
  issue_number: issueNumber,
9901
10352
  body
9902
10353
  });
9903
- return this.mapComment(response.data);
10354
+ this.invalidateIssue(issueNumber);
10355
+ return this.mapRestComment(response.data);
9904
10356
  } catch (error) {
9905
10357
  this.logError(`add comment to issue #${issueNumber}`, error);
9906
- throw new GitHubClientError(
9907
- `Failed to add comment to GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9908
- getErrorStatus(error)
9909
- );
9910
- }
9911
- }
9912
- async listComments(issueNumber) {
9913
- logger_default.debug(`[GitHubClient] Listing comments for issue #${issueNumber} in ${this.owner}/${this.repo}`);
9914
- try {
9915
- const comments = await this.octokit.paginate(this.octokit.rest.issues.listComments, {
9916
- owner: this.owner,
9917
- repo: this.repo,
9918
- issue_number: issueNumber,
9919
- per_page: DEFAULT_PER_PAGE
9920
- });
9921
- return comments.map((comment) => this.mapComment(comment));
9922
- } catch (error) {
9923
- this.logError(`list comments for issue #${issueNumber}`, error);
9924
- throw new GitHubClientError(
9925
- `Failed to list comments for GitHub issue #${issueNumber}: ${getErrorMessage(error)}`,
9926
- getErrorStatus(error)
9927
- );
10358
+ throw new GitHubClientError(`Failed to add comment to GitHub issue #${issueNumber}: ${getErrorMessage(error)}`, getErrorStatus(error));
9928
10359
  }
9929
10360
  }
9930
- mapIssue(issue) {
10361
+ // ── REST mappers (for write operations) ──────────────────────────────
10362
+ mapRestIssue(data) {
10363
+ const labels = Array.isArray(data.labels) ? data.labels : [];
9931
10364
  return {
9932
- number: issue.number,
9933
- title: issue.title,
9934
- body: issue.body ?? "",
9935
- state: issue.state,
9936
- labels: issue.labels.map((label) => typeof label === "string" ? label : label.name ?? "").map((label) => label.trim()).filter((label) => label.length > 0),
9937
- created_at: issue.created_at,
9938
- updated_at: issue.updated_at,
9939
- html_url: issue.html_url
10365
+ number: data.number,
10366
+ title: data.title,
10367
+ body: data.body ?? "",
10368
+ state: data.state,
10369
+ labels: labels.map((l) => typeof l === "string" ? l : l?.name ?? "").map((l) => l.trim()).filter((l) => l.length > 0),
10370
+ created_at: data.created_at,
10371
+ updated_at: data.updated_at,
10372
+ html_url: data.html_url
9940
10373
  };
9941
10374
  }
9942
- mapComment(comment) {
10375
+ mapRestComment(data) {
9943
10376
  return {
9944
- id: comment.id,
9945
- body: comment.body ?? "",
9946
- created_at: comment.created_at,
9947
- updated_at: comment.updated_at,
9948
- html_url: comment.html_url
10377
+ id: data.id,
10378
+ body: data.body ?? "",
10379
+ created_at: data.created_at,
10380
+ updated_at: data.updated_at,
10381
+ html_url: data.html_url
9949
10382
  };
9950
10383
  }
9951
10384
  logError(action, error) {
@@ -10466,430 +10899,11 @@ var init_ideaService = __esm({
10466
10899
  }
10467
10900
  });
10468
10901
 
10469
- // src/L3-services/postStore/postStore.ts
10470
- function getQueueDir() {
10471
- const { OUTPUT_DIR } = getConfig();
10472
- return join(OUTPUT_DIR, "publish-queue");
10473
- }
10474
- function getPublishedDir() {
10475
- const { OUTPUT_DIR } = getConfig();
10476
- return join(OUTPUT_DIR, "published");
10477
- }
10478
- async function readQueueItem(folderPath, id) {
10479
- const metadataPath = join(folderPath, "metadata.json");
10480
- const postPath = join(folderPath, "post.md");
10481
- try {
10482
- const metadataRaw = await readTextFile(metadataPath);
10483
- const metadata = JSON.parse(metadataRaw);
10484
- let postContent = "";
10485
- try {
10486
- postContent = await readTextFile(postPath);
10487
- } catch {
10488
- logger_default.debug(`No post.md found for ${String(id).replace(/[\r\n]/g, "")}`);
10489
- }
10490
- const videoPath = join(folderPath, "media.mp4");
10491
- const imagePath = join(folderPath, "media.png");
10492
- let mediaPath = null;
10493
- let hasMedia = false;
10494
- if (await fileExists(videoPath)) {
10495
- mediaPath = videoPath;
10496
- hasMedia = true;
10497
- } else if (await fileExists(imagePath)) {
10498
- mediaPath = imagePath;
10499
- hasMedia = true;
10500
- }
10501
- const thumbnailPath = join(folderPath, "thumbnail.png");
10502
- const hasThumbnail = await fileExists(thumbnailPath);
10503
- return {
10504
- id,
10505
- metadata,
10506
- postContent,
10507
- hasMedia,
10508
- mediaPath,
10509
- thumbnailPath: hasThumbnail ? thumbnailPath : metadata.thumbnailPath ?? null,
10510
- folderPath
10511
- };
10512
- } catch (err) {
10513
- logger_default.debug(`Failed to read queue item ${String(id).replace(/[\r\n]/g, "")}: ${String(err).replace(/[\r\n]/g, "")}`);
10514
- return null;
10515
- }
10516
- }
10517
- async function getPendingItems() {
10518
- const queueDir = getQueueDir();
10519
- await ensureDirectory(queueDir);
10520
- let entries;
10521
- try {
10522
- const dirents = await listDirectoryWithTypes(queueDir);
10523
- entries = dirents.filter((d) => d.isDirectory()).map((d) => d.name);
10524
- } catch {
10525
- return [];
10526
- }
10527
- const items = [];
10528
- for (const name of entries) {
10529
- const item = await readQueueItem(join(queueDir, name), name);
10530
- if (item) items.push(item);
10531
- }
10532
- items.sort((a, b) => {
10533
- if (a.hasMedia !== b.hasMedia) return a.hasMedia ? -1 : 1;
10534
- return a.metadata.createdAt.localeCompare(b.metadata.createdAt);
10535
- });
10536
- return items;
10537
- }
10538
- async function getGroupedPendingItems() {
10539
- const items = await getPendingItems();
10540
- const groups = /* @__PURE__ */ new Map();
10541
- for (const item of items) {
10542
- const platform = item.metadata.platform.toLowerCase();
10543
- const clipSlug = item.id.endsWith(`-${platform}`) ? item.id.slice(0, -(platform.length + 1)) : item.id;
10544
- const groupKey = `${item.metadata.sourceVideo}::${clipSlug}`;
10545
- if (!groups.has(groupKey)) {
10546
- groups.set(groupKey, []);
10547
- }
10548
- groups.get(groupKey).push(item);
10549
- }
10550
- const result = [];
10551
- for (const [groupKey, groupItems] of groups) {
10552
- if (groupItems.length === 0) continue;
10553
- const first = groupItems[0];
10554
- result.push({
10555
- groupKey,
10556
- sourceVideo: first.metadata.sourceVideo,
10557
- sourceClip: first.metadata.sourceClip,
10558
- clipType: first.metadata.clipType,
10559
- hasMedia: first.hasMedia,
10560
- mediaType: first.metadata.mediaType,
10561
- items: groupItems
10562
- });
10563
- }
10564
- result.sort((a, b) => {
10565
- if (a.hasMedia !== b.hasMedia) return a.hasMedia ? -1 : 1;
10566
- const aDate = Math.min(...a.items.map((i) => new Date(i.metadata.createdAt).getTime()));
10567
- const bDate = Math.min(...b.items.map((i) => new Date(i.metadata.createdAt).getTime()));
10568
- return aDate - bDate;
10569
- });
10570
- return result;
10571
- }
10572
- async function getItem(id) {
10573
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10574
- throw new Error(`Invalid ID format: ${id}`);
10575
- }
10576
- const folderPath = join(getQueueDir(), basename(id));
10577
- return readQueueItem(folderPath, id);
10578
- }
10579
- async function createItem(id, metadata, postContent, mediaSourcePath, thumbnailSourcePath) {
10580
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10581
- throw new Error(`Invalid ID format: ${id}`);
10582
- }
10583
- const folderPath = join(getQueueDir(), basename(id));
10584
- await ensureDirectory(folderPath);
10585
- await writeJsonFile(join(folderPath, "metadata.json"), metadata);
10586
- await writeTextFile(join(folderPath, "post.md"), postContent);
10587
- let hasMedia = false;
10588
- const ext = mediaSourcePath ? extname(mediaSourcePath) : ".mp4";
10589
- const mediaFilename = `media${ext}`;
10590
- const mediaPath = join(folderPath, mediaFilename);
10591
- if (mediaSourcePath) {
10592
- await copyFile(mediaSourcePath, mediaPath);
10593
- hasMedia = true;
10594
- }
10595
- let thumbnailPath = null;
10596
- if (thumbnailSourcePath) {
10597
- const thumbDest = join(folderPath, "thumbnail.png");
10598
- await copyFile(thumbnailSourcePath, thumbDest);
10599
- thumbnailPath = thumbDest;
10600
- }
10601
- logger_default.debug(`Created queue item: ${String(id).replace(/[\r\n]/g, "")}`);
10602
- return {
10603
- id,
10604
- metadata,
10605
- postContent,
10606
- hasMedia,
10607
- mediaPath: hasMedia ? mediaPath : null,
10608
- thumbnailPath,
10609
- folderPath
10610
- };
10611
- }
10612
- async function updateItem(id, updates) {
10613
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10614
- throw new Error(`Invalid ID format: ${id}`);
10615
- }
10616
- const existing = await getItem(id);
10617
- if (!existing) return null;
10618
- if (updates.metadata) {
10619
- const sanitized = {
10620
- id: String(existing.metadata.id),
10621
- platform: String(updates.metadata.platform ?? existing.metadata.platform),
10622
- accountId: String(updates.metadata.accountId ?? existing.metadata.accountId),
10623
- sourceVideo: String(existing.metadata.sourceVideo),
10624
- sourceClip: existing.metadata.sourceClip !== null ? String(existing.metadata.sourceClip) : null,
10625
- clipType: existing.metadata.clipType,
10626
- sourceMediaPath: existing.metadata.sourceMediaPath !== null ? String(existing.metadata.sourceMediaPath) : null,
10627
- hashtags: Array.isArray(updates.metadata.hashtags) ? updates.metadata.hashtags.map(String) : Array.isArray(existing.metadata.hashtags) ? existing.metadata.hashtags.map(String) : [],
10628
- links: Array.isArray(updates.metadata.links) ? updates.metadata.links : Array.isArray(existing.metadata.links) ? existing.metadata.links : [],
10629
- characterCount: updates.metadata.characterCount !== void 0 ? Number(updates.metadata.characterCount) || 0 : Number(existing.metadata.characterCount) || 0,
10630
- platformCharLimit: updates.metadata.platformCharLimit !== void 0 ? Number(updates.metadata.platformCharLimit) || 0 : Number(existing.metadata.platformCharLimit) || 0,
10631
- suggestedSlot: updates.metadata.suggestedSlot !== void 0 ? updates.metadata.suggestedSlot !== null ? String(updates.metadata.suggestedSlot) : null : existing.metadata.suggestedSlot !== null ? String(existing.metadata.suggestedSlot) : null,
10632
- scheduledFor: updates.metadata.scheduledFor !== void 0 ? updates.metadata.scheduledFor !== null ? String(updates.metadata.scheduledFor) : null : existing.metadata.scheduledFor !== null ? String(existing.metadata.scheduledFor) : null,
10633
- status: updates.metadata.status ?? existing.metadata.status,
10634
- latePostId: updates.metadata.latePostId !== void 0 ? updates.metadata.latePostId !== null ? String(updates.metadata.latePostId) : null : existing.metadata.latePostId !== null ? String(existing.metadata.latePostId) : null,
10635
- publishedUrl: updates.metadata.publishedUrl !== void 0 ? updates.metadata.publishedUrl !== null ? String(updates.metadata.publishedUrl) : null : existing.metadata.publishedUrl !== null ? String(existing.metadata.publishedUrl) : null,
10636
- createdAt: String(existing.metadata.createdAt),
10637
- reviewedAt: updates.metadata.reviewedAt !== void 0 ? updates.metadata.reviewedAt !== null ? String(updates.metadata.reviewedAt) : null : existing.metadata.reviewedAt !== null ? String(existing.metadata.reviewedAt) : null,
10638
- publishedAt: updates.metadata.publishedAt !== void 0 ? updates.metadata.publishedAt !== null ? String(updates.metadata.publishedAt) : null : existing.metadata.publishedAt !== null ? String(existing.metadata.publishedAt) : null,
10639
- textOnly: updates.metadata.textOnly ?? existing.metadata.textOnly,
10640
- mediaType: updates.metadata.mediaType ?? existing.metadata.mediaType,
10641
- ideaIds: Array.isArray(updates.metadata.ideaIds) ? updates.metadata.ideaIds.map(String) : Array.isArray(existing.metadata.ideaIds) ? existing.metadata.ideaIds.map(String) : void 0,
10642
- platformSpecificData: updates.metadata.platformSpecificData ?? existing.metadata.platformSpecificData
10643
- };
10644
- existing.metadata = sanitized;
10645
- const metadataWritePath = resolve(join(existing.folderPath, "metadata.json"));
10646
- if (!metadataWritePath.startsWith(resolve(getQueueDir()) + sep)) {
10647
- throw new Error("Write target outside queue directory");
10648
- }
10649
- await writeTextFile(
10650
- metadataWritePath,
10651
- JSON.stringify(existing.metadata, null, 2)
10652
- );
10653
- }
10654
- if (updates.postContent !== void 0) {
10655
- const sanitizedContent = String(updates.postContent);
10656
- existing.postContent = sanitizedContent;
10657
- const postWritePath = resolve(join(existing.folderPath, "post.md"));
10658
- if (!postWritePath.startsWith(resolve(getQueueDir()) + sep)) {
10659
- throw new Error("Write target outside queue directory");
10660
- }
10661
- await writeTextFile(postWritePath, sanitizedContent);
10662
- }
10663
- logger_default.debug(`Updated queue item: ${String(id).replace(/[\r\n]/g, "")}`);
10664
- return existing;
10665
- }
10666
- async function approveItem(id, publishData) {
10667
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10668
- throw new Error(`Invalid ID format: ${id}`);
10669
- }
10670
- const item = await getItem(id);
10671
- if (!item) return;
10672
- const now = (/* @__PURE__ */ new Date()).toISOString();
10673
- if (publishData.accountId) {
10674
- item.metadata.accountId = String(publishData.accountId);
10675
- }
10676
- item.metadata.status = "published";
10677
- item.metadata.latePostId = String(publishData.latePostId);
10678
- item.metadata.scheduledFor = String(publishData.scheduledFor);
10679
- item.metadata.publishedUrl = publishData.publishedUrl ? String(publishData.publishedUrl) : null;
10680
- item.metadata.publishedAt = now;
10681
- item.metadata.reviewedAt = now;
10682
- if (item.metadata.ideaIds && item.metadata.ideaIds.length > 0) {
10683
- try {
10684
- const { getIdea: getIdea2, listIdeas: listIdeas2, markPublished: markPublished3 } = await Promise.resolve().then(() => (init_ideaService(), ideaService_exports));
10685
- let cachedIdeas;
10686
- for (const rawIdeaId of item.metadata.ideaIds) {
10687
- const normalizedIdeaId = String(rawIdeaId).trim();
10688
- if (!normalizedIdeaId) {
10689
- continue;
10690
- }
10691
- const parsedIssueNumber = Number.parseInt(normalizedIdeaId, 10);
10692
- let issueNumber;
10693
- if (Number.isInteger(parsedIssueNumber)) {
10694
- issueNumber = parsedIssueNumber;
10695
- } else {
10696
- if (!cachedIdeas) {
10697
- const ideas = await listIdeas2();
10698
- cachedIdeas = new Map(ideas.flatMap((idea2) => [[idea2.id, idea2.issueNumber], [String(idea2.issueNumber), idea2.issueNumber]]));
10699
- }
10700
- issueNumber = cachedIdeas.get(normalizedIdeaId);
10701
- }
10702
- if (!issueNumber) {
10703
- logger_default.warn(`Skipping publish record for unknown idea identifier: ${normalizedIdeaId}`);
10704
- continue;
10705
- }
10706
- const idea = await getIdea2(issueNumber);
10707
- if (!idea) {
10708
- logger_default.warn(`Skipping publish record for missing idea #${issueNumber}`);
10709
- continue;
10710
- }
10711
- await markPublished3(issueNumber, {
10712
- clipType: item.metadata.clipType,
10713
- platform: fromLatePlatform(item.metadata.platform),
10714
- queueItemId: id,
10715
- publishedAt: now,
10716
- latePostId: item.metadata.latePostId ?? "",
10717
- lateUrl: item.metadata.publishedUrl || (item.metadata.latePostId ? `https://app.late.co/dashboard/post/${item.metadata.latePostId}` : "")
10718
- });
10719
- }
10720
- } catch (err) {
10721
- const msg = err instanceof Error ? err.message : String(err);
10722
- logger_default.warn(`Failed to update idea status for ${id}: ${msg}`);
10723
- }
10724
- }
10725
- const sanitizedMetadata = {
10726
- id: String(item.metadata.id),
10727
- platform: String(item.metadata.platform),
10728
- accountId: String(item.metadata.accountId),
10729
- sourceVideo: String(item.metadata.sourceVideo),
10730
- sourceClip: item.metadata.sourceClip !== null ? String(item.metadata.sourceClip) : null,
10731
- clipType: item.metadata.clipType,
10732
- sourceMediaPath: item.metadata.sourceMediaPath !== null ? String(item.metadata.sourceMediaPath) : null,
10733
- hashtags: Array.isArray(item.metadata.hashtags) ? item.metadata.hashtags.map(String) : [],
10734
- links: Array.isArray(item.metadata.links) ? item.metadata.links : [],
10735
- characterCount: Number(item.metadata.characterCount) || 0,
10736
- platformCharLimit: Number(item.metadata.platformCharLimit) || 0,
10737
- suggestedSlot: item.metadata.suggestedSlot !== null ? String(item.metadata.suggestedSlot) : null,
10738
- scheduledFor: item.metadata.scheduledFor !== null ? String(item.metadata.scheduledFor) : null,
10739
- status: item.metadata.status,
10740
- latePostId: item.metadata.latePostId !== null ? String(item.metadata.latePostId) : null,
10741
- publishedUrl: item.metadata.publishedUrl !== null ? String(item.metadata.publishedUrl) : null,
10742
- createdAt: String(item.metadata.createdAt),
10743
- reviewedAt: item.metadata.reviewedAt !== null ? String(item.metadata.reviewedAt) : null,
10744
- publishedAt: item.metadata.publishedAt !== null ? String(item.metadata.publishedAt) : null,
10745
- textOnly: item.metadata.textOnly,
10746
- mediaType: item.metadata.mediaType,
10747
- ideaIds: Array.isArray(item.metadata.ideaIds) ? item.metadata.ideaIds.map(String) : void 0,
10748
- platformSpecificData: item.metadata.platformSpecificData
10749
- };
10750
- const approveMetadataPath = resolve(join(item.folderPath, "metadata.json"));
10751
- if (!approveMetadataPath.startsWith(resolve(getQueueDir()) + sep)) {
10752
- throw new Error("Write target outside queue directory");
10753
- }
10754
- await writeTextFile(
10755
- approveMetadataPath,
10756
- JSON.stringify(sanitizedMetadata, null, 2)
10757
- );
10758
- const publishedDir = getPublishedDir();
10759
- await ensureDirectory(publishedDir);
10760
- const destPath = join(publishedDir, basename(id));
10761
- const resolvedDest = resolve(destPath);
10762
- const resolvedPublishedDir = resolve(publishedDir);
10763
- if (!resolvedDest.startsWith(resolvedPublishedDir + sep) && resolvedDest !== resolvedPublishedDir) {
10764
- throw new Error(`Invalid destination path for item ${id}`);
10765
- }
10766
- try {
10767
- await renameFile(item.folderPath, destPath);
10768
- } catch (renameErr) {
10769
- const errCode = renameErr?.code;
10770
- if (errCode === "EPERM") {
10771
- logger_default.warn(`rename failed (EPERM) for ${String(id).replace(/[\r\n]/g, "")}, falling back to copy+delete`);
10772
- await copyDirectory(item.folderPath, destPath);
10773
- await removeDirectory(item.folderPath, { recursive: true, force: true });
10774
- } else {
10775
- throw renameErr;
10776
- }
10777
- }
10778
- logger_default.debug(`Approved and moved queue item: ${String(id).replace(/[\r\n]/g, "")}`);
10779
- }
10780
- async function approveBulk(itemIds, publishDataMap) {
10781
- const results = [];
10782
- const errors = [];
10783
- for (const id of itemIds) {
10784
- try {
10785
- const publishData = publishDataMap.get(id);
10786
- if (!publishData) {
10787
- errors.push({ itemId: id, error: "No publish data provided" });
10788
- continue;
10789
- }
10790
- await approveItem(id, publishData);
10791
- results.push({
10792
- itemId: id,
10793
- platform: id.split("-").pop() || "unknown",
10794
- latePostId: publishData.latePostId,
10795
- scheduledFor: publishData.scheduledFor,
10796
- publishedUrl: publishData.publishedUrl
10797
- });
10798
- } catch (err) {
10799
- const msg = err instanceof Error ? err.message : String(err);
10800
- errors.push({ itemId: id, error: msg });
10801
- logger_default.error(`Bulk approve failed for ${String(id).replace(/[\r\n]/g, "")}: ${msg}`);
10802
- }
10803
- }
10804
- if (errors.length > 0) {
10805
- logger_default.warn(`Bulk approval completed with ${errors.length} errors`);
10806
- }
10807
- return results;
10808
- }
10809
- async function rejectItem(id) {
10810
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10811
- throw new Error(`Invalid ID format: ${id}`);
10812
- }
10813
- const folderPath = join(getQueueDir(), basename(id));
10814
- try {
10815
- await removeDirectory(folderPath, { recursive: true });
10816
- logger_default.debug(`Rejected and deleted queue item: ${String(id).replace(/[\r\n]/g, "")}`);
10817
- } catch (err) {
10818
- logger_default.debug(`Failed to reject queue item ${String(id).replace(/[\r\n]/g, "")}: ${String(err).replace(/[\r\n]/g, "")}`);
10819
- }
10820
- }
10821
- async function getPublishedItems() {
10822
- const publishedDir = getPublishedDir();
10823
- await ensureDirectory(publishedDir);
10824
- let entries;
10825
- try {
10826
- const dirents = await listDirectoryWithTypes(publishedDir);
10827
- entries = dirents.filter((d) => d.isDirectory()).map((d) => d.name);
10828
- } catch {
10829
- return [];
10830
- }
10831
- const items = [];
10832
- for (const name of entries) {
10833
- const item = await readQueueItem(join(publishedDir, name), name);
10834
- if (item) items.push(item);
10835
- }
10836
- items.sort((a, b) => a.metadata.createdAt.localeCompare(b.metadata.createdAt));
10837
- return items;
10838
- }
10839
- async function getScheduledItemsByIdeaIds(ideaIds) {
10840
- if (ideaIds.length === 0) return [];
10841
- const ideaIdSet = new Set(ideaIds);
10842
- const [pendingItems, publishedItems] = await Promise.all([
10843
- getPendingItems(),
10844
- getPublishedItems()
10845
- ]);
10846
- return [...pendingItems, ...publishedItems].filter(
10847
- (item) => item.metadata.ideaIds?.some((id) => ideaIdSet.has(id)) ?? false
10848
- );
10849
- }
10850
- async function updatePublishedItemSchedule(id, scheduledFor) {
10851
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10852
- throw new Error(`Invalid ID format: ${id}`);
10853
- }
10854
- const publishedDir = getPublishedDir();
10855
- const folderPath = join(publishedDir, basename(id));
10856
- const metadataPath = join(folderPath, "metadata.json");
10857
- if (!resolve(metadataPath).startsWith(resolve(publishedDir) + sep)) {
10858
- throw new Error("Write target outside published directory");
10859
- }
10860
- const raw = await readTextFile(metadataPath);
10861
- const metadata = JSON.parse(raw);
10862
- metadata.scheduledFor = String(scheduledFor);
10863
- await writeTextFile(metadataPath, JSON.stringify(metadata, null, 2));
10864
- }
10865
- async function itemExists(id) {
10866
- if (!id || !/^[a-zA-Z0-9_-]+$/.test(id)) {
10867
- throw new Error(`Invalid ID format: ${id}`);
10868
- }
10869
- if (await fileExists(join(getQueueDir(), basename(id)))) {
10870
- return "pending";
10871
- }
10872
- if (await fileExists(join(getPublishedDir(), basename(id)))) {
10873
- return "published";
10874
- }
10875
- return null;
10876
- }
10877
- var init_postStore = __esm({
10878
- "src/L3-services/postStore/postStore.ts"() {
10879
- "use strict";
10880
- init_types2();
10881
- init_environment();
10882
- init_configLogger();
10883
- init_fileSystem();
10884
- init_paths();
10885
- }
10886
- });
10887
-
10888
- // src/L1-infra/http/network.ts
10889
- import { Readable } from "stream";
10890
- var init_network = __esm({
10891
- "src/L1-infra/http/network.ts"() {
10892
- "use strict";
10902
+ // src/L1-infra/http/network.ts
10903
+ import { Readable } from "stream";
10904
+ var init_network = __esm({
10905
+ "src/L1-infra/http/network.ts"() {
10906
+ "use strict";
10893
10907
  }
10894
10908
  });
10895
10909
 
@@ -11086,6 +11100,333 @@ var init_lateApi = __esm({
11086
11100
  }
11087
11101
  });
11088
11102
 
11103
+ // src/L3-services/queueMapping/queueMapping.ts
11104
+ var queueMapping_exports = {};
11105
+ __export(queueMapping_exports, {
11106
+ clearQueueCache: () => clearQueueCache,
11107
+ getAllQueueMappings: () => getAllQueueMappings,
11108
+ getProfileId: () => getProfileId,
11109
+ getQueueId: () => getQueueId,
11110
+ refreshQueueMappings: () => refreshQueueMappings
11111
+ });
11112
+ function cachePath() {
11113
+ return join(process.cwd(), CACHE_FILE);
11114
+ }
11115
+ function isCacheValid(cache2) {
11116
+ const fetchedAtTime = new Date(cache2.fetchedAt).getTime();
11117
+ if (Number.isNaN(fetchedAtTime)) {
11118
+ logger_default.warn("Invalid fetchedAt in queue cache; treating as stale", {
11119
+ fetchedAt: cache2.fetchedAt
11120
+ });
11121
+ return false;
11122
+ }
11123
+ const age = Date.now() - fetchedAtTime;
11124
+ return age < CACHE_TTL_MS2;
11125
+ }
11126
+ async function readFileCache() {
11127
+ try {
11128
+ const raw = await readTextFile(cachePath());
11129
+ const cache2 = JSON.parse(raw);
11130
+ if (cache2.mappings && cache2.profileId && cache2.fetchedAt && isCacheValid(cache2)) {
11131
+ return cache2;
11132
+ }
11133
+ return null;
11134
+ } catch {
11135
+ return null;
11136
+ }
11137
+ }
11138
+ async function writeFileCache(cache2) {
11139
+ try {
11140
+ if (!cache2 || typeof cache2 !== "object" || !cache2.mappings || !cache2.profileId || !cache2.fetchedAt) {
11141
+ logger_default.warn("Invalid queue cache structure, skipping write");
11142
+ return;
11143
+ }
11144
+ const sanitized = {
11145
+ mappings: typeof cache2.mappings === "object" ? { ...cache2.mappings } : {},
11146
+ profileId: String(cache2.profileId),
11147
+ fetchedAt: String(cache2.fetchedAt)
11148
+ };
11149
+ for (const [name, id] of Object.entries(sanitized.mappings)) {
11150
+ if (typeof name !== "string" || typeof id !== "string" || /[\x00-\x1f]/.test(name) || /[\x00-\x1f]/.test(id)) {
11151
+ logger_default.warn("Invalid queue mapping data from API, skipping cache write");
11152
+ return;
11153
+ }
11154
+ }
11155
+ const resolvedCachePath = resolve(cachePath());
11156
+ if (!resolvedCachePath.startsWith(resolve(process.cwd()) + sep)) {
11157
+ throw new Error("Cache path outside working directory");
11158
+ }
11159
+ await writeTextFile(resolvedCachePath, JSON.stringify(sanitized, null, 2));
11160
+ } catch (err) {
11161
+ logger_default.warn("Failed to write queue cache file", { error: err });
11162
+ }
11163
+ }
11164
+ async function fetchAndCache() {
11165
+ const client = new LateApiClient();
11166
+ const profiles = await client.listProfiles();
11167
+ if (profiles.length === 0) {
11168
+ logger_default.warn("No Late API profiles found \u2014 queue mappings will be empty");
11169
+ const emptyCache = {
11170
+ mappings: {},
11171
+ profileId: "",
11172
+ fetchedAt: (/* @__PURE__ */ new Date()).toISOString()
11173
+ };
11174
+ memoryCache = emptyCache;
11175
+ return emptyCache;
11176
+ }
11177
+ const profileId = profiles[0]._id;
11178
+ const { queues } = await client.listQueues(profileId, true);
11179
+ if (queues.length === 0) {
11180
+ logger_default.warn(
11181
+ "No queues found in Late API \u2014 run `vidpipe sync-queues` to create platform queues"
11182
+ );
11183
+ }
11184
+ const mappings = {};
11185
+ for (const queue2 of queues) {
11186
+ mappings[queue2.name] = queue2._id;
11187
+ }
11188
+ const cache2 = {
11189
+ mappings,
11190
+ profileId,
11191
+ fetchedAt: (/* @__PURE__ */ new Date()).toISOString()
11192
+ };
11193
+ memoryCache = cache2;
11194
+ await writeFileCache(cache2);
11195
+ logger_default.info("Refreshed Late queue mappings", {
11196
+ queueCount: queues.length,
11197
+ queues: Object.keys(mappings)
11198
+ });
11199
+ return cache2;
11200
+ }
11201
+ async function ensureMappings() {
11202
+ if (memoryCache && isCacheValid(memoryCache)) {
11203
+ return memoryCache;
11204
+ }
11205
+ const fileCache = await readFileCache();
11206
+ if (fileCache) {
11207
+ memoryCache = fileCache;
11208
+ return fileCache;
11209
+ }
11210
+ try {
11211
+ return await fetchAndCache();
11212
+ } catch (err) {
11213
+ logger_default.error("Failed to fetch Late queue mappings", { error: err });
11214
+ return { mappings: {}, profileId: "", fetchedAt: (/* @__PURE__ */ new Date()).toISOString() };
11215
+ }
11216
+ }
11217
+ async function getQueueId(platform, clipType) {
11218
+ const cache2 = await ensureMappings();
11219
+ const normalizedPlatform = platform === "twitter" ? "x" : platform;
11220
+ const normalizedClipType = clipType === "medium-clip" ? "medium" : clipType;
11221
+ const queueName = `${normalizedPlatform}-${normalizedClipType}`;
11222
+ return cache2.mappings[queueName] ?? null;
11223
+ }
11224
+ async function getProfileId() {
11225
+ const cache2 = await ensureMappings();
11226
+ return cache2.profileId;
11227
+ }
11228
+ async function getAllQueueMappings() {
11229
+ const cache2 = await ensureMappings();
11230
+ return { ...cache2.mappings };
11231
+ }
11232
+ async function refreshQueueMappings() {
11233
+ memoryCache = null;
11234
+ const cache2 = await fetchAndCache();
11235
+ return { ...cache2.mappings };
11236
+ }
11237
+ async function clearQueueCache() {
11238
+ memoryCache = null;
11239
+ try {
11240
+ await removeFile(cachePath());
11241
+ } catch {
11242
+ }
11243
+ }
11244
+ var CACHE_FILE, CACHE_TTL_MS2, memoryCache;
11245
+ var init_queueMapping = __esm({
11246
+ "src/L3-services/queueMapping/queueMapping.ts"() {
11247
+ "use strict";
11248
+ init_lateApi();
11249
+ init_configLogger();
11250
+ init_fileSystem();
11251
+ init_paths();
11252
+ CACHE_FILE = ".vidpipe-queue-cache.json";
11253
+ CACHE_TTL_MS2 = 24 * 60 * 60 * 1e3;
11254
+ memoryCache = null;
11255
+ }
11256
+ });
11257
+
11258
+ // src/L3-services/lateApi/lateApiService.ts
11259
+ var lateApiService_exports = {};
11260
+ __export(lateApiService_exports, {
11261
+ createLateApiClient: () => createLateApiClient,
11262
+ priorityShiftQueue: () => priorityShiftQueue,
11263
+ reorderAllQueues: () => reorderAllQueues,
11264
+ reorderQueue: () => reorderQueue
11265
+ });
11266
+ function createLateApiClient(...args) {
11267
+ return new LateApiClient(...args);
11268
+ }
11269
+ async function reorderQueue(platform, clipType, options) {
11270
+ const client = createLateApiClient();
11271
+ const queueId = await getQueueId(platform, clipType);
11272
+ const profileId = await getProfileId();
11273
+ if (!queueId) {
11274
+ logger_default.warn(`No queue found for ${platform}/${clipType}`);
11275
+ return { moved: 0, errors: 0, order: [] };
11276
+ }
11277
+ const allPosts = await client.getScheduledPosts(platform);
11278
+ const queuePosts = allPosts.filter((p) => p.queueId === queueId && p.scheduledFor);
11279
+ if (queuePosts.length === 0) {
11280
+ logger_default.info(`No posts in ${platform}/${clipType} queue \u2014 nothing to reorder`);
11281
+ return { moved: 0, errors: 0, order: [] };
11282
+ }
11283
+ const sorted = [...queuePosts].sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
11284
+ logger_default.info(`Reordering ${platform}/${clipType}: ${sorted.length} posts (newest-first)`);
11285
+ if (options?.dryRun) {
11286
+ const slots = queuePosts.map((p) => p.scheduledFor).sort();
11287
+ const order2 = sorted.map((p, i) => ({
11288
+ id: p._id,
11289
+ createdAt: p.createdAt,
11290
+ newSlot: slots[i]
11291
+ }));
11292
+ for (const entry of order2) {
11293
+ logger_default.info(` ${entry.createdAt.slice(0, 10)} \u2192 ${entry.newSlot?.slice(0, 10)} [${entry.id.slice(-8)}]`);
11294
+ }
11295
+ return { moved: sorted.length, errors: 0, order: order2 };
11296
+ }
11297
+ const tempResult = await client.createQueue({
11298
+ profileId,
11299
+ name: `temp-${platform}-${clipType}`,
11300
+ timezone: "America/Chicago",
11301
+ slots: [{ dayOfWeek: 0, time: "04:00" }],
11302
+ active: true
11303
+ });
11304
+ const tempQueueId = tempResult.schedule?._id;
11305
+ if (!tempQueueId) {
11306
+ logger_default.error("Failed to create temp queue");
11307
+ return { moved: 0, errors: queuePosts.length, order: [] };
11308
+ }
11309
+ let errors = 0;
11310
+ logger_default.info(` Moving ${sorted.length} posts to temp queue...`);
11311
+ for (const p of queuePosts) {
11312
+ try {
11313
+ await client.updatePost(p._id, { queuedFromProfile: profileId, queueId: tempQueueId });
11314
+ } catch {
11315
+ errors++;
11316
+ }
11317
+ await new Promise((r) => setTimeout(r, 200));
11318
+ }
11319
+ await new Promise((r) => setTimeout(r, 500));
11320
+ logger_default.info(` Re-queuing newest-first...`);
11321
+ const order = [];
11322
+ for (const p of sorted) {
11323
+ try {
11324
+ const result = await client.updatePost(p._id, {
11325
+ queuedFromProfile: profileId,
11326
+ queueId,
11327
+ isDraft: false
11328
+ });
11329
+ order.push({ id: p._id, createdAt: p.createdAt, newSlot: result.scheduledFor });
11330
+ } catch {
11331
+ errors++;
11332
+ order.push({ id: p._id, createdAt: p.createdAt });
11333
+ }
11334
+ await new Promise((r) => setTimeout(r, 200));
11335
+ }
11336
+ try {
11337
+ await client.deleteQueue(profileId, tempQueueId);
11338
+ } catch {
11339
+ logger_default.warn("Failed to delete temp queue \u2014 clean up manually");
11340
+ }
11341
+ logger_default.info(`\u2705 Reordered ${platform}/${clipType}: ${sorted.length} posts, ${errors} errors`);
11342
+ return { moved: sorted.length, errors, order };
11343
+ }
11344
+ async function reorderAllQueues(options) {
11345
+ const { getAllQueueMappings: getAllQueueMappings2 } = await Promise.resolve().then(() => (init_queueMapping(), queueMapping_exports));
11346
+ const mappings = await getAllQueueMappings2();
11347
+ let total = 0;
11348
+ let errors = 0;
11349
+ for (const queueName of Object.keys(mappings)) {
11350
+ const parts = queueName.split("-");
11351
+ const platform = parts[0];
11352
+ const clipType = parts.slice(1).join("-") || "short";
11353
+ const fullClipType = clipType === "medium" ? "medium-clip" : clipType;
11354
+ logger_default.info(`
11355
+ \u2500\u2500 ${queueName} \u2500\u2500`);
11356
+ const result = await reorderQueue(platform, fullClipType, options);
11357
+ total += result.moved;
11358
+ errors += result.errors;
11359
+ }
11360
+ logger_default.info(`
11361
+ \u2705 All queues reordered: ${total} posts, ${errors} errors`);
11362
+ return { total, errors };
11363
+ }
11364
+ async function priorityShiftQueue(platform, clipType) {
11365
+ const client = createLateApiClient();
11366
+ const queueId = await getQueueId(platform, clipType);
11367
+ const profileId = await getProfileId();
11368
+ if (!queueId) {
11369
+ logger_default.warn(`No queue found for ${platform}/${clipType} \u2014 cannot priority shift`);
11370
+ return null;
11371
+ }
11372
+ const allPosts = await client.getScheduledPosts(platform);
11373
+ const preview = await client.previewQueue(profileId, queueId, 100);
11374
+ const utcTimePatterns = new Set(
11375
+ (preview.slots ?? []).map((s) => s.slice(11, 16))
11376
+ // Extract HH:MM from UTC ISO strings
11377
+ );
11378
+ const sorted = allPosts.filter((p) => {
11379
+ if (!p.scheduledFor) return false;
11380
+ const postTime = p.scheduledFor.slice(11, 16);
11381
+ return utcTimePatterns.has(postTime);
11382
+ }).sort((a, b) => a.scheduledFor.localeCompare(b.scheduledFor));
11383
+ if (sorted.length === 0) {
11384
+ logger_default.info(`No scheduled posts in ${platform}/${clipType} queue \u2014 no shift needed`);
11385
+ return null;
11386
+ }
11387
+ const freedSlot = sorted[0].scheduledFor;
11388
+ const lastPostDate = sorted[sorted.length - 1].scheduledFor;
11389
+ const nextSlotForLast = preview.slots?.find((s) => s > lastPostDate);
11390
+ if (!nextSlotForLast) {
11391
+ logger_default.warn(`No queue slot found after ${lastPostDate} \u2014 shifting all except last post`);
11392
+ if (sorted.length < 2) {
11393
+ logger_default.info(`Only 1 post in queue \u2014 cannot shift`);
11394
+ return null;
11395
+ }
11396
+ logger_default.info(`Priority shift: freeing ${freedSlot} for ${platform}/${clipType}`);
11397
+ logger_default.info(` Shifting ${sorted.length - 1} posts (last post stays at ${lastPostDate})`);
11398
+ for (let i = sorted.length - 2; i >= 0; i--) {
11399
+ const post = sorted[i];
11400
+ const newDate = sorted[i + 1].scheduledFor;
11401
+ if (post.scheduledFor === newDate) continue;
11402
+ logger_default.info(` Rescheduling [${post._id.slice(-8)}]: ${post.scheduledFor} \u2192 ${newDate}`);
11403
+ await client.schedulePost(post._id, newDate);
11404
+ }
11405
+ logger_default.info(`\u2705 Priority shift complete: freed ${freedSlot}, shifted ${sorted.length - 1} posts`);
11406
+ return { freedSlot, shiftedCount: sorted.length - 1 };
11407
+ }
11408
+ logger_default.info(`Priority shift: freeing ${freedSlot} for ${platform}/${clipType}`);
11409
+ logger_default.info(` Shifting ${sorted.length} posts, last post \u2192 ${nextSlotForLast}`);
11410
+ for (let i = sorted.length - 1; i >= 0; i--) {
11411
+ const post = sorted[i];
11412
+ const newDate = i < sorted.length - 1 ? sorted[i + 1].scheduledFor : nextSlotForLast;
11413
+ const oldDate = post.scheduledFor;
11414
+ if (oldDate === newDate) continue;
11415
+ logger_default.info(` Rescheduling [${post._id.slice(-8)}]: ${oldDate} \u2192 ${newDate}`);
11416
+ await client.schedulePost(post._id, newDate);
11417
+ }
11418
+ logger_default.info(`\u2705 Priority shift complete: freed ${freedSlot}, shifted ${sorted.length} posts`);
11419
+ return { freedSlot, shiftedCount: sorted.length };
11420
+ }
11421
+ var init_lateApiService = __esm({
11422
+ "src/L3-services/lateApi/lateApiService.ts"() {
11423
+ "use strict";
11424
+ init_lateApi();
11425
+ init_configLogger();
11426
+ init_queueMapping();
11427
+ }
11428
+ });
11429
+
11089
11430
  // src/L2-clients/scheduleStore/scheduleStore.ts
11090
11431
  async function readScheduleFile(filePath) {
11091
11432
  return readTextFile(filePath);
@@ -12005,138 +12346,561 @@ var init_scheduler = __esm({
12005
12346
  }
12006
12347
  });
12007
12348
 
12008
- // src/L3-services/queueMapping/queueMapping.ts
12009
- function cachePath() {
12010
- return join(process.cwd(), CACHE_FILE);
12349
+ // src/L2-clients/azure/blobClient.ts
12350
+ import { BlobServiceClient, StorageSharedKeyCredential } from "@azure/storage-blob";
12351
+ import { Readable as Readable2 } from "stream";
12352
+ import { createReadStream as createReadStream2 } from "fs";
12353
+ import { stat } from "fs/promises";
12354
+ function getClient() {
12355
+ const config2 = getConfig();
12356
+ const accountName = config2.AZURE_STORAGE_ACCOUNT_NAME;
12357
+ const accountKey = config2.AZURE_STORAGE_ACCOUNT_KEY;
12358
+ const containerName = config2.AZURE_CONTAINER_NAME;
12359
+ if (!accountName || !accountKey) {
12360
+ throw new Error("Azure Storage credentials not configured. Set AZURE_STORAGE_ACCOUNT_NAME and AZURE_STORAGE_ACCOUNT_KEY.");
12361
+ }
12362
+ const credential = new StorageSharedKeyCredential(accountName, accountKey);
12363
+ const blobService = new BlobServiceClient(`https://${accountName}.blob.core.windows.net`, credential);
12364
+ const container = blobService.getContainerClient(containerName);
12365
+ return { blobService, container };
12366
+ }
12367
+ async function uploadBuffer(blobPath, data, contentType) {
12368
+ const { container } = getClient();
12369
+ const blockBlob = container.getBlockBlobClient(blobPath);
12370
+ await blockBlob.upload(data, data.length, {
12371
+ blobHTTPHeaders: contentType ? { blobContentType: contentType } : void 0
12372
+ });
12373
+ logger_default.debug(`Uploaded blob: ${blobPath} (${data.length} bytes)`);
12374
+ return blockBlob.url;
12375
+ }
12376
+ async function uploadFile(blobPath, localFilePath, contentType) {
12377
+ const { container } = getClient();
12378
+ const blockBlob = container.getBlockBlobClient(blobPath);
12379
+ const fileStat2 = await stat(localFilePath);
12380
+ await blockBlob.uploadStream(
12381
+ createReadStream2(localFilePath),
12382
+ 4 * 1024 * 1024,
12383
+ // 4MB buffer size
12384
+ 5,
12385
+ // max concurrency
12386
+ {
12387
+ blobHTTPHeaders: contentType ? { blobContentType: contentType } : void 0
12388
+ }
12389
+ );
12390
+ logger_default.debug(`Uploaded file to blob: ${blobPath} (${fileStat2.size} bytes)`);
12391
+ return blockBlob.url;
12011
12392
  }
12012
- function isCacheValid(cache2) {
12013
- const fetchedAtTime = new Date(cache2.fetchedAt).getTime();
12014
- if (Number.isNaN(fetchedAtTime)) {
12015
- logger_default.warn("Invalid fetchedAt in queue cache; treating as stale", {
12016
- fetchedAt: cache2.fetchedAt
12017
- });
12018
- return false;
12393
+ async function downloadToFile(blobPath, localPath) {
12394
+ const { container } = getClient();
12395
+ const blockBlob = container.getBlockBlobClient(blobPath);
12396
+ await blockBlob.downloadToFile(localPath);
12397
+ logger_default.debug(`Downloaded blob to file: ${blobPath} \u2192 ${localPath}`);
12398
+ }
12399
+ async function downloadStream(blobPath) {
12400
+ const { container } = getClient();
12401
+ const blockBlob = container.getBlockBlobClient(blobPath);
12402
+ const response = await blockBlob.download(0);
12403
+ if (!response.readableStreamBody) {
12404
+ throw new Error(`Failed to get readable stream for blob: ${blobPath}`);
12019
12405
  }
12020
- const age = Date.now() - fetchedAtTime;
12021
- return age < CACHE_TTL_MS;
12406
+ return Readable2.from(response.readableStreamBody);
12022
12407
  }
12023
- async function readFileCache() {
12408
+ async function listBlobs(prefix) {
12409
+ const { container } = getClient();
12410
+ const blobs = [];
12411
+ for await (const blob of container.listBlobsFlat({ prefix })) {
12412
+ blobs.push(blob.name);
12413
+ }
12414
+ return blobs;
12415
+ }
12416
+ function isAzureConfigured() {
12417
+ const config2 = getConfig();
12418
+ return Boolean(config2.AZURE_STORAGE_ACCOUNT_NAME && config2.AZURE_STORAGE_ACCOUNT_KEY);
12419
+ }
12420
+ var init_blobClient = __esm({
12421
+ "src/L2-clients/azure/blobClient.ts"() {
12422
+ "use strict";
12423
+ init_configLogger();
12424
+ init_environment();
12425
+ }
12426
+ });
12427
+
12428
+ // src/L2-clients/azure/tableClient.ts
12429
+ import { TableClient as AzureTableClient, AzureNamedKeyCredential } from "@azure/data-tables";
12430
+ function getTableClient(tableName) {
12431
+ const config2 = getConfig();
12432
+ const accountName = config2.AZURE_STORAGE_ACCOUNT_NAME;
12433
+ const accountKey = config2.AZURE_STORAGE_ACCOUNT_KEY;
12434
+ if (!accountName || !accountKey) {
12435
+ throw new Error("Azure Storage credentials not configured. Set AZURE_STORAGE_ACCOUNT_NAME and AZURE_STORAGE_ACCOUNT_KEY.");
12436
+ }
12437
+ const credential = new AzureNamedKeyCredential(accountName, accountKey);
12438
+ const url = `https://${accountName}.table.core.windows.net`;
12439
+ return new AzureTableClient(url, tableName, credential);
12440
+ }
12441
+ async function upsertEntity(tableName, partitionKey, rowKey, properties) {
12442
+ const client = getTableClient(tableName);
12443
+ const entity = { partitionKey, rowKey, ...properties };
12444
+ await client.upsertEntity(entity, "Merge");
12445
+ logger_default.debug(`Upserted entity: ${tableName}/${partitionKey}/${rowKey}`);
12446
+ }
12447
+ async function getEntity(tableName, partitionKey, rowKey) {
12448
+ const client = getTableClient(tableName);
12024
12449
  try {
12025
- const raw = await readTextFile(cachePath());
12026
- const cache2 = JSON.parse(raw);
12027
- if (cache2.mappings && cache2.profileId && cache2.fetchedAt && isCacheValid(cache2)) {
12028
- return cache2;
12450
+ return await client.getEntity(partitionKey, rowKey);
12451
+ } catch (error) {
12452
+ if (error instanceof Error && "statusCode" in error && error.statusCode === 404) {
12453
+ return null;
12029
12454
  }
12030
- return null;
12455
+ throw error;
12456
+ }
12457
+ }
12458
+ async function queryEntities(tableName, filter) {
12459
+ const client = getTableClient(tableName);
12460
+ const entities = [];
12461
+ for await (const entity of client.listEntities({ queryOptions: { filter } })) {
12462
+ entities.push(entity);
12463
+ }
12464
+ return entities;
12465
+ }
12466
+ async function updateEntity(tableName, partitionKey, rowKey, properties) {
12467
+ const client = getTableClient(tableName);
12468
+ const entity = { partitionKey, rowKey, ...properties };
12469
+ await client.updateEntity(entity, "Merge");
12470
+ logger_default.debug(`Updated entity: ${tableName}/${partitionKey}/${rowKey}`);
12471
+ }
12472
+ var init_tableClient = __esm({
12473
+ "src/L2-clients/azure/tableClient.ts"() {
12474
+ "use strict";
12475
+ init_configLogger();
12476
+ init_environment();
12477
+ }
12478
+ });
12479
+
12480
+ // src/L3-services/azureStorage/azureStorageService.ts
12481
+ var azureStorageService_exports = {};
12482
+ __export(azureStorageService_exports, {
12483
+ downloadBlobToFile: () => downloadBlobToFile,
12484
+ downloadContentMedia: () => downloadContentMedia,
12485
+ findContentItemByRowKey: () => findContentItemByRowKey,
12486
+ getContentItem: () => getContentItem,
12487
+ getContentItems: () => getContentItems,
12488
+ getRunId: () => getRunId,
12489
+ getVideoRecord: () => getVideoRecord,
12490
+ isAzureConfigured: () => isAzureConfigured2,
12491
+ listVideos: () => listVideos,
12492
+ migrateLocalContent: () => migrateLocalContent,
12493
+ updateContentStatus: () => updateContentStatus,
12494
+ uploadContentItem: () => uploadContentItem,
12495
+ uploadPublishQueue: () => uploadPublishQueue,
12496
+ uploadRawVideo: () => uploadRawVideo,
12497
+ uploadVideoFile: () => uploadVideoFile
12498
+ });
12499
+ import { readdir, readFile } from "fs/promises";
12500
+ import { join as join8 } from "path";
12501
+ import { randomUUID as randomUUID2 } from "crypto";
12502
+ async function uploadVideoFile(localPath, blobPath) {
12503
+ logger_default.info(`Uploading video to Azure blob: ${blobPath}`);
12504
+ return uploadFile(blobPath, localPath, "video/mp4");
12505
+ }
12506
+ async function downloadBlobToFile(blobPath, localPath) {
12507
+ return downloadToFile(blobPath, localPath);
12508
+ }
12509
+ function isAzureConfigured2() {
12510
+ return isAzureConfigured();
12511
+ }
12512
+ function getRunId() {
12513
+ return process.env.GITHUB_RUN_ID || randomUUID2();
12514
+ }
12515
+ async function uploadRawVideo(localPath, runId, metadata) {
12516
+ const blobPath = `raw/${runId}-${metadata.originalFilename}`;
12517
+ logger_default.info(`Uploading raw video to Azure: ${blobPath}`);
12518
+ await uploadFile(blobPath, localPath, "video/mp4");
12519
+ await upsertEntity(VIDEOS_TABLE, "video", runId, {
12520
+ originalFilename: metadata.originalFilename,
12521
+ slug: metadata.slug,
12522
+ blobPath,
12523
+ sourceUrl: metadata.sourceUrl || "",
12524
+ duration: metadata.duration || 0,
12525
+ size: metadata.size,
12526
+ status: "completed",
12527
+ contentCount: 0,
12528
+ processedAt: (/* @__PURE__ */ new Date()).toISOString(),
12529
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
12530
+ });
12531
+ logger_default.info(`Created video record: ${runId}`);
12532
+ return blobPath;
12533
+ }
12534
+ async function uploadContentItem(localItemDir, itemId, videoSlug, runId, metadata) {
12535
+ const blobBasePath = `content/${itemId}/`;
12536
+ const files = await readdir(localItemDir);
12537
+ for (const file of files) {
12538
+ const localFilePath = join8(localItemDir, file);
12539
+ const blobPath = `${blobBasePath}${file}`;
12540
+ const contentType = getContentType(file);
12541
+ await uploadFile(blobPath, localFilePath, contentType);
12542
+ }
12543
+ let itemMetadata = {};
12544
+ const metadataPath = join8(localItemDir, "metadata.json");
12545
+ try {
12546
+ const metadataContent = await readFile(metadataPath, "utf8");
12547
+ itemMetadata = JSON.parse(metadataContent);
12548
+ } catch {
12549
+ }
12550
+ let postContent = "";
12551
+ const postPath = join8(localItemDir, "post.md");
12552
+ try {
12553
+ postContent = await readFile(postPath, "utf8");
12031
12554
  } catch {
12032
- return null;
12033
12555
  }
12556
+ const mediaFilename = files.find((f) => f.startsWith("media.")) || "";
12557
+ const thumbnailFilename = files.find((f) => f.startsWith("thumbnail.")) || "";
12558
+ const record = {
12559
+ platform: String(itemMetadata.platform || metadata?.platform || ""),
12560
+ clipType: String(itemMetadata.clipType || metadata?.clipType || ""),
12561
+ status: metadata?.status || "pending_review",
12562
+ blobBasePath,
12563
+ mediaType: String(itemMetadata.mediaType || metadata?.mediaType || "video"),
12564
+ mediaFilename,
12565
+ postContent,
12566
+ hashtags: Array.isArray(itemMetadata.hashtags) ? itemMetadata.hashtags.join(",") : metadata?.hashtags || "",
12567
+ characterCount: Number(itemMetadata.characterCount || metadata?.characterCount || postContent.length),
12568
+ scheduledFor: String(itemMetadata.scheduledFor || metadata?.scheduledFor || ""),
12569
+ latePostId: String(itemMetadata.latePostId || metadata?.latePostId || ""),
12570
+ publishedUrl: String(itemMetadata.publishedUrl || metadata?.publishedUrl || ""),
12571
+ sourceVideoRunId: runId,
12572
+ thumbnailFilename,
12573
+ ideaIds: Array.isArray(itemMetadata.ideaIds) ? itemMetadata.ideaIds.join(",") : metadata?.ideaIds || "",
12574
+ createdAt: String(itemMetadata.createdAt || (/* @__PURE__ */ new Date()).toISOString()),
12575
+ reviewedAt: String(itemMetadata.reviewedAt || metadata?.reviewedAt || ""),
12576
+ publishedAt: String(itemMetadata.publishedAt || metadata?.publishedAt || "")
12577
+ };
12578
+ await upsertEntity(CONTENT_TABLE, videoSlug, itemId, record);
12579
+ logger_default.info(`Uploaded content item: ${itemId} (${record.platform}/${record.clipType}) \u2014 blob + table record created`);
12580
+ return blobBasePath;
12034
12581
  }
12035
- async function writeFileCache(cache2) {
12582
+ async function uploadPublishQueue(publishQueueDir, videoSlug, runId) {
12583
+ const errors = [];
12584
+ let uploaded = 0;
12585
+ let items;
12036
12586
  try {
12037
- if (!cache2 || typeof cache2 !== "object" || !cache2.mappings || !cache2.profileId || !cache2.fetchedAt) {
12038
- logger_default.warn("Invalid queue cache structure, skipping write");
12039
- return;
12040
- }
12041
- const sanitized = {
12042
- mappings: typeof cache2.mappings === "object" ? { ...cache2.mappings } : {},
12043
- profileId: String(cache2.profileId),
12044
- fetchedAt: String(cache2.fetchedAt)
12045
- };
12046
- for (const [name, id] of Object.entries(sanitized.mappings)) {
12047
- if (typeof name !== "string" || typeof id !== "string" || /[\x00-\x1f]/.test(name) || /[\x00-\x1f]/.test(id)) {
12048
- logger_default.warn("Invalid queue mapping data from API, skipping cache write");
12049
- return;
12587
+ items = await readdir(publishQueueDir);
12588
+ } catch {
12589
+ logger_default.warn(`Publish queue directory not found: ${publishQueueDir}`);
12590
+ return { uploaded: 0, errors: ["Publish queue directory not found"] };
12591
+ }
12592
+ for (const itemId of items) {
12593
+ const itemDir = join8(publishQueueDir, itemId);
12594
+ try {
12595
+ const metaPath = join8(itemDir, "metadata.json");
12596
+ const metaContent = await readFile(metaPath, "utf8");
12597
+ const meta = JSON.parse(metaContent);
12598
+ const sourceVideo = String(meta.sourceVideo || "");
12599
+ if (sourceVideo && !sourceVideo.endsWith(videoSlug)) {
12600
+ continue;
12050
12601
  }
12602
+ } catch {
12603
+ continue;
12051
12604
  }
12052
- const resolvedCachePath = resolve(cachePath());
12053
- if (!resolvedCachePath.startsWith(resolve(process.cwd()) + sep)) {
12054
- throw new Error("Cache path outside working directory");
12605
+ try {
12606
+ await uploadContentItem(itemDir, itemId, videoSlug, runId);
12607
+ uploaded++;
12608
+ } catch (error) {
12609
+ const msg = error instanceof Error ? error.message : String(error);
12610
+ errors.push(`${itemId}: ${msg}`);
12611
+ logger_default.error(`Failed to upload content item ${itemId}: ${msg}`);
12055
12612
  }
12056
- await writeTextFile(resolvedCachePath, JSON.stringify(sanitized, null, 2));
12057
- } catch (err) {
12058
- logger_default.warn("Failed to write queue cache file", { error: err });
12059
12613
  }
12614
+ await updateEntity(VIDEOS_TABLE, "video", runId, {
12615
+ contentCount: uploaded
12616
+ });
12617
+ logger_default.info(`Uploaded ${uploaded} content items to Azure (${errors.length} errors)`);
12618
+ return { uploaded, errors };
12060
12619
  }
12061
- async function fetchAndCache() {
12062
- const client = new LateApiClient();
12063
- const profiles = await client.listProfiles();
12064
- if (profiles.length === 0) {
12065
- logger_default.warn("No Late API profiles found \u2014 queue mappings will be empty");
12066
- const emptyCache = {
12067
- mappings: {},
12068
- profileId: "",
12069
- fetchedAt: (/* @__PURE__ */ new Date()).toISOString()
12070
- };
12071
- memoryCache = emptyCache;
12072
- return emptyCache;
12620
+ async function migrateLocalContent(outputDir) {
12621
+ const errors = [];
12622
+ let uploaded = 0;
12623
+ const runId = `migration-${Date.now()}`;
12624
+ const publishQueueDir = join8(outputDir, "publish-queue");
12625
+ try {
12626
+ const items = await readdir(publishQueueDir);
12627
+ for (const itemId of items) {
12628
+ try {
12629
+ const videoSlug = extractVideoSlug(itemId);
12630
+ await uploadContentItem(
12631
+ join8(publishQueueDir, itemId),
12632
+ itemId,
12633
+ videoSlug,
12634
+ runId,
12635
+ { status: "pending_review" }
12636
+ );
12637
+ uploaded++;
12638
+ } catch (error) {
12639
+ const msg = error instanceof Error ? error.message : String(error);
12640
+ errors.push(`publish-queue/${itemId}: ${msg}`);
12641
+ }
12642
+ }
12643
+ } catch {
12644
+ logger_default.info("No publish-queue directory found for migration");
12073
12645
  }
12074
- const profileId = profiles[0]._id;
12075
- const { queues } = await client.listQueues(profileId, true);
12076
- if (queues.length === 0) {
12077
- logger_default.warn(
12078
- "No queues found in Late API \u2014 run `vidpipe sync-queues` to create platform queues"
12079
- );
12646
+ const publishedDir = join8(outputDir, "published");
12647
+ try {
12648
+ const items = await readdir(publishedDir);
12649
+ for (const itemId of items) {
12650
+ try {
12651
+ const videoSlug = extractVideoSlug(itemId);
12652
+ await uploadContentItem(
12653
+ join8(publishedDir, itemId),
12654
+ itemId,
12655
+ videoSlug,
12656
+ runId,
12657
+ { status: "published" }
12658
+ );
12659
+ uploaded++;
12660
+ } catch (error) {
12661
+ const msg = error instanceof Error ? error.message : String(error);
12662
+ errors.push(`published/${itemId}: ${msg}`);
12663
+ }
12664
+ }
12665
+ } catch {
12666
+ logger_default.info("No published directory found for migration");
12080
12667
  }
12081
- const mappings = {};
12082
- for (const queue2 of queues) {
12083
- mappings[queue2.name] = queue2._id;
12668
+ logger_default.info(`Migration complete: ${uploaded} items uploaded, ${errors.length} errors`);
12669
+ return { uploaded, errors };
12670
+ }
12671
+ async function getContentItems(filters) {
12672
+ const parts = [];
12673
+ if (filters?.videoSlug) {
12674
+ parts.push(`PartitionKey eq '${filters.videoSlug}'`);
12084
12675
  }
12085
- const cache2 = {
12086
- mappings,
12087
- profileId,
12088
- fetchedAt: (/* @__PURE__ */ new Date()).toISOString()
12089
- };
12090
- memoryCache = cache2;
12091
- await writeFileCache(cache2);
12092
- logger_default.info("Refreshed Late queue mappings", {
12093
- queueCount: queues.length,
12094
- queues: Object.keys(mappings)
12676
+ if (filters?.status) {
12677
+ parts.push(`status eq '${filters.status}'`);
12678
+ }
12679
+ const filter = parts.length > 0 ? parts.join(" and ") : "";
12680
+ const entities = await queryEntities(
12681
+ CONTENT_TABLE,
12682
+ filter
12683
+ );
12684
+ return entities;
12685
+ }
12686
+ async function getContentItem(videoSlug, itemId) {
12687
+ const entity = await getEntity(
12688
+ CONTENT_TABLE,
12689
+ videoSlug,
12690
+ itemId
12691
+ );
12692
+ return entity;
12693
+ }
12694
+ async function findContentItemByRowKey(itemId) {
12695
+ const results = await queryEntities(
12696
+ CONTENT_TABLE,
12697
+ `RowKey eq '${itemId}'`
12698
+ );
12699
+ return results[0] ?? null;
12700
+ }
12701
+ async function updateContentStatus(videoSlug, itemId, status, extraFields) {
12702
+ await updateEntity(CONTENT_TABLE, videoSlug, itemId, {
12703
+ status,
12704
+ ...extraFields
12095
12705
  });
12096
- return cache2;
12706
+ logger_default.info(`Updated content status: ${itemId} \u2192 ${status}`);
12097
12707
  }
12098
- async function ensureMappings() {
12099
- if (memoryCache && isCacheValid(memoryCache)) {
12100
- return memoryCache;
12708
+ async function getVideoRecord(runId) {
12709
+ return getEntity(
12710
+ VIDEOS_TABLE,
12711
+ "video",
12712
+ runId
12713
+ );
12714
+ }
12715
+ async function listVideos(status) {
12716
+ const filter = status ? `PartitionKey eq 'video' and status eq '${status}'` : "PartitionKey eq 'video'";
12717
+ return queryEntities(
12718
+ VIDEOS_TABLE,
12719
+ filter
12720
+ );
12721
+ }
12722
+ async function downloadContentMedia(blobPath) {
12723
+ return downloadStream(blobPath);
12724
+ }
12725
+ function getContentType(filename) {
12726
+ const ext = filename.split(".").pop()?.toLowerCase();
12727
+ switch (ext) {
12728
+ case "mp4":
12729
+ return "video/mp4";
12730
+ case "png":
12731
+ return "image/png";
12732
+ case "jpg":
12733
+ case "jpeg":
12734
+ return "image/jpeg";
12735
+ case "json":
12736
+ return "application/json";
12737
+ case "md":
12738
+ return "text/markdown";
12739
+ case "srt":
12740
+ case "vtt":
12741
+ case "ass":
12742
+ return "text/plain";
12743
+ default:
12744
+ return "application/octet-stream";
12101
12745
  }
12102
- const fileCache = await readFileCache();
12103
- if (fileCache) {
12104
- memoryCache = fileCache;
12105
- return fileCache;
12746
+ }
12747
+ function extractVideoSlug(itemId) {
12748
+ const platforms = ["youtube-shorts", "instagram-reels", "instagram-feed", "twitter", "youtube", "tiktok", "instagram", "linkedin", "x"];
12749
+ for (const platform of platforms) {
12750
+ if (itemId.endsWith(`-${platform}`)) {
12751
+ return itemId.slice(0, -(platform.length + 1));
12752
+ }
12753
+ }
12754
+ return itemId;
12755
+ }
12756
+ var VIDEOS_TABLE, CONTENT_TABLE;
12757
+ var init_azureStorageService = __esm({
12758
+ "src/L3-services/azureStorage/azureStorageService.ts"() {
12759
+ "use strict";
12760
+ init_configLogger();
12761
+ init_blobClient();
12762
+ init_tableClient();
12763
+ VIDEOS_TABLE = "Videos";
12764
+ CONTENT_TABLE = "Content";
12765
+ }
12766
+ });
12767
+
12768
+ // src/L3-services/azureStorage/azureConfigService.ts
12769
+ var azureConfigService_exports = {};
12770
+ __export(azureConfigService_exports, {
12771
+ listConfigFiles: () => listConfigFiles,
12772
+ pullConfig: () => pullConfig,
12773
+ pushConfig: () => pushConfig
12774
+ });
12775
+ import { readdir as readdir2, stat as stat2, mkdir } from "fs/promises";
12776
+ import { join as join9 } from "path";
12777
+ async function pushConfig(vidpipeDir) {
12778
+ let uploaded = 0;
12779
+ for (const file of CONFIG_FILES) {
12780
+ const fullPath = join9(vidpipeDir, file);
12781
+ try {
12782
+ await stat2(fullPath);
12783
+ const blobPath = `${CONFIG_PREFIX}${file}`;
12784
+ logger_default.info(`Uploading ${file}...`);
12785
+ await uploadFile(blobPath, fullPath);
12786
+ uploaded++;
12787
+ logger_default.info(` \u2705 ${blobPath}`);
12788
+ } catch {
12789
+ logger_default.debug(`Config file not found, skipping: ${file}`);
12790
+ }
12791
+ }
12792
+ for (const dir of CONFIG_DIRS) {
12793
+ const fullPath = join9(vidpipeDir, dir);
12794
+ try {
12795
+ await stat2(fullPath);
12796
+ logger_default.info(`Uploading ${dir}/...`);
12797
+ const count = await uploadDirectory(fullPath, `${CONFIG_PREFIX}${dir}`);
12798
+ uploaded += count;
12799
+ logger_default.info(` \u2705 ${dir}/ (${count} files)`);
12800
+ } catch {
12801
+ logger_default.debug(`Config directory not found, skipping: ${dir}/`);
12802
+ }
12803
+ }
12804
+ logger_default.info(`Pushed ${uploaded} config files to Azure`);
12805
+ return { uploaded };
12806
+ }
12807
+ async function uploadDirectory(localDir, blobPrefix) {
12808
+ let count = 0;
12809
+ const entries = await readdir2(localDir);
12810
+ for (const entry of entries) {
12811
+ const fullPath = join9(localDir, entry);
12812
+ const entryStat = await stat2(fullPath);
12813
+ if (entryStat.isDirectory()) {
12814
+ count += await uploadDirectory(fullPath, `${blobPrefix}/${entry}`);
12815
+ } else if (entryStat.isFile()) {
12816
+ const blobPath = `${blobPrefix}/${entry}`;
12817
+ await uploadFile(blobPath, fullPath);
12818
+ count++;
12819
+ }
12820
+ }
12821
+ return count;
12822
+ }
12823
+ async function pullConfig(targetDir) {
12824
+ let downloaded = 0;
12825
+ const blobs = await listBlobs(CONFIG_PREFIX);
12826
+ for (const blobPath of blobs) {
12827
+ const relativePath = blobPath.slice(CONFIG_PREFIX.length);
12828
+ const localPath = join9(targetDir, relativePath);
12829
+ const parentDir = join9(localPath, "..");
12830
+ await mkdir(parentDir, { recursive: true });
12831
+ await downloadToFile(blobPath, localPath);
12832
+ downloaded++;
12833
+ logger_default.debug(`Downloaded config: ${blobPath} \u2192 ${localPath}`);
12834
+ }
12835
+ logger_default.info(`Pulled ${downloaded} config files from Azure`);
12836
+ return { downloaded };
12837
+ }
12838
+ async function listConfigFiles() {
12839
+ const blobs = await listBlobs(CONFIG_PREFIX);
12840
+ return blobs.map((b) => b.slice(CONFIG_PREFIX.length));
12841
+ }
12842
+ var CONFIG_PREFIX, CONFIG_FILES, CONFIG_DIRS;
12843
+ var init_azureConfigService = __esm({
12844
+ "src/L3-services/azureStorage/azureConfigService.ts"() {
12845
+ "use strict";
12846
+ init_configLogger();
12847
+ init_blobClient();
12848
+ CONFIG_PREFIX = "config/";
12849
+ CONFIG_FILES = ["schedule.json", "brand.json"];
12850
+ CONFIG_DIRS = ["assets"];
12106
12851
  }
12852
+ });
12853
+
12854
+ // src/L4-agents/cloudStorage/cloudStorageOperations.ts
12855
+ var cloudStorageOperations_exports = {};
12856
+ __export(cloudStorageOperations_exports, {
12857
+ isCloudEnabled: () => isCloudEnabled,
12858
+ migrateLocalContent: () => migrateLocalContent2,
12859
+ pullConfig: () => pullConfig2,
12860
+ pushConfig: () => pushConfig2,
12861
+ uploadPipelineResults: () => uploadPipelineResults
12862
+ });
12863
+ function isCloudEnabled() {
12864
+ return isAzureConfigured2();
12865
+ }
12866
+ async function uploadPipelineResults(inputVideoPath, publishQueueDir, videoSlug, metadata) {
12867
+ const runId = getRunId();
12868
+ logger_default.info(`Cloud upload starting (runId: ${runId})`);
12869
+ let videoUploaded = false;
12107
12870
  try {
12108
- return await fetchAndCache();
12109
- } catch (err) {
12110
- logger_default.error("Failed to fetch Late queue mappings", { error: err });
12111
- return { mappings: {}, profileId: "", fetchedAt: (/* @__PURE__ */ new Date()).toISOString() };
12871
+ await uploadRawVideo(inputVideoPath, runId, {
12872
+ ...metadata,
12873
+ slug: videoSlug
12874
+ });
12875
+ videoUploaded = true;
12876
+ } catch (error) {
12877
+ const msg = error instanceof Error ? error.message : String(error);
12878
+ logger_default.error(`Failed to upload raw video: ${msg}`);
12112
12879
  }
12880
+ const result = await uploadPublishQueue(publishQueueDir, videoSlug, runId);
12881
+ logger_default.info(`Cloud upload complete: video=${videoUploaded}, content=${result.uploaded}, errors=${result.errors.length}`);
12882
+ return {
12883
+ runId,
12884
+ videoUploaded,
12885
+ contentUploaded: result.uploaded,
12886
+ errors: result.errors
12887
+ };
12113
12888
  }
12114
- async function getQueueId(platform, clipType) {
12115
- const cache2 = await ensureMappings();
12116
- const normalizedPlatform = platform === "twitter" ? "x" : platform;
12117
- const queueName = `${normalizedPlatform}-${clipType}`;
12118
- return cache2.mappings[queueName] ?? null;
12889
+ async function pullConfig2(targetDir) {
12890
+ return pullConfig(targetDir);
12119
12891
  }
12120
- async function getProfileId() {
12121
- const cache2 = await ensureMappings();
12122
- return cache2.profileId;
12892
+ async function pushConfig2(sourceDir) {
12893
+ return pushConfig(sourceDir);
12123
12894
  }
12124
- async function refreshQueueMappings() {
12125
- memoryCache = null;
12126
- const cache2 = await fetchAndCache();
12127
- return { ...cache2.mappings };
12895
+ async function migrateLocalContent2(outputDir) {
12896
+ return migrateLocalContent(outputDir);
12128
12897
  }
12129
- var CACHE_FILE, CACHE_TTL_MS, memoryCache;
12130
- var init_queueMapping = __esm({
12131
- "src/L3-services/queueMapping/queueMapping.ts"() {
12898
+ var init_cloudStorageOperations = __esm({
12899
+ "src/L4-agents/cloudStorage/cloudStorageOperations.ts"() {
12132
12900
  "use strict";
12133
- init_lateApi();
12134
12901
  init_configLogger();
12135
- init_fileSystem();
12136
- init_paths();
12137
- CACHE_FILE = ".vidpipe-queue-cache.json";
12138
- CACHE_TTL_MS = 24 * 60 * 60 * 1e3;
12139
- memoryCache = null;
12902
+ init_azureStorageService();
12903
+ init_azureConfigService();
12140
12904
  }
12141
12905
  });
12142
12906
 
@@ -12163,7 +12927,8 @@ function buildDesiredQueues(config2) {
12163
12927
  const normalizedPlatform = normalizePlatformName(platformKey);
12164
12928
  if (!platformSchedule.byClipType) continue;
12165
12929
  for (const [clipType, clipTypeSchedule] of Object.entries(platformSchedule.byClipType)) {
12166
- const queueName = `${normalizedPlatform}-${clipType}`;
12930
+ const normalizedClipType = clipType === "medium-clip" ? "medium" : clipType;
12931
+ const queueName = `${normalizedPlatform}-${normalizedClipType}`;
12167
12932
  const slots = [];
12168
12933
  for (const timeSlot of clipTypeSchedule.slots) {
12169
12934
  for (const day of timeSlot.days) {
@@ -12598,8 +13363,8 @@ var specLoader_exports = {};
12598
13363
  __export(specLoader_exports, {
12599
13364
  loadSpec: () => loadSpec
12600
13365
  });
12601
- import { readFile, readdir } from "fs/promises";
12602
- import { join as join8, extname as extname2 } from "path";
13366
+ import { readFile as readFile2, readdir as readdir3 } from "fs/promises";
13367
+ import { join as join11, extname as extname2 } from "path";
12603
13368
  import { parse as parseYaml } from "yaml";
12604
13369
  function isFilePath(nameOrPath) {
12605
13370
  return nameOrPath.includes("/") || nameOrPath.includes("\\") || nameOrPath.endsWith(".yaml") || nameOrPath.endsWith(".yml") || nameOrPath.endsWith(".json");
@@ -12613,7 +13378,7 @@ function parseFileContent(raw, filePath) {
12613
13378
  }
12614
13379
  async function fileExists4(filePath) {
12615
13380
  try {
12616
- await readFile(filePath);
13381
+ await readFile2(filePath);
12617
13382
  return true;
12618
13383
  } catch {
12619
13384
  return false;
@@ -12621,7 +13386,7 @@ async function fileExists4(filePath) {
12621
13386
  }
12622
13387
  async function listSpecFiles(specsDir) {
12623
13388
  try {
12624
- const entries = await readdir(specsDir);
13389
+ const entries = await readdir3(specsDir);
12625
13390
  return entries.filter((e) => e.endsWith(".yaml") || e.endsWith(".yml") || e.endsWith(".json"));
12626
13391
  } catch {
12627
13392
  return [];
@@ -12640,7 +13405,7 @@ async function loadSpec(nameOrPath, repoRoot) {
12640
13405
  if (isFilePath(nameOrPath)) {
12641
13406
  let raw;
12642
13407
  try {
12643
- raw = await readFile(nameOrPath, "utf-8");
13408
+ raw = await readFile2(nameOrPath, "utf-8");
12644
13409
  } catch (err) {
12645
13410
  throw new Error(`Failed to read spec file '${nameOrPath}': ${err.message}`);
12646
13411
  }
@@ -12650,10 +13415,10 @@ async function loadSpec(nameOrPath, repoRoot) {
12650
13415
  if (isPresetName(nameOrPath)) {
12651
13416
  return getPreset(nameOrPath);
12652
13417
  }
12653
- const specsDir = join8(repoRoot, "pipeline-specs");
12654
- const conventionPath = join8(specsDir, `${nameOrPath}.yaml`);
13418
+ const specsDir = join11(repoRoot, "pipeline-specs");
13419
+ const conventionPath = join11(specsDir, `${nameOrPath}.yaml`);
12655
13420
  if (await fileExists4(conventionPath)) {
12656
- const raw = await readFile(conventionPath, "utf-8");
13421
+ const raw = await readFile2(conventionPath, "utf-8");
12657
13422
  const parsed = parseFileContent(raw, conventionPath);
12658
13423
  return validateAndMerge(parsed, conventionPath);
12659
13424
  }
@@ -13307,18 +14072,33 @@ ${cues}
13307
14072
  var ASS_HEADER = `[Script Info]
13308
14073
  Title: Auto-generated captions
13309
14074
  ScriptType: v4.00+
13310
- PlayResX: 1920
13311
- PlayResY: 1080
14075
+ PlayResX: 1920
14076
+ PlayResY: 1080
14077
+ WrapStyle: 0
14078
+
14079
+ [V4+ Styles]
14080
+ Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
14081
+ Style: Default,Montserrat,58,&H00FFFFFF,&H0000FFFF,&H00000000,&H80000000,1,0,0,0,100,100,0,0,1,3,1,2,20,20,40,1
14082
+
14083
+ [Events]
14084
+ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
14085
+ `;
14086
+ var ASS_HEADER_PORTRAIT = `[Script Info]
14087
+ Title: Auto-generated captions
14088
+ ScriptType: v4.00+
14089
+ PlayResX: 1080
14090
+ PlayResY: 1920
13312
14091
  WrapStyle: 0
13313
14092
 
13314
14093
  [V4+ Styles]
13315
14094
  Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
13316
- Style: Default,Montserrat,58,&H00FFFFFF,&H0000FFFF,&H00000000,&H80000000,1,0,0,0,100,100,0,0,1,3,1,2,20,20,40,1
14095
+ Style: Default,Montserrat,120,&H00FFFFFF,&H0000FFFF,&H00000000,&H80000000,1,0,0,0,100,100,0,0,1,3,1,2,30,30,770,1
14096
+ Style: Hook,Montserrat,56,&H00333333,&H00333333,&H60D0D0D0,&H60E0E0E0,1,0,0,0,100,100,2,0,3,18,2,8,80,80,250,1
13317
14097
 
13318
14098
  [Events]
13319
14099
  Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
13320
14100
  `;
13321
- var ASS_HEADER_PORTRAIT = `[Script Info]
14101
+ var ASS_HEADER_PORTRAIT_LOWER = `[Script Info]
13322
14102
  Title: Auto-generated captions
13323
14103
  ScriptType: v4.00+
13324
14104
  PlayResX: 1080
@@ -13327,7 +14107,7 @@ WrapStyle: 0
13327
14107
 
13328
14108
  [V4+ Styles]
13329
14109
  Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
13330
- Style: Default,Montserrat,120,&H00FFFFFF,&H0000FFFF,&H00000000,&H80000000,1,0,0,0,100,100,0,0,1,3,1,2,30,30,770,1
14110
+ Style: Default,Montserrat,120,&H00FFFFFF,&H0000FFFF,&H00000000,&H80000000,1,0,0,0,100,100,0,0,1,3,1,2,30,30,280,1
13331
14111
  Style: Hook,Montserrat,56,&H00333333,&H00333333,&H60D0D0D0,&H60E0E0E0,1,0,0,0,100,100,2,0,3,18,2,8,80,80,250,1
13332
14112
 
13333
14113
  [Events]
@@ -13406,14 +14186,26 @@ function buildPremiumDialogueLines(words, style = "shorts") {
13406
14186
  }
13407
14187
  return dialogues;
13408
14188
  }
14189
+ function getASSHeader(style) {
14190
+ switch (style) {
14191
+ case "portrait":
14192
+ return ASS_HEADER_PORTRAIT;
14193
+ case "portrait-lower":
14194
+ return ASS_HEADER_PORTRAIT_LOWER;
14195
+ case "medium":
14196
+ return ASS_HEADER_MEDIUM;
14197
+ default:
14198
+ return ASS_HEADER;
14199
+ }
14200
+ }
13409
14201
  function generateStyledASS(transcript, style = "shorts") {
13410
- const header = style === "portrait" ? ASS_HEADER_PORTRAIT : style === "medium" ? ASS_HEADER_MEDIUM : ASS_HEADER;
14202
+ const header = getASSHeader(style);
13411
14203
  const allWords = transcript.words;
13412
14204
  if (allWords.length === 0) return header;
13413
14205
  return header + buildPremiumDialogueLines(allWords, style).join("\n") + "\n";
13414
14206
  }
13415
14207
  function generateStyledASSForSegment(transcript, startTime, endTime, buffer = 1, style = "shorts") {
13416
- const header = style === "portrait" ? ASS_HEADER_PORTRAIT : style === "medium" ? ASS_HEADER_MEDIUM : ASS_HEADER;
14208
+ const header = getASSHeader(style);
13417
14209
  const bufferedStart = Math.max(0, startTime - buffer);
13418
14210
  const bufferedEnd = endTime + buffer;
13419
14211
  const words = transcript.words.filter(
@@ -13428,7 +14220,7 @@ function generateStyledASSForSegment(transcript, startTime, endTime, buffer = 1,
13428
14220
  return header + buildPremiumDialogueLines(adjusted, style).join("\n") + "\n";
13429
14221
  }
13430
14222
  function generateStyledASSForComposite(transcript, segments, buffer = 1, style = "shorts") {
13431
- const header = style === "portrait" ? ASS_HEADER_PORTRAIT : style === "medium" ? ASS_HEADER_MEDIUM : ASS_HEADER;
14223
+ const header = getASSHeader(style);
13432
14224
  const allAdjusted = [];
13433
14225
  let runningOffset = 0;
13434
14226
  for (const seg of segments) {
@@ -13455,14 +14247,16 @@ function generateHookOverlay(hookText, displayDuration = 4, _style = "portrait")
13455
14247
  const text = hookText.length > HOOK_TEXT_MAX_LENGTH ? hookText.slice(0, HOOK_TEXT_MAX_LENGTH - 3) + "..." : hookText;
13456
14248
  return `Dialogue: 1,${toASS(0)},${toASS(displayDuration)},Hook,,0,0,0,,{\\fad(300,500)}${text}`;
13457
14249
  }
13458
- function generatePortraitASSWithHook(transcript, hookText, startTime, endTime, buffer) {
13459
- const baseASS = generateStyledASSForSegment(transcript, startTime, endTime, buffer, "portrait");
13460
- const hookLine = generateHookOverlay(hookText, 4, "portrait");
14250
+ function generatePortraitASSWithHook(transcript, hookText, startTime, endTime, buffer, isSplitScreen = true) {
14251
+ const style = isSplitScreen ? "portrait" : "portrait-lower";
14252
+ const baseASS = generateStyledASSForSegment(transcript, startTime, endTime, buffer, style);
14253
+ const hookLine = generateHookOverlay(hookText, 4, style);
13461
14254
  return baseASS + hookLine + "\n";
13462
14255
  }
13463
- function generatePortraitASSWithHookComposite(transcript, segments, hookText, buffer) {
13464
- const baseASS = generateStyledASSForComposite(transcript, segments, buffer, "portrait");
13465
- const hookLine = generateHookOverlay(hookText, 4, "portrait");
14256
+ function generatePortraitASSWithHookComposite(transcript, segments, hookText, buffer, isSplitScreen = true) {
14257
+ const style = isSplitScreen ? "portrait" : "portrait-lower";
14258
+ const baseASS = generateStyledASSForComposite(transcript, segments, buffer, style);
14259
+ const hookLine = generateHookOverlay(hookText, 4, style);
13466
14260
  return baseASS + hookLine + "\n";
13467
14261
  }
13468
14262
 
@@ -15052,6 +15846,19 @@ init_videoOperations();
15052
15846
  init_fileSystem();
15053
15847
  init_paths();
15054
15848
  init_configLogger();
15849
+ function mapVariantResults(results) {
15850
+ return results.map((v) => ({
15851
+ path: v.path,
15852
+ aspectRatio: v.aspectRatio,
15853
+ platform: v.platform,
15854
+ width: v.width,
15855
+ height: v.height,
15856
+ isSplitScreen: v.isSplitScreen
15857
+ }));
15858
+ }
15859
+ function buildPortraitCaptionASS(transcript, segments, hookText, isSplitScreen) {
15860
+ return segments.length === 1 ? generatePortraitASSWithHook(transcript, hookText, segments[0].start, segments[0].end, void 0, isSplitScreen) : generatePortraitASSWithHookComposite(transcript, segments, hookText, void 0, isSplitScreen);
15861
+ }
15055
15862
  function buildShortsSystemPrompt(clipConfig) {
15056
15863
  const minDuration = clipConfig?.duration?.min ?? 15;
15057
15864
  const maxDuration = clipConfig?.duration?.max ?? 60;
@@ -15449,13 +16256,7 @@ Words: ${words}`;
15449
16256
  const defaultPlatforms = ["tiktok", "youtube-shorts", "instagram-reels", "instagram-feed", "linkedin"];
15450
16257
  const results = await generatePlatformVariants2(outputPath, shortsDir, shortSlug, defaultPlatforms, { webcamOverride });
15451
16258
  if (results.length > 0) {
15452
- clipVariants = results.map((v) => ({
15453
- path: v.path,
15454
- aspectRatio: v.aspectRatio,
15455
- platform: v.platform,
15456
- width: v.width,
15457
- height: v.height
15458
- }));
16259
+ clipVariants = mapVariantResults(results);
15459
16260
  logger_default.info(`[ShortsAgent] Generated ${clipVariants.length} platform variants for: ${plan.title}`);
15460
16261
  }
15461
16262
  } catch (err) {
@@ -15483,7 +16284,8 @@ Words: ${words}`;
15483
16284
  if (portraitVariants.length > 0) {
15484
16285
  try {
15485
16286
  const hookText = plan.hook ?? plan.title;
15486
- const portraitAssContent = segments.length === 1 ? generatePortraitASSWithHook(transcript, hookText, segments[0].start, segments[0].end) : generatePortraitASSWithHookComposite(transcript, segments, hookText);
16287
+ const isSplitScreen = portraitVariants[0].isSplitScreen ?? false;
16288
+ const portraitAssContent = buildPortraitCaptionASS(transcript, segments, hookText, isSplitScreen);
15487
16289
  const portraitAssPath = join(shortsDir, `${shortSlug}-portrait.ass`);
15488
16290
  await writeTextFile(portraitAssPath, portraitAssContent);
15489
16291
  const portraitCaptionedPath = portraitVariants[0].path.replace(".mp4", "-captioned.mp4");
@@ -19206,14 +20008,7 @@ var MainVideoAsset = class _MainVideoAsset extends VideoAsset {
19206
20008
 
19207
20009
  // src/L4-agents/ScheduleAgent.ts
19208
20010
  init_BaseAgent();
19209
-
19210
- // src/L3-services/lateApi/lateApiService.ts
19211
- init_lateApi();
19212
- function createLateApiClient(...args) {
19213
- return new LateApiClient(...args);
19214
- }
19215
-
19216
- // src/L4-agents/ScheduleAgent.ts
20011
+ init_lateApiService();
19217
20012
  init_scheduler();
19218
20013
  init_scheduleConfig();
19219
20014
 
@@ -21316,6 +22111,16 @@ function createIdeaDiscoveryAgent(...args) {
21316
22111
  return new IdeaDiscoveryAgent(...args);
21317
22112
  }
21318
22113
 
22114
+ // src/L5-assets/bridges/cloudStorageBridge.ts
22115
+ async function uploadToCloud(inputVideoPath, publishQueueDir, videoSlug, metadata) {
22116
+ const { uploadPipelineResults: uploadPipelineResults2 } = await Promise.resolve().then(() => (init_cloudStorageOperations(), cloudStorageOperations_exports));
22117
+ return uploadPipelineResults2(inputVideoPath, publishQueueDir, videoSlug, metadata);
22118
+ }
22119
+ async function isCloudEnabled2() {
22120
+ const { isCloudEnabled: check } = await Promise.resolve().then(() => (init_cloudStorageOperations(), cloudStorageOperations_exports));
22121
+ return check();
22122
+ }
22123
+
21319
22124
  // src/L6-pipeline/pipeline.ts
21320
22125
  init_types2();
21321
22126
  async function runStage(stageName, fn, stageResults) {
@@ -21596,6 +22401,23 @@ async function processVideo(videoPath, ideas, publishBy, spec) {
21596
22401
  skipStage("queue-build" /* QueueBuild */, "NO_SOCIAL_POSTS");
21597
22402
  }
21598
22403
  const blogPost = await trackStage("blog" /* Blog */, () => asset.getBlog());
22404
+ await trackStage("cloud-upload" /* CloudUpload */, async () => {
22405
+ const cloudEnabled = await isCloudEnabled2();
22406
+ if (!cloudEnabled) {
22407
+ logger_default.info("Cloud upload skipped \u2014 Azure storage not configured");
22408
+ return;
22409
+ }
22410
+ const publishQueueDir = join(cfg.OUTPUT_DIR, "publish-queue");
22411
+ const result = await uploadToCloud(videoPath, publishQueueDir, video.slug, {
22412
+ originalFilename: video.filename,
22413
+ duration: video.duration,
22414
+ size: video.size
22415
+ });
22416
+ logger_default.info(`Cloud upload complete \u2014 runId: ${result.runId}, items uploaded: ${result.contentUploaded}, video: ${result.videoUploaded}`);
22417
+ if (result.errors.length > 0) {
22418
+ logger_default.warn(`Cloud upload had ${result.errors.length} error(s): ${result.errors.join("; ")}`);
22419
+ }
22420
+ });
21599
22421
  const totalDuration = Date.now() - pipelineStart;
21600
22422
  const report = costTracker3.getReport();
21601
22423
  if (report.records.length > 0) {
@@ -21708,6 +22530,7 @@ init_process();
21708
22530
  init_fileSystem();
21709
22531
  init_paths();
21710
22532
  init_environment();
22533
+ init_lateApiService();
21711
22534
  init_scheduleConfig();
21712
22535
  var require3 = createModuleRequire(import.meta.url);
21713
22536
  function normalizeProviderName(raw) {
@@ -21992,6 +22815,7 @@ function getFFprobePath3(...args) {
21992
22815
  }
21993
22816
 
21994
22817
  // src/L7-app/commands/init.ts
22818
+ init_lateApiService();
21995
22819
  init_scheduleConfig();
21996
22820
  var rl = createReadlineInterface({ input: process.stdin, output: process.stdout });
21997
22821
  function ask(question) {
@@ -23160,7 +23984,9 @@ var credentialKeys = [
23160
23984
  "perplexityApiKey",
23161
23985
  "lateApiKey",
23162
23986
  "githubToken",
23163
- "geminiApiKey"
23987
+ "geminiApiKey",
23988
+ "azureStorageAccountName",
23989
+ "azureStorageAccountKey"
23164
23990
  ];
23165
23991
  var defaultKeys = [
23166
23992
  "llmProvider",
@@ -24152,6 +24978,248 @@ async function runIdeaSearch(query, options) {
24152
24978
  }
24153
24979
  }
24154
24980
 
24981
+ // src/L7-app/commands/cloud.ts
24982
+ init_environment();
24983
+ init_configLogger();
24984
+ import { basename as basename3, dirname as dirname6, join as join10 } from "path";
24985
+ import { stat as fileStat } from "fs/promises";
24986
+ import { execFile } from "child_process";
24987
+ var BLOB_PREFIX = "blob://";
24988
+ function getVidpipeDir() {
24989
+ const config2 = getConfig();
24990
+ return dirname6(config2.OUTPUT_DIR);
24991
+ }
24992
+ function createCloudCommand() {
24993
+ const cloud = new Command("cloud").description("Manage cloud storage \u2014 sync config, migrate content, check status");
24994
+ cloud.command("push-config").description("Upload config files (schedule.json, brand.json, assets/) to Azure Storage").action(async () => {
24995
+ initConfig({});
24996
+ const sourceDir = getVidpipeDir();
24997
+ logger_default.info(`Pushing config from ${sourceDir} to Azure Storage...`);
24998
+ try {
24999
+ const { pushConfig: pushConfig3 } = await Promise.resolve().then(() => (init_azureConfigService(), azureConfigService_exports));
25000
+ const result = await pushConfig3(sourceDir);
25001
+ logger_default.info(`\u2705 Pushed ${result.uploaded} config file(s) to Azure Storage`);
25002
+ } catch (err) {
25003
+ const msg = err instanceof Error ? err.message : String(err);
25004
+ logger_default.error(`Failed to push config: ${msg}`);
25005
+ process.exitCode = 1;
25006
+ }
25007
+ process.exit(process.exitCode ?? 0);
25008
+ });
25009
+ cloud.command("pull-config").description("Download config files from Azure Storage to local directory").action(async () => {
25010
+ initConfig({});
25011
+ const targetDir = getVidpipeDir();
25012
+ logger_default.info(`Pulling config from Azure Storage to ${targetDir}...`);
25013
+ try {
25014
+ const { pullConfig: pullConfig3 } = await Promise.resolve().then(() => (init_azureConfigService(), azureConfigService_exports));
25015
+ const result = await pullConfig3(targetDir);
25016
+ logger_default.info(`\u2705 Pulled ${result.downloaded} config file(s) from Azure Storage`);
25017
+ } catch (err) {
25018
+ const msg = err instanceof Error ? err.message : String(err);
25019
+ logger_default.error(`Failed to pull config: ${msg}`);
25020
+ process.exitCode = 1;
25021
+ }
25022
+ process.exit(process.exitCode ?? 0);
25023
+ });
25024
+ cloud.command("upload <recording-folder>").description("Upload a recording folder (video + publish-queue) to Azure Storage").option("--slug <slug>", "Video slug (auto-derived from folder name if omitted)").action(async (recordingFolder, opts) => {
25025
+ initConfig({});
25026
+ const config2 = getConfig();
25027
+ try {
25028
+ const { uploadVideoFile: uploadVideoFile2, uploadPublishQueue: uploadPublishQueue2, isAzureConfigured: isAzureConfigured3, getRunId: getRunId2 } = await Promise.resolve().then(() => (init_azureStorageService(), azureStorageService_exports));
25029
+ const { readdir: readdir4, stat: getFileStat } = await import("fs/promises");
25030
+ if (!isAzureConfigured3()) {
25031
+ logger_default.error("Azure Storage not configured");
25032
+ process.exit(1);
25033
+ return;
25034
+ }
25035
+ const files = await readdir4(recordingFolder);
25036
+ const mp4File = files.find((f) => f.endsWith(".mp4"));
25037
+ if (!mp4File) {
25038
+ logger_default.error(`No .mp4 file found in ${recordingFolder}`);
25039
+ process.exit(1);
25040
+ return;
25041
+ }
25042
+ const videoPath = join10(recordingFolder, mp4File);
25043
+ const folderName = basename3(recordingFolder);
25044
+ const slug = opts.slug || folderName.replace(/[^a-z0-9-]/gi, "-").toLowerCase();
25045
+ const runId = getRunId2();
25046
+ const blobPath = `raw/${runId}-${mp4File}`;
25047
+ const stats = await getFileStat(videoPath);
25048
+ logger_default.info(`Uploading ${mp4File} (${(stats.size / 1024 / 1024).toFixed(1)} MB) to Azure...`);
25049
+ await uploadVideoFile2(videoPath, blobPath);
25050
+ logger_default.info(`\u2705 Raw video uploaded: ${blobPath}`);
25051
+ const publishQueueDir = join10(config2.OUTPUT_DIR, "publish-queue");
25052
+ logger_default.info(`Scanning publish-queue for items from ${folderName}...`);
25053
+ let uploaded = 0;
25054
+ const errors = [];
25055
+ try {
25056
+ const queueItems = await readdir4(publishQueueDir);
25057
+ const { readFile: readFile3 } = await import("fs/promises");
25058
+ const { uploadContentItem: uploadContentItem2 } = await Promise.resolve().then(() => (init_azureStorageService(), azureStorageService_exports));
25059
+ for (const itemId of queueItems) {
25060
+ const itemDir = join10(publishQueueDir, itemId);
25061
+ const metadataPath = join10(itemDir, "metadata.json");
25062
+ try {
25063
+ const meta = JSON.parse(await readFile3(metadataPath, "utf8"));
25064
+ const sourceVideo = String(meta.sourceVideo || "");
25065
+ if (!sourceVideo.endsWith(folderName)) continue;
25066
+ await uploadContentItem2(itemDir, itemId, slug, runId);
25067
+ uploaded++;
25068
+ logger_default.info(` \u2705 ${itemId}`);
25069
+ } catch (err) {
25070
+ const msg = err instanceof Error ? err.message : String(err);
25071
+ errors.push(`${itemId}: ${msg}`);
25072
+ }
25073
+ }
25074
+ } catch {
25075
+ logger_default.warn("No publish-queue directory found");
25076
+ }
25077
+ logger_default.info(`\u2705 Content uploaded: ${uploaded} item(s)`);
25078
+ if (errors.length > 0) {
25079
+ logger_default.warn(` ${errors.length} error(s):`);
25080
+ for (const err of errors) {
25081
+ logger_default.warn(` \u26A0 ${err}`);
25082
+ }
25083
+ }
25084
+ logger_default.info(`Cloud upload complete (runId: ${runId})`);
25085
+ } catch (err) {
25086
+ const msg = err instanceof Error ? err.message : String(err);
25087
+ logger_default.error(`Failed to upload: ${msg}`);
25088
+ process.exitCode = 1;
25089
+ }
25090
+ process.exit(process.exitCode ?? 0);
25091
+ });
25092
+ cloud.command("migrate").description("Upload existing local publish-queue/ and published/ content to Azure Storage").action(async () => {
25093
+ initConfig({});
25094
+ const config2 = getConfig();
25095
+ const outputDir = config2.OUTPUT_DIR;
25096
+ logger_default.info(`Migrating local content from ${outputDir} to Azure Storage...`);
25097
+ try {
25098
+ const { migrateLocalContent: migrateLocalContent3 } = await Promise.resolve().then(() => (init_azureStorageService(), azureStorageService_exports));
25099
+ const result = await migrateLocalContent3(outputDir);
25100
+ logger_default.info(`\u2705 Migration complete: ${result.uploaded} file(s) uploaded`);
25101
+ if (result.errors.length > 0) {
25102
+ logger_default.warn(` ${result.errors.length} error(s):`);
25103
+ for (const err of result.errors) {
25104
+ logger_default.warn(` \u26A0 ${err}`);
25105
+ }
25106
+ }
25107
+ } catch (err) {
25108
+ const msg = err instanceof Error ? err.message : String(err);
25109
+ logger_default.error(`Failed to migrate content: ${msg}`);
25110
+ process.exitCode = 1;
25111
+ }
25112
+ process.exit(process.exitCode ?? 0);
25113
+ });
25114
+ cloud.command("status").description("Show Azure connection status, stored config files, and content counts").action(async () => {
25115
+ initConfig({});
25116
+ try {
25117
+ const { isAzureConfigured: isAzureConfigured3, getContentItems: getContentItems2, listVideos: listVideos2 } = await Promise.resolve().then(() => (init_azureStorageService(), azureStorageService_exports));
25118
+ const { listConfigFiles: listConfigFiles2 } = await Promise.resolve().then(() => (init_azureConfigService(), azureConfigService_exports));
25119
+ const configured = isAzureConfigured3();
25120
+ console.log(`
25121
+ \u2601\uFE0F Azure Storage Status
25122
+ `);
25123
+ console.log(` Connection: ${configured ? "\u2705 Configured" : "\u274C Not configured"}`);
25124
+ if (!configured) {
25125
+ console.log("\n Run `vidpipe configure set credentials.azureStorageAccountName <name>` and\n `vidpipe configure set credentials.azureStorageAccountKey <key>` to configure.\n");
25126
+ process.exit(0);
25127
+ return;
25128
+ }
25129
+ const [configFiles, contentItems, videos] = await Promise.all([
25130
+ listConfigFiles2(),
25131
+ getContentItems2(),
25132
+ listVideos2()
25133
+ ]);
25134
+ console.log(`
25135
+ Config files: ${configFiles.length}`);
25136
+ for (const file of configFiles) {
25137
+ console.log(` \u2022 ${file}`);
25138
+ }
25139
+ console.log(`
25140
+ Content items: ${contentItems.length}`);
25141
+ console.log(` Videos stored: ${videos.length}`);
25142
+ console.log();
25143
+ } catch (err) {
25144
+ const msg = err instanceof Error ? err.message : String(err);
25145
+ logger_default.error(`Failed to check Azure status: ${msg}`);
25146
+ process.exitCode = 1;
25147
+ }
25148
+ process.exit(process.exitCode ?? 0);
25149
+ });
25150
+ cloud.command("process <video>").description("Upload video to Azure Storage and trigger GitHub Actions pipeline").option("--spec <spec>", "Pipeline spec preset").option("--ideas <ids>", "Comma-separated idea IDs").option("--publish-by <date>", "Publish-by date (ISO or +Nd)").option("--repo <owner/repo>", "GitHub repository", "htekdev/vidpipe").action(async (videoPath, opts) => {
25151
+ initConfig({});
25152
+ try {
25153
+ const { uploadVideoFile: uploadVideoFile2, isAzureConfigured: isAzureConfigured3, getRunId: getRunId2 } = await Promise.resolve().then(() => (init_azureStorageService(), azureStorageService_exports));
25154
+ if (!isAzureConfigured3()) {
25155
+ logger_default.error("Azure Storage not configured. Run `vidpipe configure set credentials.azureStorageAccountName <name>`");
25156
+ process.exit(1);
25157
+ return;
25158
+ }
25159
+ const filename = basename3(videoPath);
25160
+ const runId = getRunId2();
25161
+ const blobPath = `raw/${runId}-${filename}`;
25162
+ const stats = await fileStat(videoPath);
25163
+ logger_default.info(`Uploading ${filename} (${(stats.size / 1024 / 1024).toFixed(1)} MB) to Azure...`);
25164
+ await uploadVideoFile2(videoPath, blobPath);
25165
+ logger_default.info(`\u2705 Uploaded to ${blobPath}`);
25166
+ const videoUrl = `${BLOB_PREFIX}${blobPath}`;
25167
+ const args = ["workflow", "run", "process-video.yml", "--repo", opts.repo, "-f", `video_url=${videoUrl}`];
25168
+ if (opts.spec) args.push("-f", `spec=${opts.spec}`);
25169
+ if (opts.ideas) args.push("-f", `ideas=${opts.ideas}`);
25170
+ if (opts.publishBy) args.push("-f", `publish_by=${opts.publishBy}`);
25171
+ logger_default.info("Triggering GitHub Actions workflow...");
25172
+ await new Promise((resolve4, reject) => {
25173
+ execFile("gh", args, (err, stdout, stderr) => {
25174
+ if (err) {
25175
+ reject(new Error(stderr || err.message));
25176
+ return;
25177
+ }
25178
+ if (stdout.trim()) logger_default.info(stdout.trim());
25179
+ resolve4();
25180
+ });
25181
+ });
25182
+ logger_default.info(`\u2705 Workflow triggered. Video: ${videoUrl}`);
25183
+ logger_default.info(` Monitor at: https://github.com/${opts.repo}/actions`);
25184
+ } catch (err) {
25185
+ const msg = err instanceof Error ? err.message : String(err);
25186
+ logger_default.error(`Failed: ${msg}`);
25187
+ process.exitCode = 1;
25188
+ }
25189
+ process.exit(process.exitCode ?? 0);
25190
+ });
25191
+ cloud.command("download <video-url> <output-path>").description("Download a video from Azure blob (blob://) or HTTP URL").action(async (videoUrl, outputPath) => {
25192
+ initConfig({});
25193
+ try {
25194
+ if (videoUrl.startsWith(BLOB_PREFIX)) {
25195
+ const blobPath = videoUrl.slice(BLOB_PREFIX.length);
25196
+ logger_default.info(`Downloading from Azure blob: ${blobPath}`);
25197
+ const { downloadBlobToFile: downloadBlobToFile2 } = await Promise.resolve().then(() => (init_azureStorageService(), azureStorageService_exports));
25198
+ await downloadBlobToFile2(blobPath, outputPath);
25199
+ } else {
25200
+ logger_default.info(`Downloading from URL: ${videoUrl}`);
25201
+ await new Promise((resolve4, reject) => {
25202
+ execFile("curl", ["-L", "--fail", "-o", outputPath, videoUrl], (err, _stdout, stderr) => {
25203
+ if (err) {
25204
+ reject(new Error(stderr || err.message));
25205
+ return;
25206
+ }
25207
+ resolve4();
25208
+ });
25209
+ });
25210
+ }
25211
+ const stats = await fileStat(outputPath);
25212
+ logger_default.info(`\u2705 Downloaded: ${outputPath} (${(stats.size / 1024 / 1024).toFixed(1)} MB)`);
25213
+ } catch (err) {
25214
+ const msg = err instanceof Error ? err.message : String(err);
25215
+ logger_default.error(`Download failed: ${msg}`);
25216
+ process.exitCode = 1;
25217
+ }
25218
+ process.exit(process.exitCode ?? 0);
25219
+ });
25220
+ return cloud;
25221
+ }
25222
+
24155
25223
  // src/L1-infra/http/http.ts
24156
25224
  import { default as default8 } from "express";
24157
25225
  import { Router } from "express";
@@ -24159,17 +25227,182 @@ import { Router } from "express";
24159
25227
  // src/L7-app/review/server.ts
24160
25228
  init_paths();
24161
25229
 
25230
+ // src/L3-services/azureStorage/azureReviewDataSource.ts
25231
+ init_configLogger();
25232
+ init_azureStorageService();
25233
+ init_blobClient();
25234
+ function mapContentRecordToReviewItem(record) {
25235
+ const itemId = record.rowKey;
25236
+ const mediaFilename = record.mediaFilename || "";
25237
+ const thumbnailFilename = record.thumbnailFilename || "";
25238
+ return {
25239
+ id: itemId,
25240
+ videoSlug: record.partitionKey,
25241
+ platform: record.platform,
25242
+ clipType: record.clipType,
25243
+ status: record.status,
25244
+ mediaType: record.mediaType || "video",
25245
+ mediaUrl: mediaFilename ? `/api/media/${itemId}/${mediaFilename}` : "",
25246
+ postContent: record.postContent || "",
25247
+ hashtags: record.hashtags ? record.hashtags.split(",").filter(Boolean) : [],
25248
+ scheduledFor: record.scheduledFor || null,
25249
+ latePostId: record.latePostId || null,
25250
+ publishedUrl: record.publishedUrl || null,
25251
+ createdAt: record.createdAt || (/* @__PURE__ */ new Date()).toISOString(),
25252
+ thumbnailUrl: thumbnailFilename ? `/api/media/${itemId}/${thumbnailFilename}` : null,
25253
+ ideaIds: record.ideaIds ? record.ideaIds.split(",").filter(Boolean) : [],
25254
+ mediaFilename,
25255
+ thumbnailFilename,
25256
+ blobBasePath: record.blobBasePath || `content/${itemId}/`
25257
+ };
25258
+ }
25259
+ function getContentTypeForFilename(filename) {
25260
+ const ext = filename.split(".").pop()?.toLowerCase();
25261
+ switch (ext) {
25262
+ case "mp4":
25263
+ return "video/mp4";
25264
+ case "webm":
25265
+ return "video/webm";
25266
+ case "png":
25267
+ return "image/png";
25268
+ case "jpg":
25269
+ case "jpeg":
25270
+ return "image/jpeg";
25271
+ case "gif":
25272
+ return "image/gif";
25273
+ case "webp":
25274
+ return "image/webp";
25275
+ case "md":
25276
+ return "text/markdown";
25277
+ case "json":
25278
+ return "application/json";
25279
+ default:
25280
+ return "application/octet-stream";
25281
+ }
25282
+ }
25283
+ async function listPendingItems() {
25284
+ const records = await getContentItems({ status: "pending_review" });
25285
+ return records.map(mapContentRecordToReviewItem);
25286
+ }
25287
+ async function getGroupedItems() {
25288
+ const items = await listPendingItems();
25289
+ const platformSuffixes = [
25290
+ "youtube-shorts",
25291
+ "instagram-reels",
25292
+ "instagram-feed",
25293
+ "twitter",
25294
+ "youtube",
25295
+ "tiktok",
25296
+ "instagram",
25297
+ "linkedin",
25298
+ "x"
25299
+ ];
25300
+ const deduped = /* @__PURE__ */ new Map();
25301
+ for (const item of items) {
25302
+ const existing = deduped.get(item.id);
25303
+ if (!existing || item.createdAt > existing.createdAt) {
25304
+ deduped.set(item.id, item);
25305
+ }
25306
+ }
25307
+ const groupMap = /* @__PURE__ */ new Map();
25308
+ for (const item of deduped.values()) {
25309
+ let clipSlug = item.id;
25310
+ for (const suffix of platformSuffixes) {
25311
+ if (item.id.endsWith(`-${suffix}`)) {
25312
+ clipSlug = item.id.slice(0, -(suffix.length + 1));
25313
+ break;
25314
+ }
25315
+ }
25316
+ if (!groupMap.has(clipSlug)) {
25317
+ groupMap.set(clipSlug, []);
25318
+ }
25319
+ groupMap.get(clipSlug).push(item);
25320
+ }
25321
+ const groups = [];
25322
+ for (const [clipSlug, groupItems] of groupMap) {
25323
+ const first = groupItems[0];
25324
+ groups.push({
25325
+ videoSlug: clipSlug,
25326
+ clipType: first.clipType,
25327
+ items: groupItems
25328
+ });
25329
+ }
25330
+ groups.sort((a, b) => {
25331
+ const aHasMedia = a.items.some((i) => Boolean(i.mediaFilename));
25332
+ const bHasMedia = b.items.some((i) => Boolean(i.mediaFilename));
25333
+ if (aHasMedia !== bHasMedia) return aHasMedia ? -1 : 1;
25334
+ return a.items[0].createdAt.localeCompare(b.items[0].createdAt);
25335
+ });
25336
+ return groups;
25337
+ }
25338
+ async function getItemById(videoSlug, itemId) {
25339
+ const record = await getContentItem(videoSlug, itemId);
25340
+ if (!record) return null;
25341
+ return mapContentRecordToReviewItem(record);
25342
+ }
25343
+ async function getMediaStream(itemId, filename) {
25344
+ const blobPath = `content/${itemId}/${filename}`;
25345
+ const stream = await downloadStream(blobPath);
25346
+ const contentType = getContentTypeForFilename(filename);
25347
+ return { stream, contentType };
25348
+ }
25349
+ async function approveItem(videoSlug, itemId) {
25350
+ await updateContentStatus(videoSlug, itemId, "approved", {
25351
+ reviewedAt: (/* @__PURE__ */ new Date()).toISOString()
25352
+ });
25353
+ logger_default.info(`Approved content item: ${itemId}`);
25354
+ }
25355
+ async function markPublished3(videoSlug, itemId, publishData) {
25356
+ await updateContentStatus(videoSlug, itemId, "published", {
25357
+ latePostId: publishData.latePostId,
25358
+ scheduledFor: publishData.scheduledFor,
25359
+ publishedUrl: publishData.publishedUrl || "",
25360
+ publishedAt: (/* @__PURE__ */ new Date()).toISOString()
25361
+ });
25362
+ logger_default.info(`Marked content item as published: ${itemId} \u2192 ${publishData.latePostId}`);
25363
+ }
25364
+ async function rejectItem(videoSlug, itemId) {
25365
+ await updateContentStatus(videoSlug, itemId, "rejected");
25366
+ logger_default.info(`Rejected content item: ${itemId}`);
25367
+ }
25368
+ async function updateItem2(videoSlug, itemId, changes) {
25369
+ const record = await getContentItem(videoSlug, itemId);
25370
+ if (!record) return null;
25371
+ const updateFields = {};
25372
+ if (changes.postContent !== void 0) {
25373
+ updateFields.postContent = changes.postContent;
25374
+ updateFields.characterCount = changes.postContent.length;
25375
+ const blobPath = `content/${itemId}/post.md`;
25376
+ const buffer = Buffer.from(changes.postContent, "utf8");
25377
+ await uploadBuffer(blobPath, buffer, "text/markdown");
25378
+ logger_default.debug(`Updated post.md blob for ${itemId}`);
25379
+ }
25380
+ if (Object.keys(updateFields).length > 0) {
25381
+ await updateContentStatus(videoSlug, itemId, record.status, updateFields);
25382
+ }
25383
+ const updated = await getContentItem(videoSlug, itemId);
25384
+ if (!updated) return null;
25385
+ return mapContentRecordToReviewItem(updated);
25386
+ }
25387
+ async function downloadMediaToFile(itemId, filename, localPath) {
25388
+ const blobPath = `content/${itemId}/${filename}`;
25389
+ await downloadToFile(blobPath, localPath);
25390
+ }
25391
+
24162
25392
  // src/L7-app/review/routes.ts
24163
- init_postStore();
25393
+ init_azureStorageService();
24164
25394
  init_ideaService2();
24165
25395
  init_scheduler();
25396
+ init_lateApiService();
24166
25397
  init_types2();
24167
25398
  init_configLogger();
24168
25399
  init_queueMapping();
24169
25400
 
24170
25401
  // src/L7-app/review/approvalQueue.ts
25402
+ init_paths();
24171
25403
  init_fileSystem();
24172
- init_postStore();
25404
+ init_environment();
25405
+ init_azureStorageService();
24173
25406
  init_ideaService2();
24174
25407
  init_scheduler();
24175
25408
  init_scheduleConfig();
@@ -24181,7 +25414,7 @@ init_configLogger();
24181
25414
  init_fileSystem();
24182
25415
  init_paths();
24183
25416
  var CACHE_FILE2 = ".vidpipe-cache.json";
24184
- var CACHE_TTL_MS2 = 24 * 60 * 60 * 1e3;
25417
+ var CACHE_TTL_MS3 = 24 * 60 * 60 * 1e3;
24185
25418
  var memoryCache2 = null;
24186
25419
  function toLatePlatform2(platform) {
24187
25420
  return platform === "x" /* X */ ? "twitter" : platform;
@@ -24198,7 +25431,7 @@ function isCacheValid2(cache2) {
24198
25431
  return false;
24199
25432
  }
24200
25433
  const age = Date.now() - fetchedAtTime;
24201
- return age < CACHE_TTL_MS2;
25434
+ return age < CACHE_TTL_MS3;
24202
25435
  }
24203
25436
  async function readFileCache2() {
24204
25437
  try {
@@ -24280,14 +25513,15 @@ async function getAccountId(platform) {
24280
25513
  }
24281
25514
 
24282
25515
  // src/L7-app/review/approvalQueue.ts
25516
+ init_lateApiService();
24283
25517
  init_queueMapping();
24284
25518
  init_types2();
24285
25519
  init_configLogger();
24286
25520
  var queue = [];
24287
25521
  var processing = false;
24288
- function enqueueApproval(itemIds) {
25522
+ function enqueueApproval(itemIds, options) {
24289
25523
  return new Promise((resolve4) => {
24290
- queue.push({ itemIds, resolve: resolve4 });
25524
+ queue.push({ itemIds, priority: options?.priority ?? false, resolve: resolve4 });
24291
25525
  if (!processing) drain();
24292
25526
  });
24293
25527
  }
@@ -24296,7 +25530,7 @@ async function drain() {
24296
25530
  while (queue.length > 0) {
24297
25531
  const job = queue.shift();
24298
25532
  try {
24299
- const result = await processApprovalBatch(job.itemIds);
25533
+ const result = await processApprovalBatch(job.itemIds, job.priority);
24300
25534
  job.resolve(result);
24301
25535
  } catch (err) {
24302
25536
  const msg = err instanceof Error ? err.message : String(err);
@@ -24311,24 +25545,35 @@ async function drain() {
24311
25545
  }
24312
25546
  processing = false;
24313
25547
  }
24314
- async function processApprovalBatch(itemIds) {
25548
+ async function processApprovalBatch(itemIds, priority) {
24315
25549
  const client = createLateApiClient();
24316
25550
  const schedConfig = await loadScheduleConfig();
24317
- const publishDataMap = /* @__PURE__ */ new Map();
24318
25551
  const results = [];
24319
25552
  const rateLimitedPlatforms = /* @__PURE__ */ new Set();
24320
- const loadedItems = await Promise.all(
24321
- itemIds.map(async (id) => ({ id, item: await getItem(id) }))
24322
- );
24323
- const itemMap = new Map(loadedItems.map(({ id, item }) => [id, item]));
24324
- const allIdeaIds = /* @__PURE__ */ new Set();
24325
- for (const { item } of loadedItems) {
24326
- if (item?.metadata.ideaIds?.length) {
24327
- for (const ideaId of item.metadata.ideaIds) {
24328
- allIdeaIds.add(ideaId);
25553
+ const allPending = await getContentItems({ status: "pending_review" });
25554
+ const itemMap = /* @__PURE__ */ new Map();
25555
+ for (const record of allPending) {
25556
+ if (itemIds.includes(record.rowKey)) {
25557
+ itemMap.set(record.rowKey, { record, videoSlug: record.partitionKey });
25558
+ }
25559
+ }
25560
+ const missingIds = itemIds.filter((id) => !itemMap.has(id));
25561
+ if (missingIds.length > 0) {
25562
+ const allItems = await getContentItems();
25563
+ const missingIdSet = new Set(missingIds);
25564
+ for (const record of allItems) {
25565
+ if (missingIdSet.has(record.rowKey)) {
25566
+ itemMap.set(record.rowKey, { record, videoSlug: record.partitionKey });
24329
25567
  }
24330
25568
  }
24331
25569
  }
25570
+ const allIdeaIds = /* @__PURE__ */ new Set();
25571
+ for (const { record } of itemMap.values()) {
25572
+ const ideas = record.ideaIds ? record.ideaIds.split(",").filter(Boolean) : [];
25573
+ for (const ideaId of ideas) {
25574
+ allIdeaIds.add(ideaId);
25575
+ }
25576
+ }
24332
25577
  let ideaMap = /* @__PURE__ */ new Map();
24333
25578
  if (allIdeaIds.size > 0) {
24334
25579
  try {
@@ -24340,13 +25585,17 @@ async function processApprovalBatch(itemIds) {
24340
25585
  } catch {
24341
25586
  }
24342
25587
  }
24343
- const enriched = loadedItems.map(({ id, item }) => {
24344
- const createdAt = item?.metadata.createdAt ?? null;
24345
- if (!item?.metadata.ideaIds?.length) {
24346
- return { id, publishBy: null, hasIdeas: false, createdAt };
25588
+ const enriched = itemIds.map((id) => {
25589
+ const entry = itemMap.get(id);
25590
+ if (!entry) return { id, videoSlug: "", publishBy: null, hasIdeas: false, createdAt: null };
25591
+ const { record, videoSlug } = entry;
25592
+ const createdAt = record.createdAt || null;
25593
+ const ideas = record.ideaIds ? record.ideaIds.split(",").filter(Boolean) : [];
25594
+ if (ideas.length === 0) {
25595
+ return { id, videoSlug, publishBy: null, hasIdeas: false, createdAt };
24347
25596
  }
24348
- const dates = item.metadata.ideaIds.map((ideaId) => ideaMap.get(ideaId)?.publishBy).filter((publishBy) => Boolean(publishBy)).sort();
24349
- return { id, publishBy: dates[0] ?? null, hasIdeas: true, createdAt };
25597
+ const dates = ideas.map((ideaId) => ideaMap.get(ideaId)?.publishBy).filter((publishBy) => Boolean(publishBy)).sort();
25598
+ return { id, videoSlug, publishBy: dates[0] ?? null, hasIdeas: true, createdAt };
24350
25599
  });
24351
25600
  enriched.sort((a, b) => {
24352
25601
  if (a.hasIdeas && !b.hasIdeas) return -1;
@@ -24367,124 +25616,142 @@ async function processApprovalBatch(itemIds) {
24367
25616
  const publishByMap = new Map(
24368
25617
  enriched.flatMap((entry) => entry.publishBy ? [[entry.id, entry.publishBy]] : [])
24369
25618
  );
24370
- for (const itemId of sortedIds) {
24371
- const item = itemMap.get(itemId) ?? null;
24372
- try {
24373
- if (!item) {
24374
- results.push({ itemId, success: false, error: "Item not found" });
24375
- continue;
24376
- }
24377
- const latePlatform = normalizePlatformString(item.metadata.platform);
24378
- if (rateLimitedPlatforms.has(latePlatform)) {
24379
- results.push({ itemId, success: false, error: `${latePlatform} rate-limited` });
24380
- continue;
24381
- }
24382
- const ideaIds = item.metadata.ideaIds;
24383
- const publishBy = publishByMap.get(itemId);
24384
- const clipType = item.metadata.clipType || "short";
24385
- const queueId = await getQueueId(latePlatform, clipType);
24386
- let slot;
24387
- let useQueue = false;
24388
- if (queueId) {
24389
- useQueue = true;
24390
- logger_default.debug(`Using Late queue ${queueId} for ${latePlatform}/${clipType} (idea priority via batch order)`);
24391
- } else {
24392
- logger_default.debug(`No queue for ${latePlatform}/${clipType}, using local slot calculation`);
24393
- const foundSlot = ideaIds?.length ? await findNextSlot(latePlatform, clipType, { ideaIds, publishBy }) : await findNextSlot(latePlatform, clipType);
24394
- slot = foundSlot ?? void 0;
24395
- if (!slot) {
24396
- results.push({ itemId, success: false, error: `No available slot for ${latePlatform}` });
25619
+ const videoSlugMap = new Map(enriched.map((entry) => [entry.id, entry.videoSlug]));
25620
+ const config2 = getConfig();
25621
+ const tempDir = join(config2.OUTPUT_DIR, ".azure-media-temp");
25622
+ await ensureDirectory(tempDir);
25623
+ try {
25624
+ for (const itemId of sortedIds) {
25625
+ const entry = itemMap.get(itemId);
25626
+ try {
25627
+ if (!entry) {
25628
+ results.push({ itemId, success: false, error: "Item not found" });
24397
25629
  continue;
24398
25630
  }
24399
- }
24400
- const platform = fromLatePlatform(latePlatform);
24401
- const accountId = item.metadata.accountId || await getAccountId(platform);
24402
- if (!accountId) {
24403
- results.push({ itemId, success: false, error: `No account for ${latePlatform}` });
24404
- continue;
24405
- }
24406
- let mediaItems;
24407
- let platformSpecificData = item.metadata.platformSpecificData;
24408
- const effectiveMediaPath = item.mediaPath ?? item.metadata.sourceMediaPath;
24409
- if (effectiveMediaPath) {
24410
- const mediaExists = await fileExists(effectiveMediaPath);
24411
- if (mediaExists) {
24412
- if (!item.mediaPath && item.metadata.sourceMediaPath) {
24413
- logger_default.info(`Using source media fallback for ${String(item.id).replace(/[\r\n]/g, "")}: ${String(item.metadata.sourceMediaPath).replace(/[\r\n]/g, "")}`);
25631
+ const { record, videoSlug } = entry;
25632
+ const latePlatform = normalizePlatformString(record.platform);
25633
+ if (rateLimitedPlatforms.has(latePlatform)) {
25634
+ results.push({ itemId, success: false, error: `${latePlatform} rate-limited` });
25635
+ continue;
25636
+ }
25637
+ const ideaIds = record.ideaIds ? record.ideaIds.split(",").filter(Boolean) : [];
25638
+ const publishBy = publishByMap.get(itemId);
25639
+ const clipType = record.clipType || "short";
25640
+ const queueId = await getQueueId(latePlatform, clipType);
25641
+ let slot;
25642
+ let useQueue = false;
25643
+ if (priority && queueId) {
25644
+ logger_default.info(`\u26A1 Priority scheduling for ${latePlatform}/${clipType}`);
25645
+ const { priorityShiftQueue: priorityShiftQueue2 } = await Promise.resolve().then(() => (init_lateApiService(), lateApiService_exports));
25646
+ const shiftResult = await priorityShiftQueue2(latePlatform, clipType);
25647
+ if (shiftResult) {
25648
+ slot = shiftResult.freedSlot;
25649
+ useQueue = false;
25650
+ logger_default.info(`\u26A1 Freed slot: ${slot} (shifted ${shiftResult.shiftedCount} posts)`);
25651
+ } else {
25652
+ useQueue = true;
25653
+ logger_default.info(`\u26A1 No posts to shift \u2014 using queue normally`);
24414
25654
  }
24415
- const upload = await client.uploadMedia(effectiveMediaPath);
24416
- const mediaItem = { type: upload.type, url: upload.url };
24417
- const effectiveThumbnailPath = item.thumbnailPath ?? item.metadata.thumbnailPath;
24418
- if (effectiveThumbnailPath && await fileExists(effectiveThumbnailPath)) {
24419
- try {
24420
- const thumbUpload = await client.uploadMedia(effectiveThumbnailPath);
24421
- const thumbUrl = thumbUpload.url;
24422
- mediaItem.thumbnail = thumbUrl;
24423
- if (latePlatform === "instagram") {
24424
- platformSpecificData = { ...platformSpecificData, instagramThumbnail: thumbUrl };
25655
+ } else if (queueId) {
25656
+ useQueue = true;
25657
+ logger_default.debug(`Using Late queue ${queueId} for ${latePlatform}/${clipType} (idea priority via batch order)`);
25658
+ } else {
25659
+ logger_default.debug(`No queue for ${latePlatform}/${clipType}, using local slot calculation`);
25660
+ const foundSlot = ideaIds.length > 0 ? await findNextSlot(latePlatform, clipType, { ideaIds, publishBy }) : await findNextSlot(latePlatform, clipType);
25661
+ slot = foundSlot ?? void 0;
25662
+ if (!slot) {
25663
+ results.push({ itemId, success: false, error: `No available slot for ${latePlatform}` });
25664
+ continue;
25665
+ }
25666
+ }
25667
+ const platform = fromLatePlatform(latePlatform);
25668
+ const accountId = await getAccountId(platform);
25669
+ if (!accountId) {
25670
+ results.push({ itemId, success: false, error: `No account for ${latePlatform}` });
25671
+ continue;
25672
+ }
25673
+ let mediaItems;
25674
+ let platformSpecificData;
25675
+ if (record.mediaFilename) {
25676
+ try {
25677
+ const localMediaPath = join(tempDir, `${itemId}-${record.mediaFilename}`);
25678
+ await downloadMediaToFile(itemId, record.mediaFilename, localMediaPath);
25679
+ const upload = await client.uploadMedia(localMediaPath);
25680
+ const mediaItem = { type: upload.type, url: upload.url };
25681
+ if (record.thumbnailFilename) {
25682
+ try {
25683
+ const localThumbPath = join(tempDir, `${itemId}-${record.thumbnailFilename}`);
25684
+ await downloadMediaToFile(itemId, record.thumbnailFilename, localThumbPath);
25685
+ const thumbUpload = await client.uploadMedia(localThumbPath);
25686
+ const thumbUrl = thumbUpload.url;
25687
+ mediaItem.thumbnail = thumbUrl;
25688
+ if (latePlatform === "instagram") {
25689
+ platformSpecificData = { ...platformSpecificData, instagramThumbnail: thumbUrl };
25690
+ }
25691
+ logger_default.info(`Uploaded thumbnail for ${String(itemId).replace(/[\r\n]/g, "")}`);
25692
+ } catch (thumbErr) {
25693
+ logger_default.warn(`Failed to upload thumbnail for ${String(itemId).replace(/[\r\n]/g, "")}: ${thumbErr instanceof Error ? thumbErr.message : String(thumbErr)}`);
24425
25694
  }
24426
- logger_default.info(`Uploaded thumbnail for ${String(item.id).replace(/[\r\n]/g, "")}`);
24427
- } catch (thumbErr) {
24428
- logger_default.warn(`Failed to upload thumbnail for ${String(item.id).replace(/[\r\n]/g, "")}: ${thumbErr instanceof Error ? thumbErr.message : String(thumbErr)}`);
24429
25695
  }
25696
+ mediaItems = [mediaItem];
25697
+ } catch (mediaErr) {
25698
+ logger_default.warn(`Failed to download/upload media for ${String(itemId).replace(/[\r\n]/g, "")}: ${mediaErr instanceof Error ? mediaErr.message : String(mediaErr)}`);
24430
25699
  }
24431
- mediaItems = [mediaItem];
24432
- }
24433
- }
24434
- const isTikTok = latePlatform === "tiktok";
24435
- const tiktokSettings = isTikTok ? {
24436
- privacy_level: "PUBLIC_TO_EVERYONE",
24437
- allow_comment: true,
24438
- allow_duet: true,
24439
- allow_stitch: true,
24440
- content_preview_confirmed: true,
24441
- express_consent_given: true
24442
- } : void 0;
24443
- const profileId = useQueue ? await getProfileId() : void 0;
24444
- const createParams = {
24445
- content: item.postContent,
24446
- platforms: [{ platform: latePlatform, accountId }],
24447
- timezone: schedConfig.timezone,
24448
- isDraft: false,
24449
- mediaItems,
24450
- platformSpecificData,
24451
- tiktokSettings
24452
- };
24453
- if (useQueue) {
24454
- createParams.queuedFromProfile = profileId;
24455
- createParams.queueId = queueId ?? void 0;
24456
- } else {
24457
- createParams.scheduledFor = slot;
24458
- }
24459
- const latePost = await client.createPost(createParams);
24460
- publishDataMap.set(itemId, {
24461
- latePostId: latePost._id,
24462
- scheduledFor: latePost.scheduledFor ?? slot ?? "",
24463
- publishedUrl: void 0,
24464
- accountId
24465
- });
24466
- results.push({ itemId, success: true, scheduledFor: latePost.scheduledFor ?? slot, latePostId: latePost._id });
24467
- } catch (itemErr) {
24468
- const itemMsg = itemErr instanceof Error ? itemErr.message : String(itemErr);
24469
- if (itemMsg.includes("429") || itemMsg.includes("Daily post limit")) {
24470
- const latePlatform = normalizePlatformString(item?.metadata.platform ?? "");
24471
- rateLimitedPlatforms.add(latePlatform);
24472
- logger_default.warn(`Approval queue: ${latePlatform} hit daily post limit, skipping remaining ${latePlatform} items`);
24473
- results.push({ itemId, success: false, error: `${latePlatform} rate-limited` });
24474
- } else {
24475
- logger_default.error(`Approval queue: failed for ${String(itemId).replace(/[\r\n]/g, "")}: ${String(itemMsg).replace(/[\r\n]/g, "")}`);
24476
- results.push({ itemId, success: false, error: itemMsg });
25700
+ }
25701
+ const isTikTok = latePlatform === "tiktok";
25702
+ const tiktokSettings = isTikTok ? {
25703
+ privacy_level: "PUBLIC_TO_EVERYONE",
25704
+ allow_comment: true,
25705
+ allow_duet: true,
25706
+ allow_stitch: true,
25707
+ content_preview_confirmed: true,
25708
+ express_consent_given: true
25709
+ } : void 0;
25710
+ const profileId = useQueue ? await getProfileId() : void 0;
25711
+ const createParams = {
25712
+ content: record.postContent,
25713
+ platforms: [{ platform: latePlatform, accountId }],
25714
+ timezone: schedConfig.timezone,
25715
+ isDraft: false,
25716
+ mediaItems,
25717
+ platformSpecificData,
25718
+ tiktokSettings
25719
+ };
25720
+ if (useQueue) {
25721
+ createParams.queuedFromProfile = profileId;
25722
+ createParams.queueId = queueId ?? void 0;
25723
+ } else {
25724
+ createParams.scheduledFor = slot;
25725
+ }
25726
+ const latePost = await client.createPost(createParams);
25727
+ await approveItem(videoSlug, itemId);
25728
+ await markPublished3(videoSlug, itemId, {
25729
+ latePostId: latePost._id,
25730
+ scheduledFor: latePost.scheduledFor ?? slot ?? "",
25731
+ publishedUrl: void 0
25732
+ });
25733
+ results.push({ itemId, success: true, scheduledFor: latePost.scheduledFor ?? slot, latePostId: latePost._id });
25734
+ } catch (itemErr) {
25735
+ const itemMsg = itemErr instanceof Error ? itemErr.message : String(itemErr);
25736
+ if (itemMsg.includes("429") || itemMsg.includes("Daily post limit")) {
25737
+ const entry2 = itemMap.get(itemId);
25738
+ const latePlatform = normalizePlatformString(entry2?.record.platform ?? "");
25739
+ rateLimitedPlatforms.add(latePlatform);
25740
+ logger_default.warn(`Approval queue: ${latePlatform} hit daily post limit, skipping remaining ${latePlatform} items`);
25741
+ results.push({ itemId, success: false, error: `${latePlatform} rate-limited` });
25742
+ } else {
25743
+ logger_default.error(`Approval queue: failed for ${String(itemId).replace(/[\r\n]/g, "")}: ${String(itemMsg).replace(/[\r\n]/g, "")}`);
25744
+ results.push({ itemId, success: false, error: itemMsg });
25745
+ }
24477
25746
  }
24478
25747
  }
25748
+ } finally {
25749
+ try {
25750
+ await removeDirectory(tempDir);
25751
+ } catch {
25752
+ }
24479
25753
  }
24480
- const successIds = itemIds.filter((id) => publishDataMap.has(id));
24481
- if (successIds.length === 1) {
24482
- const id = successIds[0];
24483
- await approveItem(id, publishDataMap.get(id));
24484
- } else if (successIds.length > 1) {
24485
- await approveBulk(successIds, publishDataMap);
24486
- }
24487
- const scheduled = successIds.length;
25754
+ const scheduled = results.filter((r) => r.success).length;
24488
25755
  const failed = itemIds.length - scheduled;
24489
25756
  if (scheduled > 0) {
24490
25757
  logger_default.info(`Approval queue: ${scheduled} of ${itemIds.length} scheduled${rateLimitedPlatforms.size > 0 ? ` (rate-limited: ${[...rateLimitedPlatforms].join(", ")})` : ""}`);
@@ -24493,7 +25760,7 @@ async function processApprovalBatch(itemIds) {
24493
25760
  }
24494
25761
 
24495
25762
  // src/L7-app/review/routes.ts
24496
- var CACHE_TTL_MS3 = 5 * 60 * 1e3;
25763
+ var CACHE_TTL_MS4 = 5 * 60 * 1e3;
24497
25764
  var cache = /* @__PURE__ */ new Map();
24498
25765
  function getCached(key) {
24499
25766
  const entry = cache.get(key);
@@ -24501,7 +25768,7 @@ function getCached(key) {
24501
25768
  cache.delete(key);
24502
25769
  return void 0;
24503
25770
  }
24504
- function setCache(key, data, ttl = CACHE_TTL_MS3) {
25771
+ function setCache(key, data, ttl = CACHE_TTL_MS4) {
24505
25772
  cache.set(key, { data, expiry: Date.now() + ttl });
24506
25773
  }
24507
25774
  async function getEarliestPublishBy(ideaIds) {
@@ -24513,59 +25780,58 @@ async function getEarliestPublishBy(ideaIds) {
24513
25780
  return void 0;
24514
25781
  }
24515
25782
  }
24516
- async function enrichQueueItem(item) {
24517
- const ideaPublishBy = item.metadata.ideaIds?.length ? await getEarliestPublishBy(item.metadata.ideaIds) : void 0;
25783
+ async function enrichReviewItem(item) {
25784
+ const ideaPublishBy = item.ideaIds?.length ? await getEarliestPublishBy(item.ideaIds) : void 0;
24518
25785
  return {
24519
25786
  ...item,
24520
25787
  ...ideaPublishBy ? { ideaPublishBy } : {}
24521
25788
  };
24522
25789
  }
24523
- async function enrichQueueItems(items) {
24524
- const allIdeaIds = /* @__PURE__ */ new Set();
24525
- for (const item of items) {
24526
- if (item.metadata.ideaIds?.length) {
24527
- for (const ideaId of item.metadata.ideaIds) {
24528
- allIdeaIds.add(ideaId);
24529
- }
24530
- }
24531
- }
24532
- let publishByMap = /* @__PURE__ */ new Map();
24533
- if (allIdeaIds.size > 0) {
24534
- try {
24535
- const ideas = await getIdeasByIds([...allIdeaIds]);
24536
- for (const idea of ideas) {
24537
- publishByMap.set(idea.id, idea.publishBy);
24538
- publishByMap.set(String(idea.issueNumber), idea.publishBy);
24539
- }
24540
- } catch {
24541
- }
24542
- }
24543
- return items.map((item) => {
24544
- if (!item.metadata.ideaIds?.length) return { ...item };
24545
- const dates = item.metadata.ideaIds.map((id) => publishByMap.get(id)).filter((publishBy) => Boolean(publishBy)).sort();
24546
- const ideaPublishBy = dates[0];
24547
- return ideaPublishBy ? { ...item, ideaPublishBy } : { ...item };
25790
+ function enrichGroupedItems(groups) {
25791
+ return groups.map((group) => {
25792
+ const firstItem = group.items[0];
25793
+ const hasMedia = group.items.some((item) => Boolean(item.mediaFilename));
25794
+ return {
25795
+ ...group,
25796
+ groupKey: group.videoSlug,
25797
+ hasMedia,
25798
+ mediaType: firstItem?.mediaType || "video",
25799
+ items: group.items.map((item) => ({ ...item }))
25800
+ };
24548
25801
  });
24549
25802
  }
24550
- async function enrichGroupedQueueItems(groups) {
24551
- return Promise.all(groups.map(async (group) => ({
24552
- ...group,
24553
- items: await enrichQueueItems(group.items)
24554
- })));
24555
- }
24556
25803
  function createRouter() {
24557
25804
  const router = Router();
25805
+ router.get("/api/media/:itemId/:filename", async (req, res) => {
25806
+ try {
25807
+ const { itemId, filename } = req.params;
25808
+ const { stream, contentType } = await getMediaStream(itemId, filename);
25809
+ res.setHeader("Content-Type", contentType);
25810
+ res.setHeader("Cache-Control", "public, max-age=3600");
25811
+ stream.pipe(res);
25812
+ stream.on("error", () => {
25813
+ if (!res.headersSent) {
25814
+ res.status(500).json({ error: "Stream error" });
25815
+ }
25816
+ });
25817
+ } catch (err) {
25818
+ const msg = err instanceof Error ? err.message : String(err);
25819
+ if (!res.headersSent) {
25820
+ res.status(404).json({ error: msg });
25821
+ }
25822
+ }
25823
+ });
24558
25824
  router.get("/api/posts/pending", async (req, res) => {
24559
- const items = await enrichQueueItems(await getPendingItems());
25825
+ const items = await listPendingItems();
24560
25826
  res.json({ items, total: items.length });
24561
25827
  });
24562
25828
  router.get("/api/posts/grouped", async (req, res) => {
24563
- const groups = await enrichGroupedQueueItems(await getGroupedPendingItems());
25829
+ const groups = enrichGroupedItems(await getGroupedItems());
24564
25830
  res.json({ groups, total: groups.length });
24565
25831
  });
24566
25832
  router.get("/api/init", async (req, res) => {
24567
25833
  const [groupsResult, accountsResult, profileResult] = await Promise.allSettled([
24568
- (async () => enrichGroupedQueueItems(await getGroupedPendingItems()))(),
25834
+ (async () => enrichGroupedItems(await getGroupedItems()))(),
24569
25835
  (async () => {
24570
25836
  const cached = getCached("accounts");
24571
25837
  if (cached) return cached;
@@ -24590,14 +25856,24 @@ function createRouter() {
24590
25856
  res.json({ groups, total: groups.length, accounts, profile });
24591
25857
  });
24592
25858
  router.get("/api/posts/:id", async (req, res) => {
24593
- const item = await getItem(req.params.id);
25859
+ const match = await findContentItemByRowKey(req.params.id);
25860
+ if (!match) return res.status(404).json({ error: "Item not found" });
25861
+ const item = await getItemById(match.partitionKey, match.rowKey);
24594
25862
  if (!item) return res.status(404).json({ error: "Item not found" });
24595
- res.json(await enrichQueueItem(item));
25863
+ res.json(await enrichReviewItem(item));
24596
25864
  });
24597
- router.post("/api/posts/:id/approve", (req, res) => {
25865
+ router.post("/api/posts/:id/approve", async (req, res) => {
24598
25866
  const itemId = req.params.id;
25867
+ const priority = req.query.priority === "true" || req.body?.priority === true;
25868
+ try {
25869
+ const match = await findContentItemByRowKey(itemId);
25870
+ if (match) {
25871
+ await approveItem(match.partitionKey, match.rowKey);
25872
+ }
25873
+ } catch {
25874
+ }
24599
25875
  res.status(202).json({ accepted: true });
24600
- enqueueApproval([itemId]).then((result) => {
25876
+ enqueueApproval([itemId], { priority }).then((result) => {
24601
25877
  if (result.scheduled > 0) {
24602
25878
  logger_default.info(`Single approve completed: ${String(itemId).replace(/[\r\n]/g, "")} \u2192 ${result.results[0]?.scheduledFor}`);
24603
25879
  } else {
@@ -24606,20 +25882,31 @@ function createRouter() {
24606
25882
  }).catch(() => {
24607
25883
  });
24608
25884
  });
24609
- router.post("/api/posts/bulk-approve", (req, res) => {
24610
- const { itemIds } = req.body;
25885
+ router.post("/api/posts/bulk-approve", async (req, res) => {
25886
+ const { itemIds, priority } = req.body;
24611
25887
  if (!Array.isArray(itemIds) || itemIds.length === 0) {
24612
25888
  return res.status(400).json({ error: "itemIds must be a non-empty array" });
24613
25889
  }
25890
+ for (const itemId of itemIds) {
25891
+ try {
25892
+ const match = await findContentItemByRowKey(itemId);
25893
+ if (match) {
25894
+ await approveItem(match.partitionKey, match.rowKey);
25895
+ }
25896
+ } catch {
25897
+ }
25898
+ }
24614
25899
  res.status(202).json({ accepted: true, count: itemIds.length });
24615
- enqueueApproval(itemIds).catch((err) => {
25900
+ enqueueApproval(itemIds, { priority: priority === true }).catch((err) => {
24616
25901
  const msg = err instanceof Error ? err.message : String(err);
24617
25902
  logger_default.error(`Bulk approve background failed: ${String(msg).replace(/[\r\n]/g, "")}`);
24618
25903
  });
24619
25904
  });
24620
25905
  router.post("/api/posts/:id/reject", async (req, res) => {
24621
25906
  try {
24622
- await rejectItem(req.params.id);
25907
+ const match = await findContentItemByRowKey(req.params.id);
25908
+ if (!match) return res.status(404).json({ error: "Item not found" });
25909
+ await rejectItem(match.partitionKey, match.rowKey);
24623
25910
  res.json({ success: true });
24624
25911
  } catch (err) {
24625
25912
  const msg = err instanceof Error ? err.message : String(err);
@@ -24633,23 +25920,32 @@ function createRouter() {
24633
25920
  }
24634
25921
  res.status(202).json({ accepted: true, count: itemIds.length });
24635
25922
  (async () => {
25923
+ const allItems = await getContentItems();
25924
+ const itemLookup = new Map(allItems.map((r) => [r.rowKey, r.partitionKey]));
24636
25925
  let succeeded = 0;
24637
25926
  for (const itemId of itemIds) {
24638
25927
  try {
24639
- await rejectItem(itemId);
25928
+ const videoSlug = itemLookup.get(itemId);
25929
+ if (!videoSlug) {
25930
+ logger_default.error(`Bulk reject: item not found: ${String(itemId).replace(/[\r\n]/g, "")}`);
25931
+ continue;
25932
+ }
25933
+ await rejectItem(videoSlug, itemId);
24640
25934
  succeeded++;
24641
25935
  } catch (err) {
24642
25936
  const msg = err instanceof Error ? err.message : String(err);
24643
25937
  logger_default.error(`Bulk reject failed for ${String(itemId).replace(/[\r\n]/g, "")}: ${String(msg).replace(/[\r\n]/g, "")}`);
24644
25938
  }
24645
25939
  }
24646
- logger_default.info(`Bulk reject completed: ${succeeded} of ${itemIds.length} removed`);
25940
+ logger_default.info(`Bulk reject completed: ${succeeded} of ${itemIds.length} rejected`);
24647
25941
  })();
24648
25942
  });
24649
25943
  router.put("/api/posts/:id", async (req, res) => {
24650
25944
  try {
24651
- const { postContent, metadata } = req.body;
24652
- const updated = await updateItem(req.params.id, { postContent, metadata });
25945
+ const { postContent } = req.body;
25946
+ const match = await findContentItemByRowKey(req.params.id);
25947
+ if (!match) return res.status(404).json({ error: "Item not found" });
25948
+ const updated = await updateItem2(match.partitionKey, match.rowKey, { postContent });
24653
25949
  if (!updated) return res.status(404).json({ error: "Item not found" });
24654
25950
  res.json(updated);
24655
25951
  } catch (err) {
@@ -24718,23 +26014,23 @@ function createRouter() {
24718
26014
  }
24719
26015
 
24720
26016
  // src/L7-app/review/server.ts
24721
- init_environment();
26017
+ init_azureStorageService();
24722
26018
  init_configLogger();
24723
26019
  var __dirname3 = dirname(fileURLToPath(import.meta.url));
24724
26020
  async function startReviewServer(options = {}) {
26021
+ if (!isAzureConfigured2()) {
26022
+ const msg = "Review server requires Azure Storage. Set AZURE_STORAGE_ACCOUNT_NAME and AZURE_STORAGE_ACCOUNT_KEY environment variables.";
26023
+ logger_default.error(msg);
26024
+ throw new Error(msg);
26025
+ }
24725
26026
  const app = default8();
24726
26027
  const port = options.port || 3847;
24727
26028
  app.use(default8.json());
24728
26029
  app.use(createRouter());
24729
- const cfg = getConfig();
24730
- const queueDir = join(cfg.OUTPUT_DIR, "publish-queue");
24731
- const publishedDir = join(cfg.OUTPUT_DIR, "published");
24732
- app.use("/media/queue", default8.static(queueDir));
24733
- app.use("/media/published", default8.static(publishedDir));
24734
26030
  const publicDir = join(__dirname3, "public");
24735
26031
  app.use(default8.static(publicDir));
24736
26032
  app.get("/{*splat}", (req, res) => {
24737
- if (!req.path.startsWith("/api/") && !req.path.startsWith("/media/")) {
26033
+ if (!req.path.startsWith("/api/")) {
24738
26034
  res.sendFile(join(publicDir, "index.html"));
24739
26035
  }
24740
26036
  });
@@ -24857,6 +26153,23 @@ program.command("realign").description("Realign all Late scheduled, cancelled, a
24857
26153
  await runRealign({ platform: opts.platform, dryRun: opts.dryRun, queue: opts.queue });
24858
26154
  process.exit(0);
24859
26155
  });
26156
+ program.command("reorder-queue").description("Reorder Late queue so newest content gets earliest slots (temp queue swap)").option("--platform <name>", "Filter to a single platform (tiktok, youtube, instagram, linkedin, x)").option("--clip-type <type>", "Filter to a clip type (short, medium-clip, video)").option("--dry-run", "Preview the reorder plan without making changes").option("--all", "Reorder all queues").action(async (opts) => {
26157
+ initConfig({});
26158
+ const { reorderQueue: reorderQueue2, reorderAllQueues: reorderAllQueues2 } = await Promise.resolve().then(() => (init_lateApiService(), lateApiService_exports));
26159
+ if (opts.all) {
26160
+ logger_default.info("Reordering ALL queues (newest-first)...");
26161
+ const result = await reorderAllQueues2({ dryRun: opts.dryRun });
26162
+ logger_default.info(`
26163
+ Total: ${result.total} posts reordered, ${result.errors} errors`);
26164
+ } else if (opts.platform && opts.clipType) {
26165
+ const result = await reorderQueue2(opts.platform, opts.clipType, { dryRun: opts.dryRun });
26166
+ logger_default.info(`Reordered: ${result.moved} posts, ${result.errors} errors`);
26167
+ } else {
26168
+ logger_default.error("Specify --platform + --clip-type, or use --all to reorder everything");
26169
+ process.exit(1);
26170
+ }
26171
+ process.exit(0);
26172
+ });
24860
26173
  program.command("reschedule").description("Reschedule idea-linked posts for optimal slot placement, displacing non-idea content").option("--dry-run", "Preview changes without updating posts").option("--queue", "Use Late API queue reshuffle instead of per-post reschedule").action(async (opts) => {
24861
26174
  const { runReschedule: runReschedule2 } = await Promise.resolve().then(() => (init_reschedule(), reschedule_exports));
24862
26175
  await runReschedule2({ dryRun: opts.dryRun, queue: opts.queue });
@@ -24966,6 +26279,7 @@ program.command("specs").description("List available pipeline spec presets and c
24966
26279
  console.log("\nUsage: vidpipe process --spec <name-or-path> video.mp4\n");
24967
26280
  process.exit(0);
24968
26281
  });
26282
+ program.addCommand(createCloudCommand());
24969
26283
  var defaultCmd = program.command("process", { isDefault: true }).argument("[video-path]", "Path to a video file to process (implies --once)").option("--watch-dir <path>", "Folder to watch for new recordings (default: env WATCH_FOLDER)").option("--output-dir <path>", "Output directory for processed videos (default: ./recordings)").option("--openai-key <key>", "OpenAI API key (default: env OPENAI_API_KEY)").option("--exa-key <key>", "Exa AI API key for web search (default: env EXA_API_KEY)").option("--youtube-key <key>", "YouTube API key (default: env YOUTUBE_API_KEY)").option("--perplexity-key <key>", "Perplexity API key (default: env PERPLEXITY_API_KEY)").option("--once", "Process a single video and exit (no watching)").option("--brand <path>", "Path to brand.json config (default: ./brand.json)").option("--no-silence-removal", "Skip silence removal stage").option("--no-shorts", "Skip shorts generation").option("--no-medium-clips", "Skip medium clip generation").option("--no-social", "Skip social media post generation").option("--no-captions", "Skip caption generation/burning").option("--no-visual-enhancement", "Skip visual enhancement (AI image overlays)").option("--no-intro-outro", "Skip intro/outro concatenation").option("--no-social-publish", "Skip social media publishing/queue-build stage").option("--spec <nameOrPath>", "Pipeline spec preset name or YAML file path").option("--late-api-key <key>", "Late API key (default: env LATE_API_KEY)").option("--late-profile-id <id>", "Late profile ID (default: env LATE_PROFILE_ID)").option("--ideas <ids>", "Comma-separated idea IDs to link to this video").option("--publish-by <date>", "Publish-by deadline for auto-created ideas (ISO date or +Nd for relative, default: +7d)").option("-v, --verbose", "Verbose logging").option("--progress", "Emit structured JSON progress events to stderr").option("--doctor", "Check all prerequisites and exit").action(async (videoPath) => {
24970
26284
  const opts = defaultCmd.opts();
24971
26285
  if (opts.doctor) {