@gfxlabs/third-eye-cli 3.23.1 → 3.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin.mjs CHANGED
@@ -1,4 +1,5 @@
1
1
  #!/usr/bin/env node
2
+ import { createRequire } from "node:module";
2
3
  import path, { join, normalize } from "node:path";
3
4
  import { existsSync, mkdirSync, readFileSync, readdirSync, unlinkSync, writeFileSync } from "node:fs";
4
5
  import { fileURLToPath } from "node:url";
@@ -25,6 +26,9 @@ import { createORPCClient } from "@orpc/client";
25
26
  import { RPCLink } from "@orpc/client/fetch";
26
27
  import { execa } from "execa";
27
28
  import { XMLParser } from "fast-xml-parser";
29
+ //#region \0rolldown/runtime.js
30
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
31
+ //#endregion
28
32
  //#region src/log.ts
29
33
  const logMemory = [];
30
34
  const serializeError = (error) => ({
@@ -860,7 +864,7 @@ const collectLadleStories = async (ladleUrl) => {
860
864
  };
861
865
  //#endregion
862
866
  //#region src/crawler/storybook.ts
863
- const kebabCase = (str) => (str ?? "").replace(/([a-z\d])([A-Z])/g, "$1-$2").replace(/[\s_/]+/g, "-").toLowerCase();
867
+ const kebabCase$1 = (str) => (str ?? "").replace(/([a-z\d])([A-Z])/g, "$1-$2").replace(/[\s_/]+/g, "-").toLowerCase();
864
868
  const getStoryBookUrl = (url) => {
865
869
  if (url.startsWith("http://") || url.startsWith("https://") || url.startsWith("file://")) return url;
866
870
  if (url.startsWith("/")) return `file://${url}`;
@@ -996,9 +1000,9 @@ const generateStoryUrl = (iframeUrl, storyId, args, breakpoint) => {
996
1000
  const generateFilename = (kind, story, prefix, suffix) => {
997
1001
  return [
998
1002
  prefix,
999
- kebabCase(kind),
1000
- kebabCase(story),
1001
- kebabCase(suffix)
1003
+ kebabCase$1(kind),
1004
+ kebabCase$1(story),
1005
+ kebabCase$1(suffix)
1002
1006
  ].filter(Boolean).join("--");
1003
1007
  };
1004
1008
  const generateStorybookShotItems = (baseUrl, stories, mask, modeBreakpoints, browser) => {
@@ -1278,6 +1282,28 @@ const takeScreenShot = async ({ browser, shotItem, logger }) => {
1278
1282
  try {
1279
1283
  await page.evaluate(() => document.fonts.ready);
1280
1284
  } catch {}
1285
+ try {
1286
+ await page.addStyleTag({ content: `
1287
+ *, *::before, *::after {
1288
+ animation-delay: -0.0001ms !important;
1289
+ animation-duration: 0s !important;
1290
+ animation-play-state: paused !important;
1291
+ transition-duration: 0s !important;
1292
+ transition-delay: 0s !important;
1293
+ }
1294
+ ` });
1295
+ await page.evaluate(() => {
1296
+ document.querySelectorAll("svg").forEach((svg) => {
1297
+ try {
1298
+ svg.pauseAnimations?.();
1299
+ } catch {}
1300
+ });
1301
+ document.getAnimations().forEach((anim) => {
1302
+ anim.pause();
1303
+ anim.currentTime = 0;
1304
+ });
1305
+ });
1306
+ } catch {}
1281
1307
  await sleep(shotItem?.waitBeforeScreenshot ?? config.waitBeforeScreenshot);
1282
1308
  try {
1283
1309
  if (shotItem.viewport) {
@@ -1474,7 +1500,12 @@ const collectHistoireStories = async (histoireUrl) => {
1474
1500
  };
1475
1501
  //#endregion
1476
1502
  //#region src/createShots.ts
1477
- const createShots = async () => {
1503
+ /**
1504
+ * @param turboSnapFilter - When provided, only stories whose shotName is in this set
1505
+ * will be screenshotted. Stories not in the set are skipped entirely (no Playwright
1506
+ * navigation), which is the main performance win of TurboSnap.
1507
+ */
1508
+ const createShots = async (turboSnapFilter) => {
1478
1509
  const { ladleShots, histoireShots, storybookShots, pageShots, customShots, imagePathCurrent } = config;
1479
1510
  let storybookShotItems = [];
1480
1511
  let ladleShotItems = [];
@@ -1558,10 +1589,12 @@ const createShots = async () => {
1558
1589
  await mapLimit(browsers, 1, async (browser) => {
1559
1590
  const shotItems = generateStorybookShotItems(storybookWebUrl, collection.stories, mask, storybookShots.breakpoints, browsers.length > 1 ? browser : void 0);
1560
1591
  const filterItemsToCheck = "filterItemsToCheck" in config ? config.filterItemsToCheck : void 0;
1561
- const filteredShotItems = filterItemsToCheck ? shotItems.filter((item) => filterItemsToCheck(item)) : shotItems;
1592
+ let filteredShotItems = filterItemsToCheck ? shotItems.filter((item) => filterItemsToCheck(item)) : shotItems;
1593
+ if (turboSnapFilter) filteredShotItems = filteredShotItems.filter((item) => turboSnapFilter.has(item.shotName) || turboSnapFilter.has(`${item.shotMode}/${item.shotName}`));
1562
1594
  storybookShotItems = shotItems;
1563
- log.process("info", "general", `Prepared ${filteredShotItems.length} stories for screenshots on ${browser.name()}`);
1564
- await takeScreenShots(filteredShotItems, browser);
1595
+ const capturedItems = filteredShotItems;
1596
+ log.process("info", "general", turboSnapFilter ? `Prepared ${capturedItems.length}/${shotItems.length} stories for screenshots on ${browser.name()} (TurboSnap: ${shotItems.length - capturedItems.length} skipped)` : `Prepared ${capturedItems.length} stories for screenshots on ${browser.name()}`);
1597
+ await takeScreenShots(capturedItems, browser);
1565
1598
  });
1566
1599
  localServer?.close();
1567
1600
  } catch (error) {
@@ -1604,6 +1637,76 @@ const createShots = async () => {
1604
1637
  ];
1605
1638
  };
1606
1639
  //#endregion
1640
+ //#region src/git.ts
1641
+ const INITIAL_BATCH_SIZE = 20;
1642
+ /**
1643
+ * Execute a git command and return the trimmed output.
1644
+ */
1645
+ const execGit = (command) => {
1646
+ try {
1647
+ return execSync(command, {
1648
+ encoding: "utf-8",
1649
+ timeout: 1e4
1650
+ }).trim();
1651
+ } catch {
1652
+ return "";
1653
+ }
1654
+ };
1655
+ /**
1656
+ * Find the "covering" set of ancestor commits that have builds on the server.
1657
+ *
1658
+ * This mirrors Chromatic's approach:
1659
+ * 1. Walk git history with `git rev-list`
1660
+ * 2. Ask the server which commits have builds
1661
+ * 3. Use `--not` to exclude ancestors of commits with builds
1662
+ * 4. Repeat with exponentially larger batches until no more uncovered commits
1663
+ *
1664
+ * The result is the minimal set of ancestor commits with builds such that
1665
+ * every ancestor of HEAD either has a build or is an ancestor of a commit
1666
+ * with a build.
1667
+ */
1668
+ const getParentCommits = async (hasBuildsWithCommits) => {
1669
+ if (!execGit("git rev-parse HEAD")) {
1670
+ log.process("info", "general", "Not a git repository, skipping ancestor detection");
1671
+ return [];
1672
+ }
1673
+ if (!execGit("git --no-pager log -n 1 --skip=1 --format=\"%H\"")) {
1674
+ log.process("info", "general", "Initial commit, no ancestors");
1675
+ return [];
1676
+ }
1677
+ let commitsWithBuilds = [];
1678
+ let commitsWithoutBuilds = [];
1679
+ let limit = INITIAL_BATCH_SIZE;
1680
+ for (;;) {
1681
+ const notArgs = commitsWithBuilds.map((c) => c.trim()).join(" ");
1682
+ const output = execGit(`git rev-list HEAD -n ${limit + commitsWithoutBuilds.length}${notArgs ? ` --not ${notArgs}` : ""}`);
1683
+ const candidates = (output ? output.split("\n").filter(Boolean) : []).filter((c) => !commitsWithBuilds.includes(c)).filter((c) => !commitsWithoutBuilds.includes(c)).slice(0, limit);
1684
+ if (candidates.length === 0) break;
1685
+ log.process("info", "general", `🔍 Checking ${candidates.length} commits for builds (batch size ${limit})`);
1686
+ const newCommitsWithBuilds = await hasBuildsWithCommits(candidates);
1687
+ const newCommitsWithoutBuilds = candidates.filter((c) => !newCommitsWithBuilds.includes(c));
1688
+ commitsWithBuilds = [...commitsWithBuilds, ...newCommitsWithBuilds];
1689
+ commitsWithoutBuilds = [...commitsWithoutBuilds, ...newCommitsWithoutBuilds];
1690
+ limit *= 2;
1691
+ if (commitsWithoutBuilds.length > 1e4) {
1692
+ log.process("info", "general", "Reached max history depth (10000 commits)");
1693
+ break;
1694
+ }
1695
+ }
1696
+ if (commitsWithBuilds.length === 0) {
1697
+ log.process("info", "general", "No ancestor builds found — this may be the first build");
1698
+ return [];
1699
+ }
1700
+ if (commitsWithBuilds.length > 1) {
1701
+ const parentArgs = commitsWithBuilds.map((c) => `"${c}^@"`).join(" ");
1702
+ const maxOutput = execGit(`git rev-list ${commitsWithBuilds.join(" ")} --not ${parentArgs}`);
1703
+ const maxCommits = maxOutput ? maxOutput.split("\n").filter(Boolean) : [];
1704
+ if (maxCommits.length > 0) commitsWithBuilds = maxCommits;
1705
+ }
1706
+ log.process("info", "general", `📌 Found ${commitsWithBuilds.length} ancestor build(s): ${commitsWithBuilds.map((c) => c.slice(0, 7)).join(", ")}`);
1707
+ return commitsWithBuilds;
1708
+ };
1709
+ //#endregion
1607
1710
  //#region ../shared/dist/client.js
1608
1711
  /**
1609
1712
  * Typed oRPC client factory.
@@ -1672,7 +1775,7 @@ const getApiToken = async (config) => {
1672
1775
  process.exit(1);
1673
1776
  }
1674
1777
  };
1675
- const sendInitToAPI = async (config, apiToken) => {
1778
+ const sendInitToAPI = async (config, apiToken, parentCommits) => {
1676
1779
  const client = createClient(config.thirdEyePlatform, void 0, apiToken);
1677
1780
  return withRetry("init", () => client.orgs.projects.builds.init({
1678
1781
  orgId: config.thirdEyeOrgId,
@@ -1681,9 +1784,18 @@ const sendInitToAPI = async (config, apiToken) => {
1681
1784
  branchName: config.commitRefName,
1682
1785
  buildNumber: config.ciBuildNumber,
1683
1786
  baseBranch: config.baseBranch || void 0,
1684
- prNumber: config.prNumber
1787
+ prNumber: config.prNumber,
1788
+ parentCommits
1685
1789
  }));
1686
1790
  };
1791
+ const sendHasBuildsWithCommitsToAPI = async (config, apiToken, commits) => {
1792
+ const client = createClient(config.thirdEyePlatform, void 0, apiToken);
1793
+ return (await withRetry("hasBuildsWithCommits", () => client.orgs.projects.builds.hasBuildsWithCommits({
1794
+ orgId: config.thirdEyeOrgId,
1795
+ projectId: config.thirdEyeProjectId,
1796
+ commits
1797
+ }))).commits;
1798
+ };
1687
1799
  const sendFinalizeToAPI = async (config, apiToken) => {
1688
1800
  const client = createClient(config.thirdEyePlatform, void 0, apiToken);
1689
1801
  return withRetry("finalize", () => client.orgs.projects.builds.finalize({
@@ -1773,7 +1885,7 @@ const uploadStorybookArchive = async (config, apiToken, projectId, buildId, arch
1773
1885
  };
1774
1886
  //#endregion
1775
1887
  //#region src/upload.ts
1776
- const uploadRequiredShots = async ({ config, apiToken, uploadToken, requiredFileHashes, extendedShotItems }) => {
1888
+ const uploadRequiredShots = async ({ config, apiToken, uploadToken, requiredFileHashes, extendedShotItems, dependencyMap }) => {
1777
1889
  if (requiredFileHashes.length > 0) {
1778
1890
  log.process("info", "api", "Uploading shots");
1779
1891
  const uploadStart = process.hrtime();
@@ -1803,7 +1915,8 @@ const uploadRequiredShots = async ({ config, apiToken, uploadToken, requiredFile
1803
1915
  ...shotItem.tags ? { tags: shotItem.tags } : {},
1804
1916
  ...shotItem.viewport ? { viewport: shotItem.viewport } : {},
1805
1917
  ...shotItem.breakpoint !== void 0 ? { breakpoint: shotItem.breakpoint } : {},
1806
- ...domHtml ? { dom_html: domHtml } : {}
1918
+ ...domHtml ? { dom_html: domHtml } : {},
1919
+ ...dependencyMap && shotItem.importPath && dependencyMap.has(shotItem.importPath) ? { dependencies: dependencyMap.get(shotItem.importPath) } : {}
1807
1920
  },
1808
1921
  logger
1809
1922
  });
@@ -1814,7 +1927,168 @@ const uploadRequiredShots = async ({ config, apiToken, uploadToken, requiredFile
1814
1927
  return true;
1815
1928
  };
1816
1929
  //#endregion
1930
+ //#region src/turbosnap.ts
1931
+ /**
1932
+ * TurboSnap: static import tracer for determining which stories are affected by code changes.
1933
+ *
1934
+ * Given a list of changed files (from git diff) and the storybook index (importPath per story),
1935
+ * this module traces each story file's transitive imports to determine which stories need
1936
+ * re-screenshotting. Stories whose dependency tree does not overlap with any changed file are
1937
+ * skipped entirely — they never get launched in Playwright, saving the majority of run time.
1938
+ *
1939
+ * The import tracer is intentionally simple: it parses `import`/`require` statements with a
1940
+ * regex, resolves relative and aliased paths, and follows them recursively with cycle detection.
1941
+ * It does not evaluate dynamic imports or re-exports — those are rare in component code and the
1942
+ * fallback is always "capture the story" (safe default).
1943
+ */
1944
+ const RESOLVE_EXTENSIONS = [
1945
+ ".ts",
1946
+ ".tsx",
1947
+ ".js",
1948
+ ".jsx",
1949
+ ".mjs",
1950
+ ".cjs"
1951
+ ];
1952
+ const INDEX_FILES = RESOLVE_EXTENSIONS.map((ext) => `index${ext}`);
1953
+ /**
1954
+ * Regex that captures the string literal from import/require statements.
1955
+ * Handles:
1956
+ * import ... from 'foo'
1957
+ * import ... from "foo"
1958
+ * import 'foo'
1959
+ * export ... from 'foo'
1960
+ * require('foo')
1961
+ */
1962
+ const IMPORT_RE = /(?:import\s+(?:[\s\S]*?\s+from\s+)?|export\s+(?:[\s\S]*?\s+from\s+)?|require\s*\()["']([^"']+)["']/g;
1963
+ /**
1964
+ * Parse tsconfig-style path aliases into a simple prefix → directory map.
1965
+ * E.g. `{ "#/*": ["./src/app/*"] }` → `{ "#/": "/abs/path/src/app/" }`
1966
+ */
1967
+ const parsePathAliases = (tsconfigPath, projectRoot) => {
1968
+ const aliases = {};
1969
+ try {
1970
+ const stripped = readFileSync(tsconfigPath, "utf-8").replace(/\/\/[^\n]*/g, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/,(\s*[}\]])/g, "$1");
1971
+ const paths = JSON.parse(stripped).compilerOptions?.paths;
1972
+ if (paths) {
1973
+ for (const [pattern, targets] of Object.entries(paths)) if (targets.length > 0) {
1974
+ const prefix = pattern.replace(/\*$/, "");
1975
+ const target = targets[0].replace(/\*$/, "");
1976
+ aliases[prefix] = path.resolve(projectRoot, target);
1977
+ }
1978
+ }
1979
+ } catch {}
1980
+ return aliases;
1981
+ };
1982
+ /**
1983
+ * Resolve a single import specifier to an absolute file path.
1984
+ * Returns undefined if the import is external (node_modules) or unresolvable.
1985
+ */
1986
+ const resolveImport = (specifier, fromFile, aliases) => {
1987
+ if (!specifier.startsWith(".") && !specifier.startsWith("/") && !Object.keys(aliases).some((prefix) => specifier.startsWith(prefix))) return;
1988
+ let resolved;
1989
+ const matchingAlias = Object.entries(aliases).find(([prefix]) => specifier.startsWith(prefix));
1990
+ if (matchingAlias) {
1991
+ const [prefix, target] = matchingAlias;
1992
+ resolved = path.join(target, specifier.slice(prefix.length));
1993
+ } else resolved = path.resolve(path.dirname(fromFile), specifier);
1994
+ if (existsSync(resolved) && !isDirectory(resolved)) return resolved;
1995
+ for (const ext of RESOLVE_EXTENSIONS) {
1996
+ const withExt = resolved + ext;
1997
+ if (existsSync(withExt)) return withExt;
1998
+ }
1999
+ for (const indexFile of INDEX_FILES) {
2000
+ const withIndex = path.join(resolved, indexFile);
2001
+ if (existsSync(withIndex)) return withIndex;
2002
+ }
2003
+ };
2004
+ const isDirectory = (p) => {
2005
+ try {
2006
+ const { statSync } = __require("node:fs");
2007
+ return statSync(p).isDirectory();
2008
+ } catch {
2009
+ return false;
2010
+ }
2011
+ };
2012
+ /**
2013
+ * Extract all import specifiers from a source file.
2014
+ */
2015
+ const extractImports = (filePath) => {
2016
+ try {
2017
+ const content = readFileSync(filePath, "utf-8");
2018
+ const imports = [];
2019
+ let match;
2020
+ IMPORT_RE.lastIndex = 0;
2021
+ while ((match = IMPORT_RE.exec(content)) !== null) if (match[1]) imports.push(match[1]);
2022
+ return imports;
2023
+ } catch {
2024
+ return [];
2025
+ }
2026
+ };
2027
+ /**
2028
+ * Trace all transitive dependencies of a file recursively.
2029
+ * Returns a Set of absolute file paths that the file depends on (including itself).
2030
+ */
2031
+ const traceDependencies = (entryFile, projectRoot, aliases, _cache) => {
2032
+ const cache = _cache ?? /* @__PURE__ */ new Map();
2033
+ const absEntry = path.isAbsolute(entryFile) ? entryFile : path.resolve(projectRoot, entryFile);
2034
+ if (cache.has(absEntry)) return cache.get(absEntry);
2035
+ const deps = new Set([absEntry]);
2036
+ cache.set(absEntry, deps);
2037
+ if (!existsSync(absEntry)) return deps;
2038
+ const importSpecifiers = extractImports(absEntry);
2039
+ for (const spec of importSpecifiers) {
2040
+ const resolved = resolveImport(spec, absEntry, aliases);
2041
+ if (resolved && !deps.has(resolved)) {
2042
+ deps.add(resolved);
2043
+ const transitiveDeps = traceDependencies(resolved, projectRoot, aliases, cache);
2044
+ for (const d of transitiveDeps) deps.add(d);
2045
+ }
2046
+ }
2047
+ return deps;
2048
+ };
2049
+ /**
2050
+ * Determine which stories are affected by a set of changed files.
2051
+ *
2052
+ * @param stories - Array of { shotName, importPath } from the storybook index
2053
+ * @param changedFiles - Relative file paths from `git diff --name-only`
2054
+ * @param projectRoot - Absolute path to the project root (for resolving imports)
2055
+ * @returns Which stories to capture and which to skip
2056
+ */
2057
+ const getAffectedStoriesLocal = (stories, changedFiles, projectRoot) => {
2058
+ const aliases = parsePathAliases(path.join(projectRoot, "tsconfig.json"), projectRoot);
2059
+ const changedAbsolute = new Set(changedFiles.map((f) => path.resolve(projectRoot, f)));
2060
+ const affected = [];
2061
+ const skipped = [];
2062
+ const dependencyMap = /* @__PURE__ */ new Map();
2063
+ const depCache = /* @__PURE__ */ new Map();
2064
+ for (const story of stories) {
2065
+ if (!story.importPath) {
2066
+ affected.push(story.shotName);
2067
+ continue;
2068
+ }
2069
+ const deps = traceDependencies(path.resolve(projectRoot, story.importPath), projectRoot, aliases, depCache);
2070
+ const relativeDeps = [...deps].map((d) => path.relative(projectRoot, d));
2071
+ dependencyMap.set(story.importPath, relativeDeps);
2072
+ let isAffected = false;
2073
+ for (const dep of deps) if (changedAbsolute.has(dep)) {
2074
+ isAffected = true;
2075
+ break;
2076
+ }
2077
+ if (isAffected) affected.push(story.shotName);
2078
+ else skipped.push(story.shotName);
2079
+ }
2080
+ return {
2081
+ affected,
2082
+ skipped,
2083
+ total: stories.length,
2084
+ dependencyMap
2085
+ };
2086
+ };
2087
+ //#endregion
1817
2088
  //#region src/runner.ts
2089
+ const kebabCase = (str) => (str ?? "").replace(/([a-z\d])([A-Z])/g, "$1-$2").replace(/[\s_/]+/g, "-").toLowerCase();
2090
+ /** Generate the shot name from a storybook title and story name, matching the crawler's logic. */
2091
+ const generateShotName = (title, name) => [kebabCase(title), kebabCase(name)].filter(Boolean).join("--");
1818
2092
  /**
1819
2093
  * Get the list of files changed between the current HEAD and a base ref
1820
2094
  * using git diff. Returns an empty array if git is unavailable or fails.
@@ -1919,30 +2193,46 @@ const platformRunner = async (config, apiToken) => {
1919
2193
  `commitRefName = ${config.commitRefName}`,
1920
2194
  `commitHash = ${config.commitHash}`
1921
2195
  ].join("\n - "));
1922
- await sendInitToAPI(config, apiToken);
2196
+ log.process("info", "general", "🔍 Resolving ancestor builds from git history...");
2197
+ await sendInitToAPI(config, apiToken, await getParentCommits((commits) => sendHasBuildsWithCommitsToAPI(config, apiToken, commits)));
1923
2198
  if (!await checkForCachedBuild(config, apiToken)) {
1924
2199
  log.process("info", "general", "📂 Creating shot folders");
1925
2200
  const createShotsStart = process.hrtime();
1926
2201
  createShotsFolders();
1927
- log.process("info", "general", "📸 Creating shots");
1928
- let shotItems = await createShots();
2202
+ let turboSnapFilter;
2203
+ let turboSnapDependencyMap;
1929
2204
  if (config.turboSnap && config.baseBranch) {
1930
2205
  log.process("info", "general", `⚡ TurboSnap enabled, checking changed files against ${config.baseBranch}`);
1931
2206
  const changedFiles = getChangedFiles(config.baseBranch);
1932
2207
  if (changedFiles.length > 0) {
1933
2208
  log.process("info", "general", `Found ${changedFiles.length} changed file(s)`);
1934
2209
  try {
1935
- const turboResult = await getAffectedStories(config, apiToken, changedFiles);
1936
- log.process("info", "general", `TurboSnap: ${turboResult.affectedCount} affected, ${turboResult.skippedCount} skipped`);
1937
- if (turboResult.affected.length > 0) {
1938
- const affectedSet = new Set(turboResult.affected);
1939
- shotItems = shotItems.filter((item) => affectedSet.has(item.shotName) || affectedSet.has(`${item.shotMode}/${item.shotName}`));
2210
+ const storybookPath = config.storybookShots?.storybookUrl ?? "";
2211
+ const indexJsonPath = path.join(storybookPath.startsWith("http") ? "" : storybookPath, "index.json");
2212
+ if (existsSync(indexJsonPath)) {
2213
+ const indexJson = JSON.parse(fs.readFileSync(indexJsonPath, "utf-8"));
2214
+ const entries = indexJson.entries ?? indexJson.stories ?? {};
2215
+ const turboResult = getAffectedStoriesLocal(Object.values(entries).filter((e) => e.type !== "docs").map((e) => ({
2216
+ shotName: generateShotName(e.title, e.name),
2217
+ importPath: e.importPath
2218
+ })), changedFiles, process.cwd());
2219
+ log.process("info", "general", `⚡ TurboSnap: ${turboResult.affected.length} affected, ${turboResult.skipped.length} skipped out of ${turboResult.total} stories`);
2220
+ turboSnapFilter = new Set(turboResult.affected);
2221
+ turboSnapDependencyMap = turboResult.dependencyMap;
2222
+ } else try {
2223
+ const serverResult = await getAffectedStories(config, apiToken, changedFiles);
2224
+ log.process("info", "general", `⚡ TurboSnap (server): ${serverResult.affectedCount} affected, ${serverResult.skippedCount} skipped`);
2225
+ if (serverResult.affected.length > 0) turboSnapFilter = new Set(serverResult.affected);
2226
+ } catch (error) {
2227
+ if (error instanceof Error) log.process("warn", "general", `TurboSnap server query failed, capturing all stories: ${error.message}`);
1940
2228
  }
1941
2229
  } catch (error) {
1942
2230
  if (error instanceof Error) log.process("warn", "general", `TurboSnap filtering failed, capturing all stories: ${error.message}`);
1943
2231
  }
1944
2232
  } else log.process("info", "general", "TurboSnap: no changed files detected, capturing all stories");
1945
2233
  }
2234
+ log.process("info", "general", "📸 Creating shots");
2235
+ let shotItems = await createShots(turboSnapFilter);
1946
2236
  const shotNames = shotItems.map((shotItem) => shotItem.shotName);
1947
2237
  const uniqueShotNames = new Set(shotNames);
1948
2238
  if (shotNames.length !== uniqueShotNames.size) {
@@ -1978,7 +2268,8 @@ const platformRunner = async (config, apiToken) => {
1978
2268
  apiToken,
1979
2269
  uploadToken,
1980
2270
  requiredFileHashes,
1981
- extendedShotItems
2271
+ extendedShotItems,
2272
+ dependencyMap: turboSnapDependencyMap
1982
2273
  });
1983
2274
  await processShots(config, apiToken, uploadToken, shotItems.map((shotItem) => ({
1984
2275
  name: `${shotItem.shotMode}/${shotItem.shotName}`,