metro-file-map 0.83.5 → 0.83.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/package.json +1 -1
  2. package/src/Watcher.d.ts +13 -9
  3. package/src/Watcher.js +66 -39
  4. package/src/Watcher.js.flow +84 -51
  5. package/src/cache/DiskCacheManager.d.ts +8 -1
  6. package/src/constants.d.ts +8 -1
  7. package/src/crawlers/node/hasNativeFindSupport.d.ts +8 -1
  8. package/src/crawlers/node/index.d.ts +10 -5
  9. package/src/crawlers/node/index.js +4 -1
  10. package/src/crawlers/node/index.js.flow +8 -6
  11. package/src/crawlers/watchman/index.d.ts +12 -12
  12. package/src/crawlers/watchman/index.js.flow +2 -6
  13. package/src/flow-types.d.ts +88 -32
  14. package/src/flow-types.js.flow +89 -29
  15. package/src/index.d.ts +11 -4
  16. package/src/index.js +145 -120
  17. package/src/index.js.flow +199 -149
  18. package/src/lib/FileProcessor.d.ts +8 -1
  19. package/src/lib/FileSystemChangeAggregator.d.ts +40 -0
  20. package/src/lib/FileSystemChangeAggregator.js +89 -0
  21. package/src/lib/FileSystemChangeAggregator.js.flow +143 -0
  22. package/src/lib/RootPathUtils.d.ts +8 -1
  23. package/src/lib/TreeFS.d.ts +23 -8
  24. package/src/lib/TreeFS.js +67 -16
  25. package/src/lib/TreeFS.js.flow +89 -16
  26. package/src/lib/checkWatchmanCapabilities.d.ts +8 -1
  27. package/src/lib/normalizePathSeparatorsToPosix.d.ts +8 -1
  28. package/src/lib/normalizePathSeparatorsToSystem.d.ts +8 -1
  29. package/src/lib/rootRelativeCacheKeys.d.ts +8 -1
  30. package/src/lib/sorting.d.ts +8 -1
  31. package/src/plugins/DependencyPlugin.d.ts +9 -13
  32. package/src/plugins/DependencyPlugin.js +1 -3
  33. package/src/plugins/DependencyPlugin.js.flow +1 -16
  34. package/src/plugins/HastePlugin.d.ts +10 -11
  35. package/src/plugins/HastePlugin.js +11 -11
  36. package/src/plugins/HastePlugin.js.flow +12 -12
  37. package/src/plugins/MockPlugin.d.ts +10 -5
  38. package/src/plugins/MockPlugin.js +17 -20
  39. package/src/plugins/MockPlugin.js.flow +18 -22
  40. package/src/plugins/dependencies/dependencyExtractor.d.ts +1 -1
  41. package/src/plugins/haste/DuplicateHasteCandidatesError.d.ts +8 -1
  42. package/src/plugins/haste/HasteConflictsError.d.ts +8 -1
  43. package/src/plugins/haste/computeConflicts.d.ts +8 -1
  44. package/src/plugins/haste/getPlatformExtension.d.ts +8 -1
  45. package/src/plugins/mocks/getMockName.d.ts +8 -1
  46. package/src/watchers/AbstractWatcher.d.ts +8 -1
  47. package/src/watchers/FallbackWatcher.d.ts +8 -1
  48. package/src/watchers/FallbackWatcher.js +19 -3
  49. package/src/watchers/FallbackWatcher.js.flow +28 -5
  50. package/src/watchers/NativeWatcher.d.ts +9 -2
  51. package/src/watchers/NativeWatcher.js +27 -5
  52. package/src/watchers/NativeWatcher.js.flow +33 -6
  53. package/src/watchers/RecrawlWarning.d.ts +8 -1
  54. package/src/watchers/WatchmanWatcher.d.ts +8 -1
  55. package/src/watchers/common.d.ts +10 -1
  56. package/src/watchers/common.js +6 -1
  57. package/src/watchers/common.js.flow +1 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "metro-file-map",
3
- "version": "0.83.5",
3
+ "version": "0.83.6",
4
4
  "description": "[Experimental] - 🚇 File crawling, watching and mapping for Metro",
5
5
  "main": "src/index.js",
6
6
  "exports": {
package/src/Watcher.d.ts CHANGED
@@ -4,26 +4,26 @@
4
4
  * This source code is licensed under the MIT license found in the
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
- * @format
7
+ * @noformat
8
+ * @generated SignedSource<<25fee66c7d26ad53cdd5bbab454fe50b>>
9
+ *
10
+ * This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
11
+ * Original file: packages/metro-file-map/src/Watcher.js
12
+ * To regenerate, run:
13
+ * js1 build metro-ts-defs (internal) OR
14
+ * yarn run build-ts-defs (OSS)
8
15
  */
9
16
 
10
17
  import type {
11
18
  Console,
12
19
  CrawlerOptions,
13
- FileData,
14
- Path,
20
+ CrawlResult,
15
21
  PerfLogger,
16
22
  WatcherBackendChangeEvent,
17
- WatchmanClocks,
18
23
  } from './flow-types';
19
24
 
20
25
  import EventEmitter from 'events';
21
26
 
22
- type CrawlResult = {
23
- changedFiles: FileData;
24
- clocks?: WatchmanClocks;
25
- removedFiles: Set<Path>;
26
- };
27
27
  type WatcherOptions = {
28
28
  abortSignal: AbortSignal;
29
29
  computeSha1: boolean;
@@ -64,6 +64,10 @@ export type HealthCheckResult =
64
64
  export declare class Watcher extends EventEmitter {
65
65
  constructor(options: WatcherOptions);
66
66
  crawl(): Promise<CrawlResult>;
67
+ recrawl(
68
+ subpath: string,
69
+ currentFileSystem: CrawlerOptions['previousState']['fileSystem'],
70
+ ): Promise<CrawlResult>;
67
71
  watch(onChange: (change: WatcherBackendChangeEvent) => void): void;
68
72
  close(): void;
69
73
  checkHealth(timeout: number): Promise<HealthCheckResult>;
package/src/Watcher.js CHANGED
@@ -69,10 +69,32 @@ class Watcher extends _events.default {
69
69
  async crawl() {
70
70
  this.#options.perfLogger?.point("crawl_start");
71
71
  const options = this.#options;
72
+ const result = await this.#crawl({
73
+ previousState: options.previousState,
74
+ roots: options.roots,
75
+ useWatchman: options.useWatchman,
76
+ });
77
+ this.#options.perfLogger?.point("crawl_end");
78
+ return result;
79
+ }
80
+ async recrawl(subpath, currentFileSystem) {
81
+ return this.#crawl({
82
+ previousState: {
83
+ clocks: new Map(),
84
+ fileSystem: currentFileSystem,
85
+ },
86
+ roots: [path.join(this.#options.rootDir, subpath)],
87
+ subpath,
88
+ useWatchman: false,
89
+ });
90
+ }
91
+ async #crawl(crawlOptions) {
92
+ const options = this.#options;
93
+ const { useWatchman, subpath } = crawlOptions;
72
94
  const ignoreForCrawl = (filePath) =>
73
95
  options.ignoreForCrawl(filePath) ||
74
96
  path.basename(filePath).startsWith(this.#options.healthCheckFilePrefix);
75
- const crawl = options.useWatchman ? _watchman.default : _node.default;
97
+ const crawl = useWatchman ? _watchman.default : _node.default;
76
98
  let crawler = crawl === _watchman.default ? "watchman" : "node";
77
99
  options.abortSignal.throwIfAborted();
78
100
  const crawlerOptions = {
@@ -87,49 +109,47 @@ class Watcher extends _events.default {
87
109
  this.emit("status", status);
88
110
  },
89
111
  perfLogger: options.perfLogger,
90
- previousState: options.previousState,
112
+ previousState: crawlOptions.previousState,
91
113
  rootDir: options.rootDir,
92
- roots: options.roots,
114
+ roots: crawlOptions.roots,
115
+ subpath,
93
116
  };
94
- const retry = (error) => {
95
- if (crawl === _watchman.default) {
96
- crawler = "node";
97
- options.console.warn(
98
- "metro-file-map: Watchman crawl failed. Retrying once with node " +
99
- "crawler.\n" +
100
- " Usually this happens when watchman isn't running. Create an " +
101
- "empty `.watchmanconfig` file in your project's root folder or " +
102
- "initialize a git or hg repository in your project.\n" +
103
- " " +
104
- error.toString(),
105
- );
106
- return (0, _node.default)(crawlerOptions).catch((e) => {
107
- throw new Error(
108
- "Crawler retry failed:\n" +
109
- ` Original error: ${error.message}\n` +
110
- ` Retry error: ${e.message}\n`,
111
- );
112
- });
117
+ debug("Crawling roots: %s with %s crawler.", crawlOptions.roots, crawler);
118
+ let delta;
119
+ try {
120
+ delta = await crawl(crawlerOptions);
121
+ } catch (firstError) {
122
+ if (crawl !== _watchman.default) {
123
+ throw firstError;
113
124
  }
114
- throw error;
115
- };
116
- const logEnd = (delta) => {
117
- debug(
118
- 'Crawler "%s" returned %d added/modified, %d removed, %d clock(s).',
119
- crawler,
120
- delta.changedFiles.size,
121
- delta.removedFiles.size,
122
- delta.clocks?.size ?? 0,
125
+ crawler = "node";
126
+ options.console.warn(
127
+ "metro-file-map: Watchman crawl failed. Retrying once with node " +
128
+ "crawler.\n" +
129
+ " Usually this happens when watchman isn't running. Create an " +
130
+ "empty `.watchmanconfig` file in your project's root folder or " +
131
+ "initialize a git or hg repository in your project.\n" +
132
+ " " +
133
+ firstError.toString(),
123
134
  );
124
- this.#options.perfLogger?.point("crawl_end");
125
- return delta;
126
- };
127
- debug('Beginning crawl with "%s".', crawler);
128
- try {
129
- return crawl(crawlerOptions).catch(retry).then(logEnd);
130
- } catch (error) {
131
- return retry(error).then(logEnd);
135
+ try {
136
+ delta = await (0, _node.default)(crawlerOptions);
137
+ } catch (retryError) {
138
+ throw new Error(
139
+ "Crawler retry failed:\n" +
140
+ ` Original error: ${firstError.message}\n` +
141
+ ` Retry error: ${retryError.message}\n`,
142
+ );
143
+ }
132
144
  }
145
+ debug(
146
+ 'Crawler "%s" returned %d added/modified, %d removed, %d clock(s).',
147
+ crawler,
148
+ delta.changedFiles.size,
149
+ delta.removedFiles.size,
150
+ delta.clocks?.size ?? 0,
151
+ );
152
+ return delta;
133
153
  }
134
154
  async watch(onChange) {
135
155
  const { extensions, ignorePatternForWatch, useWatchman } = this.#options;
@@ -181,6 +201,13 @@ class Watcher extends _events.default {
181
201
  }
182
202
  return;
183
203
  }
204
+ if (change.event === "recrawl" && useWatchman) {
205
+ this.#options.console.error(
206
+ "metro-file-map: Received unexpected recrawl event while using " +
207
+ "Watchman. Watchman recrawls are not implemented.",
208
+ );
209
+ return;
210
+ }
184
211
  onChange(change);
185
212
  });
186
213
  await watcher.startWatching();
@@ -11,12 +11,11 @@
11
11
  import type {
12
12
  Console,
13
13
  CrawlerOptions,
14
- FileData,
14
+ CrawlResult,
15
15
  Path,
16
16
  PerfLogger,
17
17
  WatcherBackend,
18
18
  WatcherBackendChangeEvent,
19
- WatchmanClocks,
20
19
  } from './flow-types';
21
20
  import type {WatcherOptions as WatcherBackendOptions} from './watchers/common';
22
21
 
@@ -37,11 +36,12 @@ const debug = require('debug')('Metro:Watcher');
37
36
 
38
37
  const MAX_WAIT_TIME = 240000;
39
38
 
40
- type CrawlResult = {
41
- changedFiles: FileData,
42
- clocks?: WatchmanClocks,
43
- removedFiles: Set<Path>,
44
- };
39
+ type InternalCrawlOptions = Readonly<{
40
+ previousState: CrawlerOptions['previousState'],
41
+ roots: ReadonlyArray<string>,
42
+ subpath?: string,
43
+ useWatchman: boolean,
44
+ }>;
45
45
 
46
46
  type WatcherOptions = {
47
47
  abortSignal: AbortSignal,
@@ -86,12 +86,41 @@ export class Watcher extends EventEmitter {
86
86
 
87
87
  async crawl(): Promise<CrawlResult> {
88
88
  this.#options.perfLogger?.point('crawl_start');
89
+ const options = this.#options;
90
+
91
+ const result = await this.#crawl({
92
+ previousState: options.previousState,
93
+ roots: options.roots,
94
+ useWatchman: options.useWatchman,
95
+ });
89
96
 
97
+ this.#options.perfLogger?.point('crawl_end');
98
+ return result;
99
+ }
100
+
101
+ async recrawl(
102
+ subpath: string,
103
+ currentFileSystem: CrawlerOptions['previousState']['fileSystem'],
104
+ ): Promise<CrawlResult> {
105
+ return this.#crawl({
106
+ previousState: {
107
+ clocks: new Map(),
108
+ fileSystem: currentFileSystem,
109
+ },
110
+ roots: [path.join(this.#options.rootDir, subpath)],
111
+ subpath,
112
+ useWatchman: false,
113
+ });
114
+ }
115
+
116
+ async #crawl(crawlOptions: InternalCrawlOptions): Promise<CrawlResult> {
90
117
  const options = this.#options;
118
+ const {useWatchman, subpath} = crawlOptions;
119
+
91
120
  const ignoreForCrawl = (filePath: string) =>
92
121
  options.ignoreForCrawl(filePath) ||
93
122
  path.basename(filePath).startsWith(this.#options.healthCheckFilePrefix);
94
- const crawl = options.useWatchman ? watchmanCrawl : nodeCrawl;
123
+ const crawl = useWatchman ? watchmanCrawl : nodeCrawl;
95
124
  let crawler = crawl === watchmanCrawl ? 'watchman' : 'node';
96
125
 
97
126
  options.abortSignal.throwIfAborted();
@@ -108,55 +137,50 @@ export class Watcher extends EventEmitter {
108
137
  this.emit('status', status);
109
138
  },
110
139
  perfLogger: options.perfLogger,
111
- previousState: options.previousState,
140
+ previousState: crawlOptions.previousState,
112
141
  rootDir: options.rootDir,
113
- roots: options.roots,
142
+ roots: crawlOptions.roots,
143
+ subpath,
114
144
  };
115
145
 
116
- const retry = (error: Error): Promise<CrawlResult> => {
117
- if (crawl === watchmanCrawl) {
118
- crawler = 'node';
119
- options.console.warn(
120
- 'metro-file-map: Watchman crawl failed. Retrying once with node ' +
121
- 'crawler.\n' +
122
- " Usually this happens when watchman isn't running. Create an " +
123
- "empty `.watchmanconfig` file in your project's root folder or " +
124
- 'initialize a git or hg repository in your project.\n' +
125
- ' ' +
126
- error.toString(),
127
- );
128
- // $FlowFixMe[incompatible-type] Found when updating Promise type definition
129
- return nodeCrawl(crawlerOptions).catch<CrawlResult>(e => {
130
- throw new Error(
131
- 'Crawler retry failed:\n' +
132
- ` Original error: ${error.message}\n` +
133
- ` Retry error: ${e.message}\n`,
134
- );
135
- });
136
- }
137
-
138
- throw error;
139
- };
140
-
141
- const logEnd = (delta: CrawlResult): CrawlResult => {
142
- debug(
143
- 'Crawler "%s" returned %d added/modified, %d removed, %d clock(s).',
144
- crawler,
145
- delta.changedFiles.size,
146
- delta.removedFiles.size,
147
- delta.clocks?.size ?? 0,
148
- );
149
- this.#options.perfLogger?.point('crawl_end');
150
- return delta;
151
- };
146
+ debug('Crawling roots: %s with %s crawler.', crawlOptions.roots, crawler);
152
147
 
153
- debug('Beginning crawl with "%s".', crawler);
148
+ let delta: CrawlResult;
154
149
  try {
155
- // $FlowFixMe[incompatible-type] Found when updating Promise type definition
156
- return crawl(crawlerOptions).catch<CrawlResult>(retry).then(logEnd);
157
- } catch (error) {
158
- return retry(error).then(logEnd);
150
+ delta = await crawl(crawlerOptions);
151
+ } catch (firstError) {
152
+ if (crawl !== watchmanCrawl) {
153
+ throw firstError;
154
+ }
155
+ crawler = 'node';
156
+ options.console.warn(
157
+ 'metro-file-map: Watchman crawl failed. Retrying once with node ' +
158
+ 'crawler.\n' +
159
+ " Usually this happens when watchman isn't running. Create an " +
160
+ "empty `.watchmanconfig` file in your project's root folder or " +
161
+ 'initialize a git or hg repository in your project.\n' +
162
+ ' ' +
163
+ firstError.toString(),
164
+ );
165
+ try {
166
+ delta = await nodeCrawl(crawlerOptions);
167
+ } catch (retryError) {
168
+ throw new Error(
169
+ 'Crawler retry failed:\n' +
170
+ ` Original error: ${firstError.message}\n` +
171
+ ` Retry error: ${retryError.message}\n`,
172
+ );
173
+ }
159
174
  }
175
+
176
+ debug(
177
+ 'Crawler "%s" returned %d added/modified, %d removed, %d clock(s).',
178
+ crawler,
179
+ delta.changedFiles.size,
180
+ delta.removedFiles.size,
181
+ delta.clocks?.size ?? 0,
182
+ );
183
+ return delta;
160
184
  }
161
185
 
162
186
  async watch(onChange: (change: WatcherBackendChangeEvent) => void) {
@@ -214,6 +238,15 @@ export class Watcher extends EventEmitter {
214
238
  }
215
239
  return;
216
240
  }
241
+ // Watchman handles recrawls internally - receiving a recrawl event
242
+ // when using Watchman would indicate a bug. Log an error and ignore.
243
+ if (change.event === 'recrawl' && useWatchman) {
244
+ this.#options.console.error(
245
+ 'metro-file-map: Received unexpected recrawl event while using ' +
246
+ 'Watchman. Watchman recrawls are not implemented.',
247
+ );
248
+ return;
249
+ }
217
250
  onChange(change);
218
251
  });
219
252
  await watcher.startWatching();
@@ -4,8 +4,15 @@
4
4
  * This source code is licensed under the MIT license found in the
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
- * @format
7
+ * @noformat
8
8
  * @oncall react_native
9
+ * @generated SignedSource<<9cdec2a3b7a46f0a893dd5dc392a5294>>
10
+ *
11
+ * This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
12
+ * Original file: packages/metro-file-map/src/cache/DiskCacheManager.js
13
+ * To regenerate, run:
14
+ * js1 build metro-ts-defs (internal) OR
15
+ * yarn run build-ts-defs (OSS)
9
16
  */
10
17
 
11
18
  import type {
@@ -4,7 +4,14 @@
4
4
  * This source code is licensed under the MIT license found in the
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
- * @format
7
+ * @noformat
8
+ * @generated SignedSource<<733fae11203b79438dfb1ee2bbb6473d>>
9
+ *
10
+ * This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
11
+ * Original file: packages/metro-file-map/src/constants.js
12
+ * To regenerate, run:
13
+ * js1 build metro-ts-defs (internal) OR
14
+ * yarn run build-ts-defs (OSS)
8
15
  */
9
16
 
10
17
  import type {HType} from './flow-types';
@@ -4,8 +4,15 @@
4
4
  * This source code is licensed under the MIT license found in the
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
- * @format
7
+ * @noformat
8
8
  * @oncall react_native
9
+ * @generated SignedSource<<8b6ff8a24f9156cd7991006c72edd296>>
10
+ *
11
+ * This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
12
+ * Original file: packages/metro-file-map/src/crawlers/node/hasNativeFindSupport.js
13
+ * To regenerate, run:
14
+ * js1 build metro-ts-defs (internal) OR
15
+ * yarn run build-ts-defs (OSS)
9
16
  */
10
17
 
11
18
  declare function hasNativeFindSupport(): Promise<boolean>;
@@ -4,13 +4,18 @@
4
4
  * This source code is licensed under the MIT license found in the
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
- * @format
7
+ * @noformat
8
8
  * @oncall react_native
9
+ * @generated SignedSource<<27109494e4956802ba89ac6fd22aa277>>
10
+ *
11
+ * This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
12
+ * Original file: packages/metro-file-map/src/crawlers/node/index.js
13
+ * To regenerate, run:
14
+ * js1 build metro-ts-defs (internal) OR
15
+ * yarn run build-ts-defs (OSS)
9
16
  */
10
17
 
11
- import type {CanonicalPath, CrawlerOptions, FileData} from '../../flow-types';
18
+ import type {CrawlerOptions, CrawlResult} from '../../flow-types';
12
19
 
13
- declare function nodeCrawl(
14
- options: CrawlerOptions,
15
- ): Promise<{removedFiles: Set<CanonicalPath>; changedFiles: FileData}>;
20
+ declare function nodeCrawl(options: CrawlerOptions): Promise<CrawlResult>;
16
21
  export default nodeCrawl;
@@ -183,6 +183,7 @@ async function nodeCrawl(options) {
183
183
  perfLogger,
184
184
  roots,
185
185
  abortSignal,
186
+ subpath,
186
187
  } = options;
187
188
  abortSignal?.throwIfAborted();
188
189
  perfLogger?.point("nodeCrawl_start");
@@ -193,7 +194,9 @@ async function nodeCrawl(options) {
193
194
  debug("Using system find: %s", useNativeFind);
194
195
  return new Promise((resolve, reject) => {
195
196
  const callback = (fileData) => {
196
- const difference = previousState.fileSystem.getDifference(fileData);
197
+ const difference = previousState.fileSystem.getDifference(fileData, {
198
+ subpath,
199
+ });
197
200
  perfLogger?.point("nodeCrawl_end");
198
201
  try {
199
202
  abortSignal?.throwIfAborted();
@@ -10,9 +10,9 @@
10
10
  */
11
11
 
12
12
  import type {
13
- CanonicalPath,
14
13
  Console,
15
14
  CrawlerOptions,
15
+ CrawlResult,
16
16
  FileData,
17
17
  IgnoreMatcher,
18
18
  } from '../../flow-types';
@@ -170,10 +170,9 @@ function findNative(
170
170
  });
171
171
  }
172
172
 
173
- export default async function nodeCrawl(options: CrawlerOptions): Promise<{
174
- removedFiles: Set<CanonicalPath>,
175
- changedFiles: FileData,
176
- }> {
173
+ export default async function nodeCrawl(
174
+ options: CrawlerOptions,
175
+ ): Promise<CrawlResult> {
177
176
  const {
178
177
  console,
179
178
  previousState,
@@ -185,6 +184,7 @@ export default async function nodeCrawl(options: CrawlerOptions): Promise<{
185
184
  perfLogger,
186
185
  roots,
187
186
  abortSignal,
187
+ subpath,
188
188
  } = options;
189
189
 
190
190
  abortSignal?.throwIfAborted();
@@ -199,7 +199,9 @@ export default async function nodeCrawl(options: CrawlerOptions): Promise<{
199
199
 
200
200
  return new Promise((resolve, reject) => {
201
201
  const callback: Callback = fileData => {
202
- const difference = previousState.fileSystem.getDifference(fileData);
202
+ const difference = previousState.fileSystem.getDifference(fileData, {
203
+ subpath,
204
+ });
203
205
 
204
206
  perfLogger?.point('nodeCrawl_end');
205
207
 
@@ -4,20 +4,20 @@
4
4
  * This source code is licensed under the MIT license found in the
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
- * @format
7
+ * @noformat
8
8
  * @oncall react_native
9
+ * @generated SignedSource<<bcfb58810773510450845bc00a93beae>>
10
+ *
11
+ * This file was translated from Flow by scripts/generateTypeScriptDefinitions.js
12
+ * Original file: packages/metro-file-map/src/crawlers/watchman/index.js
13
+ * To regenerate, run:
14
+ * js1 build metro-ts-defs (internal) OR
15
+ * yarn run build-ts-defs (OSS)
9
16
  */
10
17
 
11
- import type {
12
- CanonicalPath,
13
- CrawlerOptions,
14
- FileData,
15
- WatchmanClocks,
16
- } from '../../flow-types';
18
+ import type {CrawlerOptions, CrawlResult} from '../../flow-types';
17
19
 
18
- declare function watchmanCrawl($$PARAM_0$$: CrawlerOptions): Promise<{
19
- changedFiles: FileData;
20
- removedFiles: Set<CanonicalPath>;
21
- clocks: WatchmanClocks;
22
- }>;
20
+ declare function watchmanCrawl(
21
+ $$PARAM_0$$: CrawlerOptions,
22
+ ): Promise<CrawlResult>;
23
23
  export default watchmanCrawl;
@@ -13,10 +13,10 @@ import type {WatchmanClockSpec} from '../../flow-types';
13
13
  import type {
14
14
  CanonicalPath,
15
15
  CrawlerOptions,
16
+ CrawlResult,
16
17
  FileData,
17
18
  FileMetadata,
18
19
  Path,
19
- WatchmanClocks,
20
20
  } from '../../flow-types';
21
21
  import type {WatchmanQueryResponse, WatchmanWatchResponse} from 'fb-watchman';
22
22
 
@@ -57,11 +57,7 @@ export default async function watchmanCrawl({
57
57
  previousState,
58
58
  rootDir,
59
59
  roots,
60
- }: CrawlerOptions): Promise<{
61
- changedFiles: FileData,
62
- removedFiles: Set<CanonicalPath>,
63
- clocks: WatchmanClocks,
64
- }> {
60
+ }: CrawlerOptions): Promise<CrawlResult> {
65
61
  abortSignal?.throwIfAborted();
66
62
 
67
63
  const client = new watchman.Client();