nx 19.0.0-canary.20240426-ac9ad35 → 19.0.0-canary.20240427-f74aeab

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,6 @@
2
2
  /// <reference types="node" />
3
3
  import { ChildProcess } from 'child_process';
4
4
  import { FileData, ProjectGraph } from '../../config/project-graph';
5
- import { NxJsonConfiguration } from '../../config/nx-json';
6
5
  import { Hash } from '../../hasher/task-hasher';
7
6
  import { Task, TaskGraph } from '../../config/task-graph';
8
7
  import { ConfigurationSourceMaps } from '../../project-graph/utils/project-configuration-utils';
@@ -13,7 +12,7 @@ export type ChangedFile = {
13
12
  };
14
13
  export declare class DaemonClient {
15
14
  private readonly nxJson;
16
- constructor(nxJson: NxJsonConfiguration);
15
+ constructor();
17
16
  private queue;
18
17
  private socketMessenger;
19
18
  private currentMessage;
@@ -30,20 +30,25 @@ var DaemonStatus;
30
30
  DaemonStatus[DaemonStatus["CONNECTED"] = 2] = "CONNECTED";
31
31
  })(DaemonStatus || (DaemonStatus = {}));
32
32
  class DaemonClient {
33
- constructor(nxJson) {
34
- this.nxJson = nxJson;
33
+ constructor() {
35
34
  this._daemonStatus = DaemonStatus.DISCONNECTED;
36
35
  this._waitForDaemonReady = null;
37
36
  this._daemonReady = null;
38
37
  this._out = null;
39
38
  this._err = null;
39
+ try {
40
+ this.nxJson = (0, configuration_1.readNxJson)();
41
+ }
42
+ catch (e) {
43
+ this.nxJson = null;
44
+ }
40
45
  this.reset();
41
46
  }
42
47
  enabled() {
43
48
  if (this._enabled === undefined) {
44
49
  // TODO(v19): Add migration to move it out of existing configs and remove the ?? here.
45
- const useDaemonProcessOption = this.nxJson.useDaemonProcess ??
46
- this.nxJson.tasksRunnerOptions?.['default']?.options?.useDaemonProcess;
50
+ const useDaemonProcessOption = this.nxJson?.useDaemonProcess ??
51
+ this.nxJson?.tasksRunnerOptions?.['default']?.options?.useDaemonProcess;
47
52
  const env = process.env.NX_DAEMON;
48
53
  // env takes precedence
49
54
  // option=true,env=false => no daemon
@@ -329,7 +334,7 @@ class DaemonClient {
329
334
  }
330
335
  }
331
336
  exports.DaemonClient = DaemonClient;
332
- exports.daemonClient = new DaemonClient((0, configuration_1.readNxJson)());
337
+ exports.daemonClient = new DaemonClient();
333
338
  function isDocker() {
334
339
  try {
335
340
  (0, fs_1.statSync)('/.dockerenv');
@@ -6,6 +6,7 @@ const logger_1 = require("./logger");
6
6
  const socket_utils_1 = require("../socket-utils");
7
7
  const cache_1 = require("../cache");
8
8
  const plugins_1 = require("./plugins");
9
+ const error_types_1 = require("../../project-graph/error-types");
9
10
  exports.SERVER_INACTIVITY_TIMEOUT_MS = 10800000; // 10800000 ms = 3 hours
10
11
  let watcherInstance;
11
12
  function storeWatcherInstance(instance) {
@@ -69,9 +70,13 @@ function respondToClient(socket, response, description) {
69
70
  }
70
71
  exports.respondToClient = respondToClient;
71
72
  async function respondWithErrorAndExit(socket, description, error) {
73
+ const normalizedError = error instanceof error_types_1.DaemonProjectGraphError
74
+ ? error_types_1.ProjectGraphError.fromDaemonProjectGraphError(error)
75
+ : error;
72
76
  // print some extra stuff in the error message
73
- logger_1.serverLogger.requestLog(`Responding to the client with an error.`, description, error.message);
74
- console.error(error.stack);
77
+ logger_1.serverLogger.requestLog(`Responding to the client with an error.`, description, normalizedError.message);
78
+ console.error(normalizedError.stack);
79
+ // Respond with the original error
75
80
  await respondToClient(socket, (0, socket_utils_1.serializeResult)(error, null, null), null);
76
81
  }
77
82
  exports.respondWithErrorAndExit = respondWithErrorAndExit;
@@ -28,6 +28,7 @@ export interface RunCommandsOptions extends Json {
28
28
  __unparsed__: string[];
29
29
  usePty?: boolean;
30
30
  streamOutput?: boolean;
31
+ tty?: boolean;
31
32
  }
32
33
  export interface NormalizedRunCommandsOptions extends RunCommandsOptions {
33
34
  commands: {
@@ -40,6 +40,7 @@ const propKeys = [
40
40
  'streamOutput',
41
41
  'verbose',
42
42
  'forwardAllArgs',
43
+ 'tty',
43
44
  ];
44
45
  async function default_1(options, context) {
45
46
  registerProcessListener();
@@ -67,7 +68,7 @@ async function default_1(options, context) {
67
68
  }
68
69
  exports.default = default_1;
69
70
  async function runInParallel(options, context) {
70
- const procs = options.commands.map((c) => createProcess(null, c, options.readyWhen, options.color, calculateCwd(options.cwd, context), options.env ?? {}, true, options.usePty, options.streamOutput).then((result) => ({
71
+ const procs = options.commands.map((c) => createProcess(null, c, options.readyWhen, options.color, calculateCwd(options.cwd, context), options.env ?? {}, true, options.usePty, options.streamOutput, options.tty).then((result) => ({
71
72
  result,
72
73
  command: c.command,
73
74
  })));
@@ -144,7 +145,7 @@ async function runSerially(options, context) {
144
145
  pseudoTerminal ??= pseudo_terminal_1.PseudoTerminal.isSupported() ? (0, pseudo_terminal_1.getPseudoTerminal)() : null;
145
146
  let terminalOutput = '';
146
147
  for (const c of options.commands) {
147
- const result = await createProcess(pseudoTerminal, c, undefined, options.color, calculateCwd(options.cwd, context), options.env ?? {}, false, options.usePty, options.streamOutput);
148
+ const result = await createProcess(pseudoTerminal, c, undefined, options.color, calculateCwd(options.cwd, context), options.env ?? {}, false, options.usePty, options.streamOutput, options.tty);
148
149
  terminalOutput += result.terminalOutput;
149
150
  if (!result.success) {
150
151
  const output = `Warning: command "${c.command}" exited with non-zero status code`;
@@ -157,7 +158,7 @@ async function runSerially(options, context) {
157
158
  }
158
159
  return { success: true, terminalOutput };
159
160
  }
160
- async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cwd, env, isParallel, usePty = true, streamOutput = true) {
161
+ async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cwd, env, isParallel, usePty = true, streamOutput = true, tty) {
161
162
  env = processEnv(color, cwd, env);
162
163
  // The rust runCommand is always a tty, so it will not look nice in parallel and if we need prefixes
163
164
  // currently does not work properly in windows
@@ -174,6 +175,7 @@ async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cw
174
175
  cwd,
175
176
  jsEnv: env,
176
177
  quiet: !streamOutput,
178
+ tty,
177
179
  });
178
180
  childProcesses.add(cp);
179
181
  return new Promise((res) => {
@@ -100,8 +100,15 @@
100
100
  },
101
101
  "args": {
102
102
  "oneOf": [
103
- { "type": "array", "items": { "type": "string" } },
104
- { "type": "string" }
103
+ {
104
+ "type": "array",
105
+ "items": {
106
+ "type": "string"
107
+ }
108
+ },
109
+ {
110
+ "type": "string"
111
+ }
105
112
  ],
106
113
  "description": "Extra arguments. You can pass them as follows: nx run project:target --args='--wait=100'. You can then use {args.wait} syntax to interpolate them in the workspace config file. See example [above](#chaining-commands-interpolating-args-and-setting-the-cwd)"
107
114
  },
@@ -140,6 +147,11 @@
140
147
  "type": "boolean",
141
148
  "description": "Whether arguments should be forwarded when interpolation is not present.",
142
149
  "default": true
150
+ },
151
+ "tty": {
152
+ "type": "boolean",
153
+ "description": "Whether commands should be run with a tty terminal",
154
+ "hidden": true
143
155
  }
144
156
  },
145
157
  "additionalProperties": true,
@@ -2,11 +2,18 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.createTreeWithEmptyV1Workspace = exports.createTreeWithEmptyWorkspace = void 0;
4
4
  const tree_1 = require("../tree");
5
+ const workspace_root_1 = require("../../utils/workspace-root");
5
6
  /**
6
7
  * Creates a host for testing.
7
8
  */
8
9
  function createTreeWithEmptyWorkspace(opts = {}) {
9
10
  const tree = new tree_1.FsTree('/virtual', false);
11
+ // Our unit tests are all written as though they are at the root of a workspace
12
+ // However, when they are run in a subdirectory of the workspaceRoot,
13
+ // the relative path between workspaceRoot and the directory the tests are run
14
+ // is prepended to the paths created in the virtual tree.
15
+ // Setting this envVar to workspaceRoot prevents this behaviour
16
+ process.env.INIT_CWD = workspace_root_1.workspaceRoot;
10
17
  return addCommonFiles(tree, opts.layout === 'apps-libs');
11
18
  }
12
19
  exports.createTreeWithEmptyWorkspace = createTreeWithEmptyWorkspace;
@@ -150,7 +150,7 @@ export class ChildProcess {
150
150
  }
151
151
  export class RustPseudoTerminal {
152
152
  constructor()
153
- runCommand(command: string, commandDir?: string | undefined | null, jsEnv?: Record<string, string> | undefined | null, quiet?: boolean | undefined | null): ChildProcess
153
+ runCommand(command: string, commandDir?: string | undefined | null, jsEnv?: Record<string, string> | undefined | null, quiet?: boolean | undefined | null, tty?: boolean | undefined | null): ChildProcess
154
154
  /**
155
155
  * This allows us to run a pseudoterminal with a fake node ipc channel
156
156
  * this makes it possible to be backwards compatible with the old implementation
@@ -33,39 +33,127 @@ function getPnpmLockfileDependencies(lockFileContent, lockFileHash, ctx) {
33
33
  return getDependencies(data, keyMap, isV6, ctx);
34
34
  }
35
35
  exports.getPnpmLockfileDependencies = getPnpmLockfileDependencies;
36
+ function matchPropValue(record, key, originalPackageName) {
37
+ if (!record) {
38
+ return undefined;
39
+ }
40
+ const index = Object.values(record).findIndex((version) => version === key);
41
+ if (index > -1) {
42
+ return Object.keys(record)[index];
43
+ }
44
+ // check if non-aliased name is found
45
+ if (record[originalPackageName] &&
46
+ key.startsWith(`/${originalPackageName}/${record[originalPackageName]}`)) {
47
+ return originalPackageName;
48
+ }
49
+ }
50
+ function matchedDependencyName(importer, key, originalPackageName) {
51
+ return (matchPropValue(importer.dependencies, key, originalPackageName) ||
52
+ matchPropValue(importer.optionalDependencies, key, originalPackageName) ||
53
+ matchPropValue(importer.peerDependencies, key, originalPackageName));
54
+ }
55
+ function createHashFromSnapshot(snapshot) {
56
+ return (snapshot.resolution?.['integrity'] ||
57
+ (snapshot.resolution?.['tarball']
58
+ ? (0, file_hasher_1.hashArray)([snapshot.resolution['tarball']])
59
+ : undefined));
60
+ }
61
+ function isLockFileKey(depVersion) {
62
+ return depVersion.startsWith('/');
63
+ }
36
64
  function getNodes(data, keyMap, isV6) {
37
65
  const nodes = new Map();
38
- Object.entries(data.packages).forEach(([key, snapshot]) => {
39
- findPackageNames(key, snapshot, data).forEach((packageName) => {
40
- const rawVersion = findVersion(key, packageName);
41
- const version = parseBaseVersion(rawVersion, isV6);
42
- // we don't need to keep duplicates, we can just track the keys
43
- const existingNode = nodes.get(packageName)?.get(version);
44
- if (existingNode) {
45
- keyMap.set(key, existingNode);
46
- return;
66
+ const maybeAliasedPackageVersions = new Map(); // <version, alias>
67
+ const packageNames = new Set();
68
+ for (const [key, snapshot] of Object.entries(data.packages)) {
69
+ const originalPackageName = extractNameFromKey(key);
70
+ if (!originalPackageName) {
71
+ continue;
72
+ }
73
+ // snapshot already has a name
74
+ if (snapshot.name) {
75
+ packageNames.add({
76
+ key,
77
+ packageName: snapshot.name,
78
+ hash: createHashFromSnapshot(snapshot),
79
+ });
80
+ }
81
+ const rootDependencyName = matchedDependencyName(data.importers['.'], key, originalPackageName) ||
82
+ // only root importers have devDependencies
83
+ matchPropValue(data.importers['.'].devDependencies, key, originalPackageName);
84
+ if (rootDependencyName) {
85
+ packageNames.add({
86
+ key,
87
+ packageName: rootDependencyName,
88
+ hash: createHashFromSnapshot(snapshot),
89
+ });
90
+ }
91
+ if (!snapshot.name && !rootDependencyName) {
92
+ packageNames.add({
93
+ key,
94
+ packageName: originalPackageName,
95
+ hash: createHashFromSnapshot(snapshot),
96
+ });
97
+ }
98
+ if (snapshot.peerDependencies) {
99
+ for (const [depName, depVersion] of Object.entries(snapshot.peerDependencies)) {
100
+ if (isLockFileKey(depVersion)) {
101
+ maybeAliasedPackageVersions.set(depVersion, depName);
102
+ }
103
+ }
104
+ }
105
+ if (snapshot.optionalDependencies) {
106
+ for (const [depName, depVersion] of Object.entries(snapshot.optionalDependencies)) {
107
+ if (isLockFileKey(depVersion)) {
108
+ maybeAliasedPackageVersions.set(depVersion, depName);
109
+ }
47
110
  }
111
+ }
112
+ if (snapshot.dependencies) {
113
+ for (const [depName, depVersion] of Object.entries(snapshot.dependencies)) {
114
+ if (isLockFileKey(depVersion)) {
115
+ maybeAliasedPackageVersions.set(depVersion, depName);
116
+ }
117
+ }
118
+ }
119
+ const aliasedDep = maybeAliasedPackageVersions.get(key);
120
+ if (aliasedDep) {
121
+ packageNames.add({
122
+ key,
123
+ packageName: aliasedDep,
124
+ hash: createHashFromSnapshot(snapshot),
125
+ });
126
+ }
127
+ }
128
+ for (const { key, packageName, hash } of packageNames) {
129
+ const rawVersion = findVersion(key, packageName);
130
+ if (!rawVersion) {
131
+ continue;
132
+ }
133
+ const version = parseBaseVersion(rawVersion, isV6);
134
+ if (!version) {
135
+ continue;
136
+ }
137
+ if (!nodes.has(packageName)) {
138
+ nodes.set(packageName, new Map());
139
+ }
140
+ if (!nodes.get(packageName).has(version)) {
48
141
  const node = {
49
142
  type: 'npm',
50
143
  name: version ? `npm:${packageName}@${version}` : `npm:${packageName}`,
51
144
  data: {
52
145
  version,
53
146
  packageName,
54
- hash: snapshot.resolution?.['integrity'] ||
55
- (0, file_hasher_1.hashArray)(snapshot.resolution?.['tarball']
56
- ? [snapshot.resolution['tarball']]
57
- : [packageName, version]),
147
+ hash: hash ?? (0, file_hasher_1.hashArray)([packageName, version]),
58
148
  },
59
149
  };
150
+ nodes.get(packageName).set(version, node);
60
151
  keyMap.set(key, node);
61
- if (!nodes.has(packageName)) {
62
- nodes.set(packageName, new Map([[version, node]]));
63
- }
64
- else {
65
- nodes.get(packageName).set(version, node);
66
- }
67
- });
68
- });
152
+ }
153
+ else {
154
+ keyMap.set(key, nodes.get(packageName).get(version));
155
+ }
156
+ }
69
157
  const hoistedDeps = (0, pnpm_normalizer_1.loadPnpmHoistedDepsDefinition)();
70
158
  const results = {};
71
159
  for (const [packageName, versionMap] of nodes.entries()) {
@@ -228,52 +316,6 @@ function findVersion(key, packageName) {
228
316
  // for tarball package the entire key is the version spec
229
317
  return key;
230
318
  }
231
- function findPackageNames(key, snapshot, data) {
232
- const packageNames = new Set();
233
- const originalPackageName = extractNameFromKey(key);
234
- const matchPropValue = (record) => {
235
- if (!record) {
236
- return undefined;
237
- }
238
- const index = Object.values(record).findIndex((version) => version === key);
239
- if (index > -1) {
240
- return Object.keys(record)[index];
241
- }
242
- // check if non aliased name is found
243
- if (record[originalPackageName] &&
244
- key.startsWith(`/${originalPackageName}/${record[originalPackageName]}`)) {
245
- return originalPackageName;
246
- }
247
- };
248
- const matchedDependencyName = (importer) => {
249
- return (matchPropValue(importer.dependencies) ||
250
- matchPropValue(importer.optionalDependencies) ||
251
- matchPropValue(importer.peerDependencies));
252
- };
253
- // snapshot already has a name
254
- if (snapshot.name) {
255
- packageNames.add(snapshot.name);
256
- }
257
- // it'a a root dependency
258
- const rootDependencyName = matchedDependencyName(data.importers['.']) ||
259
- // only root importers have devDependencies
260
- matchPropValue(data.importers['.'].devDependencies);
261
- if (rootDependencyName) {
262
- packageNames.add(rootDependencyName);
263
- }
264
- // find a snapshot that has a dependency that points to this snapshot
265
- const snapshots = Object.values(data.packages);
266
- for (let i = 0; i < snapshots.length; i++) {
267
- const dependencyName = matchedDependencyName(snapshots[i]);
268
- if (dependencyName) {
269
- packageNames.add(dependencyName);
270
- }
271
- }
272
- if (packageNames.size === 0) {
273
- packageNames.add(originalPackageName);
274
- }
275
- return Array.from(packageNames);
276
- }
277
319
  function getVersion(key, packageName) {
278
320
  const KEY_NAME_SEPARATOR_LENGTH = 2; // leading and trailing slash
279
321
  return key.slice(packageName.length + KEY_NAME_SEPARATOR_LENGTH);
@@ -15,7 +15,9 @@ const getTouchedProjects = (touchedFiles, projectGraphNodes) => {
15
15
  };
16
16
  exports.getTouchedProjects = getTouchedProjects;
17
17
  const getImplicitlyTouchedProjects = (fileChanges, projectGraphNodes, nxJson) => {
18
- const implicits = {};
18
+ const implicits = {
19
+ 'nx.json': '*',
20
+ };
19
21
  Object.values(projectGraphNodes || {}).forEach((node) => {
20
22
  const namedInputs = {
21
23
  ...nxJson.namedInputs,
@@ -5,7 +5,7 @@ exports.LoadPluginError = exports.DaemonProjectGraphError = exports.isMergeNodes
5
5
  const tslib_1 = require("tslib");
6
6
  class ProjectGraphError extends Error {
7
7
  constructor(errors, partialProjectGraph, partialSourceMaps) {
8
- super(`Failed to process project graph.`);
8
+ super(`Failed to process project graph. Run "nx reset" to fix this. Please report the issue if you keep seeing it.`);
9
9
  _ProjectGraphError_errors.set(this, void 0);
10
10
  _ProjectGraphError_partialProjectGraph.set(this, void 0);
11
11
  _ProjectGraphError_partialSourceMaps.set(this, void 0);
@@ -15,6 +15,8 @@ export interface PluginWorkerLoadResult {
15
15
  type: 'load-result';
16
16
  payload: {
17
17
  name: string;
18
+ include?: string[];
19
+ exclude?: string[];
18
20
  createNodesPattern: string;
19
21
  hasCreateDependencies: boolean;
20
22
  hasProcessProjectGraph: boolean;
@@ -73,11 +73,13 @@ function createWorkerHandler(worker, pending, onload, onloadError) {
73
73
  return (0, messaging_1.consumeMessage)(message, {
74
74
  'load-result': (result) => {
75
75
  if (result.success) {
76
- const { name, createNodesPattern } = result;
76
+ const { name, createNodesPattern, include, exclude } = result;
77
77
  pluginName = name;
78
78
  pluginNames.set(worker, pluginName);
79
79
  onload({
80
80
  name,
81
+ include,
82
+ exclude,
81
83
  createNodes: createNodesPattern
82
84
  ? [
83
85
  createNodesPattern,
@@ -22,6 +22,8 @@ process.on('message', async (message) => {
22
22
  type: 'load-result',
23
23
  payload: {
24
24
  name: plugin.name,
25
+ include: plugin.include,
26
+ exclude: plugin.exclude,
25
27
  createNodesPattern: plugin.createNodes?.[0],
26
28
  hasCreateDependencies: 'createDependencies' in plugin && !!plugin.createDependencies,
27
29
  hasProcessProjectGraph: 'processProjectGraph' in plugin && !!plugin.processProjectGraph,
@@ -44,7 +44,6 @@ async function runCreateNodesInParallel(configFiles, plugin, options, context) {
44
44
  const errors = [];
45
45
  const results = [];
46
46
  const promises = configFiles.map(async (file) => {
47
- performance.mark(`${plugin.name}:createNodes:${file} - start`);
48
47
  try {
49
48
  const value = await plugin.createNodes[1](file, options, context);
50
49
  if (value) {
@@ -62,10 +61,6 @@ async function runCreateNodesInParallel(configFiles, plugin, options, context) {
62
61
  file,
63
62
  }));
64
63
  }
65
- finally {
66
- performance.mark(`${plugin.name}:createNodes:${file} - end`);
67
- performance.measure(`${plugin.name}:createNodes:${file}`, `${plugin.name}:createNodes:${file} - start`, `${plugin.name}:createNodes:${file} - end`);
68
- }
69
64
  });
70
65
  await Promise.all(promises).then(() => {
71
66
  performance.mark(`${plugin.name}:createNodes - end`);
@@ -11,10 +11,11 @@ export declare class PseudoTerminal {
11
11
  static isSupported(): boolean;
12
12
  constructor(rustPseudoTerminal: RustPseudoTerminal);
13
13
  init(): Promise<void>;
14
- runCommand(command: string, { cwd, jsEnv, quiet, }?: {
14
+ runCommand(command: string, { cwd, jsEnv, quiet, tty, }?: {
15
15
  cwd?: string;
16
16
  jsEnv?: Record<string, string>;
17
17
  quiet?: boolean;
18
+ tty?: boolean;
18
19
  }): PseudoTtyProcess;
19
20
  fork(id: string, script: string, { cwd, jsEnv, quiet, }: {
20
21
  cwd?: string;
@@ -32,8 +32,8 @@ class PseudoTerminal {
32
32
  await this.pseudoIPC.init();
33
33
  this.initialized = true;
34
34
  }
35
- runCommand(command, { cwd, jsEnv, quiet, } = {}) {
36
- return new PseudoTtyProcess(this.rustPseudoTerminal.runCommand(command, cwd, jsEnv, quiet));
35
+ runCommand(command, { cwd, jsEnv, quiet, tty, } = {}) {
36
+ return new PseudoTtyProcess(this.rustPseudoTerminal.runCommand(command, cwd, jsEnv, quiet, tty));
37
37
  }
38
38
  async fork(id, script, { cwd, jsEnv, quiet, }) {
39
39
  if (!this.initialized) {
@@ -30,7 +30,7 @@ export interface NxArgs {
30
30
  type?: string;
31
31
  batch?: boolean;
32
32
  }
33
- export declare function createOverrides(__overrides_unparsed__?: string[]): any;
33
+ export declare function createOverrides(__overrides_unparsed__?: string[]): Record<string, any>;
34
34
  export declare function splitArgsIntoNxArgsAndOverrides(args: {
35
35
  [k: string]: any;
36
36
  }, mode: 'run-one' | 'run-many' | 'affected' | 'print-affected', options: {
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getPromptsForSchema = exports.convertSmartDefaultsIntoNamedParams = exports.warnDeprecations = exports.combineOptionsForGenerator = exports.combineOptionsForExecutor = exports.applyVerbosity = exports.setDefaults = exports.validateObject = exports.validateOptsAgainstSchema = exports.SchemaError = exports.convertAliases = exports.coerceTypesInOptions = exports.convertToCamelCase = exports.handleErrors = void 0;
4
4
  const logger_1 = require("./logger");
5
5
  const output_1 = require("./output");
6
+ const client_1 = require("../daemon/client/client");
6
7
  const LIST_CHOICE_DISPLAY_LIMIT = 10;
7
8
  async function handleErrors(isVerbose, fn) {
8
9
  try {
@@ -41,6 +42,9 @@ async function handleErrors(isVerbose, fn) {
41
42
  logger_1.logger.info(err.stack);
42
43
  }
43
44
  }
45
+ if (client_1.daemonClient.enabled()) {
46
+ client_1.daemonClient.reset();
47
+ }
44
48
  return 1;
45
49
  }
46
50
  }