@uss-stargazer/job-queue 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Job Queue
1
+ # job-queue
2
2
 
3
3
  The concept is modeling your todo-list as bite-sized jobs in a First-In, First-Out (FIFO) queue.
4
4
  The goal is to increase productivity.
@@ -8,7 +8,7 @@ The goal is to increase productivity.
8
8
 
9
9
  ## Install
10
10
 
11
- Can be installed system-wide if you have Node/NPM installed:
11
+ Can be installed system-wide if you have NodeJS installed:
12
12
 
13
13
  ```
14
14
  npm install --global @uss-stargazer/job-queue
@@ -22,7 +22,33 @@ job-queue --help
22
22
  jobq --help
23
23
  ```
24
24
 
25
- ## Workflow
25
+ ## Syncing with gists
26
+
27
+ `job-queue` allows you to sync data files with a gist/gists on your GitHub account. This provides
28
+ cloud storage and makes the program more portable.
29
+
30
+ To link to a gist, provide the gist ID and a GitHub access token in config.json, like:
31
+
32
+ ```json
33
+ {
34
+ "$schema": "/job-queue/schemas/config.schema.json",
35
+ "jobqueue": {
36
+ "local": "/job-queue/jobqueue.json",
37
+ "ghGistId": "00000000000000000000000000000000",
38
+ "ghAccessToken": "ghp_000000000000000000000000000000000000"
39
+ },
40
+ "projectpool": {
41
+ "local": "/job-queue/projectpool.json",
42
+ "ghGistId": "00000000000000000000000000000000",
43
+ "ghAccessToken": "ghp_000000000000000000000000000000000000"
44
+ },
45
+ "schemas": "/job-queue/schemas",
46
+ }
47
+ ```
48
+
49
+ Your gist ID can be copied easily from the URL; usually something like `https://gist.github.com/USERNAME/GIST_ID`.
50
+
51
+ ## Sample Workflow
26
52
 
27
53
  - _\[Daemon\]_ Spontaneous, not fleshed out ideas get immediately added to the project pool as "inactive".
28
54
  - You should generally have an idea of a few projects you want to focus on at a time. For each, push
package/dist/actions.js CHANGED
@@ -170,7 +170,7 @@ const actions = {
170
170
  await projectpool.sync();
171
171
  }
172
172
  catch (error) {
173
- if (error instanceof AbortError)
173
+ if (!(error instanceof AbortError))
174
174
  throw error;
175
175
  console.log(chalk.red('[e]'), error.message);
176
176
  pool.push(project);
@@ -3,13 +3,13 @@ import envPaths from 'env-paths';
3
3
  import path from 'path';
4
4
  import fs from 'fs/promises';
5
5
  import { existsSync } from 'fs';
6
- import { makeJsonData } from '../utils/jsonData.js';
7
6
  import chalk from 'chalk';
8
7
  import { JobQueueSchema, JobSchema } from './jobqueue.js';
9
- import { ProjectPoolSchema, ProjectSchema, } from './projectpool.js';
8
+ import { ProjectPoolSchema, ProjectSchema } from './projectpool.js';
10
9
  import { confirm } from '@inquirer/prompts';
11
10
  import { fileURLToPath, pathToFileURL } from 'url';
12
11
  import pkg from '../../package.json' with { type: 'json' };
12
+ import { getDataTargetNicely } from '../utils/promptUser.js';
13
13
  const jsonSchemaNames = [
14
14
  'job',
15
15
  'jobqueue',
@@ -18,12 +18,36 @@ const jsonSchemaNames = [
18
18
  'config',
19
19
  ];
20
20
  const NonemptyString = z.string().nonempty();
21
+ const GistDataObject = z.object({
22
+ local: NonemptyString.meta({ title: "Local JSON path", description: "Path to local JSON file for data linked to the Gist." }),
23
+ ghGistId: NonemptyString.regex(/^[A-Fa-f0-9]{32}$/, { error: "Is not valid GitHub gist ID" }).meta({ title: "Gist ID", description: "ID of gist to link (can get from URL of the gist you created)." }),
24
+ ghAccessToken: NonemptyString.regex(/^(gh[ps]_[a-zA-Z0-9]{36}|github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59})$/, { error: "Is not valid GitHub access token" }).meta({ title: "Gist Access Token", description: "GitHub access token for read/write access to your gist." })
25
+ }).meta({ title: "Gist-Linked JSON", description: "Data for a configuration/data file linked to a GitHub gist." });
21
26
  export const ConfigSchema = z.object({
22
- jobqueue: NonemptyString.optional().meta({ title: "Jobqueue path", description: "Path to jobqueue.json." }),
23
- projectpool: NonemptyString.optional().meta({ title: "Projectpool path", description: "Path to projectpool.json." }),
24
- editor: NonemptyString.optional().meta({ title: "Editor command", description: "Command to run editor. Will be run like `<editor> /some/data.json` so make sure it waits." }),
27
+ jobqueue: z.union([
28
+ NonemptyString.meta({ title: "Jobqueue path", description: "Path to local jobqueue.json." }),
29
+ GistDataObject.meta({ title: "Gist-Linked Jobqueue", description: "Data for jobqueue.json locally and linked to gist on GitHub." })
30
+ ]),
31
+ projectpool: z.union([
32
+ NonemptyString.meta({ title: "Projectpool path", description: "Path to local projectpool.json." }),
33
+ GistDataObject.meta({ title: "Gist-Linked Jobqueue", description: "Data for projectpool.json locally and linked to gist on GitHub." })
34
+ ]),
25
35
  schemas: NonemptyString.meta({ title: "Schemas directory", description: `Path to directory containing: ${jsonSchemaNames.map(base => `${base}.schema.json`).join(", ")}.` }).transform((schemasDir) => Object.fromEntries(jsonSchemaNames.map((base) => [base, pathToFileURL(path.resolve(schemasDir, `${base}.schema.json`)).href]))),
36
+ editor: NonemptyString.optional().meta({ title: "Editor command", description: "Command to run editor. Will be run like `<editor> /some/data.json` so make sure it waits." }),
37
+ confirmGistUpdates: z.boolean().optional().meta({ description: "Whether to prompt before pushing or pulling a gist from GitHub." }),
38
+ confirmOffline: z.boolean().optional().meta({ description: "Whether to confirm before using offline (if there are gists in config)." })
26
39
  });
40
+ export const toInputConfig = (outputData) => {
41
+ const schemaDir = Object.values(outputData.schemas).reduce((schemaDir, schemaPath) => {
42
+ const thisSchemaDir = path.dirname(fileURLToPath(schemaPath));
43
+ return schemaDir === null || schemaDir === thisSchemaDir
44
+ ? thisSchemaDir
45
+ : (() => {
46
+ throw new Error('Decoded invidual schemas are not in the same directory.');
47
+ })();
48
+ }, null);
49
+ return { ...outputData, schemas: schemaDir };
50
+ };
27
51
  const jsonSchemas = {
28
52
  job: JobSchema,
29
53
  jobqueue: JobQueueSchema,
@@ -52,7 +76,9 @@ const getJsonSchemaId = (schemaFileUrl) => path.posix.join(pkg.version, path.bas
52
76
  const versionFromJsonSchemaId = (id) => path.dirname(id);
53
77
  const updateJsonSchema = async (schemaSchema, schemaFileUrl) => {
54
78
  const schemaPath = fileURLToPath(schemaFileUrl);
55
- await fs.mkdir(path.dirname(schemaPath), { recursive: true });
79
+ const schemaDir = path.dirname(schemaPath);
80
+ if (!existsSync(schemaDir))
81
+ await fs.mkdir(schemaDir, { recursive: true });
56
82
  return await fs.writeFile(schemaPath, JSON.stringify({
57
83
  $id: getJsonSchemaId(schemaFileUrl),
58
84
  ...schemaSchema.toJSONSchema({
@@ -61,10 +87,7 @@ const updateJsonSchema = async (schemaSchema, schemaFileUrl) => {
61
87
  }),
62
88
  }, undefined, ' '));
63
89
  };
64
- const defaultData = {
65
- jobqueue: { queue: [] },
66
- projectpool: { pool: [] },
67
- };
90
+ export const getDataPathFromConfig = (data) => (typeof data === 'string' ? data : data.local);
68
91
  const createConfig = async (configDir, override) => {
69
92
  const config = {
70
93
  jobqueue: path.resolve(configDir, 'jobqueue.json'),
@@ -73,39 +96,18 @@ const createConfig = async (configDir, override) => {
73
96
  ...override,
74
97
  };
75
98
  const decodedConfig = ConfigSchema.decode(config);
76
- for (const key of ['jobqueue', 'projectpool']) {
77
- if (!existsSync(config[key])) {
78
- console.log(chalk.blue('[i]'), `Creating ${path.join('{config}', path.relative(configDir, config[key]))}...`);
79
- await fs.writeFile(config[key], JSON.stringify({
80
- $schema: decodedConfig.schemas[key],
81
- ...defaultData[key],
82
- }, undefined, ' '));
83
- }
84
- }
85
- await fs.mkdir(config.schemas);
99
+ if (!existsSync(config.schemas))
100
+ await fs.mkdir(config.schemas);
86
101
  for (const schema of jsonSchemaNames) {
87
102
  if (!existsSync(decodedConfig.schemas[schema])) {
88
103
  console.log(chalk.blue('[i]'), `Creating ${path.join('{config}', path.relative(configDir, fileURLToPath(decodedConfig.schemas[schema])))}...`);
89
104
  await updateJsonSchema(jsonSchemas[schema], decodedConfig.schemas[schema]);
90
105
  }
91
106
  }
92
- return { encoded: config, decoded: decodedConfig };
107
+ return { encoded: config, jsonSchemaUrl: decodedConfig.schemas.config };
93
108
  };
94
- const checkConfig = async (config, configPath) => {
109
+ const checkConfigSchemas = async (config, configPath) => {
95
110
  try {
96
- for (const key of ['jobqueue', 'projectpool']) {
97
- if (!existsSync(config[key]))
98
- if (['jobqueue', 'projectpool'].includes(key) &&
99
- (await confirm({
100
- message: `File at config.${key} does not exist. Want to create it?`,
101
- })))
102
- await fs.writeFile(config[key], JSON.stringify({
103
- $schema: config.schemas[key],
104
- ...defaultData[key],
105
- }, undefined, ' '));
106
- else
107
- throw new Error(`File '${config[key]}' in config does not exist`);
108
- }
109
111
  const outdatedSchemas = [];
110
112
  for (const schema of Object.keys(config.schemas)) {
111
113
  const schemaPath = fileURLToPath(config.schemas[schema]);
@@ -115,7 +117,7 @@ const checkConfig = async (config, configPath) => {
115
117
  }));
116
118
  const version = schemaJson.$id && versionFromJsonSchemaId(schemaJson.$id);
117
119
  if (!version || !/^\d+\.\d+\.\d+$/.test(version)) {
118
- console.log(chalk.yellow('[w]'), `${path.join('{config}', path.basename(config.schemas[schema]))} is malformed. Overwriting...`);
120
+ console.log(chalk.yellow('[w]'), `${path.join('{schemas}', path.basename(config.schemas[schema]))} is malformed. Overwriting...`);
119
121
  await updateJsonSchema(jsonSchemas[schema], config.schemas[schema]);
120
122
  }
121
123
  else if (version !== pkg.version) {
@@ -123,7 +125,7 @@ const checkConfig = async (config, configPath) => {
123
125
  }
124
126
  }
125
127
  else {
126
- console.log(chalk.blue('[i]'), `${path.join('{config}', path.basename(config.schemas[schema]))} does not exist. Creating...`);
128
+ console.log(chalk.blue('[i]'), `${path.join('{schemas}', path.basename(config.schemas[schema]))} does not exist. Creating...`);
127
129
  await updateJsonSchema(jsonSchemas[schema], config.schemas[schema]);
128
130
  }
129
131
  }
@@ -142,17 +144,14 @@ const checkConfig = async (config, configPath) => {
142
144
  export const getConfig = async (overrideConfigDir, overrideConfig) => {
143
145
  const configDir = overrideConfigDir ?? path.resolve(envPaths('job-queue').config);
144
146
  const configPath = path.resolve(configDir, 'config.json');
145
- await fs.mkdir(configDir, { recursive: true });
146
- if (!existsSync(configPath)) {
147
- console.log(chalk.blue('[i]'), `Creating config at '${configPath}'...`);
148
- const { encoded: config, decoded: decodedConfig } = await createConfig(configDir, overrideConfig);
149
- await fs.writeFile(configPath, JSON.stringify({
150
- $schema: decodedConfig.schemas['config'],
151
- ...config,
152
- }, undefined, ' '));
153
- }
154
- const configData = await makeJsonData(configPath, ConfigSchema);
155
- await checkConfig(configData.data, configPath);
147
+ if (!existsSync(configDir))
148
+ await fs.mkdir(configDir, { recursive: true });
149
+ const { data: configData, hadToCreate } = await getDataTargetNicely(ConfigSchema, { name: 'config.json', expectedPath: configPath }, () => createConfig(configDir, overrideConfig), false, toInputConfig);
156
150
  await updateNestedObject(configData.data, ConfigSchema.partial().decode(overrideConfig));
157
- return configData;
151
+ await checkConfigSchemas(configData.data, configPath);
152
+ return {
153
+ config: configData,
154
+ path: configPath,
155
+ hadToCreate,
156
+ };
158
157
  };
@@ -1,9 +1,50 @@
1
1
  import { JobQueueSchema } from './jobqueue.js';
2
2
  import { ProjectPoolSchema } from './projectpool.js';
3
- import { ConfigSchema } from './config.js';
3
+ import { ConfigSchema, getDataPathFromConfig, toInputConfig, } from './config.js';
4
+ import { select } from '@inquirer/prompts';
5
+ import { ExitPromptError } from '@inquirer/core';
6
+ import { AbortError } from '../utils/index.js';
7
+ import chalk from 'chalk';
8
+ import { haveUserUpdateData } from '../utils/promptUser.js';
4
9
  export const dataNames = ['jobqueue', 'projectpool', 'config'];
5
10
  export const dataSchemas = {
6
11
  jobqueue: JobQueueSchema,
7
12
  projectpool: ProjectPoolSchema,
8
13
  config: ConfigSchema,
9
14
  };
15
+ export async function editData(data, configPath) {
16
+ try {
17
+ const target = await select({
18
+ message: 'Select data to edit',
19
+ choices: [
20
+ { name: 'jobqueue.json', value: 'jobqueue' },
21
+ { name: 'projectpool.json', value: 'projectpool' },
22
+ { name: 'config.json', value: 'config' },
23
+ ],
24
+ });
25
+ const targetSchema = dataSchemas[target];
26
+ const targetPath = target === 'config'
27
+ ? configPath
28
+ : getDataPathFromConfig(data.config.data[target]);
29
+ await haveUserUpdateData(targetSchema, target === 'config'
30
+ ? toInputConfig(data[target].data)
31
+ : data[target].data, {
32
+ editor: data.config.data.editor,
33
+ errorHead: 'Edit data failed',
34
+ jsonSchemaUrl: data.config.data.schemas[target],
35
+ file: { type: 'abs', path: targetPath },
36
+ }, {
37
+ preparse(raw) {
38
+ return raw.trim().length === 0
39
+ ? { errMessage: 'Cannot delete data file.' }
40
+ : 'continue';
41
+ },
42
+ });
43
+ }
44
+ catch (error) {
45
+ if (error instanceof AbortError || error instanceof ExitPromptError)
46
+ console.log(chalk.red('[e]'), error.message);
47
+ else
48
+ throw error;
49
+ }
50
+ }
@@ -1,7 +1,7 @@
1
1
  import * as z from 'zod';
2
- import { makeJsonData } from '../utils/jsonData.js';
3
2
  import { checkProjectName } from './projectpool.js';
4
- import { haveUserUpdateData } from '../utils/index.js';
3
+ import { makeGistData } from '../utils/gistData.js';
4
+ import { getDataTargetNicely, handleInvalidGist, haveUserUpdateData, makeGistConflictHandler, } from '../utils/promptUser.js';
5
5
  export const JobSchema = z.object({
6
6
  name: z.string().trim().nonempty().meta({ title: 'Job name', description: 'Short name of the job (think a single commit message). Used as job identifier.' }),
7
7
  objectivies: z.array(z.string().trim().nonempty()).meta({ title: 'Job objectivies', description: 'A list of objectives to complete for this job. Purely for your benefit.' }),
@@ -11,7 +11,11 @@ export const JobSchema = z.object({
11
11
  export const JobQueueSchema = z.object({
12
12
  queue: z.array(JobSchema).meta({ title: 'Job queue' }),
13
13
  }).meta({ title: 'Job queue root', description: 'Root object for jobs/tasks FIFO queue model.' });
14
- export const getJobQueue = async (jsonPath) => await makeJsonData(jsonPath, JobQueueSchema);
14
+ const defaultJobQueue = { queue: [] };
15
+ export const getJobQueue = async (jsonPath, jsonSchemaUrl, autoCreateFiles = false, editor) => {
16
+ const { data } = await getDataTargetNicely(JobQueueSchema, { name: 'jobqueue.json', expectedPath: jsonPath, jsonSchemaUrl }, async () => ({ encoded: defaultJobQueue }), autoCreateFiles);
17
+ return await makeGistData(JobQueueSchema, data, 'jobqueue.json', makeGistConflictHandler(JobQueueSchema, editor), handleInvalidGist, "JSON to keep track of `jobqueue` data for this user's job-queue implementation (this is probably autogenerated).");
18
+ };
15
19
  export const updateJob = async (job, projectPool, config) => {
16
20
  const pool = projectPool.data.pool;
17
21
  let userDeletedJob = false;
@@ -1,7 +1,7 @@
1
1
  import { confirm } from '@inquirer/prompts';
2
2
  import * as z from 'zod';
3
- import { makeJsonData } from '../utils/jsonData.js';
4
- import { haveUserUpdateData } from '../utils/index.js';
3
+ import { makeGistData } from '../utils/gistData.js';
4
+ import { getDataTargetNicely, handleInvalidGist, haveUserUpdateData, makeGistConflictHandler, } from '../utils/promptUser.js';
5
5
  export const ProjectSchema = z.object({
6
6
  name: z.string().trim().nonempty().meta({ title: 'Project name', description: 'Name of the project. Used as identifier.' }),
7
7
  description: z.string().optional().meta({ title: 'Project description', description: 'Longer description of the project. For your benefit, optional.' }),
@@ -11,7 +11,11 @@ export const ProjectSchema = z.object({
11
11
  export const ProjectPoolSchema = z.object({
12
12
  pool: z.array(ProjectSchema).meta({ description: 'Set of projects, works in progress or just ideas.' }),
13
13
  }).meta({ title: 'Root project pool', description: 'Root object for pool of projects.' });
14
- export const getProjectPool = async (jsonPath) => await makeJsonData(jsonPath, ProjectPoolSchema);
14
+ const defaultProjectPool = { pool: [] };
15
+ export const getProjectPool = async (jsonPath, jsonSchemaUrl, autoCreateFiles = false, editor) => {
16
+ const { data } = await getDataTargetNicely(ProjectPoolSchema, { name: 'projectpool.json', expectedPath: jsonPath, jsonSchemaUrl }, async () => ({ encoded: defaultProjectPool }), autoCreateFiles);
17
+ return await makeGistData(ProjectPoolSchema, data, 'projectpool.json', makeGistConflictHandler(ProjectPoolSchema, editor), handleInvalidGist, "JSON to keep track of `projectpool` data for this user's job-queue implementation (this is probably autogenerated).");
18
+ };
15
19
  export const checkProjectName = (name, projects) => projects.some((project) => project.name === name);
16
20
  export const updateProject = async (project, projectPool, jobQueue, config) => {
17
21
  const pool = projectPool.data.pool;
package/dist/index.js CHANGED
@@ -2,45 +2,117 @@ import chalk from 'chalk';
2
2
  import clear from 'clear';
3
3
  import figlet from 'figlet';
4
4
  import { select } from '@inquirer/prompts';
5
- import { ExitPromptError } from '@inquirer/core';
5
+ import { ExitPromptError, Separator } from '@inquirer/core';
6
6
  import actions, { actionNames, actionsDependentOnJobs, actionsDependentOnProjects, } from './actions.js';
7
- import { getConfig } from './data/config.js';
7
+ import { getConfig, getDataPathFromConfig } from './data/config.js';
8
8
  import { getJobQueue } from './data/jobqueue.js';
9
9
  import { getProjectPool } from './data/projectpool.js';
10
+ import { editData } from './data/index.js';
11
+ import { simpleDeepCompare } from './utils/index.js';
12
+ import dns from 'dns/promises';
13
+ import { inquirerConfirm } from './utils/promptUser.js';
14
+ async function loadData(overrideConfigDir, overrideConfig, previousData) {
15
+ const { config, path: configPath, hadToCreate: autoCreateOtherFiles, } = await getConfig(overrideConfigDir, overrideConfig);
16
+ const data = {
17
+ config,
18
+ configPath,
19
+ jobqueue: await getJobQueue(getDataPathFromConfig(config.data.jobqueue), config.data.schemas.jobqueue, autoCreateOtherFiles, config.data.editor),
20
+ projectpool: await getProjectPool(getDataPathFromConfig(config.data.projectpool), config.data.schemas.projectpool, autoCreateOtherFiles, config.data.editor),
21
+ };
22
+ let connectionError = undefined;
23
+ for (const key of ['jobqueue', 'projectpool']) {
24
+ if (typeof config.data[key] === 'object') {
25
+ if (previousData &&
26
+ simpleDeepCompare(config.data[key], previousData.config.data[key])) {
27
+ if (key === 'jobqueue')
28
+ data['jobqueue'] = previousData['jobqueue'];
29
+ else if (key === 'projectpool')
30
+ data['projectpool'] = previousData['projectpool'];
31
+ }
32
+ else {
33
+ if (connectionError === undefined) {
34
+ const resolved = await dns
35
+ .resolve('github.com')
36
+ .catch((error) => new Error(`Could not resolve github.com: ${error}`));
37
+ connectionError = resolved instanceof Error ? resolved : null;
38
+ if (connectionError)
39
+ break;
40
+ }
41
+ await data[key].linkGist({
42
+ id: config.data[key].ghGistId,
43
+ accessToken: config.data[key].ghAccessToken,
44
+ });
45
+ if (!config.data.confirmGistUpdates ||
46
+ (await inquirerConfirm(`Pull gist for ${key}?`))) {
47
+ console.log(chalk.blue('[i]'), 'initial pull of gist for', key);
48
+ await data[key].pull();
49
+ }
50
+ }
51
+ }
52
+ }
53
+ if (connectionError) {
54
+ console.log(chalk.red('[e]'), connectionError.message);
55
+ if (config.data.confirmOffline &&
56
+ !(await inquirerConfirm('Continue offline?')))
57
+ throw connectionError;
58
+ console.log(chalk.blue('[i]'), 'Continuing offline...');
59
+ }
60
+ return data;
61
+ }
62
+ async function syncData(data) {
63
+ for (const key of ['jobqueue', 'projectpool']) {
64
+ await data[key].sync();
65
+ if (data[key].isLinked &&
66
+ (!data.config.data.confirmGistUpdates ||
67
+ (await inquirerConfirm(`Push gist for ${key}?`)))) {
68
+ console.log(chalk.blue('[i]'), 'sync push of gist for', key);
69
+ await data[key].push();
70
+ }
71
+ }
72
+ }
10
73
  export default async function main(overrideConfigDir, overrideConfig) {
11
74
  clear();
12
75
  console.log(chalk.yellow(figlet.textSync('JobQueue', { horizontalLayout: 'full' })));
13
- const config = await getConfig(overrideConfigDir, overrideConfig);
14
- const data = {
15
- config: config,
16
- jobqueue: await getJobQueue(config.data.jobqueue),
17
- projectpool: await getProjectPool(config.data.projectpool),
18
- };
76
+ let data = await loadData(overrideConfigDir, overrideConfig);
19
77
  console.log();
20
78
  try {
21
79
  while (true) {
22
80
  const action = await select({
23
81
  message: 'Select action',
24
- choices: actionNames.map((action) => {
25
- if (actionsDependentOnJobs.includes(action) &&
26
- data.jobqueue.data.queue.length === 0)
27
- return {
28
- name: action,
29
- value: action,
30
- disabled: '(Empty job queue)',
31
- };
32
- else if (actionsDependentOnProjects.includes(action) &&
33
- data.projectpool.data.pool.length === 0)
34
- return {
35
- name: action,
36
- value: action,
37
- disabled: '(Empty project pool)',
38
- };
39
- else
40
- return { name: action, value: action };
41
- }),
82
+ choices: [
83
+ ...actionNames.map((action) => {
84
+ if (actionsDependentOnJobs.includes(action) &&
85
+ data.jobqueue.data.queue.length === 0)
86
+ return {
87
+ name: action,
88
+ value: action,
89
+ disabled: '(Empty job queue)',
90
+ };
91
+ else if (actionsDependentOnProjects.includes(action) &&
92
+ data.projectpool.data.pool.length === 0)
93
+ return {
94
+ name: action,
95
+ value: action,
96
+ disabled: '(Empty project pool)',
97
+ };
98
+ else
99
+ return { name: action, value: action };
100
+ }),
101
+ new Separator(),
102
+ { name: 'editData', value: 'editData' },
103
+ { name: 'sync', value: 'sync' },
104
+ ],
105
+ pageSize: actionNames.length + 3,
42
106
  });
43
- await actions[action](data);
107
+ if (action === 'editData') {
108
+ await editData(data, data.configPath);
109
+ data = await loadData(overrideConfigDir, overrideConfig, data);
110
+ }
111
+ else if (action === 'sync') {
112
+ await syncData(data);
113
+ }
114
+ else
115
+ await actions[action](data);
44
116
  console.log();
45
117
  }
46
118
  }
@@ -48,4 +120,5 @@ export default async function main(overrideConfigDir, overrideConfig) {
48
120
  if (!(error instanceof ExitPromptError))
49
121
  throw error;
50
122
  }
123
+ await syncData(data);
51
124
  }
@@ -0,0 +1,108 @@
1
+ import * as z from 'zod';
2
+ import { Octokit } from '@octokit/rest';
3
+ import { simpleDeepCompare } from './index.js';
4
+ export const isGistData = (obj) => typeof obj === 'object' &&
5
+ obj !== null &&
6
+ 'isLinked' in obj &&
7
+ typeof obj.isLinked === 'boolean' &&
8
+ 'pull' in obj &&
9
+ typeof obj.pull === 'function' &&
10
+ 'push' in obj &&
11
+ typeof obj.push === 'function' &&
12
+ 'linkGist' in obj &&
13
+ typeof obj.linkGist === 'function';
14
+ export class GistDataError extends Error {
15
+ constructor(message) {
16
+ super(message);
17
+ }
18
+ }
19
+ const pullGist = async (octokit, filename, gistId) => {
20
+ const resp = await octokit.gists.get({ gist_id: gistId });
21
+ if (resp.status < 200 || resp.status >= 300)
22
+ throw new GistDataError(`Could not pull gist for '${filename}': HTTP ${resp.status}`);
23
+ const gistFiles = Object.keys(resp.data.files);
24
+ if (!gistFiles.includes(filename))
25
+ throw new GistDataError(`Could not pull gist for '${filename}': '${filename}' not in gist files (${gistFiles.join(', ')})`);
26
+ const gistFile = resp.data.files[filename];
27
+ return {
28
+ contents: gistFile.content,
29
+ description: resp.data.description,
30
+ };
31
+ };
32
+ const pushGist = async (octokit, filename, gistId, gist) => {
33
+ if (!gist.description && !gist.contents)
34
+ throw new TypeError(`pushGist called with nothing to change (for '${filename}')`);
35
+ await octokit.gists.update({
36
+ gist_id: gistId,
37
+ description: gist.description,
38
+ files: {
39
+ [filename]: {
40
+ content: gist.contents,
41
+ },
42
+ },
43
+ });
44
+ };
45
+ export const makeGistData = async (schema, jsonData, filename, handleConflict, handleInvalidGist, overwriteDescription) => {
46
+ let octokit = null;
47
+ let gist = null;
48
+ let lastGistData = null;
49
+ const unlinkedPull = async () => {
50
+ throw new Error('Cannot pull unlinked gist');
51
+ };
52
+ const unlinkedPush = async () => {
53
+ throw new Error('Cannot push unlinked gist');
54
+ };
55
+ const gistData = jsonData;
56
+ gistData.isLinked = false;
57
+ gistData.pull = unlinkedPull;
58
+ gistData.push = unlinkedPush;
59
+ gistData.linkGist = async (newGistParams) => {
60
+ if (!newGistParams && !gistData.isLinked) {
61
+ return;
62
+ }
63
+ else if (!newGistParams && gistData.isLinked) {
64
+ gistData.pull = unlinkedPull;
65
+ gistData.push = unlinkedPush;
66
+ gistData.isLinked = false;
67
+ }
68
+ else {
69
+ const { id: gistId, accessToken } = newGistParams;
70
+ octokit = new Octokit({ auth: accessToken });
71
+ gistData.pull = async () => {
72
+ gist = await pullGist(octokit, filename, gistId);
73
+ if (gist.contents && gist.contents.trim().length > 0) {
74
+ let parsedData;
75
+ try {
76
+ parsedData = schema.parse(JSON.parse(gist.contents));
77
+ }
78
+ catch (error) {
79
+ if (!(error instanceof z.ZodError || error instanceof SyntaxError))
80
+ throw error;
81
+ const newData = await handleInvalidGist(gist.contents, error);
82
+ if (newData !== null)
83
+ gistData.data = newData;
84
+ }
85
+ lastGistData = parsedData;
86
+ if (!simpleDeepCompare(gistData.data, parsedData))
87
+ gistData.data = await handleConflict(gistData.data, parsedData);
88
+ await gistData.sync();
89
+ }
90
+ };
91
+ gistData.push = async () => {
92
+ if (!gist)
93
+ console.warn('pushing GistData without pulling first; will overwrite');
94
+ await gistData.sync();
95
+ if (!simpleDeepCompare(lastGistData, gistData.data))
96
+ await pushGist(octokit, filename, gistId, {
97
+ description: overwriteDescription &&
98
+ (!gist || gist.description !== overwriteDescription)
99
+ ? overwriteDescription
100
+ : undefined,
101
+ contents: JSON.stringify(schema.encode(gistData.data), undefined, ' '),
102
+ });
103
+ };
104
+ gistData.isLinked = true;
105
+ }
106
+ };
107
+ return gistData;
108
+ };
@@ -1,8 +1,3 @@
1
- import * as tmp from 'tmp-promise';
2
- import chalk from 'chalk';
3
- import * as z from 'zod';
4
- import { confirm } from '@inquirer/prompts';
5
- import { editInteractively } from 'edit-like-git';
6
1
  export class AbortError extends Error {
7
2
  constructor(message) {
8
3
  super(message);
@@ -16,68 +11,11 @@ export function reorder(array, newIndecies) {
16
11
  array[idx] = originalArray[originalIdx];
17
12
  });
18
13
  }
19
- export const haveUserUpdateData = async (schema, data, options, checks) => {
20
- const file = options?.file?.type && options.file.type === 'abs'
21
- ? { path: options.file.path }
22
- : await tmp.file({
23
- prefix: options?.file?.type === 'tmp' ? options.file.prefix : undefined,
24
- postfix: '.json',
25
- });
26
- let contents = JSON.stringify(schema instanceof z.ZodObject && options?.jsonSchemaUrl
27
- ? { $schema: options?.jsonSchemaUrl, ...data }
28
- : data, undefined, ' ');
29
- let updatedResult = undefined;
30
- while (true) {
31
- const controller = new AbortController();
32
- const signal = controller.signal;
33
- const editorPromise = editInteractively(file.path, contents, options?.editor, options?.tooltips);
34
- await new Promise((resolve) => setTimeout(resolve, 1000));
35
- const abortPromise = confirm({ message: 'Type `y` to abort...' }, { signal })
36
- .finally(() => {
37
- process.stdout.moveCursor(0, -1);
38
- process.stdout.clearLine(1);
39
- })
40
- .then(async (shouldAbort) => {
41
- if (shouldAbort) {
42
- if (file.cleanup)
43
- await file.cleanup();
44
- throw new AbortError('User aborted action');
45
- }
46
- });
47
- contents = await Promise.race([editorPromise, abortPromise]);
48
- controller.abort();
49
- if (typeof contents !== 'string')
50
- return undefined;
51
- if (checks?.preparse) {
52
- const preparseError = checks?.preparse(contents);
53
- if (preparseError === 'pass') {
54
- updatedResult = undefined;
55
- break;
56
- }
57
- else if (typeof preparseError === 'object') {
58
- console.log(chalk.red(`${options?.errorHead}:`), preparseError.errMessage);
59
- continue;
60
- }
61
- }
62
- updatedResult = schema.safeParse(JSON.parse(contents));
63
- if (updatedResult.success) {
64
- if (checks?.postparse) {
65
- const postparseError = checks?.postparse(updatedResult.data);
66
- if (typeof postparseError === 'object') {
67
- console.log(chalk.red(`${options?.errorHead}:`), postparseError.errMessage);
68
- continue;
69
- }
70
- }
71
- break;
72
- }
73
- console.log(chalk.red(`${options?.errorHead}:`), `JSON invalid: ${updatedResult.error.message}`);
14
+ export const clearNLines = (n) => {
15
+ for (let i = 0; i < n; i++) {
16
+ process.stdout.moveCursor(0, -1);
17
+ process.stdout.clearLine(1);
74
18
  }
75
- if (file.cleanup)
76
- await file.cleanup();
77
- return (updatedResult &&
78
- (updatedResult.success
79
- ? updatedResult.data
80
- : (() => {
81
- throw new Error(`${options?.errorHead}: JSON invalid: ${updatedResult.error.message}`);
82
- })()));
19
+ process.stdout.cursorTo(0);
83
20
  };
21
+ export const simpleDeepCompare = (a, b, replacer) => JSON.stringify(a, replacer) === JSON.stringify(b, replacer);
@@ -1,17 +1,14 @@
1
1
  import fs from 'fs/promises';
2
- import * as z from 'zod';
3
- export const makeJsonData = async (jsonPath, schema) => {
2
+ export const makeJsonData = async (jsonPath, schema, overrideJsonSchemaUrl, ...[toInput]) => {
4
3
  const json = JSON.parse(await fs.readFile(jsonPath, 'utf8').catch((error) => {
5
4
  throw new Error(`Couldn't open '${jsonPath}': ${error}`);
6
5
  }));
7
- const schemaUrl = json['$schema'];
8
- const parsed = schema.safeParse(json);
9
- if (!parsed.success)
10
- throw new Error(`JSON at '${jsonPath}' does not match schema: ${z.treeifyError(parsed.error)}`);
6
+ const schemaUrl = overrideJsonSchemaUrl ?? json['$schema'];
7
+ const data = schema.parse(json);
11
8
  const jsonData = {
12
- data: parsed.data,
13
- async sync() {
14
- const encoded = schema.encode(jsonData.data);
9
+ data,
10
+ sync() {
11
+ const encoded = toInput ? toInput(jsonData.data) : jsonData.data;
15
12
  return fs.writeFile(jsonPath, JSON.stringify(typeof schemaUrl === 'string'
16
13
  ? {
17
14
  $schema: schemaUrl,
@@ -0,0 +1,215 @@
1
+ import * as z from 'zod';
2
+ import * as tmp from 'tmp-promise';
3
+ import chalk from 'chalk';
4
+ import { confirm, select } from '@inquirer/prompts';
5
+ import { editInteractively } from 'edit-like-git';
6
+ import fs from 'fs/promises';
7
+ import { AbortError, clearNLines } from './index.js';
8
+ import { makeJsonData } from './jsonData.js';
9
+ import { existsSync } from 'fs';
10
+ import path from 'path';
11
+ import { diffLines } from 'diff';
12
+ export const haveUserUpdateContents = async (contents, options, check) => {
13
+ const file = options?.file?.type && options.file.type === 'abs'
14
+ ? { path: options.file.path }
15
+ : await tmp.file({
16
+ prefix: options?.file?.type === 'tmp' ? options.file.prefix : undefined,
17
+ postfix: '.json',
18
+ });
19
+ const originalContents = await fs.readFile(file.path, { encoding: 'utf8' });
20
+ while (true) {
21
+ const controller = new AbortController();
22
+ const signal = controller.signal;
23
+ const editorPromise = editInteractively(file.path, contents, options?.editor, options?.tooltips);
24
+ await new Promise((resolve) => setTimeout(resolve, 1000));
25
+ const abortPromise = confirm({ message: 'Type `y` to abort...' }, { signal })
26
+ .finally(() => clearNLines(1))
27
+ .then(async (shouldAbort) => {
28
+ if (shouldAbort) {
29
+ if (file.cleanup)
30
+ await file.cleanup();
31
+ else
32
+ await fs.writeFile(file.path, originalContents, {
33
+ encoding: 'utf8',
34
+ });
35
+ throw new AbortError('User aborted action');
36
+ }
37
+ return undefined;
38
+ });
39
+ contents = await Promise.race([editorPromise, abortPromise]);
40
+ controller.abort();
41
+ if (typeof contents !== 'string')
42
+ return undefined;
43
+ if (check) {
44
+ const error = check(contents);
45
+ if (error === 'pass') {
46
+ break;
47
+ }
48
+ else if (typeof error === 'object') {
49
+ console.log(chalk.red(`${options?.errorHead}:`), error.errMessage);
50
+ continue;
51
+ }
52
+ }
53
+ }
54
+ if (file.cleanup)
55
+ await file.cleanup();
56
+ return contents;
57
+ };
58
+ export const haveUserUpdateData = async (schema, data, options, checks) => {
59
+ const contents = JSON.stringify(schema instanceof z.ZodObject && options?.jsonSchemaUrl
60
+ ? { $schema: options?.jsonSchemaUrl, ...data }
61
+ : data, undefined, ' ');
62
+ let updatedData = undefined;
63
+ await haveUserUpdateContents(contents, options, (contents) => {
64
+ if (checks?.preparse) {
65
+ const preparseError = checks.preparse(contents);
66
+ if (preparseError !== 'continue') {
67
+ updatedData = undefined;
68
+ return preparseError;
69
+ }
70
+ }
71
+ try {
72
+ updatedData = schema.parse(JSON.parse(contents));
73
+ if (checks?.postparse) {
74
+ const postparseError = checks.postparse(updatedData);
75
+ if (typeof postparseError === 'object')
76
+ return postparseError;
77
+ }
78
+ return 'pass';
79
+ }
80
+ catch (error) {
81
+ if (error instanceof SyntaxError)
82
+ return { errMessage: `Recieved invalid JSON: ${error.message}` };
83
+ else if (error instanceof z.ZodError)
84
+ return {
85
+ errMessage: `JSON does not match schema:\n${z.prettifyError(error)}`,
86
+ };
87
+ else
88
+ throw error;
89
+ }
90
+ });
91
+ return updatedData;
92
+ };
93
+ export async function getDataTargetNicely(schema, target, recreateData, autoCreateFiles = false, ...conversionArg) {
94
+ let jsonData = undefined;
95
+ let hadToCreate = false;
96
+ try {
97
+ if (!existsSync(target.expectedPath))
98
+ throw new AbortError(`File '${target.expectedPath}' for ${target.name} does not exist`);
99
+ jsonData = await makeJsonData(target.expectedPath, schema, target.jsonSchemaUrl, ...conversionArg);
100
+ }
101
+ catch (error) {
102
+ if (error instanceof SyntaxError) {
103
+ console.log(chalk.red('[e]'), `Invalid JSON at '${target.expectedPath}'`);
104
+ }
105
+ else if (error instanceof z.ZodError) {
106
+ console.log(chalk.red('[e]'), `JSON at '${target.expectedPath}' does not match schema:\n${z.prettifyError(error)}`);
107
+ }
108
+ else if (error instanceof AbortError) {
109
+ console.log(chalk.red('[e]'), error.message);
110
+ }
111
+ else
112
+ throw error;
113
+ const shouldRegenerate = autoCreateFiles ||
114
+ (await confirm({
115
+ message: `Want to regenerate ${target.name}?`,
116
+ }).finally(() => clearNLines(1)));
117
+ if (shouldRegenerate) {
118
+ clearNLines(1);
119
+ console.log(chalk.blue('[i]'), `Creating ${target.name} at '${target.expectedPath}'...`);
120
+ hadToCreate = true;
121
+ const { encoded, newJsonPath, newJsonSchemaUrl } = await recreateData();
122
+ if (newJsonPath)
123
+ target.expectedPath = newJsonPath;
124
+ if (newJsonSchemaUrl)
125
+ target.jsonSchemaUrl = newJsonSchemaUrl;
126
+ const dir = path.dirname(target.expectedPath);
127
+ if (!existsSync(dir))
128
+ await fs.mkdir(dir, { recursive: true });
129
+ await fs.writeFile(target.expectedPath, JSON.stringify(typeof encoded === 'object' && target.jsonSchemaUrl
130
+ ? { $schema: target.jsonSchemaUrl, ...encoded }
131
+ : encoded, undefined, ' '));
132
+ jsonData = await makeJsonData(newJsonPath ?? target.expectedPath, schema, target.jsonSchemaUrl, ...conversionArg);
133
+ }
134
+ else
135
+ throw error;
136
+ }
137
+ if (!jsonData)
138
+ throw new Error(`Could not get data target ${target.name}`);
139
+ return { data: jsonData, hadToCreate };
140
+ }
141
+ export const inquirerConfirm = (message) => confirm({ message }).finally(() => clearNLines(1));
142
+ const formatGitChange = (ours, theirs, removeExtraLine = false) => `<<<<<<< ours
143
+ ${ours}${removeExtraLine ? '' : '\n'}=======
144
+ ${theirs}${removeExtraLine ? '' : '\n'}>>>>>>> theirs
145
+ `;
146
+ export const resolveConflictLikeGit = async (schema, ours, theirs, editor) => {
147
+ const objStrings = [ours, theirs].map((o) => JSON.stringify(o, undefined, ' '));
148
+ const diff = diffLines(...objStrings);
149
+ let changeGroup = null;
150
+ const diffLikeGit = diff.reduce((diffString, change, idx) => {
151
+ if (!change.added && !change.removed) {
152
+ if (changeGroup) {
153
+ diffString += formatGitChange(...changeGroup, true);
154
+ changeGroup = null;
155
+ }
156
+ diffString += change.value;
157
+ }
158
+ else {
159
+ if (!changeGroup)
160
+ changeGroup = ['', ''];
161
+ changeGroup[change.added ? 1 : 0] += change.value;
162
+ }
163
+ if (changeGroup && idx === diff.length - 1)
164
+ diffString += formatGitChange(...changeGroup, true);
165
+ return diffString;
166
+ }, '');
167
+ let resolved = undefined;
168
+ await haveUserUpdateContents(diffLikeGit, {
169
+ editor,
170
+ errorHead: 'Resolve conflict failed',
171
+ file: { type: 'tmp', prefix: 'gist-confict' },
172
+ tooltips: ['Resolve the conflict.'],
173
+ }, (contents) => {
174
+ try {
175
+ resolved = schema.parse(JSON.parse(contents));
176
+ return 'pass';
177
+ }
178
+ catch (error) {
179
+ if (error instanceof SyntaxError)
180
+ return { errMessage: `Recieved invalid JSON: ${error.message}` };
181
+ else if (error instanceof z.ZodError)
182
+ return {
183
+ errMessage: `JSON does not match schema:\n${z.prettifyError(error)}`,
184
+ };
185
+ else
186
+ throw error;
187
+ }
188
+ });
189
+ return resolved;
190
+ };
191
+ export const makeGistConflictHandler = (schema, editor) => async (ours, theirs) => {
192
+ console.log(chalk.yellow('[w]'), `Pulled gist is different than local JSON`);
193
+ const oursOrTheirs = await select({
194
+ message: 'Want to use ours or theirs?',
195
+ choices: [
196
+ { value: 'ours', description: '(local JSON)' },
197
+ { value: 'theirs', description: '(the gist)' },
198
+ { value: 'resolve', description: 'resolve manually' },
199
+ ],
200
+ });
201
+ return oursOrTheirs === 'theirs'
202
+ ? theirs
203
+ : oursOrTheirs === 'ours'
204
+ ? ours
205
+ : await resolveConflictLikeGit(schema, ours, theirs, editor);
206
+ };
207
+ export const handleInvalidGist = async (_, error) => {
208
+ let filename;
209
+ console.log(chalk.red('[e]'), `Pulled gist for '${filename}' is invalid: doesn't match schema:\n${error instanceof z.ZodError ? z.prettifyError(error) : error.message}"`);
210
+ const shouldContinue = await confirm({
211
+ message: 'Ignore gist contents and continue?',
212
+ });
213
+ if (!shouldContinue)
214
+ throw new Error(`Invalid gist for '${filename}' and user aborted`);
215
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@uss-stargazer/job-queue",
3
- "version": "0.0.1",
3
+ "version": "0.0.2",
4
4
  "description": "A CLI app to keep track of jobs/tasks built around a couple of JSON files.",
5
5
  "author": "uss-stargazer",
6
6
  "license": "Apache-2.0",
@@ -23,15 +23,18 @@
23
23
  "build:release": "npm run clean && tsc -p tsconfig.release.json",
24
24
  "lint": "eslint .",
25
25
  "prettier": "prettier \"{src,__{tests}__}/**/*.{ts,mts}\" --config .prettierrc --write",
26
- "prettier:check": "prettier \"{src,__{tests}__}/**/*.{ts,mts}\" --config .prettierrc --check"
26
+ "prettier:check": "prettier \"{src,__{tests}__}/**/*.{ts,mts}\" --config .prettierrc --check",
27
+ "test": "cross-env NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest"
27
28
  },
28
29
  "dependencies": {
29
30
  "@inquirer/prompts": "^8.2.0",
30
31
  "@inquirer/search": "^4.1.0",
32
+ "@octokit/rest": "^22.0.1",
31
33
  "ansi-escapes": "^7.2.0",
32
34
  "chalk": "^5.6.2",
33
35
  "clear": "^0.1.0",
34
36
  "commander": "^14.0.2",
37
+ "diff": "^8.0.3",
35
38
  "edit-like-git": "^1.0.2",
36
39
  "env-paths": "^4.0.0",
37
40
  "figlet": "^1.9.4",
@@ -42,14 +45,19 @@
42
45
  "devDependencies": {
43
46
  "@eslint/js": "^9.39.2",
44
47
  "@types/eslint__js": "^8.42.3",
48
+ "@types/jest": "^30.0.0",
45
49
  "@types/node": "^25.0.6",
46
50
  "@typescript-eslint/parser": "^8.52.0",
51
+ "cross-env": "^10.1.0",
52
+ "dotenv": "^17.2.3",
47
53
  "eslint": "^9.39.2",
48
54
  "eslint-config-prettier": "^10.1.8",
49
55
  "globals": "^17.0.0",
56
+ "jest": "^30.2.0",
50
57
  "prettier": "^3.7.4",
51
58
  "rimraf": "^6.1.2",
52
59
  "ts-api-utils": "^2.4.0",
60
+ "ts-jest": "^29.4.6",
53
61
  "typescript": "^5.9.3",
54
62
  "typescript-eslint": "^8.52.0"
55
63
  }