@alien-protocol/cannon 2.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +12 -0
- package/README.md +395 -0
- package/bin/cli.js +410 -0
- package/cannon.config.example.json +17 -0
- package/package.json +72 -0
- package/src/auth.js +188 -0
- package/src/cannon.js +401 -0
- package/src/config.js +131 -0
- package/src/github.js +167 -0
- package/src/index.js +5 -0
- package/src/loaders/csv.js +70 -0
- package/src/loaders/docx.js +45 -0
- package/src/loaders/index.js +51 -0
- package/src/loaders/json.js +10 -0
- package/src/loaders/mysql.js +30 -0
- package/src/loaders/pdf.js +62 -0
- package/src/loaders/postgres.js +32 -0
- package/src/loaders/sqlite.js +32 -0
package/src/index.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
|
|
4
|
+
export async function loadCSV({ file }) {
|
|
5
|
+
if (!file) throw new Error('CSV loader requires `file` option');
|
|
6
|
+
const raw = fs.readFileSync(path.resolve(file), 'utf-8');
|
|
7
|
+
return parseCSV(raw);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
// RFC-4180 CSV parser (handles quoted multi-line fields)
|
|
11
|
+
export function parseCSV(raw) {
|
|
12
|
+
const rows = [];
|
|
13
|
+
let field = '',
|
|
14
|
+
inQuotes = false,
|
|
15
|
+
fields = [],
|
|
16
|
+
i = 0;
|
|
17
|
+
while (i < raw.length) {
|
|
18
|
+
const ch = raw[i],
|
|
19
|
+
next = raw[i + 1];
|
|
20
|
+
if (inQuotes) {
|
|
21
|
+
if (ch === '"' && next === '"') {
|
|
22
|
+
field += '"';
|
|
23
|
+
i += 2;
|
|
24
|
+
} else if (ch === '"') {
|
|
25
|
+
inQuotes = false;
|
|
26
|
+
i++;
|
|
27
|
+
} else {
|
|
28
|
+
field += ch;
|
|
29
|
+
i++;
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
if (ch === '"') {
|
|
33
|
+
inQuotes = true;
|
|
34
|
+
i++;
|
|
35
|
+
} else if (ch === ',') {
|
|
36
|
+
fields.push(field);
|
|
37
|
+
field = '';
|
|
38
|
+
i++;
|
|
39
|
+
} else if (ch === '\r' && next === '\n') {
|
|
40
|
+
fields.push(field);
|
|
41
|
+
rows.push(fields);
|
|
42
|
+
fields = [];
|
|
43
|
+
field = '';
|
|
44
|
+
i += 2;
|
|
45
|
+
} else if (ch === '\n') {
|
|
46
|
+
fields.push(field);
|
|
47
|
+
rows.push(fields);
|
|
48
|
+
fields = [];
|
|
49
|
+
field = '';
|
|
50
|
+
i++;
|
|
51
|
+
} else {
|
|
52
|
+
field += ch;
|
|
53
|
+
i++;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (field || fields.length) {
|
|
58
|
+
fields.push(field);
|
|
59
|
+
if (fields.some((f) => f !== '')) rows.push(fields);
|
|
60
|
+
}
|
|
61
|
+
if (rows.length < 2) return [];
|
|
62
|
+
const headers = rows[0].map((h) => h.trim());
|
|
63
|
+
return rows.slice(1).map((row) => {
|
|
64
|
+
const obj = {};
|
|
65
|
+
headers.forEach((h, idx) => {
|
|
66
|
+
obj[h] = (row[idx] ?? '').trim();
|
|
67
|
+
});
|
|
68
|
+
return obj;
|
|
69
|
+
});
|
|
70
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import mammoth from 'mammoth';
|
|
4
|
+
|
|
5
|
+
export async function loadDOCX({ file }) {
|
|
6
|
+
if (!file) throw new Error('DOCX loader requires `file` option');
|
|
7
|
+
|
|
8
|
+
const buffer = fs.readFileSync(path.resolve(file));
|
|
9
|
+
|
|
10
|
+
const { value: html } = await mammoth.convertToHtml({ buffer });
|
|
11
|
+
|
|
12
|
+
return parseHTMLTable(html);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function parseHTMLTable(html) {
|
|
16
|
+
// Simple regex-based table extractor (no DOM dep needed)
|
|
17
|
+
const rows = [...html.matchAll(/<tr[^>]*>([\s\S]*?)<\/tr>/gi)];
|
|
18
|
+
if (rows.length < 2) throw new Error('DOCX: No table found, or table has fewer than 2 rows');
|
|
19
|
+
|
|
20
|
+
const getCells = (rowHtml) =>
|
|
21
|
+
[...rowHtml.matchAll(/<t[dh][^>]*>([\s\S]*?)<\/t[dh]>/gi)].map((m) => stripTags(m[1]).trim());
|
|
22
|
+
|
|
23
|
+
const headers = getCells(rows[0][1]).map((h) => h.toLowerCase());
|
|
24
|
+
|
|
25
|
+
return rows
|
|
26
|
+
.slice(1)
|
|
27
|
+
.map((row) => {
|
|
28
|
+
const cells = getCells(row[1]);
|
|
29
|
+
const obj = {};
|
|
30
|
+
headers.forEach((h, i) => {
|
|
31
|
+
obj[h] = cells[i] ?? '';
|
|
32
|
+
});
|
|
33
|
+
return obj;
|
|
34
|
+
})
|
|
35
|
+
.filter((r) => r.title);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function stripTags(html) {
|
|
39
|
+
return html
|
|
40
|
+
.replace(/<[^>]+>/g, '')
|
|
41
|
+
.replace(/&/g, '&')
|
|
42
|
+
.replace(/</g, '<')
|
|
43
|
+
.replace(/>/g, '>')
|
|
44
|
+
.replace(/ /g, ' ');
|
|
45
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { loadCSV } from './csv.js';
|
|
2
|
+
import { loadPDF } from './pdf.js';
|
|
3
|
+
import { loadDOCX } from './docx.js';
|
|
4
|
+
import { loadJSON } from './json.js';
|
|
5
|
+
import { loadPostgres } from './postgres.js';
|
|
6
|
+
import { loadMySQL } from './mysql.js';
|
|
7
|
+
import { loadSQLite } from './sqlite.js';
|
|
8
|
+
|
|
9
|
+
const LOADERS = {
|
|
10
|
+
csv: loadCSV,
|
|
11
|
+
pdf: loadPDF,
|
|
12
|
+
docx: loadDOCX,
|
|
13
|
+
json: loadJSON,
|
|
14
|
+
postgres: loadPostgres,
|
|
15
|
+
mysql: loadMySQL,
|
|
16
|
+
sqlite: loadSQLite,
|
|
17
|
+
array: async (opts) => opts.data ?? [],
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* @param {{ source: string, [key: string]: any }} opts
|
|
22
|
+
* @returns {Promise<object[]>}
|
|
23
|
+
*/
|
|
24
|
+
export async function loadIssues(opts = {}) {
|
|
25
|
+
const { source, ...rest } = opts;
|
|
26
|
+
if (!source) throw new Error('loadIssues: `source` is required');
|
|
27
|
+
|
|
28
|
+
const loader = LOADERS[source.toLowerCase()];
|
|
29
|
+
if (!loader) {
|
|
30
|
+
throw new Error(
|
|
31
|
+
`Unknown source "${source}". Valid sources: ${Object.keys(LOADERS).join(', ')}`
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const issues = await loader(rest);
|
|
36
|
+
|
|
37
|
+
// Normalise: ensure required fields exist
|
|
38
|
+
return issues.map((row, i) => {
|
|
39
|
+
if (!row.title) throw new Error(`Issue at index ${i} is missing "title"`);
|
|
40
|
+
if (!row.repo) throw new Error(`Issue "${row.title}" is missing "repo"`);
|
|
41
|
+
return {
|
|
42
|
+
repo: row.repo.trim(),
|
|
43
|
+
title: row.title.trim(),
|
|
44
|
+
body: row.body?.trim() ?? '',
|
|
45
|
+
labels: row.labels ?? '',
|
|
46
|
+
milestone: row.milestone?.trim() ?? '',
|
|
47
|
+
priority: row.priority?.trim() ?? '',
|
|
48
|
+
track: row.track?.trim() ?? '',
|
|
49
|
+
};
|
|
50
|
+
});
|
|
51
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
|
|
4
|
+
export async function loadJSON({ file }) {
|
|
5
|
+
if (!file) throw new Error('JSON loader requires `file` option');
|
|
6
|
+
const raw = fs.readFileSync(path.resolve(file), 'utf-8');
|
|
7
|
+
const data = JSON.parse(raw);
|
|
8
|
+
if (!Array.isArray(data)) throw new Error('JSON file must export an array of issue objects');
|
|
9
|
+
return data;
|
|
10
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import mysql from 'mysql2/promise';
|
|
2
|
+
|
|
3
|
+
export async function loadMySQL({ connectionString, host, user, password, database, query }) {
|
|
4
|
+
if (!query) throw new Error('mysql loader requires `query`');
|
|
5
|
+
|
|
6
|
+
const connOpts = connectionString
|
|
7
|
+
? { uri: interpolateEnv(connectionString) }
|
|
8
|
+
: {
|
|
9
|
+
host: interpolateEnv(host ?? ''),
|
|
10
|
+
user: interpolateEnv(user ?? ''),
|
|
11
|
+
password: interpolateEnv(password ?? ''),
|
|
12
|
+
database: interpolateEnv(database ?? ''),
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
const connection = await mysql.createConnection(connOpts);
|
|
16
|
+
try {
|
|
17
|
+
const [rows] = await connection.execute(query);
|
|
18
|
+
return rows;
|
|
19
|
+
} finally {
|
|
20
|
+
await connection.end();
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function interpolateEnv(str) {
|
|
25
|
+
return str.replace(/\$\{([^}]+)\}/g, (_, key) => {
|
|
26
|
+
const val = process.env[key];
|
|
27
|
+
if (!val) throw new Error(`Environment variable "${key}" is not set`);
|
|
28
|
+
return val;
|
|
29
|
+
});
|
|
30
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import pdfParse from 'pdf-parse/lib/pdf-parse.js';
|
|
4
|
+
|
|
5
|
+
export async function loadPDF({ file, strategy = 'auto' }) {
|
|
6
|
+
if (!file) throw new Error('PDF loader requires `file` option');
|
|
7
|
+
|
|
8
|
+
const buffer = fs.readFileSync(path.resolve(file));
|
|
9
|
+
const data = await pdfParse(buffer);
|
|
10
|
+
const text = data.text;
|
|
11
|
+
|
|
12
|
+
if (strategy === 'auto') {
|
|
13
|
+
// Heuristic: if most non-empty lines contain | or \t → table
|
|
14
|
+
const lines = text.split('\n').filter((l) => l.trim());
|
|
15
|
+
const delimited = lines.filter((l) => l.includes('|') || l.includes('\t'));
|
|
16
|
+
strategy = delimited.length > lines.length * 0.4 ? 'table' : 'text';
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (strategy === 'table') return parseTablePDF(text);
|
|
20
|
+
return parseTextPDF(text);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function parseTablePDF(text) {
|
|
24
|
+
const lines = text
|
|
25
|
+
.split('\n')
|
|
26
|
+
.map((l) => l.trim())
|
|
27
|
+
.filter(Boolean);
|
|
28
|
+
const sep = lines[0].includes('|') ? '|' : '\t';
|
|
29
|
+
const headers = lines[0].split(sep).map((h) => h.trim().toLowerCase());
|
|
30
|
+
return lines
|
|
31
|
+
.slice(1)
|
|
32
|
+
.map((line) => {
|
|
33
|
+
const cols = line.split(sep).map((c) => c.trim());
|
|
34
|
+
const obj = {};
|
|
35
|
+
headers.forEach((h, i) => {
|
|
36
|
+
obj[h] = cols[i] ?? '';
|
|
37
|
+
});
|
|
38
|
+
return obj;
|
|
39
|
+
})
|
|
40
|
+
.filter((r) => r.title);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function parseTextPDF(text) {
|
|
44
|
+
const blocks = text.split(/\n{2,}/);
|
|
45
|
+
return blocks
|
|
46
|
+
.map((block) => {
|
|
47
|
+
const get = (key) => {
|
|
48
|
+
const match = block.match(new RegExp(`^${key}:(.*)`, 'im'));
|
|
49
|
+
return match ? match[1].trim() : '';
|
|
50
|
+
};
|
|
51
|
+
return {
|
|
52
|
+
repo: get('REPO'),
|
|
53
|
+
title: get('TITLE'),
|
|
54
|
+
body: get('BODY'),
|
|
55
|
+
labels: get('LABELS'),
|
|
56
|
+
milestone: get('MILESTONE'),
|
|
57
|
+
priority: get('PRIORITY'),
|
|
58
|
+
track: get('TRACK'),
|
|
59
|
+
};
|
|
60
|
+
})
|
|
61
|
+
.filter((r) => r.repo && r.title);
|
|
62
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import pg from 'pg';
|
|
2
|
+
|
|
3
|
+
const { Client } = pg;
|
|
4
|
+
|
|
5
|
+
export async function loadPostgres({ connectionString, query, ssl }) {
|
|
6
|
+
if (!connectionString) throw new Error('postgres loader requires `connectionString`');
|
|
7
|
+
if (!query) throw new Error('postgres loader requires `query`');
|
|
8
|
+
|
|
9
|
+
// Support ${ENV_VAR} interpolation in connectionString
|
|
10
|
+
const connStr = interpolateEnv(connectionString);
|
|
11
|
+
|
|
12
|
+
const client = new Client({
|
|
13
|
+
connectionString: connStr,
|
|
14
|
+
ssl: ssl ?? (connStr.includes('sslmode=require') ? { rejectUnauthorized: false } : false),
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
await client.connect();
|
|
18
|
+
try {
|
|
19
|
+
const result = await client.query(query);
|
|
20
|
+
return result.rows;
|
|
21
|
+
} finally {
|
|
22
|
+
await client.end();
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function interpolateEnv(str) {
|
|
27
|
+
return str.replace(/\$\{([^}]+)\}/g, (_, key) => {
|
|
28
|
+
const val = process.env[key];
|
|
29
|
+
if (!val) throw new Error(`Environment variable "${key}" is not set`);
|
|
30
|
+
return val;
|
|
31
|
+
});
|
|
32
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { createRequire } from 'module';
|
|
4
|
+
|
|
5
|
+
const require = createRequire(import.meta.url);
|
|
6
|
+
|
|
7
|
+
export async function loadSQLite({ file, query }) {
|
|
8
|
+
if (!file) throw new Error('sqlite loader requires `file`');
|
|
9
|
+
if (!query) throw new Error('sqlite loader requires `query`');
|
|
10
|
+
|
|
11
|
+
const initSqlJs = require('sql.js');
|
|
12
|
+
const SQL = await initSqlJs();
|
|
13
|
+
|
|
14
|
+
const buffer = fs.readFileSync(path.resolve(file));
|
|
15
|
+
const db = new SQL.Database(buffer);
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
const results = db.exec(query);
|
|
19
|
+
if (!results.length) return [];
|
|
20
|
+
|
|
21
|
+
const { columns, values } = results[0];
|
|
22
|
+
return values.map((row) => {
|
|
23
|
+
const obj = {};
|
|
24
|
+
columns.forEach((col, i) => {
|
|
25
|
+
obj[col] = row[i] ?? '';
|
|
26
|
+
});
|
|
27
|
+
return obj;
|
|
28
|
+
});
|
|
29
|
+
} finally {
|
|
30
|
+
db.close();
|
|
31
|
+
}
|
|
32
|
+
}
|