catport 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +94 -0
- package/CONTRIBUTING.md +133 -0
- package/LICENSE +21 -0
- package/README.md +414 -0
- package/bin/catport +8 -0
- package/package.json +48 -0
- package/src/cli/args.js +133 -0
- package/src/cli/main.js +78 -0
- package/src/cli/parser.js +152 -0
- package/src/cli/ui.js +78 -0
- package/src/config/constants.js +62 -0
- package/src/config/ignores.js +119 -0
- package/src/config/loader.js +15 -0
- package/src/config/options.js +181 -0
- package/src/core/analyzer.js +23 -0
- package/src/core/bundler.js +165 -0
- package/src/core/extractor.js +76 -0
- package/src/core/ignore.js +65 -0
- package/src/core/processor.js +59 -0
- package/src/core/scanner.js +184 -0
- package/src/formatters/index.js +78 -0
- package/src/formatters/json.js +284 -0
- package/src/formatters/markdown.js +164 -0
- package/src/formatters/multipart.js +127 -0
- package/src/formatters/xml.js +221 -0
- package/src/formatters/yaml.js +147 -0
- package/src/index.js +11 -0
- package/src/optimizers/definitions.js +79 -0
- package/src/optimizers/index.js +96 -0
- package/src/optimizers/langs/batch.js +3 -0
- package/src/optimizers/langs/c_family.js +3 -0
- package/src/optimizers/langs/clojure.js +3 -0
- package/src/optimizers/langs/css.js +3 -0
- package/src/optimizers/langs/go.js +5 -0
- package/src/optimizers/langs/haskell.js +4 -0
- package/src/optimizers/langs/html.js +4 -0
- package/src/optimizers/langs/ini.js +4 -0
- package/src/optimizers/langs/javascript.js +11 -0
- package/src/optimizers/langs/lua.js +4 -0
- package/src/optimizers/langs/markdown.js +3 -0
- package/src/optimizers/langs/perl.js +3 -0
- package/src/optimizers/langs/php.js +4 -0
- package/src/optimizers/langs/powershell.js +5 -0
- package/src/optimizers/langs/python.js +5 -0
- package/src/optimizers/langs/ruby.js +4 -0
- package/src/optimizers/langs/rust.js +3 -0
- package/src/optimizers/langs/shell.js +4 -0
- package/src/optimizers/langs/sql.js +4 -0
- package/src/optimizers/langs/xml.js +3 -0
- package/src/optimizers/langs/yaml.js +3 -0
- package/src/optimizers/tokenizer.js +444 -0
- package/src/utils/git.js +35 -0
- package/src/utils/io.js +79 -0
- package/src/utils/logger.js +25 -0
- package/src/utils/path.js +59 -0
- package/src/utils/style.js +59 -0
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
export const OPTIONS = {
|
|
2
|
+
help: {
|
|
3
|
+
short: 'h',
|
|
4
|
+
type: 'boolean',
|
|
5
|
+
desc: 'Display this help message and exit.',
|
|
6
|
+
category: 'General'
|
|
7
|
+
},
|
|
8
|
+
version: {
|
|
9
|
+
short: 'V',
|
|
10
|
+
type: 'boolean',
|
|
11
|
+
desc: 'Display version information and exit.',
|
|
12
|
+
category: 'General'
|
|
13
|
+
},
|
|
14
|
+
verbose: {
|
|
15
|
+
short: 'v',
|
|
16
|
+
type: 'boolean',
|
|
17
|
+
desc: 'Enable verbose logging to stderr.',
|
|
18
|
+
category: 'General'
|
|
19
|
+
},
|
|
20
|
+
output: {
|
|
21
|
+
short: 'o',
|
|
22
|
+
type: 'string',
|
|
23
|
+
desc: 'Write output to <FILE> instead of stdout.',
|
|
24
|
+
metavar: 'FILE',
|
|
25
|
+
category: 'General'
|
|
26
|
+
},
|
|
27
|
+
format: {
|
|
28
|
+
short: 'f',
|
|
29
|
+
type: 'string',
|
|
30
|
+
desc: 'Output format: "md", "xml", "json", "yaml", or "multipart".',
|
|
31
|
+
default: 'md',
|
|
32
|
+
metavar: 'FMT',
|
|
33
|
+
category: 'Bundling'
|
|
34
|
+
},
|
|
35
|
+
replyFormat: {
|
|
36
|
+
short: 'R',
|
|
37
|
+
type: 'string',
|
|
38
|
+
desc: 'Instruct LLM to reply in this format (default: same as output).',
|
|
39
|
+
metavar: 'FMT',
|
|
40
|
+
category: 'Bundling'
|
|
41
|
+
},
|
|
42
|
+
context: {
|
|
43
|
+
short: 'C',
|
|
44
|
+
type: 'string',
|
|
45
|
+
desc: 'Prepend a custom context block to the header.',
|
|
46
|
+
metavar: 'TEXT',
|
|
47
|
+
category: 'Bundling'
|
|
48
|
+
},
|
|
49
|
+
task: {
|
|
50
|
+
short: 'T',
|
|
51
|
+
type: 'string',
|
|
52
|
+
desc: 'Append a specific task instruction to the footer.',
|
|
53
|
+
metavar: 'TEXT',
|
|
54
|
+
category: 'Bundling'
|
|
55
|
+
},
|
|
56
|
+
noInstruct: {
|
|
57
|
+
short: 'I',
|
|
58
|
+
type: 'boolean',
|
|
59
|
+
desc: 'Disable automatic "how to use" instructions.',
|
|
60
|
+
category: 'Bundling'
|
|
61
|
+
},
|
|
62
|
+
noStructure: {
|
|
63
|
+
short: 'n',
|
|
64
|
+
type: 'boolean',
|
|
65
|
+
desc: 'Disable directory structure generation.',
|
|
66
|
+
category: 'Bundling'
|
|
67
|
+
},
|
|
68
|
+
listDirs: {
|
|
69
|
+
short: 'l',
|
|
70
|
+
type: 'boolean',
|
|
71
|
+
desc: 'Include directories in the structure listing.',
|
|
72
|
+
category: 'Bundling'
|
|
73
|
+
},
|
|
74
|
+
skeleton: {
|
|
75
|
+
short: 'k',
|
|
76
|
+
type: 'boolean',
|
|
77
|
+
desc: 'Output directory structure only, omitting content.',
|
|
78
|
+
category: 'Bundling'
|
|
79
|
+
},
|
|
80
|
+
extensions: {
|
|
81
|
+
short: 'e',
|
|
82
|
+
type: 'string',
|
|
83
|
+
desc: 'Filter by comma-separated extensions (e.g. "js,ts").',
|
|
84
|
+
metavar: 'LIST',
|
|
85
|
+
category: 'Bundling'
|
|
86
|
+
},
|
|
87
|
+
ignore: {
|
|
88
|
+
short: 'i',
|
|
89
|
+
type: 'array',
|
|
90
|
+
desc: 'Add a glob pattern to the ignore list.',
|
|
91
|
+
metavar: 'GLOB',
|
|
92
|
+
category: 'Bundling'
|
|
93
|
+
},
|
|
94
|
+
noIgnore: {
|
|
95
|
+
short: 'u',
|
|
96
|
+
type: 'boolean',
|
|
97
|
+
desc: 'Unrestricted mode. Ignore .gitignore and defaults.',
|
|
98
|
+
category: 'Bundling'
|
|
99
|
+
},
|
|
100
|
+
gitDiff: {
|
|
101
|
+
short: 'g',
|
|
102
|
+
type: 'string',
|
|
103
|
+
desc: 'Bundle only files changed relative to <REF>.',
|
|
104
|
+
metavar: 'REF',
|
|
105
|
+
category: 'Bundling',
|
|
106
|
+
optional: true
|
|
107
|
+
},
|
|
108
|
+
budget: {
|
|
109
|
+
short: 'b',
|
|
110
|
+
type: 'number',
|
|
111
|
+
desc: 'Stop processing after <INT> tokens.',
|
|
112
|
+
metavar: 'INT',
|
|
113
|
+
category: 'Bundling'
|
|
114
|
+
},
|
|
115
|
+
priority: {
|
|
116
|
+
short: 'p',
|
|
117
|
+
type: 'array',
|
|
118
|
+
desc: 'Set priority rules (e.g. "README.md:100").',
|
|
119
|
+
metavar: 'RULE',
|
|
120
|
+
category: 'Bundling'
|
|
121
|
+
},
|
|
122
|
+
optimize: {
|
|
123
|
+
short: 'O',
|
|
124
|
+
type: 'string',
|
|
125
|
+
desc: 'Mode ("whitespace", "comments", "minify") OR shell command like ("terser" or "wc -l {}").',
|
|
126
|
+
default: 'none',
|
|
127
|
+
metavar: 'MODE|CMD',
|
|
128
|
+
category: 'Bundling'
|
|
129
|
+
},
|
|
130
|
+
maxSize: {
|
|
131
|
+
short: 'S',
|
|
132
|
+
type: 'string',
|
|
133
|
+
desc: 'Max file size to process (e.g. "1MB", "500KB").',
|
|
134
|
+
default: '10MB',
|
|
135
|
+
metavar: 'SIZE',
|
|
136
|
+
category: 'Bundling'
|
|
137
|
+
},
|
|
138
|
+
charsPerToken: {
|
|
139
|
+
short: 'c',
|
|
140
|
+
type: 'number',
|
|
141
|
+
desc: 'Ratio for token estimation (default: 4.2).',
|
|
142
|
+
default: 4.2,
|
|
143
|
+
metavar: 'NUM',
|
|
144
|
+
category: 'Bundling'
|
|
145
|
+
},
|
|
146
|
+
concurrency: {
|
|
147
|
+
short: 'P',
|
|
148
|
+
type: 'number',
|
|
149
|
+
desc: 'Maximum concurrent file reads.',
|
|
150
|
+
default: 32,
|
|
151
|
+
metavar: 'INT',
|
|
152
|
+
category: 'Bundling'
|
|
153
|
+
},
|
|
154
|
+
xmlMode: {
|
|
155
|
+
short: 'X',
|
|
156
|
+
type: 'string',
|
|
157
|
+
desc: 'XML strategy: "auto", "cdata", or "escape".',
|
|
158
|
+
metavar: 'MODE',
|
|
159
|
+
category: 'Bundling'
|
|
160
|
+
},
|
|
161
|
+
extract: {
|
|
162
|
+
short: 'x',
|
|
163
|
+
type: 'boolean',
|
|
164
|
+
desc: 'Switch to extraction mode.',
|
|
165
|
+
category: 'Extraction'
|
|
166
|
+
},
|
|
167
|
+
extractDir: {
|
|
168
|
+
short: 'd',
|
|
169
|
+
type: 'string',
|
|
170
|
+
desc: 'Target directory for extracted files.',
|
|
171
|
+
default: '.',
|
|
172
|
+
metavar: 'DIR',
|
|
173
|
+
category: 'Extraction'
|
|
174
|
+
},
|
|
175
|
+
unsafe: {
|
|
176
|
+
short: 'U',
|
|
177
|
+
type: 'boolean',
|
|
178
|
+
desc: 'Disable path traversal protection.',
|
|
179
|
+
category: 'Extraction'
|
|
180
|
+
}
|
|
181
|
+
};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export const Analyzer = {
|
|
2
|
+
isBinary: (buf) => {
|
|
3
|
+
return buf.indexOf(0) !== -1;
|
|
4
|
+
},
|
|
5
|
+
|
|
6
|
+
countTokens: (content, charsPerToken) => {
|
|
7
|
+
// Fallback to 1 to avoid division by zero or negative tokens
|
|
8
|
+
const cpt = charsPerToken || 4.2;
|
|
9
|
+
return Math.max(1, Math.ceil(content.length / cpt));
|
|
10
|
+
},
|
|
11
|
+
|
|
12
|
+
getPriority: (path, rules = []) => {
|
|
13
|
+
if (!rules || rules.length === 0) {
|
|
14
|
+
return 1; // Default low priority
|
|
15
|
+
}
|
|
16
|
+
for (const { regex, score } of rules) {
|
|
17
|
+
if (regex && regex.test(path)) {
|
|
18
|
+
return score;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return 1;
|
|
22
|
+
}
|
|
23
|
+
};
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { basename, resolve, dirname } from 'node:path';
|
|
2
|
+
import { Logger as DefaultLoggerFactory } from '../utils/logger.js';
|
|
3
|
+
import { Scanner as DefaultScanner } from './scanner.js';
|
|
4
|
+
import { Analyzer as DefaultAnalyzer } from './analyzer.js';
|
|
5
|
+
import { Processor as DefaultProcessor } from './processor.js';
|
|
6
|
+
import { Formatter as DefaultFormatter } from '../formatters/index.js';
|
|
7
|
+
import { Optimizer as DefaultOptimizer } from '../optimizers/index.js';
|
|
8
|
+
import { Path } from '../utils/path.js';
|
|
9
|
+
import { FORMAT, LOG } from '../config/constants.js';
|
|
10
|
+
|
|
11
|
+
export const Bundler = {
|
|
12
|
+
async run(config, io, services = {}) {
|
|
13
|
+
const {
|
|
14
|
+
scanner = DefaultScanner,
|
|
15
|
+
analyzer = DefaultAnalyzer,
|
|
16
|
+
processor = DefaultProcessor,
|
|
17
|
+
formatter = DefaultFormatter,
|
|
18
|
+
optimizer = DefaultOptimizer,
|
|
19
|
+
logger = null
|
|
20
|
+
} = services;
|
|
21
|
+
|
|
22
|
+
const log = logger || DefaultLoggerFactory(config.logLevel || LOG.WARN, io);
|
|
23
|
+
const fmt = formatter.get(config.format);
|
|
24
|
+
|
|
25
|
+
const priorityRules = (config.priorityRules || []).map(r => ({
|
|
26
|
+
regex: Path.toRegex(r.pattern)?.regex,
|
|
27
|
+
score: r.score
|
|
28
|
+
})).filter(r => r.regex);
|
|
29
|
+
|
|
30
|
+
const candidates = [];
|
|
31
|
+
const structure = [];
|
|
32
|
+
|
|
33
|
+
for await (const item of scanner.scan(config, io)) {
|
|
34
|
+
if (item.isDir) {
|
|
35
|
+
if (config.listDirs) {
|
|
36
|
+
structure.push(`${item.rel}/`);
|
|
37
|
+
}
|
|
38
|
+
} else {
|
|
39
|
+
item.priority = analyzer.getPriority(item.rel, priorityRules);
|
|
40
|
+
candidates.push(item);
|
|
41
|
+
structure.push(item.rel);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (candidates.length === 0 && structure.length === 0) {
|
|
46
|
+
log.warn('No files matched.');
|
|
47
|
+
return { files: 0, tokens: 0 };
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
candidates.sort((a, b) => {
|
|
51
|
+
const pDiff = b.priority - a.priority;
|
|
52
|
+
if (pDiff !== 0) {
|
|
53
|
+
return pDiff;
|
|
54
|
+
}
|
|
55
|
+
return a.rel.localeCompare(b.rel);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
const BATCH_SIZE = config.concurrency || 32;
|
|
59
|
+
const hasBudget = config.budget > 0;
|
|
60
|
+
|
|
61
|
+
const processBatchItem = async (item) => {
|
|
62
|
+
const result = await processor.run(item, config, io, { analyzer, optimizer });
|
|
63
|
+
if (result.error) {
|
|
64
|
+
log.warn(`Failed to read ${item.rel}: ${result.error.message}`);
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
return result;
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
const rootName = config.paths?.[0] ? basename(resolve(config.paths[0])) : 'project';
|
|
71
|
+
|
|
72
|
+
if (config.output) {
|
|
73
|
+
const outDir = dirname(resolve(config.output));
|
|
74
|
+
await io.mkdir(outDir);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const outStream = config.output ? io.createWriteStream(config.output) : {
|
|
78
|
+
write: io.writeStdout,
|
|
79
|
+
end: () => {}
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
let instructionText = '';
|
|
83
|
+
if (config.instruct) {
|
|
84
|
+
const replyFmt = formatter.get(config.replyFormat || config.format);
|
|
85
|
+
if (replyFmt.getInstruction) {
|
|
86
|
+
instructionText = replyFmt.getInstruction();
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const head = fmt.header({
|
|
91
|
+
name: rootName,
|
|
92
|
+
tree: config.structure ? structure.join('\n') : null,
|
|
93
|
+
context: config.context,
|
|
94
|
+
task: config.task
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
const foot = fmt.footer({
|
|
98
|
+
task: config.task,
|
|
99
|
+
instructionText
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
let usedTokens = analyzer.countTokens(head + foot, config.charsPerToken);
|
|
103
|
+
if (hasBudget && usedTokens >= config.budget) {
|
|
104
|
+
log.warn(`Budget exceeded by directory tree and metadata alone (${usedTokens} > ${config.budget}). Outputting skeleton only.`);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if ((hasBudget && usedTokens >= config.budget) || config.skeleton) {
|
|
108
|
+
const skeletonHead = fmt.header({
|
|
109
|
+
name: rootName,
|
|
110
|
+
tree: structure.join('\n'),
|
|
111
|
+
context: config.context,
|
|
112
|
+
task: config.task
|
|
113
|
+
});
|
|
114
|
+
outStream.write(skeletonHead);
|
|
115
|
+
outStream.write(foot);
|
|
116
|
+
if (config.output) {
|
|
117
|
+
outStream.end();
|
|
118
|
+
}
|
|
119
|
+
return { files: 0, tokens: usedTokens };
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
outStream.write(head);
|
|
123
|
+
|
|
124
|
+
const isJson = config.format === FORMAT.JSON;
|
|
125
|
+
let writtenCount = 0;
|
|
126
|
+
|
|
127
|
+
for (let i = 0; i < candidates.length; i += BATCH_SIZE) {
|
|
128
|
+
if (hasBudget && usedTokens >= config.budget) {
|
|
129
|
+
break;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const batch = candidates.slice(i, i + BATCH_SIZE);
|
|
133
|
+
const results = await Promise.all(batch.map(processBatchItem));
|
|
134
|
+
|
|
135
|
+
for (const res of results) {
|
|
136
|
+
if (!res) {
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const formatted = fmt.file(res, config);
|
|
141
|
+
const prefix = (isJson && writtenCount > 0) ? ',\n' : '';
|
|
142
|
+
const fullBlock = prefix + formatted;
|
|
143
|
+
|
|
144
|
+
const blockTokens = analyzer.countTokens(fullBlock, config.charsPerToken);
|
|
145
|
+
|
|
146
|
+
if (hasBudget && (usedTokens + blockTokens > config.budget)) {
|
|
147
|
+
log.debug(`Skipping ${res.rel} (budget exceeded)`);
|
|
148
|
+
continue;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
usedTokens += blockTokens;
|
|
152
|
+
outStream.write(fullBlock);
|
|
153
|
+
writtenCount++;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
outStream.write(foot);
|
|
158
|
+
|
|
159
|
+
if (config.output) {
|
|
160
|
+
outStream.end();
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return { files: writtenCount, tokens: usedTokens };
|
|
164
|
+
}
|
|
165
|
+
};
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { resolve, join, dirname, relative, isAbsolute } from 'node:path';
|
|
2
|
+
import { Path } from '../utils/path.js';
|
|
3
|
+
import { Logger as DefaultLoggerFactory } from '../utils/logger.js';
|
|
4
|
+
import { Formatter as DefaultFormatter } from '../formatters/index.js';
|
|
5
|
+
import { LOG } from '../config/constants.js';
|
|
6
|
+
|
|
7
|
+
export const Extractor = {
|
|
8
|
+
async run(config, io, services = {}) {
|
|
9
|
+
const {
|
|
10
|
+
formatter = DefaultFormatter,
|
|
11
|
+
logger = null
|
|
12
|
+
} = services;
|
|
13
|
+
|
|
14
|
+
if (config.safeMode === undefined) {
|
|
15
|
+
config.safeMode = true;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const log = logger || DefaultLoggerFactory(config.logLevel || LOG.WARN, io);
|
|
19
|
+
let content = '';
|
|
20
|
+
|
|
21
|
+
if (config.paths && config.paths.length > 0) {
|
|
22
|
+
content = await io.readText(config.paths[0]);
|
|
23
|
+
} else {
|
|
24
|
+
log.info('Reading stdin...');
|
|
25
|
+
for await (const chunk of io.readStdin()) {
|
|
26
|
+
content += chunk;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (!content.trim()) {
|
|
31
|
+
throw new Error('Empty input');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const fmt = formatter.detect(content);
|
|
35
|
+
const files = fmt.parse(content, log);
|
|
36
|
+
|
|
37
|
+
if (files.length === 0) {
|
|
38
|
+
log.warn('No files found.');
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const outDir = resolve(config.extractDir || '.');
|
|
43
|
+
if (config.extractDir) {
|
|
44
|
+
await io.mkdir(outDir);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
log.info(`Extracting ${files.length} files to ${outDir}`);
|
|
48
|
+
|
|
49
|
+
let count = 0;
|
|
50
|
+
for (const f of files) {
|
|
51
|
+
const safeRel = Path.sanitize(f.path);
|
|
52
|
+
|
|
53
|
+
if (config.safeMode) {
|
|
54
|
+
if (isAbsolute(safeRel) || safeRel.includes('../') || safeRel.startsWith('..')) {
|
|
55
|
+
log.warn(`[SECURITY] Skipping traversal attempt: ${f.path}`);
|
|
56
|
+
continue;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const abs = join(outDir, safeRel);
|
|
60
|
+
const resolved = resolve(abs);
|
|
61
|
+
const rel = relative(outDir, resolved);
|
|
62
|
+
|
|
63
|
+
if (isAbsolute(rel) || rel.startsWith('..')) {
|
|
64
|
+
log.warn(`[SECURITY] Skipping traversal attempt: ${f.path}`);
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const abs = join(outDir, safeRel);
|
|
70
|
+
await io.mkdir(dirname(abs));
|
|
71
|
+
await io.writeFile(abs, f.content);
|
|
72
|
+
count++;
|
|
73
|
+
}
|
|
74
|
+
log.info(`Extracted ${count} files.`);
|
|
75
|
+
}
|
|
76
|
+
};
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
import { relative } from 'node:path';
|
|
4
|
+
import { Path } from '../utils/path.js';
|
|
5
|
+
|
|
6
|
+
const createMatcher = (existingRules, newPatterns) => {
|
|
7
|
+
const newRules = newPatterns.map(Path.toRegex).filter(Boolean);
|
|
8
|
+
const rules = existingRules ? existingRules.concat(newRules) : newRules;
|
|
9
|
+
|
|
10
|
+
return {
|
|
11
|
+
test: (rel) => {
|
|
12
|
+
let ignored = false;
|
|
13
|
+
for (const { regex, isNegated } of rules) {
|
|
14
|
+
if (regex.test(rel)) {
|
|
15
|
+
ignored = !isNegated;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
return ignored;
|
|
19
|
+
},
|
|
20
|
+
|
|
21
|
+
extend: (morePatterns) => {
|
|
22
|
+
return createMatcher(rules, morePatterns);
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export const Ignore = {
|
|
28
|
+
parse: (content, dir, root) => {
|
|
29
|
+
if (!content) {
|
|
30
|
+
return [];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const lines = content.split(/\r?\n/)
|
|
34
|
+
.map(l => l.trim())
|
|
35
|
+
.filter(l => l && !l.startsWith('#'));
|
|
36
|
+
|
|
37
|
+
return lines.map(l => {
|
|
38
|
+
let p = l;
|
|
39
|
+
const isNeg = p.startsWith('!');
|
|
40
|
+
if (isNeg) {
|
|
41
|
+
p = p.slice(1);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Standard git behavior: "foo/bar" is anchored to dir, "foo" is recursive.
|
|
45
|
+
// "foo/" is recursive directory.
|
|
46
|
+
// Only anchor if it starts with slash OR has a slash in the middle
|
|
47
|
+
const hasInternalSlash = p.slice(0, -1).includes('/');
|
|
48
|
+
|
|
49
|
+
if (p.startsWith('/') || hasInternalSlash) {
|
|
50
|
+
const relDir = relative(root, dir).replace(/\\/g, '/');
|
|
51
|
+
if (relDir) {
|
|
52
|
+
const cleanP = p.startsWith('/') ? p : '/' + p;
|
|
53
|
+
p = `/${relDir}${cleanP}`;
|
|
54
|
+
} else if (!p.startsWith('/')) {
|
|
55
|
+
p = '/' + p;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return isNeg ? '!' + p : p;
|
|
59
|
+
});
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
create: (patterns) => {
|
|
63
|
+
return createMatcher([], patterns);
|
|
64
|
+
}
|
|
65
|
+
};
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
const quotePath = (path) => {
|
|
2
|
+
if (process.platform === 'win32') {
|
|
3
|
+
return `"${path.replace(/"/g, '""')}"`;
|
|
4
|
+
}
|
|
5
|
+
return `'${path.replace(/'/g, "'\\''")}'`;
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
export const Processor = {
|
|
9
|
+
run: async (item, config, io, services) => {
|
|
10
|
+
const { analyzer, optimizer } = services;
|
|
11
|
+
|
|
12
|
+
if (config.skeleton) {
|
|
13
|
+
return { ...item, content: '' };
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
try {
|
|
17
|
+
const SAMPLE_SIZE = 1024 * 8;
|
|
18
|
+
const sample = await io.readSample(item.path, SAMPLE_SIZE);
|
|
19
|
+
|
|
20
|
+
if (analyzer.isBinary(sample)) {
|
|
21
|
+
return { ...item, content: '(binary omitted)' };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
let raw;
|
|
25
|
+
if (sample.length < SAMPLE_SIZE) {
|
|
26
|
+
raw = sample.toString('utf8');
|
|
27
|
+
} else {
|
|
28
|
+
const maxBytes = config.maxSize !== undefined ? config.maxSize : 0;
|
|
29
|
+
const stats = await io.stat(item.path);
|
|
30
|
+
|
|
31
|
+
if (maxBytes > 0 && stats.size > maxBytes && !config.optimizeCmd) {
|
|
32
|
+
return { ...item, content: `(file too large for processing: ${stats.size} bytes)` };
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
raw = await io.readText(item.path);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let content;
|
|
39
|
+
if (config.optimizeCmd) {
|
|
40
|
+
// Standard Unix convention: {} represents the file path
|
|
41
|
+
if (config.optimizeCmd.includes('{}')) {
|
|
42
|
+
const safePath = quotePath(item.path);
|
|
43
|
+
const cmd = config.optimizeCmd.split('{}').join(safePath);
|
|
44
|
+
content = await io.exec(cmd);
|
|
45
|
+
} else {
|
|
46
|
+
// Pipe content to stdin
|
|
47
|
+
content = await io.execPipe(config.optimizeCmd, raw);
|
|
48
|
+
}
|
|
49
|
+
} else {
|
|
50
|
+
const ext = item.rel.split('.').pop();
|
|
51
|
+
content = optimizer.run(raw, ext, config.optimize);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return { ...item, content };
|
|
55
|
+
} catch (err) {
|
|
56
|
+
return { error: err };
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
};
|