@cspell/cspell-tools 9.5.0 → 9.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin.mjs +1 -1
- package/cspell-tools.config.schema.json +29 -0
- package/dist/app.d.mts +35 -0
- package/dist/app.mjs +292 -0
- package/dist/build-OgMPaXPZ.mjs +1403 -0
- package/dist/index.d.mts +35 -0
- package/dist/index.mjs +3 -0
- package/package.json +10 -9
- package/dist/AppOptions.d.ts +0 -31
- package/dist/AppOptions.js +0 -2
- package/dist/FeatureFlags/FeatureFlags.d.ts +0 -34
- package/dist/FeatureFlags/FeatureFlags.js +0 -94
- package/dist/FeatureFlags/index.d.ts +0 -3
- package/dist/FeatureFlags/index.js +0 -3
- package/dist/FeatureFlags/parseFlags.d.ts +0 -3
- package/dist/FeatureFlags/parseFlags.js +0 -20
- package/dist/app.d.ts +0 -4
- package/dist/app.js +0 -86
- package/dist/build.d.ts +0 -12
- package/dist/build.js +0 -36
- package/dist/compile.d.ts +0 -5
- package/dist/compile.js +0 -41
- package/dist/compiler/CompileOptions.d.ts +0 -35
- package/dist/compiler/CompileOptions.js +0 -2
- package/dist/compiler/CompilerDefinitions.d.ts +0 -2
- package/dist/compiler/CompilerDefinitions.js +0 -2
- package/dist/compiler/Reader.d.ts +0 -3
- package/dist/compiler/Reader.js +0 -24
- package/dist/compiler/SourceReader.d.ts +0 -28
- package/dist/compiler/SourceReader.js +0 -44
- package/dist/compiler/WordsCollection.d.ts +0 -20
- package/dist/compiler/WordsCollection.js +0 -3
- package/dist/compiler/compile.d.ts +0 -26
- package/dist/compiler/compile.js +0 -270
- package/dist/compiler/createCompileRequest.d.ts +0 -4
- package/dist/compiler/createCompileRequest.js +0 -84
- package/dist/compiler/createWordsCollection.d.ts +0 -10
- package/dist/compiler/createWordsCollection.js +0 -111
- package/dist/compiler/index.d.ts +0 -4
- package/dist/compiler/index.js +0 -3
- package/dist/compiler/legacyLineToWords.d.ts +0 -4
- package/dist/compiler/legacyLineToWords.js +0 -20
- package/dist/compiler/logWithTimestamp.d.ts +0 -3
- package/dist/compiler/logWithTimestamp.js +0 -5
- package/dist/compiler/logger.d.ts +0 -4
- package/dist/compiler/logger.js +0 -9
- package/dist/compiler/readers/ReaderOptions.d.ts +0 -19
- package/dist/compiler/readers/ReaderOptions.js +0 -2
- package/dist/compiler/readers/readHunspellFiles.d.ts +0 -3
- package/dist/compiler/readers/readHunspellFiles.js +0 -57
- package/dist/compiler/readers/readTextFile.d.ts +0 -3
- package/dist/compiler/readers/readTextFile.js +0 -20
- package/dist/compiler/readers/regHunspellFile.d.ts +0 -2
- package/dist/compiler/readers/regHunspellFile.js +0 -2
- package/dist/compiler/readers/textFileReader.d.ts +0 -3
- package/dist/compiler/readers/textFileReader.js +0 -11
- package/dist/compiler/readers/trieFileReader.d.ts +0 -3
- package/dist/compiler/readers/trieFileReader.js +0 -16
- package/dist/compiler/splitCamelCaseIfAllowed.d.ts +0 -5
- package/dist/compiler/splitCamelCaseIfAllowed.js +0 -65
- package/dist/compiler/streamSourceWordsFromFile.d.ts +0 -3
- package/dist/compiler/streamSourceWordsFromFile.js +0 -6
- package/dist/compiler/text.d.ts +0 -10
- package/dist/compiler/text.js +0 -28
- package/dist/compiler/wordListCompiler.d.ts +0 -21
- package/dist/compiler/wordListCompiler.js +0 -205
- package/dist/compiler/wordListParser.d.ts +0 -61
- package/dist/compiler/wordListParser.js +0 -184
- package/dist/compiler/writeTextToFile.d.ts +0 -2
- package/dist/compiler/writeTextToFile.js +0 -16
- package/dist/config/config.d.ts +0 -200
- package/dist/config/config.js +0 -2
- package/dist/config/configDefaults.d.ts +0 -9
- package/dist/config/configDefaults.js +0 -9
- package/dist/config/configUtils.d.ts +0 -5
- package/dist/config/configUtils.js +0 -14
- package/dist/config/index.d.ts +0 -4
- package/dist/config/index.js +0 -3
- package/dist/config/normalizeConfig.d.ts +0 -8
- package/dist/config/normalizeConfig.js +0 -13
- package/dist/gzip/compressFiles.d.ts +0 -16
- package/dist/gzip/compressFiles.js +0 -42
- package/dist/gzip/gzip.d.ts +0 -3
- package/dist/gzip/gzip.js +0 -10
- package/dist/gzip/index.d.ts +0 -3
- package/dist/gzip/index.js +0 -3
- package/dist/shasum/checksum.d.ts +0 -7
- package/dist/shasum/checksum.js +0 -19
- package/dist/shasum/index.d.ts +0 -3
- package/dist/shasum/index.js +0 -3
- package/dist/shasum/shasum.d.ts +0 -38
- package/dist/shasum/shasum.js +0 -150
- package/dist/test/TestHelper.d.ts +0 -53
- package/dist/test/TestHelper.js +0 -121
- package/dist/test/console.d.ts +0 -9
- package/dist/test/console.js +0 -34
- package/dist/test/escapeRegEx.d.ts +0 -7
- package/dist/test/escapeRegEx.js +0 -9
- package/dist/test/normalizeOutput.d.ts +0 -3
- package/dist/test/normalizeOutput.js +0 -20
- package/dist/types.d.ts +0 -7
- package/dist/types.js +0 -2
- package/dist/util/errors.d.ts +0 -6
- package/dist/util/errors.js +0 -11
- package/dist/util/globP.d.ts +0 -7
- package/dist/util/globP.js +0 -7
- package/dist/util/index.d.ts +0 -2
- package/dist/util/index.js +0 -4
- package/dist/util/textRegex.d.ts +0 -3
- package/dist/util/textRegex.js +0 -109
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { Buffer } from 'node:buffer';
|
|
2
|
-
import { promises as fs } from 'node:fs';
|
|
3
|
-
import { compress } from '../gzip/index.js';
|
|
4
|
-
const isGzFile = /\.gz$/;
|
|
5
|
-
export async function writeTextToFile(filename, data, useGzCompress) {
|
|
6
|
-
const dataStr = typeof data === 'string' ? data : Array.isArray(data) ? data.join('') : [...data].join('');
|
|
7
|
-
const hasGzExt = isGzFile.test(filename);
|
|
8
|
-
const useGz = useGzCompress ?? hasGzExt;
|
|
9
|
-
if (useGz && !hasGzExt) {
|
|
10
|
-
filename += '.gz';
|
|
11
|
-
}
|
|
12
|
-
const buf = Buffer.from(dataStr, 'utf8');
|
|
13
|
-
const buffer = useGz ? await compress(buf) : buf;
|
|
14
|
-
await fs.writeFile(filename, buffer);
|
|
15
|
-
}
|
|
16
|
-
//# sourceMappingURL=writeTextToFile.js.map
|
package/dist/config/config.d.ts
DELETED
|
@@ -1,200 +0,0 @@
|
|
|
1
|
-
export interface RunConfig extends Partial<Omit<CompileRequest, 'targets'>> {
|
|
2
|
-
/**
|
|
3
|
-
* Url to JSON Schema
|
|
4
|
-
* @default "https://raw.githubusercontent.com/streetsidesoftware/cspell/main/packages/cspell-tools/cspell-tools.config.schema.json"
|
|
5
|
-
*/
|
|
6
|
-
$schema?: string | undefined;
|
|
7
|
-
/**
|
|
8
|
-
* Optional Target Dictionaries to create.
|
|
9
|
-
*/
|
|
10
|
-
targets?: Target[] | undefined;
|
|
11
|
-
/**
|
|
12
|
-
* Specify the directory where all relative paths will resolved against.
|
|
13
|
-
* By default, all relative paths are relative to the location of the
|
|
14
|
-
* config file.
|
|
15
|
-
*/
|
|
16
|
-
rootDir?: string | undefined;
|
|
17
|
-
}
|
|
18
|
-
export interface CompileRequest extends CompileTargetOptions, CompileSourceOptions {
|
|
19
|
-
/**
|
|
20
|
-
* Specify the directory where all relative paths will resolved against.
|
|
21
|
-
* By default, all relative paths are relative to the current directory.
|
|
22
|
-
*/
|
|
23
|
-
rootDir?: string | undefined;
|
|
24
|
-
/**
|
|
25
|
-
* Target Dictionaries to create.
|
|
26
|
-
*/
|
|
27
|
-
targets: Target[];
|
|
28
|
-
/**
|
|
29
|
-
* Path to checksum file. `true` - defaults to `./checksum.txt`.
|
|
30
|
-
*/
|
|
31
|
-
checksumFile?: string | boolean | undefined;
|
|
32
|
-
}
|
|
33
|
-
export interface Experimental {
|
|
34
|
-
/**
|
|
35
|
-
* Experimental flags
|
|
36
|
-
*/
|
|
37
|
-
experimental?: string[] | undefined;
|
|
38
|
-
}
|
|
39
|
-
export interface CompileTargetOptions {
|
|
40
|
-
/**
|
|
41
|
-
* Generate lower case / accent free versions of words.
|
|
42
|
-
* @default false
|
|
43
|
-
*/
|
|
44
|
-
generateNonStrict?: boolean | undefined;
|
|
45
|
-
/**
|
|
46
|
-
* Sort the words in the resulting dictionary.
|
|
47
|
-
* Does not apply to `trie` based formats.
|
|
48
|
-
* @default true
|
|
49
|
-
*/
|
|
50
|
-
sort?: boolean | undefined;
|
|
51
|
-
/**
|
|
52
|
-
* Words in the `allowedSplitWords` are considered correct and can be used
|
|
53
|
-
* as a basis for splitting compound words.
|
|
54
|
-
*
|
|
55
|
-
* If entries can be split so that all the words in the entry are allowed,
|
|
56
|
-
* then only the individual words are added, otherwise the entire entry is added.
|
|
57
|
-
* This is to prevent misspellings in CamelCase words from being introduced into the
|
|
58
|
-
* dictionary.
|
|
59
|
-
*/
|
|
60
|
-
allowedSplitWords?: FilePath | FilePath[] | undefined;
|
|
61
|
-
/**
|
|
62
|
-
* Injects `cspell-dictionary` directives into the dictionary header.
|
|
63
|
-
*
|
|
64
|
-
* Example:
|
|
65
|
-
*
|
|
66
|
-
* ```ini
|
|
67
|
-
* # cspell-dictionary: no-generate-alternatives
|
|
68
|
-
* ```
|
|
69
|
-
*
|
|
70
|
-
* Known Directives:
|
|
71
|
-
* ```yaml
|
|
72
|
-
* - split # Tell the dictionary loader to split words
|
|
73
|
-
* - no-split # Tell the dictionary loader to not split words (default)
|
|
74
|
-
* - generate-alternatives # Tell the dictionary loader to generate alternate spellings (default)
|
|
75
|
-
* - no-generate-alternatives # Tell the dictionary loader to not generate alternate spellings
|
|
76
|
-
* ```
|
|
77
|
-
*/
|
|
78
|
-
dictionaryDirectives?: string[] | undefined;
|
|
79
|
-
/**
|
|
80
|
-
* Remove duplicate words, favor lower case words over mixed case words.
|
|
81
|
-
* Combine compound prefixes where possible.
|
|
82
|
-
* @default false
|
|
83
|
-
*/
|
|
84
|
-
removeDuplicates?: boolean | undefined;
|
|
85
|
-
}
|
|
86
|
-
export interface Target extends CompileTargetOptions {
|
|
87
|
-
/**
|
|
88
|
-
* Name of target, used as the basis of target file name.
|
|
89
|
-
*/
|
|
90
|
-
name: string;
|
|
91
|
-
/**
|
|
92
|
-
* The target directory
|
|
93
|
-
* @default current directory
|
|
94
|
-
*/
|
|
95
|
-
targetDirectory?: FilePath | undefined;
|
|
96
|
-
/**
|
|
97
|
-
* Setting this value to true will create a `.gz` dictionary file.
|
|
98
|
-
* Use `keepUncompressed` to also keep an uncompressed version.
|
|
99
|
-
* @default false
|
|
100
|
-
*/
|
|
101
|
-
compress?: boolean | undefined;
|
|
102
|
-
/**
|
|
103
|
-
* If `compress` is true, setting this value to true will also keep an uncompressed version of the dictionary.
|
|
104
|
-
*/
|
|
105
|
-
keepUncompressed?: boolean | undefined;
|
|
106
|
-
/**
|
|
107
|
-
* Format of the dictionary.
|
|
108
|
-
*/
|
|
109
|
-
format: DictionaryFormats;
|
|
110
|
-
/**
|
|
111
|
-
* File sources used to build the dictionary.
|
|
112
|
-
*/
|
|
113
|
-
sources: DictionarySource[];
|
|
114
|
-
/**
|
|
115
|
-
* Words from the sources that are found in `excludeWordsFrom` files
|
|
116
|
-
* will NOT be added to the dictionary.
|
|
117
|
-
*
|
|
118
|
-
* @since 8.3.2
|
|
119
|
-
*/
|
|
120
|
-
excludeWordsFrom?: FilePath[] | undefined;
|
|
121
|
-
/**
|
|
122
|
-
* Words from the sources that are NOT found in `excludeWordsNotFoundIn` files
|
|
123
|
-
* will NOT be added to the dictionary.
|
|
124
|
-
*
|
|
125
|
-
* @since 8.19.4
|
|
126
|
-
*/
|
|
127
|
-
excludeWordsNotFoundIn?: FilePath[] | undefined;
|
|
128
|
-
/**
|
|
129
|
-
* Words from the sources that match the regex in `excludeWordsMatchingRegex`
|
|
130
|
-
* will NOT be added to the dictionary.
|
|
131
|
-
*
|
|
132
|
-
* Note: The regex must be a valid JavaScript literal regex expression including the `/` delimiters.
|
|
133
|
-
*
|
|
134
|
-
* @since 8.19.4
|
|
135
|
-
*/
|
|
136
|
-
excludeWordsMatchingRegex?: string[] | undefined;
|
|
137
|
-
/**
|
|
138
|
-
* Advanced: Set the trie base number. A value between 10 and 36
|
|
139
|
-
* Set numeric base to use.
|
|
140
|
-
* 10 is the easiest to read.
|
|
141
|
-
* 16 is common hex format.
|
|
142
|
-
* 36 is the most compact.
|
|
143
|
-
*/
|
|
144
|
-
trieBase?: number | undefined;
|
|
145
|
-
}
|
|
146
|
-
export type DictionaryFormats = 'plaintext' | 'trie' | 'trie3' | 'trie4';
|
|
147
|
-
/**
|
|
148
|
-
* Note: All relative paths are relative to the config file location.
|
|
149
|
-
*/
|
|
150
|
-
export type FilePath = string;
|
|
151
|
-
export type FilePathOrFilePathArray = FilePath | FilePath[];
|
|
152
|
-
export type DictionarySource = FilePath | FileSource | FileListSource;
|
|
153
|
-
export interface FileSource extends CompileSourceOptions {
|
|
154
|
-
filename: FilePath;
|
|
155
|
-
}
|
|
156
|
-
export interface FileListSource extends CompileSourceOptions {
|
|
157
|
-
listFile: FilePath;
|
|
158
|
-
}
|
|
159
|
-
export interface CompileSourceOptions {
|
|
160
|
-
/**
|
|
161
|
-
* Maximum number of nested Hunspell Rules to apply.
|
|
162
|
-
* This is needed for recursive dictionaries like Hebrew.
|
|
163
|
-
*/
|
|
164
|
-
maxDepth?: number | undefined;
|
|
165
|
-
/**
|
|
166
|
-
* Split lines into words.
|
|
167
|
-
* @default false
|
|
168
|
-
*/
|
|
169
|
-
split?: boolean | 'legacy' | undefined;
|
|
170
|
-
/**
|
|
171
|
-
* Do not generate lower case / accent free versions of words.
|
|
172
|
-
* @default false
|
|
173
|
-
*/
|
|
174
|
-
keepRawCase?: boolean | undefined;
|
|
175
|
-
/**
|
|
176
|
-
* Words in the `allowedSplitWords` are considered correct and can be used
|
|
177
|
-
* as a basis for splitting compound words.
|
|
178
|
-
*
|
|
179
|
-
* If entries can be split so that all the words in the entry are allowed,
|
|
180
|
-
* then only the individual words are added, otherwise the entire entry is added.
|
|
181
|
-
* This is to prevent misspellings in CamelCase words from being introduced into the
|
|
182
|
-
* dictionary.
|
|
183
|
-
*/
|
|
184
|
-
allowedSplitWords?: FilePathOrFilePathArray | undefined;
|
|
185
|
-
/**
|
|
186
|
-
* Camel case words that have been split using the `allowedSplitWords` are added to the dictionary as compoundable words.
|
|
187
|
-
* These words are prefixed / suffixed with `*`.
|
|
188
|
-
* @default false
|
|
189
|
-
*/
|
|
190
|
-
storeSplitWordsAsCompounds?: boolean | undefined;
|
|
191
|
-
/**
|
|
192
|
-
* Controls the minimum length of a compound word when storing words using `storeSplitWordsAsCompounds`.
|
|
193
|
-
* The compound words are prefixed / suffixed with `*`, to allow them to be combined with other compound words.
|
|
194
|
-
* If the length is too low, then the dictionary will consider many misspelled words as correct.
|
|
195
|
-
* @default 4
|
|
196
|
-
*/
|
|
197
|
-
minCompoundLength?: number | undefined;
|
|
198
|
-
}
|
|
199
|
-
export declare const configFileSchemaURL = "https://raw.githubusercontent.com/streetsidesoftware/cspell/main/packages/cspell-tools/cspell-tools.config.schema.json";
|
|
200
|
-
//# sourceMappingURL=config.d.ts.map
|
package/dist/config/config.js
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
export declare const defaultCompileSourceOptions: {
|
|
2
|
-
readonly maxDepth: undefined;
|
|
3
|
-
readonly split: false;
|
|
4
|
-
readonly keepRawCase: false;
|
|
5
|
-
readonly allowedSplitWords: undefined;
|
|
6
|
-
readonly storeSplitWordsAsCompounds: false;
|
|
7
|
-
readonly minCompoundLength: 4;
|
|
8
|
-
};
|
|
9
|
-
//# sourceMappingURL=configDefaults.d.ts.map
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
import type { DictionarySource, FileListSource, FilePath, FileSource } from '../config/index.js';
|
|
2
|
-
export declare function isFilePath(source: DictionarySource): source is FilePath;
|
|
3
|
-
export declare function isFileSource(source: DictionarySource): source is FileSource;
|
|
4
|
-
export declare function isFileListSource(source: DictionarySource): source is FileListSource;
|
|
5
|
-
//# sourceMappingURL=configUtils.d.ts.map
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
export function isFilePath(source) {
|
|
2
|
-
return typeof source === 'string';
|
|
3
|
-
}
|
|
4
|
-
export function isFileSource(source) {
|
|
5
|
-
if (!source || isFilePath(source))
|
|
6
|
-
return false;
|
|
7
|
-
return source.filename !== undefined;
|
|
8
|
-
}
|
|
9
|
-
export function isFileListSource(source) {
|
|
10
|
-
if (!source || isFilePath(source))
|
|
11
|
-
return false;
|
|
12
|
-
return source.listFile !== undefined;
|
|
13
|
-
}
|
|
14
|
-
//# sourceMappingURL=configUtils.js.map
|
package/dist/config/index.d.ts
DELETED
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
export type { CompileRequest, CompileSourceOptions, CompileTargetOptions, DictionaryFormats, DictionarySource, FileListSource, FilePath, FileSource, RunConfig, Target, } from './config.js';
|
|
2
|
-
export { isFileListSource, isFilePath, isFileSource } from './configUtils.js';
|
|
3
|
-
export { normalizeConfig } from './normalizeConfig.js';
|
|
4
|
-
//# sourceMappingURL=index.d.ts.map
|
package/dist/config/index.js
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import * as path from 'node:path';
|
|
2
|
-
export function normalizeConfig(cfg) {
|
|
3
|
-
if (!cfg)
|
|
4
|
-
return cfg;
|
|
5
|
-
const dir = path.dirname(cfg.filepath);
|
|
6
|
-
const config = cfg.config;
|
|
7
|
-
const result = { ...config, rootDir: path.resolve(dir, config.rootDir || '.') };
|
|
8
|
-
return {
|
|
9
|
-
config: result,
|
|
10
|
-
filepath: cfg.filepath,
|
|
11
|
-
};
|
|
12
|
-
}
|
|
13
|
-
//# sourceMappingURL=normalizeConfig.js.map
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import type { Buffer } from 'node:buffer';
|
|
2
|
-
export declare enum OSFlags {
|
|
3
|
-
auto = -1,
|
|
4
|
-
FAT = 0,
|
|
5
|
-
Unix = 3,
|
|
6
|
-
HPFS = 6,// cspell:ignore hpfs
|
|
7
|
-
MACOS = 7,
|
|
8
|
-
NTFS = 11
|
|
9
|
-
}
|
|
10
|
-
export declare function compressFile(file: string, os?: OSFlags): Promise<string>;
|
|
11
|
-
export declare function compress(buf: string | Uint8Array | Buffer, os?: OSFlags): Promise<Uint8Array>;
|
|
12
|
-
export declare function compressSync(buf: string | Uint8Array | Buffer, os?: OSFlags): Uint8Array;
|
|
13
|
-
export declare function decompress(buf: Uint8Array | Buffer, encoding?: undefined): Promise<Uint8Array>;
|
|
14
|
-
export declare function decompress(buf: Uint8Array | Buffer, encoding: 'utf8'): Promise<string>;
|
|
15
|
-
export declare function decompress(buf: Uint8Array | Buffer, encoding: 'utf8' | undefined): Promise<string | Uint8Array>;
|
|
16
|
-
//# sourceMappingURL=compressFiles.d.ts.map
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
import { readFile, writeFile } from 'node:fs/promises';
|
|
2
|
-
import { promisify } from 'node:util';
|
|
3
|
-
import { gunzip as gunzipCB, gzip as gz, gzipSync } from 'node:zlib';
|
|
4
|
-
const gzip = promisify(gz);
|
|
5
|
-
const gunzip = promisify(gunzipCB);
|
|
6
|
-
export var OSFlags;
|
|
7
|
-
(function (OSFlags) {
|
|
8
|
-
OSFlags[OSFlags["auto"] = -1] = "auto";
|
|
9
|
-
OSFlags[OSFlags["FAT"] = 0] = "FAT";
|
|
10
|
-
OSFlags[OSFlags["Unix"] = 3] = "Unix";
|
|
11
|
-
OSFlags[OSFlags["HPFS"] = 6] = "HPFS";
|
|
12
|
-
OSFlags[OSFlags["MACOS"] = 7] = "MACOS";
|
|
13
|
-
OSFlags[OSFlags["NTFS"] = 11] = "NTFS";
|
|
14
|
-
})(OSFlags || (OSFlags = {}));
|
|
15
|
-
// https://docs.fileformat.com/compression/gz/#:~:text=A%20GZ%20file%20is%20a,compression%20formats%20on%20UNIX%20systems.
|
|
16
|
-
const OSSystemIDOffset = 9;
|
|
17
|
-
export async function compressFile(file, os) {
|
|
18
|
-
if (file.endsWith('.gz'))
|
|
19
|
-
return file;
|
|
20
|
-
const targetFile = file + '.gz';
|
|
21
|
-
const zBuf = await compress(await readFile(file), os);
|
|
22
|
-
await writeFile(targetFile, zBuf);
|
|
23
|
-
return targetFile;
|
|
24
|
-
}
|
|
25
|
-
export async function compress(buf, os) {
|
|
26
|
-
return fixOSSystemID(await gzip(buf), os);
|
|
27
|
-
}
|
|
28
|
-
export function compressSync(buf, os) {
|
|
29
|
-
return fixOSSystemID(gzipSync(buf), os);
|
|
30
|
-
}
|
|
31
|
-
function fixOSSystemID(zBuf, os = OSFlags.Unix) {
|
|
32
|
-
const osFlag = os == OSFlags.auto ? zBuf[OSSystemIDOffset] : os;
|
|
33
|
-
zBuf[OSSystemIDOffset] = osFlag;
|
|
34
|
-
return zBuf;
|
|
35
|
-
}
|
|
36
|
-
export async function decompress(buf, encoding) {
|
|
37
|
-
const dBuf = gunzip(buf);
|
|
38
|
-
if (!encoding)
|
|
39
|
-
return dBuf;
|
|
40
|
-
return (await dBuf).toString(encoding);
|
|
41
|
-
}
|
|
42
|
-
//# sourceMappingURL=compressFiles.js.map
|
package/dist/gzip/gzip.d.ts
DELETED
package/dist/gzip/gzip.js
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
import { globP } from '../util/globP.js';
|
|
2
|
-
import { compressFile } from './compressFiles.js';
|
|
3
|
-
// cspell:ignore nodir
|
|
4
|
-
export async function gzip(globs, os) {
|
|
5
|
-
const files = await globP(globs, { nodir: true });
|
|
6
|
-
for (const fileName of files) {
|
|
7
|
-
await compressFile(fileName, os);
|
|
8
|
-
}
|
|
9
|
-
}
|
|
10
|
-
//# sourceMappingURL=gzip.js.map
|
package/dist/gzip/index.d.ts
DELETED
package/dist/gzip/index.js
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
type HashAlgorithm = 'SHA1';
|
|
2
|
-
export declare function calcChecksum(buf: Buffer, alg?: HashAlgorithm): string;
|
|
3
|
-
export declare function checkChecksum(checksum: string, buf: Buffer, alg?: HashAlgorithm): boolean;
|
|
4
|
-
export declare function calcFileChecksum(filename: string, alg?: HashAlgorithm): Promise<string>;
|
|
5
|
-
export declare function checkFile(checksum: string, filename: string, alg?: HashAlgorithm): Promise<boolean>;
|
|
6
|
-
export {};
|
|
7
|
-
//# sourceMappingURL=checksum.d.ts.map
|
package/dist/shasum/checksum.js
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { createHash } from 'node:crypto';
|
|
2
|
-
import { readFile } from 'node:fs/promises';
|
|
3
|
-
export function calcChecksum(buf, alg = 'SHA1') {
|
|
4
|
-
const hash = createHash(alg);
|
|
5
|
-
hash.update(buf);
|
|
6
|
-
return hash.digest('hex');
|
|
7
|
-
}
|
|
8
|
-
export function checkChecksum(checksum, buf, alg) {
|
|
9
|
-
const value = calcChecksum(buf, alg);
|
|
10
|
-
return value === checksum;
|
|
11
|
-
}
|
|
12
|
-
export async function calcFileChecksum(filename, alg) {
|
|
13
|
-
const buf = await readFile(filename);
|
|
14
|
-
return calcChecksum(buf, alg);
|
|
15
|
-
}
|
|
16
|
-
export async function checkFile(checksum, filename, alg) {
|
|
17
|
-
return (await calcFileChecksum(filename, alg)) === checksum;
|
|
18
|
-
}
|
|
19
|
-
//# sourceMappingURL=checksum.js.map
|
package/dist/shasum/index.d.ts
DELETED
package/dist/shasum/index.js
DELETED
package/dist/shasum/shasum.d.ts
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
export interface CheckShasumFileResult {
|
|
2
|
-
passed: boolean;
|
|
3
|
-
results: CheckFileResult[];
|
|
4
|
-
}
|
|
5
|
-
export interface CheckFileResult {
|
|
6
|
-
filename: string;
|
|
7
|
-
passed: boolean;
|
|
8
|
-
error?: Error;
|
|
9
|
-
}
|
|
10
|
-
export declare function shasumFile(filename: string, root: string | undefined): Promise<string>;
|
|
11
|
-
/**
|
|
12
|
-
*
|
|
13
|
-
* @param filename - name of checksum file
|
|
14
|
-
* @param files - optional list of files to check
|
|
15
|
-
* @param root - optional root, default cwd.
|
|
16
|
-
*/
|
|
17
|
-
export declare function checkShasumFile(filename: string, files: string[] | undefined, root?: string): Promise<CheckShasumFileResult>;
|
|
18
|
-
export interface ChecksumEntry {
|
|
19
|
-
filename: string;
|
|
20
|
-
checksum: string;
|
|
21
|
-
lineNumber: number;
|
|
22
|
-
}
|
|
23
|
-
export declare function readAndParseShasumFile(filename: string): Promise<ChecksumEntry[]>;
|
|
24
|
-
export declare function parseShasumFile(content: string): ChecksumEntry[];
|
|
25
|
-
interface ReportResult {
|
|
26
|
-
report: string;
|
|
27
|
-
passed: boolean;
|
|
28
|
-
}
|
|
29
|
-
interface ReportOptions {
|
|
30
|
-
root?: string | undefined;
|
|
31
|
-
listFile?: string[] | undefined;
|
|
32
|
-
}
|
|
33
|
-
export declare function reportChecksumForFiles(files: string[], options: ReportOptions): Promise<ReportResult>;
|
|
34
|
-
export declare function reportCheckChecksumFile(filename: string, files: string[] | undefined, options: ReportOptions): Promise<ReportResult>;
|
|
35
|
-
export declare function calcUpdateChecksumForFiles(filename: string, files: string[], options: ReportOptions): Promise<string>;
|
|
36
|
-
export declare function updateChecksumForFiles(filename: string, files: string[], options: ReportOptions): Promise<ReportResult>;
|
|
37
|
-
export {};
|
|
38
|
-
//# sourceMappingURL=shasum.d.ts.map
|
package/dist/shasum/shasum.js
DELETED
|
@@ -1,150 +0,0 @@
|
|
|
1
|
-
import { readFile, writeFile } from 'node:fs/promises';
|
|
2
|
-
import { resolve, sep as pathSep } from 'node:path';
|
|
3
|
-
import { toError } from '../util/errors.js';
|
|
4
|
-
import { isDefined } from '../util/index.js';
|
|
5
|
-
import { calcFileChecksum, checkFile } from './checksum.js';
|
|
6
|
-
export async function shasumFile(filename, root) {
|
|
7
|
-
try {
|
|
8
|
-
const file = resolve(root || '.', filename);
|
|
9
|
-
const checksum = await calcFileChecksum(file);
|
|
10
|
-
return `${checksum} ${filename}`;
|
|
11
|
-
}
|
|
12
|
-
catch {
|
|
13
|
-
// const err = toError(error);
|
|
14
|
-
// Reject with a string.
|
|
15
|
-
// eslint-disable-next-line unicorn/no-useless-promise-resolve-reject
|
|
16
|
-
return Promise.reject(`shasum: ${filename}: Unable to read file.`);
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
/**
|
|
20
|
-
*
|
|
21
|
-
* @param filename - name of checksum file
|
|
22
|
-
* @param files - optional list of files to check
|
|
23
|
-
* @param root - optional root, default cwd.
|
|
24
|
-
*/
|
|
25
|
-
export async function checkShasumFile(filename, files, root) {
|
|
26
|
-
files = !files ? files : files.length ? files : undefined;
|
|
27
|
-
const shaFiles = await readAndParseShasumFile(filename);
|
|
28
|
-
const filesToCheck = !files ? shaFiles.map(({ filename }) => filename) : files;
|
|
29
|
-
const mapNameToChecksum = new Map(shaFiles.map((r) => [normalizeFilename(r.filename), r.checksum]));
|
|
30
|
-
const resolvedRoot = resolve(root || '.');
|
|
31
|
-
const results = await Promise.all(filesToCheck.map(normalizeFilename).map((filename) => {
|
|
32
|
-
return tryToCheckFile(filename, resolvedRoot, mapNameToChecksum.get(filename));
|
|
33
|
-
}));
|
|
34
|
-
const passed = !results.some((v) => !v.passed);
|
|
35
|
-
return { passed, results };
|
|
36
|
-
}
|
|
37
|
-
async function tryToCheckFile(filename, root, checksum) {
|
|
38
|
-
if (!checksum) {
|
|
39
|
-
return { filename, passed: false, error: new Error('Missing Checksum.') };
|
|
40
|
-
}
|
|
41
|
-
const file = resolve(root, filename);
|
|
42
|
-
try {
|
|
43
|
-
const passed = await checkFile(checksum, file);
|
|
44
|
-
return { filename, passed };
|
|
45
|
-
}
|
|
46
|
-
catch {
|
|
47
|
-
return { filename, passed: false, error: new Error('Failed to read file.') };
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
const regLine = /([a-f0-9]{40,}) {2}(.*)/;
|
|
51
|
-
export async function readAndParseShasumFile(filename) {
|
|
52
|
-
const content = await readFile(resolve(filename), 'utf8');
|
|
53
|
-
const shaFiles = parseShasumFile(content);
|
|
54
|
-
return shaFiles;
|
|
55
|
-
}
|
|
56
|
-
export function parseShasumFile(content) {
|
|
57
|
-
const lines = content.split(/\r?\n|\r/g);
|
|
58
|
-
return lines.map(parseLine).filter(isDefined);
|
|
59
|
-
function parseLine(line, index) {
|
|
60
|
-
const m = line.match(regLine);
|
|
61
|
-
const lineNumber = index + 1;
|
|
62
|
-
if (!m) {
|
|
63
|
-
if (line.trim()) {
|
|
64
|
-
throw new Error(`Failed to parse line ${lineNumber} of checksum file.`);
|
|
65
|
-
}
|
|
66
|
-
return undefined;
|
|
67
|
-
}
|
|
68
|
-
const checksum = m[1];
|
|
69
|
-
const filename = m[2];
|
|
70
|
-
return { checksum, filename, lineNumber };
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
export async function reportChecksumForFiles(files, options) {
|
|
74
|
-
const root = options.root;
|
|
75
|
-
const filesToCheck = await resolveFileList(files, options.listFile);
|
|
76
|
-
let numFailed = 0;
|
|
77
|
-
const result = await Promise.all(filesToCheck.map((file) => shasumFile(file, root).catch((e) => {
|
|
78
|
-
++numFailed;
|
|
79
|
-
if (typeof e !== 'string')
|
|
80
|
-
throw e;
|
|
81
|
-
return e;
|
|
82
|
-
})));
|
|
83
|
-
const report = result.join('\n');
|
|
84
|
-
const passed = !numFailed;
|
|
85
|
-
return { report, passed };
|
|
86
|
-
}
|
|
87
|
-
export async function reportCheckChecksumFile(filename, files, options) {
|
|
88
|
-
const root = options.root;
|
|
89
|
-
const filesToCheck = await resolveFileList(files, options.listFile);
|
|
90
|
-
const checkResult = await checkShasumFile(filename, filesToCheck, root);
|
|
91
|
-
const results = checkResult.results;
|
|
92
|
-
const lines = results.map(({ filename, passed, error }) => `${filename}: ${passed ? 'OK' : 'FAILED'} ${error ? '- ' + error.message : ''}`.trim());
|
|
93
|
-
const withErrors = results.filter((a) => !a.passed);
|
|
94
|
-
const passed = !withErrors.length;
|
|
95
|
-
if (!passed) {
|
|
96
|
-
lines.push(`shasum: WARNING: ${withErrors.length} computed checksum${withErrors.length > 1 ? 's' : ''} did NOT match`);
|
|
97
|
-
}
|
|
98
|
-
return { report: lines.join('\n'), passed };
|
|
99
|
-
}
|
|
100
|
-
async function resolveFileList(files, listFile) {
|
|
101
|
-
files = files || [];
|
|
102
|
-
listFile = listFile || [];
|
|
103
|
-
const setOfFiles = new Set(files);
|
|
104
|
-
const pending = listFile.map((filename) => readFile(filename, 'utf8'));
|
|
105
|
-
for await (const content of pending) {
|
|
106
|
-
content
|
|
107
|
-
.split('\n')
|
|
108
|
-
.map((a) => a.trim())
|
|
109
|
-
.filter((a) => a)
|
|
110
|
-
.forEach((file) => setOfFiles.add(file));
|
|
111
|
-
}
|
|
112
|
-
return [...setOfFiles].map(normalizeFilename);
|
|
113
|
-
}
|
|
114
|
-
export async function calcUpdateChecksumForFiles(filename, files, options) {
|
|
115
|
-
const root = options.root || '.';
|
|
116
|
-
const filesToCheck = await resolveFileList(files, options.listFile);
|
|
117
|
-
const currentEntries = (await readAndParseShasumFile(filename).catch((err) => {
|
|
118
|
-
const e = toError(err);
|
|
119
|
-
if (e.code !== 'ENOENT')
|
|
120
|
-
throw e;
|
|
121
|
-
return [];
|
|
122
|
-
})).map((entry) => ({ ...entry, filename: normalizeFilename(entry.filename) }));
|
|
123
|
-
const entriesToUpdate = new Set([...filesToCheck, ...currentEntries.map((e) => e.filename)]);
|
|
124
|
-
const mustExist = new Set(filesToCheck);
|
|
125
|
-
const checksumMap = new Map(currentEntries.map(({ filename, checksum }) => [filename, checksum]));
|
|
126
|
-
for (const file of entriesToUpdate) {
|
|
127
|
-
try {
|
|
128
|
-
const checksum = await calcFileChecksum(resolve(root, file));
|
|
129
|
-
checksumMap.set(file, checksum);
|
|
130
|
-
}
|
|
131
|
-
catch (e) {
|
|
132
|
-
if (mustExist.has(file) || toError(e).code !== 'ENOENT')
|
|
133
|
-
throw e;
|
|
134
|
-
checksumMap.delete(file);
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
const updatedEntries = [...checksumMap]
|
|
138
|
-
.map(([filename, checksum]) => ({ filename, checksum }))
|
|
139
|
-
.sort((a, b) => (a.filename < b.filename ? -1 : 1));
|
|
140
|
-
return updatedEntries.map((e) => `${e.checksum} ${e.filename}`).join('\n') + '\n';
|
|
141
|
-
}
|
|
142
|
-
export async function updateChecksumForFiles(filename, files, options) {
|
|
143
|
-
const content = await calcUpdateChecksumForFiles(filename, files, options);
|
|
144
|
-
await writeFile(filename, content);
|
|
145
|
-
return { passed: true, report: content };
|
|
146
|
-
}
|
|
147
|
-
function normalizeFilename(filename) {
|
|
148
|
-
return filename.split(pathSep).join('/');
|
|
149
|
-
}
|
|
150
|
-
//# sourceMappingURL=shasum.js.map
|