@parcel/utils 2.8.3 → 2.8.4-nightly.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.js +1342 -580
- package/lib/index.js.map +1 -1
- package/package.json +11 -9
- package/src/BitSet.js +126 -0
- package/src/alternatives.js +2 -0
- package/src/collection.js +6 -3
- package/src/config.js +72 -50
- package/src/hash.js +15 -0
- package/src/index.js +8 -1
- package/src/prettyDiagnostic.js +8 -4
- package/src/shared-buffer.js +0 -1
- package/test/BitSet.test.js +119 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@parcel/utils",
|
|
3
|
-
"version": "2.8.
|
|
3
|
+
"version": "2.8.4-nightly.0+7b79c6d",
|
|
4
4
|
"description": "Blazing fast, zero configuration web application bundler",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"publishConfig": {
|
|
@@ -33,19 +33,20 @@
|
|
|
33
33
|
}
|
|
34
34
|
},
|
|
35
35
|
"dependencies": {
|
|
36
|
-
"@parcel/codeframe": "2.8.
|
|
37
|
-
"@parcel/diagnostic": "2.8.
|
|
38
|
-
"@parcel/hash": "2.8.
|
|
39
|
-
"@parcel/logger": "2.8.
|
|
40
|
-
"@parcel/markdown-ansi": "2.8.
|
|
36
|
+
"@parcel/codeframe": "2.8.4-nightly.0+7b79c6d",
|
|
37
|
+
"@parcel/diagnostic": "2.8.4-nightly.0+7b79c6d",
|
|
38
|
+
"@parcel/hash": "2.8.4-nightly.0+7b79c6d",
|
|
39
|
+
"@parcel/logger": "2.8.4-nightly.0+7b79c6d",
|
|
40
|
+
"@parcel/markdown-ansi": "2.8.4-nightly.0+7b79c6d",
|
|
41
41
|
"@parcel/source-map": "^2.1.1",
|
|
42
|
-
"chalk": "^4.1.0"
|
|
42
|
+
"chalk": "^4.1.0",
|
|
43
|
+
"nullthrows": "^1.1.1"
|
|
43
44
|
},
|
|
44
45
|
"devDependencies": {
|
|
45
46
|
"@iarna/toml": "^2.2.0",
|
|
46
47
|
"ansi-html-community": "0.0.8",
|
|
47
48
|
"clone": "^2.1.1",
|
|
48
|
-
"fast-glob": "3.
|
|
49
|
+
"fast-glob": "^3.2.12",
|
|
49
50
|
"fastest-levenshtein": "^1.0.8",
|
|
50
51
|
"is-glob": "^4.0.0",
|
|
51
52
|
"is-url": "^1.2.2",
|
|
@@ -56,6 +57,7 @@
|
|
|
56
57
|
"nullthrows": "^1.1.1",
|
|
57
58
|
"open": "^7.0.3",
|
|
58
59
|
"random-int": "^1.0.0",
|
|
60
|
+
"strip-ansi": "^6.0.0",
|
|
59
61
|
"terminal-link": "^2.1.1"
|
|
60
62
|
},
|
|
61
63
|
"browser": {
|
|
@@ -63,5 +65,5 @@
|
|
|
63
65
|
"./src/http-server.js": false,
|
|
64
66
|
"./src/openInBrowser.js": false
|
|
65
67
|
},
|
|
66
|
-
"gitHead": "
|
|
68
|
+
"gitHead": "7b79c6d69ffabef89810a8db61e9abdeb70d6990"
|
|
67
69
|
}
|
package/src/BitSet.js
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
// @flow strict-local
|
|
2
|
+
import nullthrows from 'nullthrows';
|
|
3
|
+
|
|
4
|
+
// As our current version of flow doesn't support BigInt's, these values/types
|
|
5
|
+
// have been hoisted to keep the flow errors to a minimum. This can be removed
|
|
6
|
+
// if we upgrade to a flow version that supports BigInt's
|
|
7
|
+
// $FlowFixMe
|
|
8
|
+
type TmpBigInt = bigint;
|
|
9
|
+
// $FlowFixMe
|
|
10
|
+
const BIGINT_ZERO = 0n;
|
|
11
|
+
// $FlowFixMe
|
|
12
|
+
const BIGINT_ONE = 1n;
|
|
13
|
+
// $FlowFixMe
|
|
14
|
+
let numberToBigInt = (v: number): TmpBigInt => BigInt(v);
|
|
15
|
+
|
|
16
|
+
let bitUnion = (a: TmpBigInt, b: TmpBigInt): TmpBigInt => a | b;
|
|
17
|
+
|
|
18
|
+
export class BitSet<Item> {
|
|
19
|
+
_value: TmpBigInt;
|
|
20
|
+
_lookup: Map<Item, TmpBigInt>;
|
|
21
|
+
_items: Array<Item>;
|
|
22
|
+
|
|
23
|
+
constructor({
|
|
24
|
+
initial,
|
|
25
|
+
items,
|
|
26
|
+
lookup,
|
|
27
|
+
}: {|
|
|
28
|
+
items: Array<Item>,
|
|
29
|
+
lookup: Map<Item, number>,
|
|
30
|
+
initial?: BitSet<Item> | TmpBigInt,
|
|
31
|
+
|}) {
|
|
32
|
+
if (initial instanceof BitSet) {
|
|
33
|
+
this._value = initial?._value;
|
|
34
|
+
} else if (initial) {
|
|
35
|
+
this._value = initial;
|
|
36
|
+
} else {
|
|
37
|
+
this._value = BIGINT_ZERO;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
this._items = items;
|
|
41
|
+
this._lookup = lookup;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
static from(items: Array<Item>): BitSet<Item> {
|
|
45
|
+
let lookup: Map<Item, TmpBigInt> = new Map();
|
|
46
|
+
for (let i = 0; i < items.length; i++) {
|
|
47
|
+
lookup.set(items[i], numberToBigInt(i));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return new BitSet({items, lookup});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
static union(a: BitSet<Item>, b: BitSet<Item>): BitSet<Item> {
|
|
54
|
+
return new BitSet({
|
|
55
|
+
initial: bitUnion(a._value, b._value),
|
|
56
|
+
lookup: a._lookup,
|
|
57
|
+
items: a._items,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
#getIndex(item: Item) {
|
|
62
|
+
return nullthrows(this._lookup.get(item), 'Item is missing from BitSet');
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
add(item: Item) {
|
|
66
|
+
this._value |= BIGINT_ONE << this.#getIndex(item);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
delete(item: Item) {
|
|
70
|
+
this._value &= ~(BIGINT_ONE << this.#getIndex(item));
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
has(item: Item): boolean {
|
|
74
|
+
return Boolean(this._value & (BIGINT_ONE << this.#getIndex(item)));
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
intersect(v: BitSet<Item>) {
|
|
78
|
+
this._value = this._value & v._value;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
union(v: BitSet<Item>) {
|
|
82
|
+
this._value = bitUnion(this._value, v._value);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
clear() {
|
|
86
|
+
this._value = BIGINT_ZERO;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
cloneEmpty(): BitSet<Item> {
|
|
90
|
+
return new BitSet({
|
|
91
|
+
lookup: this._lookup,
|
|
92
|
+
items: this._items,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
clone(): BitSet<Item> {
|
|
97
|
+
return new BitSet({
|
|
98
|
+
lookup: this._lookup,
|
|
99
|
+
items: this._items,
|
|
100
|
+
initial: this._value,
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
values(): Array<Item> {
|
|
105
|
+
let values = [];
|
|
106
|
+
let tmpValue = this._value;
|
|
107
|
+
let i;
|
|
108
|
+
|
|
109
|
+
// This implementation is optimized for BitSets that contain a very small percentage
|
|
110
|
+
// of items compared to the total number of potential items. This makes sense for
|
|
111
|
+
// our bundler use-cases where Sets often contain <1% coverage of the total item count.
|
|
112
|
+
// In cases where Sets contain a larger percentage of the total items, a regular looping
|
|
113
|
+
// strategy would be more performant.
|
|
114
|
+
while (tmpValue > BIGINT_ZERO) {
|
|
115
|
+
// Get last set bit
|
|
116
|
+
i = tmpValue.toString(2).length - 1;
|
|
117
|
+
|
|
118
|
+
values.push(this._items[i]);
|
|
119
|
+
|
|
120
|
+
// Unset last set bit
|
|
121
|
+
tmpValue &= ~(BIGINT_ONE << numberToBigInt(i));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
return values;
|
|
125
|
+
}
|
|
126
|
+
}
|
package/src/alternatives.js
CHANGED
package/src/collection.js
CHANGED
|
@@ -34,7 +34,10 @@ function sortEntry(entry: mixed) {
|
|
|
34
34
|
return entry;
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
-
export function setDifference<T>(
|
|
37
|
+
export function setDifference<T>(
|
|
38
|
+
a: $ReadOnlySet<T>,
|
|
39
|
+
b: $ReadOnlySet<T>,
|
|
40
|
+
): Set<T> {
|
|
38
41
|
let difference = new Set();
|
|
39
42
|
for (let e of a) {
|
|
40
43
|
if (!b.has(e)) {
|
|
@@ -49,7 +52,7 @@ export function setDifference<T>(a: Set<T>, b: Set<T>): Set<T> {
|
|
|
49
52
|
return difference;
|
|
50
53
|
}
|
|
51
54
|
|
|
52
|
-
export function setIntersect<T>(a: Set<T>, b:
|
|
55
|
+
export function setIntersect<T>(a: Set<T>, b: $ReadOnlySet<T>): void {
|
|
53
56
|
for (let entry of a) {
|
|
54
57
|
if (!b.has(entry)) {
|
|
55
58
|
a.delete(entry);
|
|
@@ -61,7 +64,7 @@ export function setUnion<T>(a: Iterable<T>, b: Iterable<T>): Set<T> {
|
|
|
61
64
|
return new Set([...a, ...b]);
|
|
62
65
|
}
|
|
63
66
|
|
|
64
|
-
export function setEqual<T>(a:
|
|
67
|
+
export function setEqual<T>(a: $ReadOnlySet<T>, b: $ReadOnlySet<T>): boolean {
|
|
65
68
|
if (a.size != b.size) {
|
|
66
69
|
return false;
|
|
67
70
|
}
|
package/src/config.js
CHANGED
|
@@ -74,7 +74,7 @@ export async function loadConfig(
|
|
|
74
74
|
if (extname === 'js' || extname === 'cjs') {
|
|
75
75
|
let output = {
|
|
76
76
|
// $FlowFixMe
|
|
77
|
-
config: clone(require(configFile)),
|
|
77
|
+
config: clone(module.require(configFile)),
|
|
78
78
|
files: [{filePath: configFile}],
|
|
79
79
|
};
|
|
80
80
|
|
|
@@ -82,55 +82,7 @@ export async function loadConfig(
|
|
|
82
82
|
return output;
|
|
83
83
|
}
|
|
84
84
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
let config;
|
|
88
|
-
if (parse === false) {
|
|
89
|
-
config = configContent;
|
|
90
|
-
} else {
|
|
91
|
-
let parse = opts?.parser ?? getParser(extname);
|
|
92
|
-
try {
|
|
93
|
-
config = parse(configContent);
|
|
94
|
-
} catch (e) {
|
|
95
|
-
if (extname !== '' && extname !== 'json') {
|
|
96
|
-
throw e;
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
let pos = {
|
|
100
|
-
line: e.lineNumber,
|
|
101
|
-
column: e.columnNumber,
|
|
102
|
-
};
|
|
103
|
-
|
|
104
|
-
throw new ThrowableDiagnostic({
|
|
105
|
-
diagnostic: {
|
|
106
|
-
message: `Failed to parse ${path.basename(configFile)}`,
|
|
107
|
-
origin: '@parcel/utils',
|
|
108
|
-
codeFrames: [
|
|
109
|
-
{
|
|
110
|
-
language: 'json5',
|
|
111
|
-
filePath: configFile,
|
|
112
|
-
code: configContent,
|
|
113
|
-
codeHighlights: [
|
|
114
|
-
{
|
|
115
|
-
start: pos,
|
|
116
|
-
end: pos,
|
|
117
|
-
message: e.message,
|
|
118
|
-
},
|
|
119
|
-
],
|
|
120
|
-
},
|
|
121
|
-
],
|
|
122
|
-
},
|
|
123
|
-
});
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
let output = {
|
|
128
|
-
config,
|
|
129
|
-
files: [{filePath: configFile}],
|
|
130
|
-
};
|
|
131
|
-
|
|
132
|
-
configCache.set(String(parse) + configFile, output);
|
|
133
|
-
return output;
|
|
85
|
+
return readConfig(fs, configFile, opts);
|
|
134
86
|
} catch (err) {
|
|
135
87
|
if (err.code === 'MODULE_NOT_FOUND' || err.code === 'ENOENT') {
|
|
136
88
|
return null;
|
|
@@ -148,6 +100,76 @@ loadConfig.clear = () => {
|
|
|
148
100
|
resolveCache.clear();
|
|
149
101
|
};
|
|
150
102
|
|
|
103
|
+
export async function readConfig(
|
|
104
|
+
fs: FileSystem,
|
|
105
|
+
configFile: FilePath,
|
|
106
|
+
opts: ?ConfigOptions,
|
|
107
|
+
): Promise<ConfigOutput | null> {
|
|
108
|
+
let parse = opts?.parse ?? true;
|
|
109
|
+
let cachedOutput = configCache.get(String(parse) + configFile);
|
|
110
|
+
if (cachedOutput) {
|
|
111
|
+
return cachedOutput;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
try {
|
|
115
|
+
let configContent = await fs.readFile(configFile, 'utf8');
|
|
116
|
+
let config;
|
|
117
|
+
if (parse === false) {
|
|
118
|
+
config = configContent;
|
|
119
|
+
} else {
|
|
120
|
+
let extname = path.extname(configFile).slice(1);
|
|
121
|
+
let parse = opts?.parser ?? getParser(extname);
|
|
122
|
+
try {
|
|
123
|
+
config = parse(configContent);
|
|
124
|
+
} catch (e) {
|
|
125
|
+
if (extname !== '' && extname !== 'json') {
|
|
126
|
+
throw e;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
let pos = {
|
|
130
|
+
line: e.lineNumber,
|
|
131
|
+
column: e.columnNumber,
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
throw new ThrowableDiagnostic({
|
|
135
|
+
diagnostic: {
|
|
136
|
+
message: `Failed to parse ${path.basename(configFile)}`,
|
|
137
|
+
origin: '@parcel/utils',
|
|
138
|
+
codeFrames: [
|
|
139
|
+
{
|
|
140
|
+
language: 'json5',
|
|
141
|
+
filePath: configFile,
|
|
142
|
+
code: configContent,
|
|
143
|
+
codeHighlights: [
|
|
144
|
+
{
|
|
145
|
+
start: pos,
|
|
146
|
+
end: pos,
|
|
147
|
+
message: e.message,
|
|
148
|
+
},
|
|
149
|
+
],
|
|
150
|
+
},
|
|
151
|
+
],
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
let output = {
|
|
158
|
+
config,
|
|
159
|
+
files: [{filePath: configFile}],
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
configCache.set(String(parse) + configFile, output);
|
|
163
|
+
return output;
|
|
164
|
+
} catch (err) {
|
|
165
|
+
if (err.code === 'MODULE_NOT_FOUND' || err.code === 'ENOENT') {
|
|
166
|
+
return null;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
throw err;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
151
173
|
function getParser(extname) {
|
|
152
174
|
switch (extname) {
|
|
153
175
|
case 'toml':
|
package/src/hash.js
CHANGED
|
@@ -29,6 +29,21 @@ export function hashObject(obj: {+[string]: mixed, ...}): string {
|
|
|
29
29
|
return hashString(JSON.stringify(objectSortedEntriesDeep(obj)));
|
|
30
30
|
}
|
|
31
31
|
|
|
32
|
+
let testCache: {|[string]: Promise<string>|} = {
|
|
33
|
+
/*:: ...null */
|
|
34
|
+
};
|
|
32
35
|
export function hashFile(fs: FileSystem, filePath: string): Promise<string> {
|
|
36
|
+
if (process.env.PARCEL_BUILD_ENV === 'test') {
|
|
37
|
+
// Development builds of these native modules are especially big and slow to hash.
|
|
38
|
+
if (
|
|
39
|
+
/parcel-swc\.[^\\/]+\.node$|lightningcss.[^\\/]+.node$/.test(filePath)
|
|
40
|
+
) {
|
|
41
|
+
let cacheEntry = testCache[filePath];
|
|
42
|
+
if (cacheEntry) return cacheEntry;
|
|
43
|
+
let v = hashStream(fs.createReadStream(filePath));
|
|
44
|
+
testCache[filePath] = v;
|
|
45
|
+
return v;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
33
48
|
return hashStream(fs.createReadStream(filePath));
|
|
34
49
|
}
|
package/src/index.js
CHANGED
|
@@ -40,7 +40,12 @@ export {
|
|
|
40
40
|
setIntersect,
|
|
41
41
|
setUnion,
|
|
42
42
|
} from './collection';
|
|
43
|
-
export {
|
|
43
|
+
export {
|
|
44
|
+
resolveConfig,
|
|
45
|
+
resolveConfigSync,
|
|
46
|
+
loadConfig,
|
|
47
|
+
readConfig,
|
|
48
|
+
} from './config';
|
|
44
49
|
export {DefaultMap, DefaultWeakMap} from './DefaultMap';
|
|
45
50
|
export {makeDeferredWithPromise} from './Deferred';
|
|
46
51
|
export {getProgressMessage} from './progress-message.js';
|
|
@@ -73,3 +78,5 @@ export {
|
|
|
73
78
|
loadSourceMap,
|
|
74
79
|
remapSourceLocation,
|
|
75
80
|
} from './sourcemap';
|
|
81
|
+
export {BitSet} from './BitSet';
|
|
82
|
+
export {default as stripAnsi} from 'strip-ansi';
|
package/src/prettyDiagnostic.js
CHANGED
|
@@ -77,10 +77,14 @@ export default async function prettyDiagnostic(
|
|
|
77
77
|
});
|
|
78
78
|
}
|
|
79
79
|
|
|
80
|
-
let location
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
80
|
+
let location;
|
|
81
|
+
if (typeof filePath !== 'string') {
|
|
82
|
+
location = '';
|
|
83
|
+
} else if (highlights.length === 0) {
|
|
84
|
+
location = filePath;
|
|
85
|
+
} else {
|
|
86
|
+
location = `${filePath}:${highlights[0].start.line}:${highlights[0].start.column}`;
|
|
87
|
+
}
|
|
84
88
|
result.codeframe += location ? chalk.gray.underline(location) + '\n' : '';
|
|
85
89
|
result.codeframe += formattedCodeFrame;
|
|
86
90
|
if (codeFrame !== codeFrames[codeFrames.length - 1]) {
|
package/src/shared-buffer.js
CHANGED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
// @flow strict-local
|
|
2
|
+
|
|
3
|
+
import assert from 'assert';
|
|
4
|
+
import {BitSet} from '../src/BitSet';
|
|
5
|
+
|
|
6
|
+
function assertValues<Item>(set: BitSet<Item>, values: Array<Item>) {
|
|
7
|
+
let setValues = set.values();
|
|
8
|
+
|
|
9
|
+
for (let value of values) {
|
|
10
|
+
assert(set.has(value), 'Set.has returned false');
|
|
11
|
+
assert(
|
|
12
|
+
setValues.some(v => v === value),
|
|
13
|
+
'Set values is missing value',
|
|
14
|
+
);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
assert(
|
|
18
|
+
setValues.length === values.length,
|
|
19
|
+
`Expected ${values.length} values but got ${setValues.length}`,
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
describe('BitSet', () => {
|
|
24
|
+
it('cloneEmpty should return an empty set', () => {
|
|
25
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
26
|
+
set1.add(1);
|
|
27
|
+
set1.add(3);
|
|
28
|
+
|
|
29
|
+
let set2 = set1.cloneEmpty();
|
|
30
|
+
|
|
31
|
+
assertValues(set2, []);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('clone should return a set with the same values', () => {
|
|
35
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
36
|
+
set1.add(1);
|
|
37
|
+
set1.add(3);
|
|
38
|
+
|
|
39
|
+
let set2 = set1.clone();
|
|
40
|
+
|
|
41
|
+
assertValues(set2, [1, 3]);
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
it('clear should remove all values from the set', () => {
|
|
45
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
46
|
+
set1.add(1);
|
|
47
|
+
set1.add(3);
|
|
48
|
+
|
|
49
|
+
set1.clear();
|
|
50
|
+
|
|
51
|
+
assertValues(set1, []);
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
it('delete should remove values from the set', () => {
|
|
55
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
56
|
+
set1.add(1);
|
|
57
|
+
set1.add(3);
|
|
58
|
+
set1.add(5);
|
|
59
|
+
|
|
60
|
+
set1.delete(3);
|
|
61
|
+
|
|
62
|
+
assertValues(set1, [1, 5]);
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
it('should intersect with another BitSet', () => {
|
|
66
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
67
|
+
set1.add(1);
|
|
68
|
+
set1.add(3);
|
|
69
|
+
|
|
70
|
+
let set2 = set1.cloneEmpty();
|
|
71
|
+
set2.add(3);
|
|
72
|
+
set2.add(5);
|
|
73
|
+
|
|
74
|
+
set1.intersect(set2);
|
|
75
|
+
assertValues(set1, [3]);
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
it('should union with another BitSet', () => {
|
|
79
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
80
|
+
set1.add(1);
|
|
81
|
+
set1.add(3);
|
|
82
|
+
|
|
83
|
+
let set2 = set1.cloneEmpty();
|
|
84
|
+
set2.add(3);
|
|
85
|
+
set2.add(5);
|
|
86
|
+
|
|
87
|
+
set1.union(set2);
|
|
88
|
+
assertValues(set1, [1, 3, 5]);
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
it('BitSet.union should create a new BitSet with the union', () => {
|
|
92
|
+
let set1 = BitSet.from([1, 2, 3, 4, 5]);
|
|
93
|
+
set1.add(1);
|
|
94
|
+
set1.add(3);
|
|
95
|
+
|
|
96
|
+
let set2 = set1.cloneEmpty();
|
|
97
|
+
set2.add(3);
|
|
98
|
+
set2.add(5);
|
|
99
|
+
|
|
100
|
+
let set3 = BitSet.union(set1, set2);
|
|
101
|
+
assertValues(set1, [1, 3]);
|
|
102
|
+
assertValues(set2, [3, 5]);
|
|
103
|
+
assertValues(set3, [1, 3, 5]);
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
it('returns an array of all values', () => {
|
|
107
|
+
let set = BitSet.from([1, 2, 3, 4]);
|
|
108
|
+
set.add(1);
|
|
109
|
+
set.add(3);
|
|
110
|
+
|
|
111
|
+
assertValues(set, [3, 1]);
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
it('should return an error if a new item is added', () => {
|
|
115
|
+
let set = BitSet.from([1, 2, 3, 4]);
|
|
116
|
+
|
|
117
|
+
assert.throws(() => set.add(5), /Item is missing from BitSet/);
|
|
118
|
+
});
|
|
119
|
+
});
|