@iebh/reflib 2.5.3 → 2.5.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/lib/formats.js +16 -0
- package/lib/writeFile.js +6 -1
- package/modules/default.js +2 -0
- package/modules/endnoteEnl.js +237 -0
- package/modules/endnoteEnlX.js +85 -0
- package/modules/endnoteXml.js +5 -2
- package/package.json +12 -8
package/README.md
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
Reflib
|
|
2
|
-
|
|
1
|
+
@IEBH/Reflib
|
|
2
|
+
============
|
|
3
3
|
Reference library processing for Node.
|
|
4
4
|
|
|
5
5
|
This library provides various read/write functionality to process citation libraries and handle individual references (henceforth "Refs").
|
|
@@ -13,8 +13,8 @@ Compatibility
|
|
|
13
13
|
| Library | Extension(s) | Read | Write |
|
|
14
14
|
|------------------------|-----------------|--------------------|--------------------|
|
|
15
15
|
| Comma Separated Values | `.csv` | :x: | :x: |
|
|
16
|
-
| EndNote ENL | `.enl` | :
|
|
17
|
-
| EndNote ENLX | `.enlx` | :
|
|
16
|
+
| EndNote ENL | `.enl` | :heavy_check_mark: | (untested) |
|
|
17
|
+
| EndNote ENLX | `.enlx` | :heavy_check_mark: | :x: |
|
|
18
18
|
| EndNote XML | `.xml` | :heavy_check_mark: | :heavy_check_mark: |
|
|
19
19
|
| JSON | `.json` | :heavy_check_mark: | :heavy_check_mark: |
|
|
20
20
|
| Medline | `.nbib` | :heavy_check_mark: | :heavy_check_mark: |
|
package/lib/formats.js
CHANGED
|
@@ -18,6 +18,22 @@ export let formats = {
|
|
|
18
18
|
canRead: false,
|
|
19
19
|
canWrite: false,
|
|
20
20
|
},
|
|
21
|
+
endnoteEnl: {
|
|
22
|
+
id: 'endnoteEnl',
|
|
23
|
+
title: 'EndNote ENL',
|
|
24
|
+
titleShort: 'EndNote',
|
|
25
|
+
ext: ['.enl'],
|
|
26
|
+
canRead: true,
|
|
27
|
+
canWrite: true,
|
|
28
|
+
},
|
|
29
|
+
endnoteEnlX: {
|
|
30
|
+
id: 'endnoteEnlX',
|
|
31
|
+
title: 'EndNote ENLX',
|
|
32
|
+
titleShort: 'EndNote ENLX',
|
|
33
|
+
ext: ['.enlx'],
|
|
34
|
+
canRead: true,
|
|
35
|
+
canWrite: false,
|
|
36
|
+
},
|
|
21
37
|
endnoteXml: {
|
|
22
38
|
id: 'endnoteXml',
|
|
23
39
|
title: 'EndNoteXML',
|
package/lib/writeFile.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import {createWriteStream} from 'node:fs';
|
|
2
|
+
import {finished as streamFinished} from 'node:stream/promises';
|
|
2
3
|
import {identifyFormat} from './identifyFormat.js';
|
|
3
4
|
import {writeStream} from './writeStream.js';
|
|
4
5
|
|
|
@@ -16,11 +17,15 @@ export function writeFile(path, refs, options) {
|
|
|
16
17
|
let format = options?.module || identifyFormat(path)?.id;
|
|
17
18
|
if (!format) throw new Error(`Unable to identify reference library format when saving file "${path}"`);
|
|
18
19
|
|
|
19
|
-
let
|
|
20
|
+
let fileStream = createWriteStream(path);
|
|
21
|
+
let writer = writeStream(format, fileStream, options);
|
|
22
|
+
|
|
20
23
|
return Promise.resolve()
|
|
21
24
|
.then(()=> writer.start())
|
|
22
25
|
.then(()=> refs.reduce((chain, ref) => // Write all refs as a series of promises
|
|
23
26
|
chain.then(()=> writer.write(ref))
|
|
24
27
|
, Promise.resolve()))
|
|
25
28
|
.then(()=> writer.end())
|
|
29
|
+
.then(()=> fileStream.close())
|
|
30
|
+
.then(()=> streamFinished(fileStream))
|
|
26
31
|
}
|
package/modules/default.js
CHANGED
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
import Emitter from '../shared/emitter.js';
|
|
2
|
+
import {default as SQLite} from 'sql.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* EndNote/SDB column to RefLib column mappings
|
|
6
|
+
* This must match EXACTLY a known, working .sdb Sqlite database table pragma
|
|
7
|
+
* Column ID's not present within this list are generally scrapped
|
|
8
|
+
*
|
|
9
|
+
* @type {Array<Object>}
|
|
10
|
+
* @property {String} sl The Sqlite/SDB column within the `refs` table
|
|
11
|
+
* @property {String} rl The Reflib column (see README.md)
|
|
12
|
+
* @property {'TEXT'|'INTEGER'} [type='TEXT'] SQLite column type, used for encoding when writing
|
|
13
|
+
* @property {Function} [value] Optional column value mapper. Called as `(val:Any, ref:Object, col:Object, colIndex:Number, refIndex:Number)`
|
|
14
|
+
* @property {*} [slDefault=''] Overriding SQLite column value if the field is empty
|
|
15
|
+
*/
|
|
16
|
+
export const columnMappings = [
|
|
17
|
+
// FIXME: This list is incomplete
|
|
18
|
+
{sl: 'id', rl: 'recNumber', type: 'INTEGER', value: (val, ref, col, colIndex, refIndex) => ref.recNumber || refIndex + 1},
|
|
19
|
+
// 'trash_state',
|
|
20
|
+
// 'text_styles',
|
|
21
|
+
// 'reference_type',
|
|
22
|
+
{sl: 'author', rl: 'author'},
|
|
23
|
+
// 'year',
|
|
24
|
+
{sl: 'title', rl: 'title'},
|
|
25
|
+
{sl: 'pages', rl: 'pages'},
|
|
26
|
+
// 'secondary_title',
|
|
27
|
+
{sl: 'volume', rl: 'volume'},
|
|
28
|
+
{sl: 'number', rl: 'number'},
|
|
29
|
+
// 'number_of_volumes',
|
|
30
|
+
// 'secondary_author',
|
|
31
|
+
// 'place_published',
|
|
32
|
+
// 'publisher',
|
|
33
|
+
// 'subsidiary_author',
|
|
34
|
+
// 'edition',
|
|
35
|
+
// 'keywords',
|
|
36
|
+
// 'type_of_work',
|
|
37
|
+
{sl: 'date', rl: 'date'},
|
|
38
|
+
{sl: 'abstract', rl: 'abstract'},
|
|
39
|
+
{sl: 'label', rl: 'label'},
|
|
40
|
+
// 'url',
|
|
41
|
+
// 'tertiary_title',
|
|
42
|
+
// 'tertiary_author',
|
|
43
|
+
{sl: 'notes', rl: 'notes'},
|
|
44
|
+
{sl: 'isbn', rl: 'isbn'},
|
|
45
|
+
{sl: 'custom_1', rl: 'custom1'},
|
|
46
|
+
{sl: 'custom_2', rl: 'custom2'},
|
|
47
|
+
{sl: 'custom_3', rl: 'custom3'},
|
|
48
|
+
{sl: 'custom_4', rl: 'custom4'},
|
|
49
|
+
// 'alternate_title',
|
|
50
|
+
// 'accession_number',
|
|
51
|
+
// 'call_number',
|
|
52
|
+
// 'short_title',
|
|
53
|
+
{sl: 'custom_5', rl: 'custom5'},
|
|
54
|
+
{sl: 'custom_6', rl: 'custom6'},
|
|
55
|
+
{sl: 'section', rl: 'section'},
|
|
56
|
+
// 'original_publication',
|
|
57
|
+
// 'reprint_edition',
|
|
58
|
+
// 'reviewed_item',
|
|
59
|
+
// 'author_address',
|
|
60
|
+
{sl: 'caption', rl: 'caption'},
|
|
61
|
+
{sl: 'custom_7', rl: 'custom7'},
|
|
62
|
+
// 'electronic_resource_number',
|
|
63
|
+
// 'translated_author',
|
|
64
|
+
// 'translated_title',
|
|
65
|
+
// 'name_of_database',
|
|
66
|
+
// 'database_provider',
|
|
67
|
+
{sl: 'research_notes', rl: 'researchNotes'},
|
|
68
|
+
{sl: 'language', rl: 'language'},
|
|
69
|
+
// 'access_date',
|
|
70
|
+
// 'last_modified_date',
|
|
71
|
+
// 'record_properties',
|
|
72
|
+
// 'added_to_library',
|
|
73
|
+
// 'record_last_updated',
|
|
74
|
+
// 'reserved3',
|
|
75
|
+
// 'fulltext_downloads',
|
|
76
|
+
// 'read_status',
|
|
77
|
+
// 'rating',
|
|
78
|
+
// 'reserved7',
|
|
79
|
+
// 'reserved8',
|
|
80
|
+
// 'reserved9',
|
|
81
|
+
// 'reserved10'
|
|
82
|
+
];
|
|
83
|
+
|
|
84
|
+
export const baseSql = [
|
|
85
|
+
'CREATE TABLE refs(id INTEGER PRIMARY KEY AUTOINCREMENT,trash_state INTEGER NOT NULL DEFAULT 0,text_styles TEXT NOT NULL DEFAULT "",reference_type INTEGER NOT NULL DEFAULT 0,author TEXT NOT NULL DEFAULT "",year TEXT NOT NULL DEFAULT "",title TEXT NOT NULL DEFAULT "",pages TEXT NOT NULL DEFAULT "",secondary_title TEXT NOT NULL DEFAULT "",volume TEXT NOT NULL DEFAULT "",number TEXT NOT NULL DEFAULT "",number_of_volumes TEXT NOT NULL DEFAULT "",secondary_author TEXT NOT NULL DEFAULT "",place_published TEXT NOT NULL DEFAULT "",publisher TEXT NOT NULL DEFAULT "",subsidiary_author TEXT NOT NULL DEFAULT "",edition TEXT NOT NULL DEFAULT "",keywords TEXT NOT NULL DEFAULT "",type_of_work TEXT NOT NULL DEFAULT "",date TEXT NOT NULL DEFAULT "",abstract TEXT NOT NULL DEFAULT "",label TEXT NOT NULL DEFAULT "",url TEXT NOT NULL DEFAULT "",tertiary_title TEXT NOT NULL DEFAULT "",tertiary_author TEXT NOT NULL DEFAULT "",notes TEXT NOT NULL DEFAULT "",isbn TEXT NOT NULL DEFAULT "",custom_1 TEXT NOT NULL DEFAULT "",custom_2 TEXT NOT NULL DEFAULT "",custom_3 TEXT NOT NULL DEFAULT "",custom_4 TEXT NOT NULL DEFAULT "",alternate_title TEXT NOT NULL DEFAULT "",accession_number TEXT NOT NULL DEFAULT "",call_number TEXT NOT NULL DEFAULT "",short_title TEXT NOT NULL DEFAULT "",custom_5 TEXT NOT NULL DEFAULT "",custom_6 TEXT NOT NULL DEFAULT "",section TEXT NOT NULL DEFAULT "",original_publication TEXT NOT NULL DEFAULT "",reprint_edition TEXT NOT NULL DEFAULT "",reviewed_item TEXT NOT NULL DEFAULT "",author_address TEXT NOT NULL DEFAULT "",caption TEXT NOT NULL DEFAULT "",custom_7 TEXT NOT NULL DEFAULT "",electronic_resource_number TEXT NOT NULL DEFAULT "",translated_author TEXT NOT NULL DEFAULT "",translated_title TEXT NOT NULL DEFAULT "",name_of_database TEXT NOT NULL DEFAULT "",database_provider TEXT NOT NULL DEFAULT "",research_notes TEXT NOT NULL DEFAULT "",language TEXT NOT NULL DEFAULT "",access_date TEXT NOT NULL DEFAULT "",last_modified_date TEXT NOT NULL DEFAULT "",record_properties TEXT NOT NULL DEFAULT "",added_to_library INTEGER NOT NULL DEFAULT 0,record_last_updated INTEGER NOT NULL DEFAULT 0,reserved3 INTEGER NOT NULL DEFAULT 0,fulltext_downloads TEXT NOT NULL DEFAULT "",read_status TEXT NOT NULL DEFAULT "",rating TEXT NOT NULL DEFAULT "",reserved7 TEXT NOT NULL DEFAULT "",reserved8 TEXT NOT NULL DEFAULT "",reserved9 TEXT NOT NULL DEFAULT "",reserved10 TEXT NOT NULL DEFAULT "")',
|
|
86
|
+
].join(';');
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Lookup object for Sqlite columns to the column mapping object
|
|
90
|
+
*
|
|
91
|
+
* @type {Object<Object>} Each column mapping item with the Sqlite column name as the key
|
|
92
|
+
*/
|
|
93
|
+
export const columnMappingSL2RL = Object.fromEntries(
|
|
94
|
+
columnMappings
|
|
95
|
+
.map(cm => [cm.sl, cm])
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Read an EndNote(@11+) / SQLite database, returning an Emitter analogue
|
|
101
|
+
*
|
|
102
|
+
* @see modules/inhterface.js
|
|
103
|
+
*
|
|
104
|
+
* @param {Stream} stream Stream primative to encapsulate
|
|
105
|
+
*
|
|
106
|
+
* @returns {Object} An Emitter analogue defined in `../shared/Emitter.js`
|
|
107
|
+
*/
|
|
108
|
+
export function readStream(stream) {
|
|
109
|
+
let emitter = Emitter();
|
|
110
|
+
|
|
111
|
+
// Queue up the parser in the next tick (so we can return the emitter first)
|
|
112
|
+
setTimeout(()=> {
|
|
113
|
+
let chunks = []; // Gathered buffer chunks, used to make an arrayBuffer later
|
|
114
|
+
|
|
115
|
+
stream
|
|
116
|
+
.on('data', chunk => chunks.push(chunk))
|
|
117
|
+
.on('error', e => emitter.emit('error', e))
|
|
118
|
+
.on('end', ()=> {
|
|
119
|
+
Promise.resolve()
|
|
120
|
+
.then(()=> { // Parse chunks into an arrayBuffer
|
|
121
|
+
let buf = new Uint8Array(
|
|
122
|
+
chunks.reduce((total, chunk) => total + chunk.length, 0)
|
|
123
|
+
);
|
|
124
|
+
|
|
125
|
+
let position = 0;
|
|
126
|
+
chunks.forEach(chunk => {
|
|
127
|
+
buf.set(chunk, position);
|
|
128
|
+
position += chunk.length;
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
// Release chunks to free up memory
|
|
132
|
+
chunks = [];
|
|
133
|
+
|
|
134
|
+
return buf;
|
|
135
|
+
})
|
|
136
|
+
.then(buf => this.readBuffer(buf, { // Hand arrayBuffer off to readBuffer() for processing
|
|
137
|
+
onRef(ref) {
|
|
138
|
+
emitter.emit('ref', ref);
|
|
139
|
+
},
|
|
140
|
+
}))
|
|
141
|
+
.finally(()=> emitter.emit('end'))
|
|
142
|
+
})
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
return emitter;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Actual reader function
|
|
151
|
+
* Accepts a buffer and tiggers the emitter when references are extracted + when ending
|
|
152
|
+
*
|
|
153
|
+
* @param {Uint8Array} buf The buffer to process
|
|
154
|
+
*
|
|
155
|
+
* @param {Object} [options] Additional options to mutate behaviour
|
|
156
|
+
* @param {Function} [options.onRef] Function to trigger when we extract a reference from the buffer. Called as `(ref:RefLibRef)`
|
|
157
|
+
* @param {String} [options.refTable='enl_refs'] The name of the reference table to query
|
|
158
|
+
*
|
|
159
|
+
* @returns {Promise} A promise which will eventually resolve when the read operation on the buffer has completed
|
|
160
|
+
*/
|
|
161
|
+
export function readBuffer(buf, options) {
|
|
162
|
+
let settings = {
|
|
163
|
+
refTable: 'refs',
|
|
164
|
+
onRef() {},
|
|
165
|
+
...options,
|
|
166
|
+
};
|
|
167
|
+
return Promise.resolve()
|
|
168
|
+
.then(()=> SQLite()) // Init database
|
|
169
|
+
.then(sqli => new sqli.Database(buf)) // Create SQLite database
|
|
170
|
+
.then(db => db.exec(`SELECT * from ${settings.refTable}`)) // Slurp all references
|
|
171
|
+
.then(([{columns, values}]) => { // Digest references
|
|
172
|
+
values.forEach(v => {
|
|
173
|
+
let ref = columns.reduce((ref, col, colIndex) => {
|
|
174
|
+
if (columnMappingSL2RL[col]) // Is a column we have a mapping for (implied else - ignore the data)
|
|
175
|
+
ref[columnMappingSL2RL[col].rl] = v[colIndex];
|
|
176
|
+
|
|
177
|
+
return ref;
|
|
178
|
+
}, {});
|
|
179
|
+
|
|
180
|
+
settings.onRef(ref);
|
|
181
|
+
})
|
|
182
|
+
})
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Write references to an EndNote(@11+)) SQLite database file
|
|
188
|
+
*
|
|
189
|
+
* @see modules/interface.js
|
|
190
|
+
*
|
|
191
|
+
* @param {Stream} stream Writable stream to output to
|
|
192
|
+
*
|
|
193
|
+
* @returns {Object} A writable stream analogue defined in `modules/interface.js`
|
|
194
|
+
*/
|
|
195
|
+
export function writeStream(stream) {
|
|
196
|
+
let db; // Database we are writing to
|
|
197
|
+
let refIndex = 0;
|
|
198
|
+
let insertOp; // Prepared query to insert a single ref
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
start() {
|
|
202
|
+
return SQLite()
|
|
203
|
+
.then(sqli => new sqli.Database())
|
|
204
|
+
.then(res => db = res)
|
|
205
|
+
.then(()=> db.exec(baseSql))
|
|
206
|
+
.then(()=> insertOp = db.prepare(
|
|
207
|
+
'INSERT INTO refs'
|
|
208
|
+
+ '('
|
|
209
|
+
+ columnMappings.map(cm => cm.sl).join(', ')
|
|
210
|
+
+ ') '
|
|
211
|
+
+ 'VALUES ('
|
|
212
|
+
+ columnMappings.map(cm => cm.sl).map(k => ':' + k).join(', ')
|
|
213
|
+
+ ')'
|
|
214
|
+
))
|
|
215
|
+
},
|
|
216
|
+
|
|
217
|
+
write(ref) {
|
|
218
|
+
// Compose ref object we are going to throw at SQLite
|
|
219
|
+
let slRef = columnMappings
|
|
220
|
+
.reduce((r, col, colIndex) => {
|
|
221
|
+
r[':' + col.sl] =
|
|
222
|
+
col.value ? col.value(ref[col.rl], ref, col, colIndex, refIndex++)
|
|
223
|
+
: ref[col.rl] || col.slDefault || '';
|
|
224
|
+
|
|
225
|
+
return r;
|
|
226
|
+
}, {});
|
|
227
|
+
|
|
228
|
+
return insertOp.run(slRef);
|
|
229
|
+
},
|
|
230
|
+
|
|
231
|
+
end() {
|
|
232
|
+
return Promise.resolve()
|
|
233
|
+
.then(()=> stream.write(db.export()))
|
|
234
|
+
.then(()=> stream.end());
|
|
235
|
+
},
|
|
236
|
+
};
|
|
237
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import * as EndNoteEnl from './endnoteEnl.js';
|
|
2
|
+
import Emitter from '../shared/emitter.js';
|
|
3
|
+
import {BlobReader as ZipBlobReader, ZipReader, Writer} from '@zip.js/zip.js';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Utility class to read a Zip file into a Uint8Array
|
|
7
|
+
* This extends the deafult @zip.js/zip.js Writer class so its works without a crowbar
|
|
8
|
+
*/
|
|
9
|
+
class BinaryStringWriter extends Writer {
|
|
10
|
+
chunks = [];
|
|
11
|
+
|
|
12
|
+
writeUint8Array(chunk) {
|
|
13
|
+
this.chunks.push(chunk);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
getData() {
|
|
17
|
+
let buf = new Uint8Array(
|
|
18
|
+
this.chunks.reduce((total, chunk) => total + chunk.length, 0)
|
|
19
|
+
);
|
|
20
|
+
|
|
21
|
+
let position = 0;
|
|
22
|
+
this.chunks.forEach(chunk => {
|
|
23
|
+
buf.set(chunk, position);
|
|
24
|
+
position += chunk.length;
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
// Flush buffer to free up memory
|
|
28
|
+
this.chunks = [];
|
|
29
|
+
|
|
30
|
+
return buf;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Read an EndNote(@11+) / Zip+SQLite database EnlX returning an Emitter analogue
|
|
37
|
+
*
|
|
38
|
+
* Since EnlX files are really just a Zip file containing the SQLite database we're actually intrestesd in, most of this module
|
|
39
|
+
* is handling the Zip container and handing the extracted buffer to the regular modules/endnoteEnl.js driver
|
|
40
|
+
*
|
|
41
|
+
* @see modules/inhterface.js
|
|
42
|
+
*
|
|
43
|
+
* @param {Stream} stream Stream primative to encapsulate
|
|
44
|
+
*
|
|
45
|
+
* @returns {Object} An Emitter analogue defined in `../shared/Emitter.js`
|
|
46
|
+
*/
|
|
47
|
+
export function readStream(stream) {
|
|
48
|
+
let emitter = Emitter();
|
|
49
|
+
|
|
50
|
+
// Queue up the parser in the next tick (so we can return the emitter first)
|
|
51
|
+
setTimeout(()=> {
|
|
52
|
+
let chunks = []; // Gathered buffer chunks, used to make an arrayBuffer later
|
|
53
|
+
|
|
54
|
+
stream
|
|
55
|
+
.on('data', chunk => chunks.push(chunk))
|
|
56
|
+
.on('error', e => emitter.emit('error', e))
|
|
57
|
+
.on('end', ()=> {
|
|
58
|
+
Promise.resolve()
|
|
59
|
+
.then(()=> { // Parse chunks into a Blob
|
|
60
|
+
let blob = new Blob(chunks);
|
|
61
|
+
|
|
62
|
+
// Release chunks to free up memory
|
|
63
|
+
chunks = [];
|
|
64
|
+
return blob;
|
|
65
|
+
})
|
|
66
|
+
.then(blob => new ZipReader( // Create zipReader
|
|
67
|
+
new ZipBlobReader(blob),
|
|
68
|
+
))
|
|
69
|
+
.then(zip => zip.getEntries()) // Fetch files
|
|
70
|
+
.then(files => files.find(f => f.filename == 'sdb/sdb.eni')) // Find the file we want
|
|
71
|
+
.then(sdb => { // Extract that files stream
|
|
72
|
+
let writer = new BinaryStringWriter();
|
|
73
|
+
return sdb.getData(writer);
|
|
74
|
+
})
|
|
75
|
+
.then(buf => EndNoteEnl.readBuffer(buf, {
|
|
76
|
+
onRef(ref) {
|
|
77
|
+
emitter.emit('ref', ref);
|
|
78
|
+
},
|
|
79
|
+
}))
|
|
80
|
+
.finally(()=> emitter.emit('end'))
|
|
81
|
+
})
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
return emitter;
|
|
85
|
+
}
|
package/modules/endnoteXml.js
CHANGED
|
@@ -5,10 +5,11 @@ import Emitter from '../shared/emitter.js';
|
|
|
5
5
|
import { WritableStream as XMLParser } from 'htmlparser2/lib/WritableStream';
|
|
6
6
|
|
|
7
7
|
/**
|
|
8
|
-
* Read an EndnoteXML file, returning
|
|
8
|
+
* Read an EndnoteXML file, returning an Emitter analogue
|
|
9
|
+
*
|
|
9
10
|
* @see modules/inhterface.js
|
|
10
11
|
* @param {Stream} stream Stream primative to encapsulate
|
|
11
|
-
* @returns {Object}
|
|
12
|
+
* @returns {Object} An Emitter analogue defined in `../shared/Emitter.js`
|
|
12
13
|
*/
|
|
13
14
|
export function readStream(stream) {
|
|
14
15
|
let emitter = Emitter();
|
|
@@ -121,6 +122,8 @@ export function readStream(stream) {
|
|
|
121
122
|
|
|
122
123
|
|
|
123
124
|
/**
|
|
125
|
+
* Write references to a file
|
|
126
|
+
*
|
|
124
127
|
* @see modules/interface.js
|
|
125
128
|
*
|
|
126
129
|
* @param {Stream} stream Writable stream to output to
|
package/package.json
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@iebh/reflib",
|
|
3
|
-
"version": "2.5.
|
|
3
|
+
"version": "2.5.4",
|
|
4
4
|
"description": "Reference / Citation reference library utilities",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"lint": "eslint .",
|
|
7
7
|
"test": "mocha",
|
|
8
|
-
"test:browser": "cd test/browser && npm run dev"
|
|
8
|
+
"test:browser": "cd test/browser && npm run dev",
|
|
9
|
+
"test:watch": "nodemon --exec npm run test"
|
|
9
10
|
},
|
|
10
11
|
"repository": {
|
|
11
12
|
"type": "git",
|
|
@@ -44,18 +45,21 @@
|
|
|
44
45
|
"JSONStream": "./modules/shims/JSONStream-browser.js"
|
|
45
46
|
},
|
|
46
47
|
"devDependencies": {
|
|
47
|
-
"@momsfriendlydevco/eslint-config": "^2.
|
|
48
|
-
"chai": "^5.
|
|
49
|
-
"eslint": "^9.
|
|
50
|
-
"mocha": "^
|
|
48
|
+
"@momsfriendlydevco/eslint-config": "^2.1.2",
|
|
49
|
+
"chai": "^5.2.0",
|
|
50
|
+
"eslint": "^9.21.0",
|
|
51
|
+
"mocha": "^11.1.0",
|
|
51
52
|
"mocha-logger": "^1.0.8",
|
|
53
|
+
"nodemon": "^3.1.9",
|
|
52
54
|
"temp": "^0.9.4",
|
|
53
55
|
"vite-plugin-replace": "^0.1.1"
|
|
54
56
|
},
|
|
55
57
|
"dependencies": {
|
|
56
|
-
"@iebh/cacx": "^1.0.
|
|
58
|
+
"@iebh/cacx": "^1.0.3",
|
|
59
|
+
"@zip.js/zip.js": "^2.7.57",
|
|
57
60
|
"htmlparser2": "^9.1.0",
|
|
58
61
|
"JSONStream": "^1.3.5",
|
|
59
|
-
"mitt": "^3.0.1"
|
|
62
|
+
"mitt": "^3.0.1",
|
|
63
|
+
"sql.js": "^1.12.0"
|
|
60
64
|
}
|
|
61
65
|
}
|