@lde/pipeline 0.3.0 → 0.3.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +14 -11
- package/dist/analyzer.d.ts +0 -18
- package/dist/analyzer.d.ts.map +0 -1
- package/dist/analyzer.js +0 -34
- package/dist/writer/fileWriter.d.ts +0 -21
- package/dist/writer/fileWriter.d.ts.map +0 -1
- package/dist/writer/fileWriter.js +0 -29
- package/dist/writer/mergeWriter.d.ts +0 -12
- package/dist/writer/mergeWriter.d.ts.map +0 -1
- package/dist/writer/mergeWriter.js +0 -10
- package/dist/writer.d.ts +0 -6
- package/dist/writer.d.ts.map +0 -1
- package/dist/writer.js +0 -1
package/package.json
CHANGED
|
@@ -1,30 +1,33 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lde/pipeline",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.7",
|
|
4
|
+
"repository": {
|
|
5
|
+
"url": "https://github.com/ldengine/lde"
|
|
6
|
+
},
|
|
4
7
|
"type": "module",
|
|
5
|
-
"main": "./dist/index.js",
|
|
6
|
-
"module": "./dist/index.js",
|
|
7
|
-
"types": "./dist/index.d.ts",
|
|
8
8
|
"exports": {
|
|
9
9
|
"./package.json": "./package.json",
|
|
10
10
|
".": {
|
|
11
|
-
"development": "./src/index.ts",
|
|
12
11
|
"types": "./dist/index.d.ts",
|
|
13
12
|
"import": "./dist/index.js",
|
|
13
|
+
"development": "./src/index.ts",
|
|
14
14
|
"default": "./dist/index.js"
|
|
15
15
|
}
|
|
16
16
|
},
|
|
17
|
+
"main": "./dist/index.js",
|
|
18
|
+
"module": "./dist/index.js",
|
|
19
|
+
"types": "./dist/index.d.ts",
|
|
17
20
|
"files": [
|
|
18
21
|
"dist",
|
|
19
22
|
"!**/*.tsbuildinfo"
|
|
20
23
|
],
|
|
21
24
|
"dependencies": {
|
|
22
|
-
"@lde/dataset": "0.
|
|
23
|
-
"@lde/dataset-registry-client": "0.3
|
|
24
|
-
"@lde/sparql-importer": "0.0.
|
|
25
|
-
"@lde/sparql-server": "0.2.
|
|
25
|
+
"@lde/dataset": "0.4.1",
|
|
26
|
+
"@lde/dataset-registry-client": "0.4.3",
|
|
27
|
+
"@lde/sparql-importer": "0.0.8",
|
|
28
|
+
"@lde/sparql-server": "0.2.2",
|
|
29
|
+
"@rdfjs/types": "^2.0.1",
|
|
26
30
|
"fetch-sparql-endpoint": "^6.0.0",
|
|
27
|
-
"tslib": "^2.3.0"
|
|
28
|
-
"@rdfjs/types": "^2.0.1"
|
|
31
|
+
"tslib": "^2.3.0"
|
|
29
32
|
}
|
|
30
33
|
}
|
package/dist/analyzer.d.ts
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
import { DataEmittingStep, NotSupported } from './step.js';
|
|
2
|
-
import { Dataset } from '@lde/dataset';
|
|
3
|
-
import { SparqlEndpointFetcher } from 'fetch-sparql-endpoint';
|
|
4
|
-
export interface Args {
|
|
5
|
-
identifier: string;
|
|
6
|
-
query: string;
|
|
7
|
-
fetcher?: SparqlEndpointFetcher;
|
|
8
|
-
}
|
|
9
|
-
export declare class SparqlQueryAnalyzer implements DataEmittingStep {
|
|
10
|
-
readonly identifier: string;
|
|
11
|
-
private readonly query;
|
|
12
|
-
private readonly fetcher;
|
|
13
|
-
constructor({ identifier, query, fetcher }: Args);
|
|
14
|
-
execute(dataset: Dataset): Promise<NotSupported | (import("readable-stream").Readable & import("@rdfjs/types").Stream<import("@rdfjs/types").Quad>)>;
|
|
15
|
-
static fromFile(filename: string): Promise<SparqlQueryAnalyzer>;
|
|
16
|
-
}
|
|
17
|
-
export declare function fromFile(filename: string): Promise<string>;
|
|
18
|
-
//# sourceMappingURL=analyzer.d.ts.map
|
package/dist/analyzer.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"analyzer.d.ts","sourceRoot":"","sources":["../src/analyzer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,qBAAqB,EAAE,MAAM,uBAAuB,CAAC;AAI9D,MAAM,WAAW,IAAI;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,qBAAqB,CAAC;CACjC;AAED,qBAAa,mBAAoB,YAAW,gBAAgB;IAC1D,SAAgB,UAAU,SAAC;IAC3B,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC;IACvB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;gBAEb,EAAE,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,IAAI;IAM1C,OAAO,CAAC,OAAO,EAAE,OAAO;WAqBV,QAAQ,CAAC,QAAQ,EAAE,MAAM;CAM9C;AAED,wBAAsB,QAAQ,CAAC,QAAQ,EAAE,MAAM,mBAE9C"}
|
package/dist/analyzer.js
DELETED
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
import { NotSupported } from './step.js';
|
|
2
|
-
import { SparqlEndpointFetcher } from 'fetch-sparql-endpoint';
|
|
3
|
-
import { readFile } from 'node:fs/promises';
|
|
4
|
-
import { resolve } from 'node:path';
|
|
5
|
-
export class SparqlQueryAnalyzer {
|
|
6
|
-
identifier;
|
|
7
|
-
query;
|
|
8
|
-
fetcher;
|
|
9
|
-
constructor({ identifier, query, fetcher }) {
|
|
10
|
-
this.identifier = identifier;
|
|
11
|
-
this.query = query;
|
|
12
|
-
this.fetcher = fetcher ?? new SparqlEndpointFetcher();
|
|
13
|
-
}
|
|
14
|
-
async execute(dataset) {
|
|
15
|
-
const distribution = dataset.getSparqlDistribution();
|
|
16
|
-
if (null === distribution) {
|
|
17
|
-
return new NotSupported('No SPARQL distribution available');
|
|
18
|
-
}
|
|
19
|
-
const query = this.query
|
|
20
|
-
.replace('#subjectFilter#', distribution.subjectFilter ?? '')
|
|
21
|
-
.replace('?dataset', `<${dataset.iri}>`)
|
|
22
|
-
.replace('#namedGraph#', distribution.namedGraph ? `FROM <${distribution.namedGraph}>` : '');
|
|
23
|
-
return await this.fetcher.fetchTriples(distribution.accessUrl.toString(), query);
|
|
24
|
-
}
|
|
25
|
-
static async fromFile(filename) {
|
|
26
|
-
return new SparqlQueryAnalyzer({
|
|
27
|
-
identifier: filename,
|
|
28
|
-
query: await fromFile(filename),
|
|
29
|
-
});
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
export async function fromFile(filename) {
|
|
33
|
-
return (await readFile(resolve(filename))).toString();
|
|
34
|
-
}
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { Writer } from './../writer.js';
|
|
2
|
-
import { Dataset, RdfFormat } from '@lde/dataset';
|
|
3
|
-
import { Readable } from 'node:stream';
|
|
4
|
-
import { Quad, Stream } from '@rdfjs/types';
|
|
5
|
-
export declare class FileWriter implements Writer {
|
|
6
|
-
private readonly directory;
|
|
7
|
-
private readonly datasetToFilename;
|
|
8
|
-
private readonly format;
|
|
9
|
-
private readonly prefixes;
|
|
10
|
-
constructor({ directory, datasetToFilename, format, prefixes, }: {
|
|
11
|
-
directory?: string;
|
|
12
|
-
datasetToFilename: (identifier: string, dataset: Dataset, format: RdfFormat) => stringq;
|
|
13
|
-
format: RdfFormat;
|
|
14
|
-
prefixes?: {
|
|
15
|
-
[key: string]: string;
|
|
16
|
-
};
|
|
17
|
-
});
|
|
18
|
-
write(identifier: string, dataset: Dataset, data: Stream<Quad> & Readable): Promise<void>;
|
|
19
|
-
private writeStream;
|
|
20
|
-
}
|
|
21
|
-
//# sourceMappingURL=fileWriter.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"fileWriter.d.ts","sourceRoot":"","sources":["../../src/writer/fileWriter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AACxC,OAAO,EAAE,OAAO,EAAE,SAAS,EAA4B,MAAM,cAAc,CAAC;AAE5E,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAKvC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAE5C,qBAAa,UAAW,YAAW,MAAM;IACvC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,iBAAiB,CAItB;IACZ,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAY;IACnC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAA4B;gBAEzC,EACV,SAAoB,EACpB,iBAG0C,EAC1C,MAA+B,EAC/B,QAAa,GACd,EAAE;QACD,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,iBAAiB,EAAE,CACjB,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,OAAO,EAChB,MAAM,EAAE,SAAS,KACd,OAAO,CAAC;QACb,MAAM,EAAE,SAAS,CAAC;QAClB,QAAQ,CAAC,EAAE;YAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;SAAE,CAAC;KACtC;IAOY,KAAK,CAChB,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,QAAQ;IAY/B,OAAO,CAAC,WAAW;CASpB"}
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { RdfFormat, rdfFormatToFileExtension } from '@lde/dataset';
|
|
2
|
-
import fs from 'node:fs';
|
|
3
|
-
import path from 'node:path';
|
|
4
|
-
import { pipeline } from 'node:stream/promises';
|
|
5
|
-
import filenamyifyUrl from 'filenamify-url';
|
|
6
|
-
import { StreamWriter } from 'n3';
|
|
7
|
-
export class FileWriter {
|
|
8
|
-
directory;
|
|
9
|
-
datasetToFilename;
|
|
10
|
-
format;
|
|
11
|
-
prefixes;
|
|
12
|
-
constructor({ directory = 'output', datasetToFilename = (identifier, dataset, format) => `${filenamyifyUrl(dataset.iri.toString(), {
|
|
13
|
-
replacement: '-',
|
|
14
|
-
})}.${rdfFormatToFileExtension(format)}`, format = RdfFormat['N-Triples'], prefixes = {}, }) {
|
|
15
|
-
this.directory = directory;
|
|
16
|
-
this.datasetToFilename = datasetToFilename;
|
|
17
|
-
this.format = format;
|
|
18
|
-
this.prefixes = prefixes;
|
|
19
|
-
}
|
|
20
|
-
async write(identifier, dataset, data) {
|
|
21
|
-
await pipeline(data, new StreamWriter({
|
|
22
|
-
prefixes: this.prefixes,
|
|
23
|
-
format: this.format,
|
|
24
|
-
}), this.writeStream(identifier, dataset));
|
|
25
|
-
}
|
|
26
|
-
writeStream(identifier, dataset) {
|
|
27
|
-
return fs.createWriteStream(path.join(this.directory, this.datasetToFilename(identifier, dataset, this.format)), { flags: 'a' });
|
|
28
|
-
}
|
|
29
|
-
}
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { Writer } from './../writer.js';
|
|
2
|
-
import { Dataset } from '@lde/dataset';
|
|
3
|
-
import { Quad, Stream } from '@rdfjs/types';
|
|
4
|
-
/**
|
|
5
|
-
* Buffers output from steps and merges it so you get a single output per dataset.
|
|
6
|
-
*/
|
|
7
|
-
export declare class MergeWriter implements Writer {
|
|
8
|
-
private readonly decorated;
|
|
9
|
-
constructor(decorated: Writer);
|
|
10
|
-
write(dataset: Dataset, data: Stream<Quad>): Promise<void>;
|
|
11
|
-
}
|
|
12
|
-
//# sourceMappingURL=mergeWriter.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"mergeWriter.d.ts","sourceRoot":"","sources":["../../src/writer/mergeWriter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAE5C;;GAEG;AACH,qBAAa,WAAY,YAAW,MAAM;IAC5B,OAAO,CAAC,QAAQ,CAAC,SAAS;gBAAT,SAAS,EAAE,MAAM;IAEjC,KAAK,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CACxE"}
|
package/dist/writer.d.ts
DELETED
package/dist/writer.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"writer.d.ts","sourceRoot":"","sources":["../src/writer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAC5C,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AAEvC,MAAM,WAAW,MAAM;IACrB,KAAK,CACH,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,GACjB,OAAO,CAAC,IAAI,CAAC,CAAC;CAClB"}
|
package/dist/writer.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|