@oceanum/datamesh 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/lib/connector.d.ts +97 -0
- package/dist/lib/connector.d.ts.map +1 -0
- package/dist/lib/datamodel.d.ts +123 -0
- package/dist/lib/datamodel.d.ts.map +1 -0
- package/dist/lib/datasource.d.ts +118 -0
- package/dist/lib/datasource.d.ts.map +1 -0
- package/dist/lib/query.d.ts +161 -0
- package/dist/lib/query.d.ts.map +1 -0
- package/dist/lib/zarr.d.ts +20 -0
- package/dist/lib/zarr.d.ts.map +1 -0
- package/dist/test/fixtures.d.ts +8 -0
- package/dist/test/fixtures.d.ts.map +1 -0
- package/dist/tsconfig.lib.tsbuildinfo +1 -0
- package/eslint.config.js +22 -0
- package/package.json +22 -0
- package/project.json +4 -0
- package/src/index.ts +2 -0
- package/src/lib/connector.ts +228 -0
- package/src/lib/datamodel.ts +458 -0
- package/src/lib/datasource.ts +139 -0
- package/src/lib/query.ts +236 -0
- package/src/lib/workers/sw.js +44 -0
- package/src/lib/zarr.ts +112 -0
- package/src/test/dataset.test.ts +60 -0
- package/src/test/datasource.test.ts +28 -0
- package/src/test/fixtures.ts +169 -0
- package/src/test/query.test.ts +49 -0
- package/tsconfig.json +13 -0
- package/tsconfig.lib.json +24 -0
- package/tsconfig.spec.json +31 -0
- package/typedoc.json +3 -0
- package/vite.config.ts +57 -0
- package/vitest.config.ts +10 -0
package/src/lib/query.ts
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import { Feature } from "geojson";
|
|
2
|
+
import dayjs from "dayjs";
|
|
3
|
+
import duration from "dayjs/plugin/duration";
|
|
4
|
+
|
|
5
|
+
dayjs.extend(duration);
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* GeoFilterType enum representing types of geofilters.
|
|
9
|
+
*/
|
|
10
|
+
enum GeoFilterType {
|
|
11
|
+
Feature = "feature",
|
|
12
|
+
Bbox = "bbox",
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* GeoFilterInterp enum representing interpolation methods for geofilters.
|
|
17
|
+
*/
|
|
18
|
+
enum GeoFilterInterp {
|
|
19
|
+
Nearest = "nearest",
|
|
20
|
+
Linear = "linear",
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* LevelFilterInterp enum representing interpolation methods for level filters.
|
|
25
|
+
*/
|
|
26
|
+
enum LevelFilterInterp {
|
|
27
|
+
Nearest = "nearest",
|
|
28
|
+
Linear = "linear",
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* TimeFilterType enum representing types of time filters.
|
|
33
|
+
*/
|
|
34
|
+
enum TimeFilterType {
|
|
35
|
+
Range = "range",
|
|
36
|
+
Series = "series",
|
|
37
|
+
Trajectory = "trajectory",
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* LevelFilterType enum representing types of level filters.
|
|
42
|
+
*/
|
|
43
|
+
enum LevelFilterType {
|
|
44
|
+
Range = "range",
|
|
45
|
+
Series = "series",
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* ResampleType enum representing types of resampling.
|
|
50
|
+
*/
|
|
51
|
+
enum ResampleType {
|
|
52
|
+
Mean = "mean",
|
|
53
|
+
Nearest = "nearest",
|
|
54
|
+
Slinear = "linear",
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* AggregateOps enum representing aggregation operations.
|
|
59
|
+
*/
|
|
60
|
+
enum AggregateOps {
|
|
61
|
+
Mean = "mean",
|
|
62
|
+
Min = "min",
|
|
63
|
+
Max = "max",
|
|
64
|
+
Std = "std",
|
|
65
|
+
Sum = "sum",
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Container enum representing data container types.
|
|
70
|
+
*/
|
|
71
|
+
enum Container {
|
|
72
|
+
GeoDataFrame = "geodataframe",
|
|
73
|
+
DataFrame = "dataframe",
|
|
74
|
+
Dataset = "dataset",
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* GeoFilter type representing a spatial subset or interpolation.
|
|
79
|
+
*/
|
|
80
|
+
export type GeoFilter = {
|
|
81
|
+
type: GeoFilterType;
|
|
82
|
+
geom: Array<number[]> | Feature;
|
|
83
|
+
interp?: GeoFilterInterp;
|
|
84
|
+
resolution?: number;
|
|
85
|
+
alltouched?: boolean;
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* LevelFilter type representing a vertical subset or interpolation.
|
|
90
|
+
*/
|
|
91
|
+
type LevelFilter = {
|
|
92
|
+
type: LevelFilterType;
|
|
93
|
+
levels: Array<number | null>;
|
|
94
|
+
interp?: LevelFilterInterp;
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* TimeFilter type representing a temporal subset or interpolation.
|
|
99
|
+
*/
|
|
100
|
+
export type TimeFilter = {
|
|
101
|
+
type?: TimeFilterType;
|
|
102
|
+
times: Array<Date | dayjs.Dayjs | duration.Duration | string>;
|
|
103
|
+
resolution?: string;
|
|
104
|
+
resample?: ResampleType;
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
const stringifyTime = (
|
|
108
|
+
t: Date | dayjs.Dayjs | duration.Duration | string
|
|
109
|
+
): string => {
|
|
110
|
+
if (t instanceof Date) {
|
|
111
|
+
return dayjs(t as Date).toISOString();
|
|
112
|
+
} else if (t instanceof dayjs) {
|
|
113
|
+
return (t as dayjs.Dayjs).toISOString();
|
|
114
|
+
} else if (t instanceof dayjs.duration) {
|
|
115
|
+
return (t as duration.Duration).toISOString();
|
|
116
|
+
} else {
|
|
117
|
+
try {
|
|
118
|
+
return dayjs.duration(t as string).toISOString();
|
|
119
|
+
} catch {
|
|
120
|
+
return dayjs(t as string).toISOString();
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
const timeFilterValidate = (timefilter: TimeFilter): TimeFilter => {
|
|
126
|
+
const times = timefilter.times.map((t) => stringifyTime(t));
|
|
127
|
+
|
|
128
|
+
return {
|
|
129
|
+
type: timefilter.type || TimeFilterType.Range,
|
|
130
|
+
times,
|
|
131
|
+
resolution: timefilter.resolution,
|
|
132
|
+
resample: timefilter.resample,
|
|
133
|
+
};
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Aggregate type representing aggregation operations.
|
|
138
|
+
*/
|
|
139
|
+
type Aggregate = {
|
|
140
|
+
operations: AggregateOps[];
|
|
141
|
+
spatial?: boolean;
|
|
142
|
+
temporal?: boolean;
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* CoordSelector type representing coordinate selection.
|
|
147
|
+
*/
|
|
148
|
+
type CoordSelector = {
|
|
149
|
+
coord: string;
|
|
150
|
+
values: Array<string | number>;
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Query interface representing a Datamesh query.
|
|
155
|
+
*/
|
|
156
|
+
export interface IQuery {
|
|
157
|
+
datasource: string;
|
|
158
|
+
parameters?: Record<string, number | string | number[] | string[]>;
|
|
159
|
+
description?: string;
|
|
160
|
+
variables?: string[];
|
|
161
|
+
timefilter?: TimeFilter;
|
|
162
|
+
geofilter?: GeoFilter;
|
|
163
|
+
levelfilter?: LevelFilter;
|
|
164
|
+
coordfilter?: CoordSelector[];
|
|
165
|
+
crs?: string | number;
|
|
166
|
+
aggregate?: Aggregate;
|
|
167
|
+
limit?: number;
|
|
168
|
+
id?: string;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Stage interface representing the result of staging a query.
|
|
173
|
+
*/
|
|
174
|
+
export type Stage = {
|
|
175
|
+
query: Query;
|
|
176
|
+
qhash: string;
|
|
177
|
+
formats: string[];
|
|
178
|
+
size: number;
|
|
179
|
+
dlen: number;
|
|
180
|
+
coordmap: Record<string, string>;
|
|
181
|
+
container: Container;
|
|
182
|
+
sig: string;
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Query class representing a Datamesh query.
|
|
187
|
+
*/
|
|
188
|
+
export class Query implements IQuery {
|
|
189
|
+
datasource: string;
|
|
190
|
+
parameters?: Record<string, number | string | number[] | string[]>;
|
|
191
|
+
description?: string;
|
|
192
|
+
variables?: string[];
|
|
193
|
+
timefilter?: TimeFilter;
|
|
194
|
+
geofilter?: GeoFilter;
|
|
195
|
+
levelfilter?: LevelFilter;
|
|
196
|
+
coordfilter?: CoordSelector[];
|
|
197
|
+
crs?: string | number;
|
|
198
|
+
aggregate?: Aggregate;
|
|
199
|
+
limit?: number;
|
|
200
|
+
id?: string;
|
|
201
|
+
|
|
202
|
+
constructor(query: IQuery) {
|
|
203
|
+
this.datasource = query.datasource;
|
|
204
|
+
this.parameters = query.parameters;
|
|
205
|
+
this.description = query.description;
|
|
206
|
+
this.variables = query.variables;
|
|
207
|
+
this.timefilter = query.timefilter && timeFilterValidate(query.timefilter);
|
|
208
|
+
this.geofilter = query.geofilter;
|
|
209
|
+
this.levelfilter = query.levelfilter;
|
|
210
|
+
this.coordfilter = query.coordfilter;
|
|
211
|
+
this.crs = query.crs;
|
|
212
|
+
this.aggregate = query.aggregate;
|
|
213
|
+
this.limit = query.limit;
|
|
214
|
+
this.id = query.id;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Returns the query as a JSON object.
|
|
219
|
+
*/
|
|
220
|
+
toJSON(): Record<string, unknown> {
|
|
221
|
+
return {
|
|
222
|
+
datasource: this.datasource,
|
|
223
|
+
parameters: this.parameters,
|
|
224
|
+
description: this.description,
|
|
225
|
+
variables: this.variables,
|
|
226
|
+
timefilter: this.timefilter,
|
|
227
|
+
geofilter: this.geofilter,
|
|
228
|
+
levelfilter: this.levelfilter,
|
|
229
|
+
coordfilter: this.coordfilter,
|
|
230
|
+
crs: this.crs,
|
|
231
|
+
aggregate: this.aggregate,
|
|
232
|
+
limit: this.limit,
|
|
233
|
+
id: this.id,
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Service worker for interacting with the Datamesh metadata server
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const addResourcesToCache = async (resources) => {
|
|
6
|
+
const cache = await caches.open("datamesh");
|
|
7
|
+
await cache.addAll(resources);
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
const putInCache = async (request, response) => {
|
|
11
|
+
const cache = await caches.open("datamesh");
|
|
12
|
+
await cache.put(request, response);
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
const cacheFirst = async ({ request }) => {
|
|
16
|
+
// First try to get the resource from the cache
|
|
17
|
+
const responseFromCache = await caches.match(request);
|
|
18
|
+
if (responseFromCache) {
|
|
19
|
+
return responseFromCache;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// Next try to get the resource from the network
|
|
23
|
+
try {
|
|
24
|
+
const responseFromNetwork = await fetch(request.clone());
|
|
25
|
+
// response may be used only once
|
|
26
|
+
// we need to save clone to put one copy in cache
|
|
27
|
+
// and serve second one
|
|
28
|
+
putInCache(request, responseFromNetwork.clone());
|
|
29
|
+
return responseFromNetwork;
|
|
30
|
+
} catch (error) {
|
|
31
|
+
return new Response("Datamesh error: " + error.message, {
|
|
32
|
+
status: 408,
|
|
33
|
+
headers: { "Content-Type": "text/plain" },
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
self.addEventListener("fetch", (event) => {
|
|
39
|
+
event.respondWith(
|
|
40
|
+
cacheFirst({
|
|
41
|
+
request: event.request,
|
|
42
|
+
})
|
|
43
|
+
);
|
|
44
|
+
});
|
package/src/lib/zarr.ts
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import {
|
|
2
|
+
set as set_cache,
|
|
3
|
+
get as get_cache,
|
|
4
|
+
del as del_cache,
|
|
5
|
+
createStore,
|
|
6
|
+
UseStore,
|
|
7
|
+
} from "idb-keyval";
|
|
8
|
+
import hash from "object-hash";
|
|
9
|
+
import { AsyncReadable, AsyncMutable, AbsolutePath } from "@zarrita/storage";
|
|
10
|
+
|
|
11
|
+
function delay(t: number): Promise<void> {
|
|
12
|
+
return new Promise((resolve) => setTimeout(resolve, t));
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export class CachedHTTPStore implements AsyncReadable {
|
|
16
|
+
cache: UseStore | undefined;
|
|
17
|
+
url: string;
|
|
18
|
+
cache_prefix: string;
|
|
19
|
+
fetchOptions: RequestInit;
|
|
20
|
+
constructor(
|
|
21
|
+
root: string,
|
|
22
|
+
authHeaders: Record<string, string>,
|
|
23
|
+
parameters?: Record<string, string | number>,
|
|
24
|
+
chunks?: string,
|
|
25
|
+
downsample?: Record<string, number>,
|
|
26
|
+
nocache?: boolean
|
|
27
|
+
) {
|
|
28
|
+
const headers = { ...authHeaders };
|
|
29
|
+
if (parameters) headers["x-parameters"] = JSON.stringify(parameters);
|
|
30
|
+
if (chunks) headers["x-chunks"] = chunks;
|
|
31
|
+
if (downsample) headers["x-downsample"] = JSON.stringify(downsample);
|
|
32
|
+
headers["x-filtered"] = "True";
|
|
33
|
+
this.fetchOptions = { headers };
|
|
34
|
+
this.url = root;
|
|
35
|
+
if (nocache) {
|
|
36
|
+
this.cache = undefined;
|
|
37
|
+
} else {
|
|
38
|
+
this.cache = createStore("zarr", "cache");
|
|
39
|
+
}
|
|
40
|
+
this.cache_prefix = hash({ ...parameters, chunks, downsample });
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async get(
|
|
44
|
+
item: string,
|
|
45
|
+
options?: RequestInit,
|
|
46
|
+
retry = 0
|
|
47
|
+
): Promise<Uint8Array | undefined> {
|
|
48
|
+
const key = `${this.cache_prefix}${item}`;
|
|
49
|
+
let data = null;
|
|
50
|
+
if (this.cache) {
|
|
51
|
+
data = await get_cache(key, this.cache);
|
|
52
|
+
if (data === "pending") {
|
|
53
|
+
await delay(200);
|
|
54
|
+
if (retry > 5 * 60) {
|
|
55
|
+
await del_cache(key, this.cache);
|
|
56
|
+
throw new Error("Zarr timeout");
|
|
57
|
+
} else {
|
|
58
|
+
return await this.get(item, options, retry + 1);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (!data || !this.cache) {
|
|
63
|
+
if (this.cache) set_cache(key, "pending", this.cache);
|
|
64
|
+
//console.log(`${this.url}/${item}`);
|
|
65
|
+
//console.log(this.fetchOptions.headers);
|
|
66
|
+
const response = await fetch(`${this.url}${item}`, {
|
|
67
|
+
...this.fetchOptions,
|
|
68
|
+
...options,
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
if (response.status === 404) {
|
|
72
|
+
// Item is not found
|
|
73
|
+
if (this.cache) await del_cache(key, this.cache);
|
|
74
|
+
return undefined;
|
|
75
|
+
} else if (response.status !== 200) {
|
|
76
|
+
// Item is found but there was an error
|
|
77
|
+
if (this.cache) await del_cache(key, this.cache);
|
|
78
|
+
throw new Error(String(response.status));
|
|
79
|
+
}
|
|
80
|
+
data = new Uint8Array(await response.arrayBuffer());
|
|
81
|
+
if (this.cache) await set_cache(key, data, this.cache);
|
|
82
|
+
}
|
|
83
|
+
return data;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export class IDBStore implements AsyncMutable {
|
|
88
|
+
cache: UseStore;
|
|
89
|
+
constructor(public root: string) {
|
|
90
|
+
this.cache = createStore(root, "store");
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async get(key: AbsolutePath): Promise<Uint8Array | undefined> {
|
|
94
|
+
try {
|
|
95
|
+
return await get_cache(key, this.cache);
|
|
96
|
+
} catch {
|
|
97
|
+
return undefined;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async has(key: AbsolutePath): Promise<boolean> {
|
|
102
|
+
return (await get_cache(key, this.cache)) !== undefined;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async set(key: AbsolutePath, value: Uint8Array): Promise<void> {
|
|
106
|
+
await set_cache(key, value, this.cache);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
async delete(key: AbsolutePath): Promise<void> {
|
|
110
|
+
await del_cache(key, this.cache);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { assertType, test, expect } from "vitest";
|
|
2
|
+
import { Dataset, IDataVar } from "../lib/datamodel";
|
|
3
|
+
import { Connector } from "../lib/connector";
|
|
4
|
+
import { dataset, datameshTest, DATAMESH_GATEWAY, HEADERS } from "./fixtures";
|
|
5
|
+
|
|
6
|
+
test("dataset init", async () => {
|
|
7
|
+
const dstest = await Dataset.init(dataset);
|
|
8
|
+
assertType<Record<string, unknown>>(dstest.attrs);
|
|
9
|
+
assertType<Record<string, IDataVar>>(dstest.data_vars);
|
|
10
|
+
const datatest = await dstest.data_vars.temperature.get();
|
|
11
|
+
expect(datatest).toBeInstanceOf(Array);
|
|
12
|
+
expect(datatest.length).toBe(10);
|
|
13
|
+
expect(datatest[0].length).toBe(30);
|
|
14
|
+
expect(datatest[0][0].length).toBe(20);
|
|
15
|
+
expect(datatest[3][4][5]).toEqual(
|
|
16
|
+
dataset.data_vars.temperature.data[3][4][5]
|
|
17
|
+
);
|
|
18
|
+
const datatest0 = await dstest.data_vars.scalar.get();
|
|
19
|
+
expect(datatest0[0]).closeTo(10.1, 0.0001);
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
datameshTest(
|
|
23
|
+
"dataset zarr",
|
|
24
|
+
async ({ dataset }) => {
|
|
25
|
+
//Test the zarr proxy endpoint directly
|
|
26
|
+
const dstest = await Dataset.zarr(
|
|
27
|
+
DATAMESH_GATEWAY + "/zarr/" + dataset.attrs.id,
|
|
28
|
+
HEADERS
|
|
29
|
+
);
|
|
30
|
+
assertType<Record<string, unknown>>(dstest.attrs);
|
|
31
|
+
assertType<Record<string, IDataVar>>(dstest.data_vars);
|
|
32
|
+
let datatest = await dstest.data_vars.temperature.get();
|
|
33
|
+
expect(datatest).toBeInstanceOf(Array);
|
|
34
|
+
expect(datatest.length).toBe(10);
|
|
35
|
+
expect(datatest[0].length).toBe(30);
|
|
36
|
+
expect(datatest[0][0].length).toBe(20);
|
|
37
|
+
expect(datatest[3][4][5]).toEqual(
|
|
38
|
+
dataset.data_vars.temperature.data[3][4][5]
|
|
39
|
+
);
|
|
40
|
+
datatest = await dstest.data_vars.scalar.get();
|
|
41
|
+
expect(datatest[0]).closeTo(10.1, 0.0001);
|
|
42
|
+
|
|
43
|
+
//Now test with the connector
|
|
44
|
+
const datamesh = new Connector(process.env.DATAMESH_TOKEN);
|
|
45
|
+
const dstest2 = await datamesh.loadDatasource(dataset.attrs.id);
|
|
46
|
+
assertType<Record<string, unknown>>(dstest2.attrs);
|
|
47
|
+
assertType<Record<string, IDataVar>>(dstest2.data_vars);
|
|
48
|
+
datatest = await dstest.data_vars.temperature.get();
|
|
49
|
+
expect(datatest).toBeInstanceOf(Array);
|
|
50
|
+
expect(datatest.length).toBe(10);
|
|
51
|
+
expect(datatest[0].length).toBe(30);
|
|
52
|
+
expect(datatest[0][0].length).toBe(20);
|
|
53
|
+
expect(datatest[3][4][5]).toEqual(
|
|
54
|
+
dataset.data_vars.temperature.data[3][4][5]
|
|
55
|
+
);
|
|
56
|
+
datatest = await dstest.data_vars.scalar.get();
|
|
57
|
+
expect(datatest[0]).closeTo(10.1, 0.0001);
|
|
58
|
+
},
|
|
59
|
+
{ timeout: 100000 }
|
|
60
|
+
);
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { assertType, test, expect } from "vitest";
|
|
2
|
+
import { Datasource } from "../lib/datasource";
|
|
3
|
+
import Connector from "../lib/connector";
|
|
4
|
+
import { datameshTest } from "./fixtures";
|
|
5
|
+
|
|
6
|
+
test("datasource type", () => {
|
|
7
|
+
const datasource: Datasource = {
|
|
8
|
+
id: "1",
|
|
9
|
+
name: "test",
|
|
10
|
+
description: "test",
|
|
11
|
+
schema: {
|
|
12
|
+
attrs: {},
|
|
13
|
+
dims: {},
|
|
14
|
+
data_vars: {},
|
|
15
|
+
},
|
|
16
|
+
};
|
|
17
|
+
assertType<Record<string, unknown>>(datasource.schema.attrs);
|
|
18
|
+
assertType<Record<string, unknown>>(datasource.schema.dims);
|
|
19
|
+
assertType<Record<string, unknown>>(datasource.schema.data_vars);
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
datameshTest("datasource_metadata", async ({ metadata }) => {
|
|
23
|
+
const connector = new Connector(process.env.DATAMESH_TOKEN);
|
|
24
|
+
const datasource = await connector.getDatasource(metadata.id);
|
|
25
|
+
assertType<Datasource>(datasource);
|
|
26
|
+
expect(datasource.id).toBe(metadata.id);
|
|
27
|
+
expect(datasource.name).toBe(metadata.name);
|
|
28
|
+
});
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
/// <reference lib="dom" />
|
|
2
|
+
|
|
3
|
+
import { test } from "vitest";
|
|
4
|
+
import { Datasource, Schema } from "../lib/datasource";
|
|
5
|
+
|
|
6
|
+
const DATAMESH_TOKEN: string = process.env.DATAMESH_TOKEN || "$DATAMESH_TOKEN";
|
|
7
|
+
export const HEADERS: HeadersInit = {
|
|
8
|
+
Authorization: `Token ${DATAMESH_TOKEN}`,
|
|
9
|
+
"X-DATAMESH-TOKEN": DATAMESH_TOKEN,
|
|
10
|
+
Accept: "application/json",
|
|
11
|
+
"Content-Type": "application/json",
|
|
12
|
+
};
|
|
13
|
+
export const DATAMESH_SERVICE =
|
|
14
|
+
process.env.DATAMESH_SERVICE || "https://datamesh.oceanum.io";
|
|
15
|
+
export const DATAMESH_GATEWAY =
|
|
16
|
+
process.env.DATAMESH_GATEWAY || "https://gateway.datamesh.oceanum.io";
|
|
17
|
+
|
|
18
|
+
const datasource: Datasource = {
|
|
19
|
+
id: "datamesh-js-test",
|
|
20
|
+
name: "test",
|
|
21
|
+
description: "datamesh-js test registration",
|
|
22
|
+
schema: {
|
|
23
|
+
attrs: {},
|
|
24
|
+
dims: {},
|
|
25
|
+
data_vars: {},
|
|
26
|
+
},
|
|
27
|
+
coordinates: { t: "time" },
|
|
28
|
+
driver: "onzarr",
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
//Create multidemensional random array of typed data
|
|
32
|
+
const createFloatArray = (
|
|
33
|
+
dims: number[],
|
|
34
|
+
depth = 0,
|
|
35
|
+
typed = false
|
|
36
|
+
): number[] | Float32Array | Float32Array[] => {
|
|
37
|
+
const size = dims[depth];
|
|
38
|
+
const array =
|
|
39
|
+
typed && depth == dims.length - 1
|
|
40
|
+
? new Float32Array(size)
|
|
41
|
+
: new Array(size);
|
|
42
|
+
if (depth === dims.length - 1) {
|
|
43
|
+
for (let i = 0; i < size; i++) {
|
|
44
|
+
array[i] = Math.random();
|
|
45
|
+
}
|
|
46
|
+
} else {
|
|
47
|
+
for (let i = 0; i < size; i++) {
|
|
48
|
+
array[i] = createFloatArray(dims, depth + 1, typed);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
return array;
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
const jsonify = (data: Record<string, unknown>): string => {
|
|
55
|
+
return JSON.stringify(data, function (key, value) {
|
|
56
|
+
if (
|
|
57
|
+
value instanceof Int8Array ||
|
|
58
|
+
value instanceof Uint8Array ||
|
|
59
|
+
value instanceof Uint8ClampedArray ||
|
|
60
|
+
value instanceof Int16Array ||
|
|
61
|
+
value instanceof Uint16Array ||
|
|
62
|
+
value instanceof Int32Array ||
|
|
63
|
+
value instanceof Uint32Array ||
|
|
64
|
+
value instanceof Float32Array ||
|
|
65
|
+
value instanceof Float64Array
|
|
66
|
+
) {
|
|
67
|
+
return Array.from(value);
|
|
68
|
+
}
|
|
69
|
+
return value;
|
|
70
|
+
});
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
const scalar = new Float32Array(1);
|
|
74
|
+
scalar[0] = 10.1;
|
|
75
|
+
export const dataset: Schema = {
|
|
76
|
+
dims: { one: 1, time: 10, lon: 20, lat: 30 },
|
|
77
|
+
coords: {
|
|
78
|
+
time: {
|
|
79
|
+
dims: ["time"],
|
|
80
|
+
attrs: { units: "unix timestamp" },
|
|
81
|
+
data: [...Array(10).keys()].map((i) => 86400000 * i),
|
|
82
|
+
},
|
|
83
|
+
lon: {
|
|
84
|
+
dims: ["lon"],
|
|
85
|
+
attrs: { units: "degrees east" },
|
|
86
|
+
data: [...Array(20).keys()].map((i) => i),
|
|
87
|
+
},
|
|
88
|
+
lat: {
|
|
89
|
+
dims: ["lat"],
|
|
90
|
+
attrs: { units: "degrees north" },
|
|
91
|
+
data: [...Array(30).keys()].map((i) => -i),
|
|
92
|
+
},
|
|
93
|
+
},
|
|
94
|
+
data_vars: {
|
|
95
|
+
temperature: {
|
|
96
|
+
dims: ["time", "lat", "lon"],
|
|
97
|
+
attrs: { units: "C" },
|
|
98
|
+
data: createFloatArray([10, 30, 20], 0, true),
|
|
99
|
+
},
|
|
100
|
+
elevation: {
|
|
101
|
+
dims: ["lat", "lon"],
|
|
102
|
+
attrs: { units: "m" },
|
|
103
|
+
data: createFloatArray([30, 20], 0, false),
|
|
104
|
+
},
|
|
105
|
+
scalar: {
|
|
106
|
+
dims: ["one"],
|
|
107
|
+
attrs: { units: "m" },
|
|
108
|
+
data: scalar,
|
|
109
|
+
},
|
|
110
|
+
},
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
export const datameshTest = test.extend({
|
|
114
|
+
metadata: async ({}, use: Function) => {
|
|
115
|
+
// setup the fixture before each test function
|
|
116
|
+
console.log(HEADERS);
|
|
117
|
+
const resp = await fetch(DATAMESH_SERVICE + "/datasource/", {
|
|
118
|
+
method: "POST",
|
|
119
|
+
headers: HEADERS,
|
|
120
|
+
body: JSON.stringify(datasource),
|
|
121
|
+
});
|
|
122
|
+
if (resp.status !== 201) {
|
|
123
|
+
const text = await resp.text();
|
|
124
|
+
throw new Error("Failed to register datasource: " + text);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// use the fixture value
|
|
128
|
+
await use(datasource);
|
|
129
|
+
|
|
130
|
+
// cleanup the fixture after each test function
|
|
131
|
+
await fetch(DATAMESH_SERVICE + "/datasource/" + datasource.id, {
|
|
132
|
+
method: "DELETE",
|
|
133
|
+
headers: HEADERS,
|
|
134
|
+
});
|
|
135
|
+
},
|
|
136
|
+
dataset: async ({}, use: Function) => {
|
|
137
|
+
// setup the fixture before each test function
|
|
138
|
+
|
|
139
|
+
let resp = await fetch(DATAMESH_GATEWAY + "/data/oceanum-js-test/", {
|
|
140
|
+
method: "PUT",
|
|
141
|
+
headers: HEADERS,
|
|
142
|
+
body: jsonify(dataset),
|
|
143
|
+
});
|
|
144
|
+
if (resp.status !== 200) {
|
|
145
|
+
throw new Error("Failed to write dataset");
|
|
146
|
+
}
|
|
147
|
+
const patch = jsonify({
|
|
148
|
+
coordinates: { t: "time", x: "lon", y: "lat" },
|
|
149
|
+
});
|
|
150
|
+
resp = await fetch(DATAMESH_SERVICE + "/datasource/oceanum-js-test/", {
|
|
151
|
+
method: "PATCH",
|
|
152
|
+
headers: HEADERS,
|
|
153
|
+
body: patch,
|
|
154
|
+
});
|
|
155
|
+
if (resp.status !== 200) {
|
|
156
|
+
throw new Error("Failed to register dataset");
|
|
157
|
+
}
|
|
158
|
+
dataset.attrs = { id: "oceanum-js-test" };
|
|
159
|
+
|
|
160
|
+
// use the fixture value
|
|
161
|
+
await use(dataset);
|
|
162
|
+
|
|
163
|
+
// cleanup the fixture after each test function
|
|
164
|
+
await fetch(DATAMESH_GATEWAY + "/data/oceanum-js-test", {
|
|
165
|
+
method: "DELETE",
|
|
166
|
+
headers: HEADERS,
|
|
167
|
+
});
|
|
168
|
+
},
|
|
169
|
+
});
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { assertType, test, expect } from "vitest";
|
|
2
|
+
import { Dataset } from "../lib/datamodel";
|
|
3
|
+
import { Connector } from "../lib/connector";
|
|
4
|
+
import { dataset, datameshTest } from "./fixtures";
|
|
5
|
+
|
|
6
|
+
test("datasource type", () => {
|
|
7
|
+
const query: IQuery = {
|
|
8
|
+
datasource: "test",
|
|
9
|
+
geofilter: {
|
|
10
|
+
type: "feature",
|
|
11
|
+
geom: {
|
|
12
|
+
type: "Feature",
|
|
13
|
+
geometry: {
|
|
14
|
+
type: "Point",
|
|
15
|
+
coordinates: [10, -10],
|
|
16
|
+
},
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
variables: ["temperature"],
|
|
20
|
+
};
|
|
21
|
+
assertType<Record<string, unknown>>(query.geofilter);
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
datameshTest(
|
|
25
|
+
"datamesh query",
|
|
26
|
+
async ({ dataset }) => {
|
|
27
|
+
const datamesh = new Connector(process.env.DATAMESH_TOKEN);
|
|
28
|
+
const query = {
|
|
29
|
+
datasource: dataset.attrs.id,
|
|
30
|
+
geofilter: {
|
|
31
|
+
type: "feature",
|
|
32
|
+
geom: {
|
|
33
|
+
type: "Feature",
|
|
34
|
+
geometry: {
|
|
35
|
+
type: "Point",
|
|
36
|
+
coordinates: [10, -10],
|
|
37
|
+
},
|
|
38
|
+
},
|
|
39
|
+
},
|
|
40
|
+
};
|
|
41
|
+
const dstest = await datamesh.query(query);
|
|
42
|
+
assertType<Dataset>(dstest);
|
|
43
|
+
const datatest = await dstest.data_vars.temperature.get();
|
|
44
|
+
expect(datatest).toBeInstanceOf(Float64Array);
|
|
45
|
+
expect(datatest.length).toBe(10);
|
|
46
|
+
expect(datatest[5]).toEqual(dataset.data_vars.temperature.data[5][10][10]);
|
|
47
|
+
},
|
|
48
|
+
{ timeout: 100000 }
|
|
49
|
+
);
|