@oceanum/datamesh 0.4.3 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@oceanum/datamesh",
3
- "version": "0.4.3",
3
+ "version": "0.5.1",
4
4
  "scripts": {
5
5
  "build:docs": "typedoc"
6
6
  },
@@ -0,0 +1,42 @@
1
+ //Example cloudfalre worker reverse proxy
2
+ //Add your datamesh token as a secret in the cloudflare worker environment
3
+
4
+ const DATAMESH = "https://datamesh-v1.oceanum.io";
5
+
6
+ export default {
7
+ async fetch(request, env, ctx) {
8
+ const url = new URL(request.url);
9
+ const DATAMESH_TOKEN = env.DATAMESH_TOKEN;
10
+
11
+ // Extract the path and construct the datamesh API URL
12
+ const datameshUrl = new URL(url.pathname + url.search, DATAMESH);
13
+
14
+ // Clone the request to modify headers
15
+ const modifiedRequest = new Request(datameshUrl.toString(), {
16
+ method: request.method,
17
+ headers: request.headers,
18
+ body: request.body,
19
+ });
20
+
21
+ // Inject the DATAMESH_TOKEN header
22
+ modifiedRequest.headers.set("x-DATAMESH-TOKEN", `${DATAMESH_TOKEN}`);
23
+
24
+ // Forward the request to datamesh
25
+ const response = await fetch(modifiedRequest);
26
+
27
+ // Return the response with CORS headers
28
+ const modifiedResponse = new Response(response.body, {
29
+ status: response.status,
30
+ statusText: response.statusText,
31
+ headers: {
32
+ ...response.headers,
33
+ "Access-Control-Allow-Origin": "*",
34
+ "Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
35
+ "Access-Control-Allow-Headers":
36
+ "Content-Type, Authorization, X-DATAMESH-TOKEN",
37
+ },
38
+ });
39
+
40
+ return modifiedResponse;
41
+ },
42
+ };
@@ -0,0 +1,54 @@
1
+ # Node Express Proxy (Example)
2
+
3
+ Example reverse proxy for Oceanum Datamesh using Node.js + Express.
4
+
5
+ ## Prerequisites
6
+
7
+ - Node.js v18+ (native `fetch` and web streams)
8
+ - Environment variable `DATAMESH_TOKEN` set to your Datamesh token
9
+ - Optional: `DATAMESH_URL` (defaults to `https://datamesh.oceanum.io`)
10
+ - Optional: `PORT` (defaults to `8787`)
11
+
12
+ ## Install and run
13
+
14
+ Create a small project in this directory and install Express:
15
+
16
+ ```sh
17
+ npm init -y
18
+ npm install express
19
+ ```
20
+
21
+ Run the proxy:
22
+
23
+ ```sh
24
+ DATAMESH_TOKEN=your_token_here node index.js
25
+ # or with a custom upstream and port
26
+ DATAMESH_TOKEN=your_token_here DATAMESH_URL=https://datamesh.oceanum.io PORT=8080 node index.js
27
+ ```
28
+
29
+ ## What it does
30
+
31
+ - Forwards all incoming requests to `DATAMESH_URL`.
32
+ - Injects/overwrites the `x-DATAMESH-TOKEN` header with your secret token.
33
+ - Adds permissive CORS headers for browser apps.
34
+
35
+ ## Use with @oceanum/datamesh
36
+
37
+ Point both `service` and `gateway` to your proxy origin:
38
+
39
+ ```ts
40
+ import { Connector } from "@oceanum/datamesh";
41
+
42
+ const PROXY_URL = "http://localhost:8787"; // or your deployed proxy
43
+
44
+ const connector = new Connector("proxy", {
45
+ service: PROXY_URL,
46
+ gateway: PROXY_URL,
47
+ });
48
+ ```
49
+
50
+ ## Security notes
51
+
52
+ - Never commit your real token. Use env vars or a secret store.
53
+ - Consider tightening CORS and limiting allowed origins in production.
54
+ - This is an example; adapt logging/error handling and header allowlists to your needs.
@@ -0,0 +1,126 @@
1
+ /*
2
+ Example Node Express reverse proxy for Oceanum Datamesh
3
+
4
+ Requirements
5
+ - Node.js v18+ (for native fetch and web streams)
6
+ - Environment variable DATAMESH_TOKEN set to your Datamesh token
7
+ - Optional: DATAMESH_URL (defaults to https://datamesh.oceanum.io)
8
+ - Optional: PORT (defaults to 8787)
9
+
10
+ Usage
11
+ DATAMESH_TOKEN=xxxx npm start
12
+ # or
13
+ DATAMESH_TOKEN=xxxx node index.js
14
+ */
15
+
16
+ const express = require("express");
17
+
18
+ const app = express();
19
+ const PORT = process.env.PORT || 8787;
20
+ const TARGET = process.env.DATAMESH_URL || "https://datamesh.oceanum.io";
21
+
22
+ if (!process.env.DATAMESH_TOKEN) {
23
+ console.warn(
24
+ "Warning: DATAMESH_TOKEN is not set. Requests to Datamesh will fail with 401/403."
25
+ );
26
+ }
27
+
28
+ // Basic CORS support (adjust for production as needed)
29
+ app.use((req, res, next) => {
30
+ res.setHeader("Access-Control-Allow-Origin", "*");
31
+ res.setHeader(
32
+ "Access-Control-Allow-Methods",
33
+ "GET, POST, PUT, DELETE, OPTIONS"
34
+ );
35
+ res.setHeader(
36
+ "Access-Control-Allow-Headers",
37
+ "Content-Type, Authorization, x-requested-with, x-datamesh-token"
38
+ );
39
+ if (req.method === "OPTIONS") {
40
+ return res.sendStatus(204);
41
+ }
42
+ next();
43
+ });
44
+
45
+ // Proxy all routes to Datamesh, injecting the token
46
+ app.use(async (req, res) => {
47
+ try {
48
+ const upstreamUrl = new URL(req.originalUrl, TARGET);
49
+
50
+ // Rebuild headers to a fresh Headers instance
51
+ const headers = new Headers();
52
+ for (const [key, value] of Object.entries(req.headers)) {
53
+ if (typeof value === "string") headers.set(key, value);
54
+ else if (Array.isArray(value)) headers.set(key, value.join(", "));
55
+ }
56
+
57
+ // Ensure correct Host and token header (overwrite any client-supplied values)
58
+ headers.delete("host");
59
+ headers.set("x-DATAMESH-TOKEN", process.env.DATAMESH_TOKEN || "");
60
+
61
+ // Prepare request init
62
+ const method = req.method.toUpperCase();
63
+ const hasBody = !["GET", "HEAD"].includes(method);
64
+ const init = {
65
+ method,
66
+ headers,
67
+ body: hasBody ? req : undefined, // Stream the incoming request body
68
+ };
69
+
70
+ const response = await fetch(upstreamUrl, init);
71
+
72
+ // Forward headers from upstream
73
+ response.headers.forEach((value, key) => {
74
+ // Skip hop-by-hop or problematic headers if necessary
75
+ if (key.toLowerCase() === "transfer-encoding") return;
76
+ res.setHeader(key, value);
77
+ });
78
+
79
+ // We already added CORS above; ensure it's present in case upstream overrides
80
+ res.setHeader("Access-Control-Allow-Origin", "*");
81
+ res.setHeader(
82
+ "Access-Control-Allow-Methods",
83
+ "GET, POST, PUT, DELETE, OPTIONS"
84
+ );
85
+ res.setHeader(
86
+ "Access-Control-Allow-Headers",
87
+ "Content-Type, Authorization, x-requested-with"
88
+ );
89
+
90
+ // Status
91
+ res.status(response.status);
92
+
93
+ // Stream body
94
+ if (response.body) {
95
+ response.body
96
+ .pipeTo(
97
+ new WritableStream({
98
+ write(chunk) {
99
+ res.write(Buffer.from(chunk));
100
+ },
101
+ close() {
102
+ res.end();
103
+ },
104
+ abort(err) {
105
+ console.error("Proxy stream aborted:", err);
106
+ res.end();
107
+ },
108
+ })
109
+ )
110
+ .catch((err) => {
111
+ console.error("Proxy piping error:", err);
112
+ res.end();
113
+ });
114
+ } else {
115
+ res.end();
116
+ }
117
+ } catch (err) {
118
+ console.error("Proxy error:", err);
119
+ res.status(502).json({ detail: "Proxy error", error: String(err) });
120
+ }
121
+ });
122
+
123
+ app.listen(PORT, () => {
124
+ console.log(`Datamesh proxy listening on http://localhost:${PORT}`);
125
+ console.log(`Forwarding to ${TARGET}`);
126
+ });
package/proxy/guide.md ADDED
@@ -0,0 +1,140 @@
1
+ ---
2
+ title: Proxy Guide
3
+ group: Documents
4
+ category: Guides
5
+ ---
6
+
7
+ # Using a Datamesh Proxy with @oceanum/datamesh
8
+
9
+ A reverse proxy lets you call Datamesh from a public web app without exposing your Datamesh token and helps you avoid CORS issues.
10
+
11
+ This repo includes example proxies you can deploy quickly, and shows how to point the `Connector` to them.
12
+
13
+ - Cloudflare Worker: `packages/datamesh/proxy/cloudflare/index.js`
14
+ - Node/Express: `packages/datamesh/proxy/express/index.js`
15
+ - Client package: `@oceanum/datamesh`
16
+
17
+ ## Why use a proxy?
18
+
19
+ - Protect secrets: keep your Datamesh token server-side.
20
+ - Simpler CORS: proxy can add permissive CORS headers.
21
+ - Stable domain: front your app with your own domain.
22
+
23
+ ## Cloudflare Worker proxy (example)
24
+
25
+ The example Cloudflare Worker:
26
+
27
+ - Forwards all requests to the Datamesh API.
28
+ - Overwrites the `x-DATAMESH-TOKEN` header using a Worker Secret.
29
+ - Adds permissive CORS headers for browser apps.
30
+
31
+ ```js
32
+ // Example Cloudflare Worker reverse proxy
33
+ // Add your datamesh token as a secret in the Cloudflare worker environment
34
+
35
+ const DATAMESH = "https://datamesh.oceanum.io";
36
+
37
+ export default {
38
+ async fetch(request, env) {
39
+ const url = new URL(request.url);
40
+ const DATAMESH_TOKEN = env.DATAMESH_TOKEN;
41
+
42
+ // Build the upstream request URL
43
+ const datameshUrl = new URL(url.pathname + url.search, DATAMESH);
44
+
45
+ // Clone request and forward body/headers/method
46
+ const modifiedRequest = new Request(datameshUrl.toString(), {
47
+ method: request.method,
48
+ headers: request.headers,
49
+ body: request.body,
50
+ });
51
+
52
+ // Inject/overwrite the token
53
+ modifiedRequest.headers.set("x-DATAMESH-TOKEN", `${DATAMESH_TOKEN}`);
54
+
55
+ // Forward
56
+ const response = await fetch(modifiedRequest);
57
+
58
+ // Add CORS headers for the browser
59
+ return new Response(response.body, {
60
+ status: response.status,
61
+ statusText: response.statusText,
62
+ headers: {
63
+ ...response.headers,
64
+ "Access-Control-Allow-Origin": "*",
65
+ "Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
66
+ "Access-Control-Allow-Headers":
67
+ "Content-Type, Authorization, x-requested-with, x-datamesh-token",
68
+ },
69
+ });
70
+ },
71
+ };
72
+ ```
73
+
74
+ ### Deploy on Cloudflare
75
+
76
+ 1. Create a new Worker in the Cloudflare dashboard.
77
+ 2. Paste the contents of `packages/datamesh/proxy/cloudflare/index.js`.
78
+ 3. Add a Worker Secret named `DATAMESH_TOKEN` and set it to your Datamesh token.
79
+ 4. (Optional) Change `DATAMESH` to point at a different upstream if needed.
80
+ 5. Deploy, and note your Worker URL, e.g. `https://your-proxy.workers.dev`.
81
+
82
+ ### Local testing tips
83
+
84
+ - You can test the Worker locally with `wrangler dev`.
85
+ - If you front the Worker with your own domain, ensure HTTPS is enabled and the domain is added to your app’s allowed origins if you use restrictive CORS elsewhere.
86
+
87
+ ## Node/Express proxy (example)
88
+
89
+ You can also run a simple Node/Express reverse proxy locally or deploy it to your own infrastructure.
90
+
91
+ - Example: `packages/datamesh/proxy/express/index.js`
92
+
93
+ ### Run locally
94
+
95
+ 1. Install dependencies in the example folder:
96
+
97
+ ```sh
98
+ cd packages/datamesh/proxy/express
99
+ npm init -y
100
+ npm install express
101
+ ```
102
+
103
+ 2. Start the proxy:
104
+
105
+ ```sh
106
+ DATAMESH_TOKEN=your_token_here node index.js
107
+ # Optional:
108
+ # DATAMESH_URL=https://datamesh.oceanum.io PORT=8787 DATAMESH_TOKEN=your_token_here node index.js
109
+ ```
110
+
111
+ The proxy will listen on `http://localhost:8787` by default and forward all requests to `DATAMESH_URL`, injecting `x-DATAMESH-TOKEN` and adding permissive CORS headers.
112
+
113
+ ## Configure @oceanum/datamesh to use the proxy
114
+
115
+ Point both `service` and `gateway` to your proxy origin. The proxy injects the token, so you can pass any non-empty string for the required `token` parameter.
116
+
117
+ ```ts
118
+ import { Connector } from "@oceanum/datamesh";
119
+
120
+ const PROXY_URL = "https://your-proxy.workers.dev"; // or your custom domain
121
+
122
+ const connector = new Connector("proxy", {
123
+ service: PROXY_URL,
124
+ gateway: PROXY_URL,
125
+ // Optional: tweak caching and session duration as needed
126
+ // nocache: true,
127
+ // sessionDuration: 1,
128
+ });
129
+ ```
130
+
131
+ Notes:
132
+
133
+ - The constructor requires a `token`. When using the proxy, the token you pass here is ignored by the upstream because the proxy overwrites the `x-DATAMESH-TOKEN` header with the secret.
134
+ - The connector will probe `GET /session` on the `gateway` to detect the API version. Ensure your proxy forwards that path.
135
+
136
+ ## Security considerations
137
+
138
+ - Never commit your Datamesh token. Store it as a Worker Secret (or equivalent secret store) in your hosting platform.
139
+ - Consider limiting origins or tightening CORS in production if your app does not need broad access.
140
+ - Audit which headers you forward. The example purposely overwrites `x-DATAMESH-TOKEN` to prevent client-supplied values from leaking upstream.
@@ -12,7 +12,9 @@ import { Session } from "./session";
12
12
  *
13
13
  */
14
14
  const DATAMESH_SERVICE =
15
- process.env.DATAMESH_SERVICE || "https://datamesh.oceanum.io";
15
+ typeof process !== "undefined" && process.env?.DATAMESH_SERVICE
16
+ ? process.env.DATAMESH_SERVICE
17
+ : "https://datamesh.oceanum.io";
16
18
 
17
19
  export class Connector {
18
20
  static LAZY_LOAD_SIZE = 1e8;
@@ -41,7 +43,9 @@ export class Connector {
41
43
  * @throws {Error} - If a valid token is not provided.
42
44
  */
43
45
  constructor(
44
- token = process.env.DATAMESH_TOKEN || "$DATAMESH_TOKEN",
46
+ token = typeof process !== "undefined" && process.env?.DATAMESH_TOKEN
47
+ ? process.env.DATAMESH_TOKEN
48
+ : "$DATAMESH_TOKEN",
45
49
  options?: {
46
50
  service?: string;
47
51
  gateway?: string;
@@ -70,8 +74,7 @@ export class Connector {
70
74
  };
71
75
 
72
76
  /* This is for testing the gateway service is not always the same as the service domain */
73
- this._gateway =
74
- options?.gateway || `${url.protocol}//gateway.${url.hostname}`;
77
+ this._gateway = options?.gateway || this._host;
75
78
 
76
79
  if (
77
80
  this._host.split(".").slice(-1)[0] !==
@@ -308,19 +311,9 @@ export class Connector {
308
311
  const dataset = await Dataset.fromArrow(table, stage.coordkeys);
309
312
  return dataset;
310
313
  }
311
- let url = null;
312
- let params = undefined;
313
- if (
314
- query.timefilter ||
315
- query.geofilter ||
316
- query.levelfilter ||
317
- query.coordfilter
318
- ) {
319
- url = `${this._gateway}/zarr/${stage.qhash}`;
320
- } else {
321
- url = `${this._gateway}/zarr/${query.datasource}`;
322
- params = query.parameters;
323
- }
314
+
315
+ const url = `${this._gateway}/zarr/${this._isV1 ? "query/" : ""}${stage.qhash}`;
316
+ const params = query.parameters;
324
317
 
325
318
  // Get headers with session information if available
326
319
  const headers = await this.getSessionHeaders();
@@ -15,7 +15,7 @@ import { Geometry, Feature, FeatureCollection } from "geojson";
15
15
  import { Geometry as WkxGeometry } from "wkx-ts";
16
16
  import { Buffer } from "buffer/index";
17
17
 
18
- import { CachedHTTPStore } from "./zarr";
18
+ import { CachedHTTPStore, zarr_open_v2_datetime } from "./zarr";
19
19
  import { Schema, Coordkeys } from "./datasource";
20
20
  import { measureTime } from "./observe";
21
21
 
@@ -163,7 +163,7 @@ const ravel = (data: Data) => {
163
163
  }
164
164
  };
165
165
 
166
- function get_strides(shape: readonly number[]) {
166
+ const get_strides = (shape: readonly number[]) => {
167
167
  const ndim = shape.length;
168
168
  const stride: number[] = Array(ndim);
169
169
  for (let i = ndim - 1, step = 1; i >= 0; i--) {
@@ -171,14 +171,14 @@ function get_strides(shape: readonly number[]) {
171
171
  step *= shape[i];
172
172
  }
173
173
  return stride;
174
- }
174
+ };
175
175
 
176
- function unravel<T extends DataType>(
176
+ const unravel = <T extends DataType>(
177
177
  data: TypedArray<T>,
178
178
  shape: number[],
179
179
  stride: number[],
180
180
  offset = 0
181
- ): Data {
181
+ ): Data => {
182
182
  // @ts-expect-error: Is array
183
183
  if (shape.length === 0) return data[0];
184
184
  if (shape.length === 1) {
@@ -199,7 +199,34 @@ function unravel<T extends DataType>(
199
199
  );
200
200
  }
201
201
  return arr;
202
- }
202
+ };
203
+
204
+ const npdatetime_to_posixtime = (data: Chunk<DataType>, dtype: string) => {
205
+ const [_, unit] = dtype.split("<M8");
206
+ const _data = new Float64Array(data.data.length);
207
+ let _divisor = 1n;
208
+ switch (unit) {
209
+ case "[ms]":
210
+ _divisor = 1000n;
211
+ break;
212
+ case "[us]":
213
+ _divisor = 1000000n;
214
+ break;
215
+ case "[ns]":
216
+ _divisor = 1000000000n;
217
+ break;
218
+ default:
219
+ _divisor = 1n;
220
+ break;
221
+ }
222
+ for (let i = 0; i < data.data.length; i++) {
223
+ // When dtype is numpy datetime (<M8...), underlying storage corresponds to int64
224
+ // so we can treat the chunk data as a BigInt64Array for conversion.
225
+ const v = (data.data as unknown as BigInt64Array)[i];
226
+ _data[i] = Number(v / _divisor);
227
+ }
228
+ return unravel(_data, data.shape, data.stride);
229
+ };
203
230
 
204
231
  const flatten = (
205
232
  data: Record<string, DataVariable>,
@@ -232,7 +259,7 @@ const flatten = (
232
259
  for (const k in data) {
233
260
  if (data[k].dimensions.includes(dim[0])) {
234
261
  subdata[k] = {
235
- attributes: {},
262
+ attributes: data[k].attributes,
236
263
  // @ts-expect-error: Is array because include dims
237
264
  data: data[k].data[i],
238
265
  dimensions: data[k].dimensions.slice(1),
@@ -323,6 +350,13 @@ export class DataVar<
323
350
  } else if (this.arr.dtype == "bool") {
324
351
  return [..._data.data] as Data;
325
352
  } else {
353
+ // Safely inspect potential numpy datetime dtype stored in attrs
354
+ const dtype = (this.arr.attrs as Record<string, unknown>)._dtype as
355
+ | string
356
+ | undefined;
357
+ if (dtype?.startsWith("<M8")) {
358
+ return npdatetime_to_posixtime(_data, dtype) as Data;
359
+ }
326
360
  return unravel(_data.data, _data.shape, _data.stride);
327
361
  }
328
362
  }
@@ -405,9 +439,23 @@ export class Dataset<S extends HttpZarr | TempZarr> {
405
439
  const dims = {} as Record<string, number>;
406
440
  for (const item of _zarr.contents()) {
407
441
  if (item.kind == "array") {
408
- const arr = await zarr.open(root.resolve(item.path), {
409
- kind: "array",
410
- });
442
+ let arr;
443
+ try {
444
+ arr = await zarr.open(root.resolve(item.path), {
445
+ kind: "array",
446
+ });
447
+ } catch (e: unknown) {
448
+ const message =
449
+ typeof e === "object" && e && "message" in e
450
+ ? String((e as { message?: unknown }).message)
451
+ : undefined;
452
+ if (message && message.includes("<M8")) {
453
+ //A python <M8 type fails to load
454
+ arr = await zarr_open_v2_datetime(root.resolve(item.path));
455
+ } else {
456
+ throw e;
457
+ }
458
+ }
411
459
  const array_dims = arr.attrs._ARRAY_DIMENSIONS as string[] | null;
412
460
  const vid = item.path.split("/").pop() as string;
413
461
  vars[vid] = new DataVar<DataType, HttpZarr>(
@@ -12,7 +12,7 @@ export class Session {
12
12
  creationTime!: Date;
13
13
  endTime!: Date;
14
14
  write!: boolean;
15
- verified: boolean = false;
15
+ verified = false;
16
16
  private _connection!: any;
17
17
 
18
18
  /**
@@ -36,7 +36,7 @@ export class Session {
36
36
  session.user = "dummy_user";
37
37
  session.creationTime = new Date();
38
38
  session.endTime = new Date(
39
- Date.now() + (options.duration || 1) * 60 * 60 * 1000
39
+ Date.now() + (options.duration || 3600) * 1000
40
40
  );
41
41
  session.write = false;
42
42
  session.verified = false;
@@ -55,9 +55,11 @@ export class Session {
55
55
  try {
56
56
  const headers = { ...connection._authHeaders };
57
57
  headers["Cache-Control"] = "no-store";
58
- const params = { duration: options.duration || 1 };
58
+ const qs = new URLSearchParams({
59
+ duration: String(options.duration ?? 3600),
60
+ });
59
61
  const response = await fetch(
60
- `${connection._gateway}/session/?${params}`,
62
+ `${connection._gateway}/session/?${qs.toString()}`,
61
63
  { headers }
62
64
  );
63
65
 
@@ -113,7 +115,7 @@ export class Session {
113
115
  * @param finaliseWrite - Whether to finalise any write operations. Defaults to false.
114
116
  * @throws {Error} - If the session cannot be closed and finaliseWrite is true.
115
117
  */
116
- async close(finaliseWrite: boolean = false): Promise<void> {
118
+ async close(finaliseWrite = false): Promise<void> {
117
119
  // Back-compatibility with beta version (ignoring)
118
120
  if (!this._connection._isV1) {
119
121
  return;
package/src/lib/zarr.ts CHANGED
@@ -6,7 +6,8 @@ import {
6
6
  UseStore,
7
7
  } from "idb-keyval";
8
8
  import hash from "object-hash";
9
- import { AsyncReadable, AsyncMutable, AbsolutePath } from "@zarrita/storage";
9
+ import { AsyncReadable, AsyncMutable, Readable, AbsolutePath } from "@zarrita/storage";
10
+ import { Array as ZArray, Location, ArrayMetadata, DataType } from "@zarrita/core";
10
11
 
11
12
  function delay(t: number): Promise<void> {
12
13
  return new Promise((resolve) => setTimeout(resolve, t));
@@ -172,3 +173,64 @@ export class IDBStore implements AsyncMutable {
172
173
  await del_cache(key, this.cache);
173
174
  }
174
175
  }
176
+
177
+ const load_meta = async <S extends Readable>(
178
+ location: Location<S>,
179
+ item = ".zarray"
180
+ ) => {
181
+ const { path } = location.resolve(item);
182
+ const meta = await location.store.get(path);
183
+ if (!meta) {
184
+ return {};
185
+ }
186
+ return JSON.parse(new TextDecoder().decode(meta));
187
+ };
188
+
189
+ //This is modified from the zarrita core library to patch for datetime support
190
+ export async function zarr_open_v2_datetime<Store extends Readable>(
191
+ location: Location<Store>
192
+ ) {
193
+ const attrs = await load_meta(location, ".zattrs");
194
+ const meta = await load_meta(location);
195
+ if (meta.dtype.startsWith("<M8")) {
196
+ attrs._dtype = meta.dtype;
197
+ }
198
+ const codecs: any[] = [];
199
+
200
+ if (meta.order === "F") {
201
+ codecs.push({ name: "transpose", configuration: { order: "F" } });
202
+ }
203
+ // Detect big-endian from v2 dtype string (e.g., ">i4"). If so, add a bytes codec.
204
+ if (typeof meta.dtype === "string" && meta.dtype.startsWith(">")) {
205
+ codecs.push({ name: "bytes", configuration: { endian: "big" } });
206
+ }
207
+ for (const { id, ...configuration } of meta.filters ?? []) {
208
+ codecs.push({ name: id, configuration });
209
+ }
210
+ if (meta.compressor) {
211
+ const { id, ...configuration } = meta.compressor;
212
+ codecs.push({ name: id, configuration });
213
+ }
214
+ const v3_metadata: ArrayMetadata<DataType> = {
215
+ zarr_format: 3,
216
+ node_type: "array",
217
+ shape: meta.shape,
218
+ data_type: "int64",
219
+ chunk_grid: {
220
+ name: "regular",
221
+ configuration: {
222
+ chunk_shape: meta.chunks,
223
+ },
224
+ },
225
+ chunk_key_encoding: {
226
+ name: "v2",
227
+ configuration: {
228
+ separator: meta.dimension_separator ?? ".",
229
+ },
230
+ },
231
+ codecs,
232
+ fill_value: meta.fill_value,
233
+ attributes: attrs,
234
+ };
235
+ return new ZArray(location.store, location.path, v3_metadata);
236
+ }
package/typedoc.json CHANGED
@@ -3,5 +3,6 @@
3
3
  "entryPoints": ["./src/index.ts"],
4
4
  "readme": "./README.md",
5
5
  "out": "../../docs/datamesh",
6
- "mergeReadme": true
6
+ "mergeReadme": true,
7
+ "projectDocuments": ["proxy/guide.md"]
7
8
  }