nanograph-db 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Cargo.toml ADDED
@@ -0,0 +1,20 @@
1
+ [package]
2
+ name = "nanograph-ts"
3
+ version = "0.8.0"
4
+ edition = "2024"
5
+ description = "NanoGraph TypeScript/Node.js SDK via napi-rs"
6
+ repository = "https://github.com/aaltshuler/nanograph"
7
+ license = "MIT"
8
+
9
+ [lib]
10
+ crate-type = ["cdylib"]
11
+
12
+ [dependencies]
13
+ nanograph = { path = "../nanograph", version = "0.8.0" }
14
+ napi = { workspace = true }
15
+ napi-derive = { workspace = true }
16
+ tokio = { workspace = true }
17
+ serde_json = { workspace = true }
18
+
19
+ [build-dependencies]
20
+ napi-build = "2"
package/build.rs ADDED
@@ -0,0 +1,5 @@
1
+ extern crate napi_build;
2
+
3
+ fn main() {
4
+ napi_build::setup();
5
+ }
package/index.d.ts ADDED
@@ -0,0 +1,126 @@
1
+ /* nanograph-db — TypeScript type definitions (hand-written) */
2
+
3
+ export interface PropDef {
4
+ name: string;
5
+ propId: number;
6
+ type: string;
7
+ nullable: boolean;
8
+ list?: boolean;
9
+ key?: boolean;
10
+ unique?: boolean;
11
+ index?: boolean;
12
+ enumValues?: string[];
13
+ embedSource?: string;
14
+ }
15
+
16
+ export interface NodeType {
17
+ name: string;
18
+ typeId: number;
19
+ properties: PropDef[];
20
+ }
21
+
22
+ export interface EdgeType {
23
+ name: string;
24
+ srcType: string;
25
+ dstType: string;
26
+ typeId: number;
27
+ properties: PropDef[];
28
+ }
29
+
30
+ export interface DescribeResult {
31
+ nodeTypes: NodeType[];
32
+ edgeTypes: EdgeType[];
33
+ }
34
+
35
+ export interface CheckResult {
36
+ name: string;
37
+ kind: "read" | "mutation";
38
+ status: "ok" | "error";
39
+ error?: string;
40
+ }
41
+
42
+ export interface MutationResult {
43
+ affectedNodes: number;
44
+ affectedEdges: number;
45
+ }
46
+
47
+ export interface CompactOptions {
48
+ targetRowsPerFragment?: number;
49
+ materializeDeletions?: boolean;
50
+ materializeDeletionsThreshold?: number;
51
+ }
52
+
53
+ export interface CompactResult {
54
+ datasetsConsidered: number;
55
+ datasetsCompacted: number;
56
+ fragmentsRemoved: number;
57
+ fragmentsAdded: number;
58
+ filesRemoved: number;
59
+ filesAdded: number;
60
+ manifestCommitted: boolean;
61
+ }
62
+
63
+ export interface CleanupOptions {
64
+ retainTxVersions?: number;
65
+ retainDatasetVersions?: number;
66
+ }
67
+
68
+ export interface CleanupResult {
69
+ txRowsRemoved: number;
70
+ txRowsKept: number;
71
+ cdcRowsRemoved: number;
72
+ cdcRowsKept: number;
73
+ datasetsCleaned: number;
74
+ datasetOldVersionsRemoved: number;
75
+ datasetBytesRemoved: number;
76
+ }
77
+
78
+ export interface DoctorResult {
79
+ healthy: boolean;
80
+ issues: string[];
81
+ warnings: string[];
82
+ manifestDbVersion: number;
83
+ datasetsChecked: number;
84
+ txRows: number;
85
+ cdcRows: number;
86
+ }
87
+
88
+ export class Database {
89
+ /** Create a new database from a schema string. */
90
+ static init(dbPath: string, schemaSource: string): Promise<Database>;
91
+
92
+ /** Open an existing database. */
93
+ static open(dbPath: string): Promise<Database>;
94
+
95
+ /** Load JSONL data into the database. */
96
+ load(dataSource: string, mode: "overwrite" | "append" | "merge"): Promise<void>;
97
+
98
+ /**
99
+ * Execute a named query from query source text.
100
+ * Read queries return an array of row objects.
101
+ * Mutation queries return `{ affectedNodes, affectedEdges }`.
102
+ */
103
+ run(
104
+ querySource: string,
105
+ queryName: string,
106
+ params?: Record<string, unknown>,
107
+ ): Promise<Record<string, unknown>[] | MutationResult>;
108
+
109
+ /** Typecheck all queries against the database schema. */
110
+ check(querySource: string): Promise<CheckResult[]>;
111
+
112
+ /** Return schema introspection. */
113
+ describe(): Promise<DescribeResult>;
114
+
115
+ /** Compact Lance datasets to reduce fragmentation. */
116
+ compact(options?: CompactOptions): Promise<CompactResult>;
117
+
118
+ /** Clean up old dataset versions and prune logs. */
119
+ cleanup(options?: CleanupOptions): Promise<CleanupResult>;
120
+
121
+ /** Run health checks on the database. */
122
+ doctor(): Promise<DoctorResult>;
123
+
124
+ /** Close the database, releasing resources. */
125
+ close(): Promise<void>;
126
+ }
package/index.js ADDED
@@ -0,0 +1,128 @@
1
+ /* eslint-disable */
2
+ /* auto-generated by NAPI-RS */
3
+
4
+ const { existsSync, readFileSync } = require('fs')
5
+ const { join } = require('path')
6
+
7
+ const { platform, arch } = process
8
+
9
+ let nativeBinding = null
10
+ let localFileExisted = false
11
+ let loadError = null
12
+
13
+ function isMusl() {
14
+ // For Node 12, is-linux-musl is not available so check /usr/bin/ldd
15
+ if (!process.report || typeof process.report.getReport !== 'function') {
16
+ try {
17
+ const lddPath = require('child_process').execSync('which ldd').toString().trim()
18
+ return readFileSync(lddPath, 'utf8').includes('musl')
19
+ } catch {
20
+ return true
21
+ }
22
+ } else {
23
+ const { glibcVersionRuntime } = process.report.getReport().header
24
+ return !glibcVersionRuntime
25
+ }
26
+ }
27
+
28
+ switch (platform) {
29
+ case 'darwin':
30
+ switch (arch) {
31
+ case 'x64':
32
+ localFileExisted = existsSync(join(__dirname, 'nanograph.darwin-x64.node'))
33
+ try {
34
+ if (localFileExisted) {
35
+ nativeBinding = require('./nanograph.darwin-x64.node')
36
+ } else {
37
+ nativeBinding = require('nanograph-db-darwin-x64')
38
+ }
39
+ } catch (e) {
40
+ loadError = e
41
+ }
42
+ break
43
+ case 'arm64':
44
+ localFileExisted = existsSync(join(__dirname, 'nanograph.darwin-arm64.node'))
45
+ try {
46
+ if (localFileExisted) {
47
+ nativeBinding = require('./nanograph.darwin-arm64.node')
48
+ } else {
49
+ nativeBinding = require('nanograph-db-darwin-arm64')
50
+ }
51
+ } catch (e) {
52
+ loadError = e
53
+ }
54
+ break
55
+ default:
56
+ throw new Error(`Unsupported architecture on macOS: ${arch}`)
57
+ }
58
+ break
59
+ case 'linux':
60
+ switch (arch) {
61
+ case 'x64':
62
+ if (isMusl()) {
63
+ throw new Error('musl libc is not supported')
64
+ } else {
65
+ localFileExisted = existsSync(join(__dirname, 'nanograph.linux-x64-gnu.node'))
66
+ try {
67
+ if (localFileExisted) {
68
+ nativeBinding = require('./nanograph.linux-x64-gnu.node')
69
+ } else {
70
+ nativeBinding = require('nanograph-db-linux-x64-gnu')
71
+ }
72
+ } catch (e) {
73
+ loadError = e
74
+ }
75
+ }
76
+ break
77
+ case 'arm64':
78
+ if (isMusl()) {
79
+ throw new Error('musl libc is not supported')
80
+ } else {
81
+ localFileExisted = existsSync(join(__dirname, 'nanograph.linux-arm64-gnu.node'))
82
+ try {
83
+ if (localFileExisted) {
84
+ nativeBinding = require('./nanograph.linux-arm64-gnu.node')
85
+ } else {
86
+ nativeBinding = require('nanograph-db-linux-arm64-gnu')
87
+ }
88
+ } catch (e) {
89
+ loadError = e
90
+ }
91
+ }
92
+ break
93
+ default:
94
+ throw new Error(`Unsupported architecture on Linux: ${arch}`)
95
+ }
96
+ break
97
+ case 'win32':
98
+ switch (arch) {
99
+ case 'x64':
100
+ localFileExisted = existsSync(join(__dirname, 'nanograph.win32-x64-msvc.node'))
101
+ try {
102
+ if (localFileExisted) {
103
+ nativeBinding = require('./nanograph.win32-x64-msvc.node')
104
+ } else {
105
+ nativeBinding = require('nanograph-db-win32-x64-msvc')
106
+ }
107
+ } catch (e) {
108
+ loadError = e
109
+ }
110
+ break
111
+ default:
112
+ throw new Error(`Unsupported architecture on Windows: ${arch}`)
113
+ }
114
+ break
115
+ default:
116
+ throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
117
+ }
118
+
119
+ if (!nativeBinding) {
120
+ if (loadError) {
121
+ throw loadError
122
+ }
123
+ throw new Error(`Failed to load native binding`)
124
+ }
125
+
126
+ const { Database } = nativeBinding
127
+
128
+ module.exports.Database = Database
package/package.json ADDED
@@ -0,0 +1,41 @@
1
+ {
2
+ "name": "nanograph-db",
3
+ "version": "0.8.0",
4
+ "description": "NanoGraph — embedded typed property graph DB for TypeScript/Node.js",
5
+ "main": "index.js",
6
+ "types": "index.d.ts",
7
+ "napi": {
8
+ "binaryName": "nanograph",
9
+ "targets": [
10
+ "aarch64-apple-darwin",
11
+ "x86_64-apple-darwin",
12
+ "x86_64-unknown-linux-gnu",
13
+ "aarch64-unknown-linux-gnu",
14
+ "x86_64-pc-windows-msvc"
15
+ ]
16
+ },
17
+ "scripts": {
18
+ "install": "napi build --platform --js index.js --release",
19
+ "build": "napi build --platform --js index.js --release",
20
+ "build:debug": "napi build --platform --js index.js",
21
+ "test": "node --test __test__/index.spec.mjs"
22
+ },
23
+ "dependencies": {
24
+ "@napi-rs/cli": "^3.0.0"
25
+ },
26
+ "engines": {
27
+ "node": ">=18"
28
+ },
29
+ "license": "MIT",
30
+ "repository": {
31
+ "type": "git",
32
+ "url": "https://github.com/aaltshuler/nanograph"
33
+ },
34
+ "files": [
35
+ "index.js",
36
+ "index.d.ts",
37
+ "src/",
38
+ "build.rs",
39
+ "Cargo.toml"
40
+ ]
41
+ }
package/src/convert.rs ADDED
@@ -0,0 +1,461 @@
1
+ use nanograph::ParamMap;
2
+ use nanograph::json_output::{JS_MAX_SAFE_INTEGER_U64, is_js_safe_integer_i64};
3
+ use nanograph::query::ast::{Literal, Param};
4
+ use nanograph::store::database::{CleanupOptions, CompactOptions, LoadMode};
5
+
6
+ fn parse_i64_param(key: &str, value: &serde_json::Value) -> napi::Result<i64> {
7
+ match value {
8
+ serde_json::Value::Number(n) => {
9
+ let parsed = if let Some(parsed) = n.as_i64() {
10
+ parsed
11
+ } else if let Some(parsed) = n.as_f64() {
12
+ if !parsed.is_finite() || parsed.fract() != 0.0 {
13
+ return Err(napi::Error::from_reason(format!(
14
+ "param '{}': expected integer, got number",
15
+ key
16
+ )));
17
+ }
18
+ if parsed < i64::MIN as f64 || parsed > i64::MAX as f64 {
19
+ return Err(napi::Error::from_reason(format!(
20
+ "param '{}': integer {} is outside i64 range",
21
+ key, parsed
22
+ )));
23
+ }
24
+ parsed as i64
25
+ } else {
26
+ return Err(napi::Error::from_reason(format!(
27
+ "param '{}': expected integer, got number",
28
+ key
29
+ )));
30
+ };
31
+ if !is_js_safe_integer_i64(parsed) {
32
+ return Err(napi::Error::from_reason(format!(
33
+ "param '{}': integer {} exceeds JS safe integer range; pass a decimal string for exact values",
34
+ key, parsed
35
+ )));
36
+ }
37
+ Ok(parsed)
38
+ }
39
+ serde_json::Value::String(s) => s.parse::<i64>().map_err(|_| {
40
+ napi::Error::from_reason(format!(
41
+ "param '{}': expected integer string, got '{}'",
42
+ key, s
43
+ ))
44
+ }),
45
+ _ => Err(napi::Error::from_reason(format!(
46
+ "param '{}': expected integer, got {}",
47
+ key,
48
+ json_type_name(value)
49
+ ))),
50
+ }
51
+ }
52
+
53
+ fn parse_u64_param(key: &str, value: &serde_json::Value) -> napi::Result<u64> {
54
+ match value {
55
+ serde_json::Value::Number(n) => {
56
+ let parsed = if let Some(parsed) = n.as_u64() {
57
+ parsed
58
+ } else if let Some(parsed) = n.as_f64() {
59
+ if !parsed.is_finite() || parsed.fract() != 0.0 || parsed < 0.0 {
60
+ return Err(napi::Error::from_reason(format!(
61
+ "param '{}': expected unsigned integer, got number",
62
+ key
63
+ )));
64
+ }
65
+ if parsed > u64::MAX as f64 {
66
+ return Err(napi::Error::from_reason(format!(
67
+ "param '{}': integer {} is outside u64 range",
68
+ key, parsed
69
+ )));
70
+ }
71
+ parsed as u64
72
+ } else {
73
+ return Err(napi::Error::from_reason(format!(
74
+ "param '{}': expected unsigned integer, got number",
75
+ key
76
+ )));
77
+ };
78
+ if parsed > JS_MAX_SAFE_INTEGER_U64 {
79
+ return Err(napi::Error::from_reason(format!(
80
+ "param '{}': integer {} exceeds JS safe integer range; pass a decimal string for exact values",
81
+ key, parsed
82
+ )));
83
+ }
84
+ Ok(parsed)
85
+ }
86
+ serde_json::Value::String(s) => s.parse::<u64>().map_err(|_| {
87
+ napi::Error::from_reason(format!(
88
+ "param '{}': expected unsigned integer string, got '{}'",
89
+ key, s
90
+ ))
91
+ }),
92
+ _ => Err(napi::Error::from_reason(format!(
93
+ "param '{}': expected unsigned integer, got {}",
94
+ key,
95
+ json_type_name(value)
96
+ ))),
97
+ }
98
+ }
99
+
100
+ fn parse_i32_param(key: &str, value: &serde_json::Value) -> napi::Result<i32> {
101
+ let parsed = parse_i64_param(key, value)?;
102
+ i32::try_from(parsed).map_err(|_| {
103
+ napi::Error::from_reason(format!(
104
+ "param '{}': value {} exceeds I32 range",
105
+ key, parsed
106
+ ))
107
+ })
108
+ }
109
+
110
+ /// Convert a JS params object to a ParamMap, using query param type declarations
111
+ /// for type-guided conversion.
112
+ pub fn js_object_to_param_map(
113
+ params: Option<&serde_json::Value>,
114
+ query_params: &[Param],
115
+ ) -> napi::Result<ParamMap> {
116
+ let mut map = ParamMap::new();
117
+ let obj = match params {
118
+ Some(serde_json::Value::Object(obj)) => obj,
119
+ Some(serde_json::Value::Null) | None => return Ok(map),
120
+ Some(other) => {
121
+ return Err(napi::Error::from_reason(format!(
122
+ "params must be an object, got {}",
123
+ json_type_name(other)
124
+ )));
125
+ }
126
+ };
127
+
128
+ for (key, value) in obj {
129
+ let decl = query_params.iter().find(|p| p.name == *key);
130
+ let lit = if let Some(decl) = decl {
131
+ convert_with_type_hint(key, value, &decl.type_name)?
132
+ } else {
133
+ convert_inferred(key, value)?
134
+ };
135
+ map.insert(key.clone(), lit);
136
+ }
137
+ Ok(map)
138
+ }
139
+
140
+ /// Convert a JS value to a Literal using the declared query param type.
141
+ fn convert_with_type_hint(
142
+ key: &str,
143
+ value: &serde_json::Value,
144
+ type_name: &str,
145
+ ) -> napi::Result<Literal> {
146
+ match type_name {
147
+ "String" => match value {
148
+ serde_json::Value::String(s) => Ok(Literal::String(s.clone())),
149
+ other => Err(napi::Error::from_reason(format!(
150
+ "param '{}': expected string, got {}",
151
+ key,
152
+ json_type_name(other)
153
+ ))),
154
+ },
155
+ "I32" => {
156
+ let n = parse_i32_param(key, value)?;
157
+ Ok(Literal::Integer(n as i64))
158
+ }
159
+ "I64" => {
160
+ let n = parse_i64_param(key, value)?;
161
+ Ok(Literal::Integer(n))
162
+ }
163
+ "U32" => {
164
+ let n = parse_u64_param(key, value)?;
165
+ if n > u32::MAX as u64 {
166
+ return Err(napi::Error::from_reason(format!(
167
+ "param '{}': value {} exceeds U32 range",
168
+ key, n
169
+ )));
170
+ }
171
+ Ok(Literal::Integer(n as i64))
172
+ }
173
+ "U64" => {
174
+ let n = parse_u64_param(key, value)?;
175
+ let n = i64::try_from(n).map_err(|_| {
176
+ napi::Error::from_reason(format!(
177
+ "param '{}': value {} exceeds current engine range for U64 parameters (max {})",
178
+ key,
179
+ n,
180
+ i64::MAX
181
+ ))
182
+ })?;
183
+ Ok(Literal::Integer(n))
184
+ }
185
+ "F32" | "F64" => {
186
+ let f = value.as_f64().ok_or_else(|| {
187
+ napi::Error::from_reason(format!(
188
+ "param '{}': expected float, got {}",
189
+ key,
190
+ json_type_name(value)
191
+ ))
192
+ })?;
193
+ Ok(Literal::Float(f))
194
+ }
195
+ "Bool" => {
196
+ let b = value.as_bool().ok_or_else(|| {
197
+ napi::Error::from_reason(format!(
198
+ "param '{}': expected boolean, got {}",
199
+ key,
200
+ json_type_name(value)
201
+ ))
202
+ })?;
203
+ Ok(Literal::Bool(b))
204
+ }
205
+ "Date" => match value {
206
+ serde_json::Value::String(s) => Ok(Literal::Date(s.clone())),
207
+ other => Err(napi::Error::from_reason(format!(
208
+ "param '{}': expected date string, got {}",
209
+ key,
210
+ json_type_name(other)
211
+ ))),
212
+ },
213
+ "DateTime" => match value {
214
+ serde_json::Value::String(s) => Ok(Literal::DateTime(s.clone())),
215
+ other => Err(napi::Error::from_reason(format!(
216
+ "param '{}': expected datetime string, got {}",
217
+ key,
218
+ json_type_name(other)
219
+ ))),
220
+ },
221
+ other if other.starts_with("Vector(") => {
222
+ let expected_dim = parse_vector_dim(other).ok_or_else(|| {
223
+ napi::Error::from_reason(format!(
224
+ "param '{}': invalid vector type '{}' (expected Vector(N))",
225
+ key, other
226
+ ))
227
+ })?;
228
+ let items = value.as_array().ok_or_else(|| {
229
+ napi::Error::from_reason(format!(
230
+ "param '{}': expected array for {}, got {}",
231
+ key,
232
+ other,
233
+ json_type_name(value)
234
+ ))
235
+ })?;
236
+ if items.len() != expected_dim {
237
+ return Err(napi::Error::from_reason(format!(
238
+ "param '{}': expected {} values for {}, got {}",
239
+ key,
240
+ expected_dim,
241
+ other,
242
+ items.len()
243
+ )));
244
+ }
245
+ let mut out = Vec::with_capacity(items.len());
246
+ for item in items {
247
+ let num = item.as_f64().ok_or_else(|| {
248
+ napi::Error::from_reason(format!(
249
+ "param '{}': vector element '{}' is not numeric",
250
+ key, item
251
+ ))
252
+ })?;
253
+ out.push(Literal::Float(num));
254
+ }
255
+ Ok(Literal::List(out))
256
+ }
257
+ _ => {
258
+ // Enum or unknown type — require string
259
+ match value {
260
+ serde_json::Value::String(s) => Ok(Literal::String(s.clone())),
261
+ other => Err(napi::Error::from_reason(format!(
262
+ "param '{}': expected string for type '{}', got {}",
263
+ key, type_name, json_type_name(other)
264
+ ))),
265
+ }
266
+ }
267
+ }
268
+ }
269
+
270
+ /// Infer Literal type from JS value when no type declaration is available.
271
+ fn convert_inferred(key: &str, value: &serde_json::Value) -> napi::Result<Literal> {
272
+ match value {
273
+ serde_json::Value::String(s) => Ok(Literal::String(s.clone())),
274
+ serde_json::Value::Bool(b) => Ok(Literal::Bool(*b)),
275
+ serde_json::Value::Number(n) => {
276
+ if let Some(i) = n.as_i64() {
277
+ if !is_js_safe_integer_i64(i) {
278
+ return Err(napi::Error::from_reason(format!(
279
+ "param '{}': integer {} exceeds JS safe integer range; use a decimal string and a typed query parameter for exact values",
280
+ key, i
281
+ )));
282
+ }
283
+ Ok(Literal::Integer(i))
284
+ } else if let Some(u) = n.as_u64() {
285
+ if u > JS_MAX_SAFE_INTEGER_U64 {
286
+ return Err(napi::Error::from_reason(format!(
287
+ "param '{}': integer {} exceeds JS safe integer range; use a decimal string and a typed query parameter for exact values",
288
+ key, u
289
+ )));
290
+ }
291
+ let i = i64::try_from(u).map_err(|_| {
292
+ napi::Error::from_reason(format!(
293
+ "param '{}': integer {} exceeds supported range (max {})",
294
+ key,
295
+ u,
296
+ i64::MAX
297
+ ))
298
+ })?;
299
+ Ok(Literal::Integer(i))
300
+ } else if let Some(f) = n.as_f64() {
301
+ Ok(Literal::Float(f))
302
+ } else {
303
+ Err(napi::Error::from_reason(format!(
304
+ "param '{}': unsupported number value",
305
+ key
306
+ )))
307
+ }
308
+ }
309
+ serde_json::Value::Array(arr) => {
310
+ let mut items = Vec::with_capacity(arr.len());
311
+ for item in arr {
312
+ items.push(convert_inferred(key, item)?);
313
+ }
314
+ Ok(Literal::List(items))
315
+ }
316
+ serde_json::Value::Null => Err(napi::Error::from_reason(format!(
317
+ "param '{}': null values are not supported as query parameters",
318
+ key
319
+ ))),
320
+ serde_json::Value::Object(_) => Err(napi::Error::from_reason(format!(
321
+ "param '{}': object values are not supported as query parameters",
322
+ key
323
+ ))),
324
+ }
325
+ }
326
+
327
+ fn parse_vector_dim(type_name: &str) -> Option<usize> {
328
+ let dim = type_name
329
+ .strip_prefix("Vector(")?
330
+ .strip_suffix(')')?
331
+ .parse::<usize>()
332
+ .ok()?;
333
+ if dim == 0 { None } else { Some(dim) }
334
+ }
335
+
336
+ fn json_type_name(v: &serde_json::Value) -> &'static str {
337
+ match v {
338
+ serde_json::Value::Null => "null",
339
+ serde_json::Value::Bool(_) => "boolean",
340
+ serde_json::Value::Number(_) => "number",
341
+ serde_json::Value::String(_) => "string",
342
+ serde_json::Value::Array(_) => "array",
343
+ serde_json::Value::Object(_) => "object",
344
+ }
345
+ }
346
+
347
+ pub fn parse_load_mode(mode: &str) -> napi::Result<LoadMode> {
348
+ match mode {
349
+ "overwrite" => Ok(LoadMode::Overwrite),
350
+ "append" => Ok(LoadMode::Append),
351
+ "merge" => Ok(LoadMode::Merge),
352
+ _ => Err(napi::Error::from_reason(format!(
353
+ "invalid load mode '{}': expected 'overwrite', 'append', or 'merge'",
354
+ mode
355
+ ))),
356
+ }
357
+ }
358
+
359
+ pub fn parse_compact_options(opts: Option<&serde_json::Value>) -> napi::Result<CompactOptions> {
360
+ let mut result = CompactOptions::default();
361
+ let obj = match opts {
362
+ Some(serde_json::Value::Object(obj)) => obj,
363
+ Some(serde_json::Value::Null) | None => return Ok(result),
364
+ Some(_) => {
365
+ return Err(napi::Error::from_reason(
366
+ "compact options must be an object",
367
+ ));
368
+ }
369
+ };
370
+ for key in obj.keys() {
371
+ match key.as_str() {
372
+ "targetRowsPerFragment" | "materializeDeletions" | "materializeDeletionsThreshold" => {
373
+ }
374
+ _ => {
375
+ return Err(napi::Error::from_reason(format!(
376
+ "unknown compact option '{}'",
377
+ key
378
+ )));
379
+ }
380
+ }
381
+ }
382
+ if let Some(v) = obj.get("targetRowsPerFragment") {
383
+ let parsed = v.as_u64().ok_or_else(|| {
384
+ napi::Error::from_reason("targetRowsPerFragment must be a positive integer")
385
+ })?;
386
+ if parsed == 0 {
387
+ return Err(napi::Error::from_reason(
388
+ "targetRowsPerFragment must be a positive integer",
389
+ ));
390
+ }
391
+ result.target_rows_per_fragment = usize::try_from(parsed).map_err(|_| {
392
+ napi::Error::from_reason("targetRowsPerFragment is too large for this platform")
393
+ })?;
394
+ }
395
+ if let Some(v) = obj.get("materializeDeletions") {
396
+ result.materialize_deletions = v
397
+ .as_bool()
398
+ .ok_or_else(|| napi::Error::from_reason("materializeDeletions must be a boolean"))?;
399
+ }
400
+ if let Some(v) = obj.get("materializeDeletionsThreshold") {
401
+ let threshold = v.as_f64().ok_or_else(|| {
402
+ napi::Error::from_reason("materializeDeletionsThreshold must be a number")
403
+ })?;
404
+ if !(0.0..=1.0).contains(&threshold) {
405
+ return Err(napi::Error::from_reason(
406
+ "materializeDeletionsThreshold must be between 0.0 and 1.0",
407
+ ));
408
+ }
409
+ result.materialize_deletions_threshold = threshold as f32;
410
+ }
411
+ Ok(result)
412
+ }
413
+
414
+ pub fn parse_cleanup_options(opts: Option<&serde_json::Value>) -> napi::Result<CleanupOptions> {
415
+ let mut result = CleanupOptions::default();
416
+ let obj = match opts {
417
+ Some(serde_json::Value::Object(obj)) => obj,
418
+ Some(serde_json::Value::Null) | None => return Ok(result),
419
+ Some(_) => {
420
+ return Err(napi::Error::from_reason(
421
+ "cleanup options must be an object",
422
+ ));
423
+ }
424
+ };
425
+ for key in obj.keys() {
426
+ match key.as_str() {
427
+ "retainTxVersions" | "retainDatasetVersions" => {}
428
+ _ => {
429
+ return Err(napi::Error::from_reason(format!(
430
+ "unknown cleanup option '{}'",
431
+ key
432
+ )));
433
+ }
434
+ }
435
+ }
436
+ if let Some(v) = obj.get("retainTxVersions") {
437
+ let parsed = v.as_u64().ok_or_else(|| {
438
+ napi::Error::from_reason("retainTxVersions must be a positive integer")
439
+ })?;
440
+ if parsed == 0 {
441
+ return Err(napi::Error::from_reason(
442
+ "retainTxVersions must be a positive integer",
443
+ ));
444
+ }
445
+ result.retain_tx_versions = parsed;
446
+ }
447
+ if let Some(v) = obj.get("retainDatasetVersions") {
448
+ let parsed = v.as_u64().ok_or_else(|| {
449
+ napi::Error::from_reason("retainDatasetVersions must be a positive integer")
450
+ })?;
451
+ if parsed == 0 {
452
+ return Err(napi::Error::from_reason(
453
+ "retainDatasetVersions must be a positive integer",
454
+ ));
455
+ }
456
+ result.retain_dataset_versions = usize::try_from(parsed).map_err(|_| {
457
+ napi::Error::from_reason("retainDatasetVersions is too large for this platform")
458
+ })?;
459
+ }
460
+ Ok(result)
461
+ }
package/src/lib.rs ADDED
@@ -0,0 +1,358 @@
1
+ #![recursion_limit = "256"]
2
+
3
+ mod convert;
4
+
5
+ use std::path::PathBuf;
6
+ use std::sync::Arc;
7
+
8
+ use napi::bindgen_prelude::*;
9
+ use napi_derive::napi;
10
+ use tokio::sync::Mutex;
11
+
12
+ use nanograph::error::NanoError;
13
+ use nanograph::json_output::record_batches_to_json_rows;
14
+ use nanograph::query::parser::parse_query;
15
+ use nanograph::query::typecheck::{CheckedQuery, typecheck_query, typecheck_query_decl};
16
+ use nanograph::store::database::Database;
17
+ use nanograph::{ParamMap, execute_mutation, execute_query, lower_mutation_query, lower_query};
18
+
19
+ use convert::{
20
+ js_object_to_param_map, parse_cleanup_options, parse_compact_options, parse_load_mode,
21
+ };
22
+
23
+ fn to_napi_err(e: NanoError) -> napi::Error {
24
+ napi::Error::from_reason(e.to_string())
25
+ }
26
+
27
+ #[napi(js_name = "Database")]
28
+ pub struct JsDatabase {
29
+ inner: Arc<Mutex<Option<Database>>>,
30
+ }
31
+
32
+ impl JsDatabase {
33
+ fn db_path(path: &str) -> PathBuf {
34
+ PathBuf::from(path)
35
+ }
36
+ }
37
+
38
+ fn prop_def_to_json(prop: &nanograph::schema_ir::PropDef) -> serde_json::Value {
39
+ let mut obj = serde_json::json!({
40
+ "name": prop.name,
41
+ "propId": prop.prop_id,
42
+ "type": prop.scalar_type,
43
+ "nullable": prop.nullable,
44
+ });
45
+ if prop.list {
46
+ obj["list"] = serde_json::Value::Bool(true);
47
+ }
48
+ if prop.key {
49
+ obj["key"] = serde_json::Value::Bool(true);
50
+ }
51
+ if prop.unique {
52
+ obj["unique"] = serde_json::Value::Bool(true);
53
+ }
54
+ if prop.index {
55
+ obj["index"] = serde_json::Value::Bool(true);
56
+ }
57
+ if !prop.enum_values.is_empty() {
58
+ obj["enumValues"] = serde_json::json!(prop.enum_values);
59
+ }
60
+ if let Some(ref src) = prop.embed_source {
61
+ obj["embedSource"] = serde_json::Value::String(src.clone());
62
+ }
63
+ obj
64
+ }
65
+
66
+ #[napi]
67
+ impl JsDatabase {
68
+ /// Create a new database from a schema string.
69
+ ///
70
+ /// ```js
71
+ /// const db = await Database.init("my.nano", schemaSource);
72
+ /// ```
73
+ #[napi(factory)]
74
+ pub async fn init(db_path: String, schema_source: String) -> Result<Self> {
75
+ let path = Self::db_path(&db_path);
76
+ let db = Database::init(&path, &schema_source)
77
+ .await
78
+ .map_err(to_napi_err)?;
79
+ Ok(JsDatabase {
80
+ inner: Arc::new(Mutex::new(Some(db))),
81
+ })
82
+ }
83
+
84
+ /// Open an existing database.
85
+ ///
86
+ /// ```js
87
+ /// const db = await Database.open("my.nano");
88
+ /// ```
89
+ #[napi(factory)]
90
+ pub async fn open(db_path: String) -> Result<Self> {
91
+ let path = Self::db_path(&db_path);
92
+ let db = Database::open(&path).await.map_err(to_napi_err)?;
93
+ Ok(JsDatabase {
94
+ inner: Arc::new(Mutex::new(Some(db))),
95
+ })
96
+ }
97
+
98
+ /// Load JSONL data into the database.
99
+ ///
100
+ /// ```js
101
+ /// await db.load(jsonlString, "overwrite");
102
+ /// ```
103
+ #[napi]
104
+ pub async fn load(&self, data_source: String, mode: String) -> Result<()> {
105
+ let load_mode = parse_load_mode(&mode)?;
106
+ let mut guard = self.inner.lock().await;
107
+ let db = guard
108
+ .as_mut()
109
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
110
+ db.load_with_mode(&data_source, load_mode)
111
+ .await
112
+ .map_err(to_napi_err)
113
+ }
114
+
115
+ /// Execute a named query from query source text.
116
+ ///
117
+ /// For read queries, returns an array of row objects.
118
+ /// For mutation queries, returns `{ affectedNodes, affectedEdges }`.
119
+ ///
120
+ /// ```js
121
+ /// const rows = await db.run(querySource, "findPeople", { minAge: 21 });
122
+ /// ```
123
+ #[napi]
124
+ pub async fn run(
125
+ &self,
126
+ query_source: String,
127
+ query_name: String,
128
+ params: Option<serde_json::Value>,
129
+ ) -> Result<serde_json::Value> {
130
+ let queries = parse_query(&query_source).map_err(to_napi_err)?;
131
+ let query = queries
132
+ .queries
133
+ .iter()
134
+ .find(|q| q.name == query_name)
135
+ .ok_or_else(|| napi::Error::from_reason(format!("query '{}' not found", query_name)))?
136
+ .clone();
137
+
138
+ let param_map: ParamMap = js_object_to_param_map(params.as_ref(), &query.params)?;
139
+
140
+ if query.mutation.is_some() {
141
+ // Mutation path — holds lock for the entire operation
142
+ let mut guard = self.inner.lock().await;
143
+ let db = guard
144
+ .as_mut()
145
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
146
+
147
+ let checked = typecheck_query_decl(db.catalog(), &query).map_err(to_napi_err)?;
148
+ if !matches!(checked, CheckedQuery::Mutation(_)) {
149
+ return Err(napi::Error::from_reason("expected mutation query"));
150
+ }
151
+
152
+ let mutation_ir = lower_mutation_query(&query).map_err(to_napi_err)?;
153
+ let result = execute_mutation(&mutation_ir, db, &param_map)
154
+ .await
155
+ .map_err(to_napi_err)?;
156
+
157
+ Ok(serde_json::json!({
158
+ "affectedNodes": result.affected_nodes,
159
+ "affectedEdges": result.affected_edges,
160
+ }))
161
+ } else {
162
+ // Read path — clone catalog + snapshot, then release lock
163
+ let (catalog, storage) = {
164
+ let guard = self.inner.lock().await;
165
+ let db = guard
166
+ .as_ref()
167
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
168
+ (db.catalog().clone(), db.snapshot())
169
+ };
170
+
171
+ let type_ctx = typecheck_query(&catalog, &query).map_err(to_napi_err)?;
172
+ let ir = lower_query(&catalog, &query, &type_ctx).map_err(to_napi_err)?;
173
+ let results = execute_query(&ir, storage, &param_map)
174
+ .await
175
+ .map_err(to_napi_err)?;
176
+
177
+ let rows = record_batches_to_json_rows(&results);
178
+ Ok(serde_json::Value::Array(rows))
179
+ }
180
+ }
181
+
182
+ /// Typecheck all queries in the source text against the database schema.
183
+ ///
184
+ /// Returns an array of `{ name, kind, status, error? }` objects.
185
+ ///
186
+ /// ```js
187
+ /// const checks = await db.check(querySource);
188
+ /// ```
189
+ #[napi]
190
+ pub async fn check(&self, query_source: String) -> Result<serde_json::Value> {
191
+ let queries = parse_query(&query_source).map_err(to_napi_err)?;
192
+
193
+ let guard = self.inner.lock().await;
194
+ let db = guard
195
+ .as_ref()
196
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
197
+ let catalog = db.catalog().clone();
198
+ drop(guard);
199
+
200
+ let mut checks = Vec::with_capacity(queries.queries.len());
201
+ for q in &queries.queries {
202
+ match typecheck_query_decl(&catalog, q) {
203
+ Ok(CheckedQuery::Read(_)) => {
204
+ checks.push(serde_json::json!({
205
+ "name": q.name,
206
+ "kind": "read",
207
+ "status": "ok",
208
+ }));
209
+ }
210
+ Ok(CheckedQuery::Mutation(_)) => {
211
+ checks.push(serde_json::json!({
212
+ "name": q.name,
213
+ "kind": "mutation",
214
+ "status": "ok",
215
+ }));
216
+ }
217
+ Err(e) => {
218
+ checks.push(serde_json::json!({
219
+ "name": q.name,
220
+ "kind": if q.mutation.is_some() { "mutation" } else { "read" },
221
+ "status": "error",
222
+ "error": e.to_string(),
223
+ }));
224
+ }
225
+ }
226
+ }
227
+
228
+ Ok(serde_json::Value::Array(checks))
229
+ }
230
+
231
+ /// Return schema introspection as a JSON object.
232
+ ///
233
+ /// ```js
234
+ /// const info = await db.describe();
235
+ /// ```
236
+ #[napi]
237
+ pub async fn describe(&self) -> Result<serde_json::Value> {
238
+ let ir = {
239
+ let guard = self.inner.lock().await;
240
+ let db = guard
241
+ .as_ref()
242
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
243
+ db.schema_ir.clone()
244
+ };
245
+
246
+ let mut node_types = Vec::new();
247
+ for nt in ir.node_types() {
248
+ let props: Vec<serde_json::Value> =
249
+ nt.properties.iter().map(prop_def_to_json).collect();
250
+ node_types.push(serde_json::json!({
251
+ "name": nt.name,
252
+ "typeId": nt.type_id,
253
+ "properties": props,
254
+ }));
255
+ }
256
+
257
+ let mut edge_types = Vec::new();
258
+ for et in ir.edge_types() {
259
+ let props: Vec<serde_json::Value> =
260
+ et.properties.iter().map(prop_def_to_json).collect();
261
+ edge_types.push(serde_json::json!({
262
+ "name": et.name,
263
+ "srcType": et.src_type_name,
264
+ "dstType": et.dst_type_name,
265
+ "typeId": et.type_id,
266
+ "properties": props,
267
+ }));
268
+ }
269
+
270
+ Ok(serde_json::json!({
271
+ "nodeTypes": node_types,
272
+ "edgeTypes": edge_types,
273
+ }))
274
+ }
275
+
276
+ /// Compact Lance datasets to reduce fragmentation.
277
+ ///
278
+ /// ```js
279
+ /// const result = await db.compact({ targetRowsPerFragment: 1048576 });
280
+ /// ```
281
+ #[napi]
282
+ pub async fn compact(&self, options: Option<serde_json::Value>) -> Result<serde_json::Value> {
283
+ let opts = parse_compact_options(options.as_ref())?;
284
+ let mut guard = self.inner.lock().await;
285
+ let db = guard
286
+ .as_mut()
287
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
288
+ let result = db.compact(opts).await.map_err(to_napi_err)?;
289
+ Ok(serde_json::json!({
290
+ "datasetsConsidered": result.datasets_considered,
291
+ "datasetsCompacted": result.datasets_compacted,
292
+ "fragmentsRemoved": result.fragments_removed,
293
+ "fragmentsAdded": result.fragments_added,
294
+ "filesRemoved": result.files_removed,
295
+ "filesAdded": result.files_added,
296
+ "manifestCommitted": result.manifest_committed,
297
+ }))
298
+ }
299
+
300
+ /// Clean up old dataset versions and prune transaction/CDC logs.
301
+ ///
302
+ /// ```js
303
+ /// const result = await db.cleanup({ retainTxVersions: 128 });
304
+ /// ```
305
+ #[napi]
306
+ pub async fn cleanup(&self, options: Option<serde_json::Value>) -> Result<serde_json::Value> {
307
+ let opts = parse_cleanup_options(options.as_ref())?;
308
+ let mut guard = self.inner.lock().await;
309
+ let db = guard
310
+ .as_mut()
311
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
312
+ let result = db.cleanup(opts).await.map_err(to_napi_err)?;
313
+ Ok(serde_json::json!({
314
+ "txRowsRemoved": result.tx_rows_removed,
315
+ "txRowsKept": result.tx_rows_kept,
316
+ "cdcRowsRemoved": result.cdc_rows_removed,
317
+ "cdcRowsKept": result.cdc_rows_kept,
318
+ "datasetsCleaned": result.datasets_cleaned,
319
+ "datasetOldVersionsRemoved": result.dataset_old_versions_removed,
320
+ "datasetBytesRemoved": result.dataset_bytes_removed,
321
+ }))
322
+ }
323
+
324
+ /// Run health checks on the database.
325
+ ///
326
+ /// ```js
327
+ /// const report = await db.doctor();
328
+ /// ```
329
+ #[napi]
330
+ pub async fn doctor(&self) -> Result<serde_json::Value> {
331
+ let guard = self.inner.lock().await;
332
+ let db = guard
333
+ .as_ref()
334
+ .ok_or_else(|| napi::Error::from_reason("database is closed"))?;
335
+ let report = db.doctor().await.map_err(to_napi_err)?;
336
+ Ok(serde_json::json!({
337
+ "healthy": report.healthy,
338
+ "issues": report.issues,
339
+ "warnings": report.warnings,
340
+ "manifestDbVersion": report.manifest_db_version,
341
+ "datasetsChecked": report.datasets_checked,
342
+ "txRows": report.tx_rows,
343
+ "cdcRows": report.cdc_rows,
344
+ }))
345
+ }
346
+
347
+ /// Close the database, releasing resources.
348
+ ///
349
+ /// ```js
350
+ /// await db.close();
351
+ /// ```
352
+ #[napi]
353
+ pub async fn close(&self) -> Result<()> {
354
+ let mut guard = self.inner.lock().await;
355
+ *guard = None;
356
+ Ok(())
357
+ }
358
+ }