geolookup-plugin 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +485 -0
- package/config.yaml +26 -0
- package/data/alabama.tar.gz +0 -0
- package/data/alaska.tar.gz +0 -0
- package/data/american samoa.tar.gz +0 -0
- package/data/arizona.tar.gz +0 -0
- package/data/arkansas.tar.gz +0 -0
- package/data/california.tar.gz +0 -0
- package/data/cnmi.tar.gz +0 -0
- package/data/colorado.tar.gz +0 -0
- package/data/connecticut.tar.gz +0 -0
- package/data/dc.tar.gz +0 -0
- package/data/delaware.tar.gz +0 -0
- package/data/florida.tar.gz +0 -0
- package/data/georgia.tar.gz +0 -0
- package/data/guam.tar.gz +0 -0
- package/data/hawaii.tar.gz +0 -0
- package/data/idaho.tar.gz +0 -0
- package/data/illinois.tar.gz +0 -0
- package/data/indiana.tar.gz +0 -0
- package/data/iowa.tar.gz +0 -0
- package/data/kansas.tar.gz +0 -0
- package/data/kentucky.tar.gz +0 -0
- package/data/louisiana.tar.gz +0 -0
- package/data/maine.tar.gz +0 -0
- package/data/maryland.tar.gz +0 -0
- package/data/massachusetts.tar.gz +0 -0
- package/data/michigan.tar.gz +0 -0
- package/data/minnesota.tar.gz +0 -0
- package/data/mississippi.tar.gz +0 -0
- package/data/missouri.tar.gz +0 -0
- package/data/montana.tar.gz +0 -0
- package/data/nebraska.tar.gz +0 -0
- package/data/nevada.tar.gz +0 -0
- package/data/new hampshire.tar.gz +0 -0
- package/data/new jersey.tar.gz +0 -0
- package/data/new mexico.tar.gz +0 -0
- package/data/new york.tar.gz +0 -0
- package/data/north carolina.tar.gz +0 -0
- package/data/north dakota.tar.gz +0 -0
- package/data/ohio.tar.gz +0 -0
- package/data/oklahoma.tar.gz +0 -0
- package/data/oregon.tar.gz +0 -0
- package/data/pennsylvania.tar.gz +0 -0
- package/data/puerto rico.tar.gz +0 -0
- package/data/rhode island.tar.gz +0 -0
- package/data/south carolina.tar.gz +0 -0
- package/data/south dakota.tar.gz +0 -0
- package/data/tennessee.tar.gz +0 -0
- package/data/texas.tar.gz +0 -0
- package/data/usvi.tar.gz +0 -0
- package/data/utah.tar.gz +0 -0
- package/data/vermont.tar.gz +0 -0
- package/data/virginia.tar.gz +0 -0
- package/data/washington.tar.gz +0 -0
- package/data/west virginia.tar.gz +0 -0
- package/data/wisconsin.tar.gz +0 -0
- package/data/wyoming.tar.gz +0 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.js +24 -0
- package/dist/index.js.map +1 -0
- package/dist/resources/DataLoad.d.ts +26 -0
- package/dist/resources/DataLoad.js +141 -0
- package/dist/resources/DataLoad.js.map +1 -0
- package/dist/resources/Geolookup.d.ts +44 -0
- package/dist/resources/Geolookup.js +105 -0
- package/dist/resources/Geolookup.js.map +1 -0
- package/dist/types.d.ts +11 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/package.json +57 -0
- package/schemas/README.md +11 -0
- package/schemas/schema.graphql +71 -0
package/config.yaml
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# yaml-language-server: $schema=./node_modules/harperdb/config-app.schema.json
|
|
2
|
+
|
|
3
|
+
# This is the configuration file for the application.
|
|
4
|
+
# It specifies built-in Harper components that will load the specified feature and files.
|
|
5
|
+
# For more information, see https://docs.harperdb.io/docs/reference/components/built-in-extensions
|
|
6
|
+
|
|
7
|
+
# Load Environment Variables from the specified file
|
|
8
|
+
# loadEnv:
|
|
9
|
+
# files: '.env'
|
|
10
|
+
|
|
11
|
+
# This provides the HTTP REST interface for all exported resources
|
|
12
|
+
#rest: true
|
|
13
|
+
|
|
14
|
+
pluginModule: 'src/index.ts'
|
|
15
|
+
|
|
16
|
+
# These reads GraphQL schemas to define the schema of database/tables/attributes.
|
|
17
|
+
graphqlSchema:
|
|
18
|
+
files: 'schemas/*.graphql'
|
|
19
|
+
|
|
20
|
+
# Loads JavaScript modules such that their exports are exported as resources
|
|
21
|
+
#jsResource:
|
|
22
|
+
# files: 'src/resources/*.ts'
|
|
23
|
+
|
|
24
|
+
# Load seed data from JSON files into tables
|
|
25
|
+
#dataLoader:
|
|
26
|
+
# files: 'data/**/*.json'
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/data/cnmi.tar.gz
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/data/dc.tar.gz
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/data/guam.tar.gz
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/data/iowa.tar.gz
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/data/ohio.tar.gz
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/data/usvi.tar.gz
ADDED
|
Binary file
|
package/data/utah.tar.gz
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Geolookup } from './resources/Geolookup.ts';
|
|
2
|
+
import { DataLoad } from './resources/DataLoad.ts';
|
|
3
|
+
import { Scope } from 'harperdb';
|
|
4
|
+
export { Geolookup, DataLoad };
|
|
5
|
+
/**
|
|
6
|
+
* Plugin entry point called by Harper during startup.
|
|
7
|
+
*
|
|
8
|
+
* Reads configuration options from the consuming app's config.yaml and
|
|
9
|
+
* conditionally registers resource endpoints. Services are only exposed
|
|
10
|
+
* when their corresponding "expose" flag is set — this allows consuming
|
|
11
|
+
* apps to use the Geolookup and DataLoad classes programmatically without
|
|
12
|
+
* necessarily exposing them as REST endpoints.
|
|
13
|
+
*
|
|
14
|
+
* @param scope - Harper scope providing access to options and resource registration
|
|
15
|
+
*/
|
|
16
|
+
export declare function handleApplication(scope: Scope): void;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { Geolookup } from "./resources/Geolookup.js";
|
|
2
|
+
import { DataLoad } from "./resources/DataLoad.js";
|
|
3
|
+
export { Geolookup, DataLoad };
|
|
4
|
+
/**
|
|
5
|
+
* Plugin entry point called by Harper during startup.
|
|
6
|
+
*
|
|
7
|
+
* Reads configuration options from the consuming app's config.yaml and
|
|
8
|
+
* conditionally registers resource endpoints. Services are only exposed
|
|
9
|
+
* when their corresponding "expose" flag is set — this allows consuming
|
|
10
|
+
* apps to use the Geolookup and DataLoad classes programmatically without
|
|
11
|
+
* necessarily exposing them as REST endpoints.
|
|
12
|
+
*
|
|
13
|
+
* @param scope - Harper scope providing access to options and resource registration
|
|
14
|
+
*/
|
|
15
|
+
export function handleApplication(scope) {
|
|
16
|
+
const options = (scope.options.getAll() || {});
|
|
17
|
+
if (options.exposeGeoService) {
|
|
18
|
+
scope.resources.set(options.geoServiceName, Geolookup);
|
|
19
|
+
}
|
|
20
|
+
if (options.exposeDataLoadService) {
|
|
21
|
+
scope.resources.set(options.dataLoadServiceName, DataLoad);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,SAAS,EAAC,MAAM,0BAA0B,CAAC;AACnD,OAAO,EAAC,QAAQ,EAAC,MAAM,yBAAyB,CAAC;AAGjD,OAAO,EAAC,SAAS,EAAE,QAAQ,EAAC,CAAC;AAE7B;;;;;;;;;;GAUG;AACH,MAAM,UAAU,iBAAiB,CAAC,KAAY;IAC1C,MAAM,OAAO,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE,CAAoB,CAAC;IAElE,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;QAC3B,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,cAAc,EAAE,SAAS,CAAC,CAAC;IAC3D,CAAC;IAED,IAAI,OAAO,CAAC,qBAAqB,EAAE,CAAC;QAChC,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,mBAAmB,EAAE,QAAQ,CAAC,CAAC;IAC/D,CAAC;AACL,CAAC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Async bulk loading endpoint for populating Location and Cell tables from
|
|
3
|
+
* pre-packaged state data files. Validates the request, creates a DataLoadJob
|
|
4
|
+
* record for tracking, and returns the job ID immediately. The actual data
|
|
5
|
+
* extraction and loading runs in the background — callers poll the DataLoadJob
|
|
6
|
+
* table to check progress.
|
|
7
|
+
*/
|
|
8
|
+
export declare class DataLoad extends Resource {
|
|
9
|
+
/**
|
|
10
|
+
* Handles GET requests to initiate a data load job.
|
|
11
|
+
*
|
|
12
|
+
* Validates the state parameter, checks that the corresponding .tar.gz file
|
|
13
|
+
* exists, creates a DataLoadJob record, and kicks off background processing.
|
|
14
|
+
* Returns the job ID immediately so the caller can poll for progress.
|
|
15
|
+
*
|
|
16
|
+
* @param target - Harper request target containing query parameters
|
|
17
|
+
* @returns Object with jobId on success, or an error object on validation failure
|
|
18
|
+
*/
|
|
19
|
+
get(target: any): Promise<{
|
|
20
|
+
error: string;
|
|
21
|
+
jobId?: undefined;
|
|
22
|
+
} | {
|
|
23
|
+
jobId: `${string}-${string}-${string}-${string}-${string}`;
|
|
24
|
+
error?: undefined;
|
|
25
|
+
}>;
|
|
26
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { databases } from 'harperdb';
|
|
2
|
+
import { execFileSync } from 'node:child_process';
|
|
3
|
+
import { readFileSync, readdirSync, rmSync, existsSync } from 'node:fs';
|
|
4
|
+
import { join } from 'node:path';
|
|
5
|
+
import { randomUUID } from 'node:crypto';
|
|
6
|
+
const { Location, Cell, DataLoadJob } = databases.geolookup;
|
|
7
|
+
const DATA_DIR = new URL('../data/', import.meta.url).pathname;
|
|
8
|
+
/**
|
|
9
|
+
* Reads all JSON files from a directory and loads each file's records into the
|
|
10
|
+
* given Harper table within a transaction. Each JSON file is expected to contain
|
|
11
|
+
* an array of records. After each file is loaded, the DataLoadJob record is
|
|
12
|
+
* updated with the running count.
|
|
13
|
+
*
|
|
14
|
+
* @param dir - Absolute path to the directory containing JSON files
|
|
15
|
+
* @param table - Harper table instance to load records into
|
|
16
|
+
* @param idField - Name of the field to use as the record's primary key
|
|
17
|
+
* @param jobId - UUID of the DataLoadJob record to update with progress
|
|
18
|
+
* @param countField - Name of the count field to update on the job (e.g. 'location_count')
|
|
19
|
+
* @returns Total number of records loaded across all files
|
|
20
|
+
*/
|
|
21
|
+
async function loadTableFiles(dir, table, idField, jobId, countField) {
|
|
22
|
+
let count = 0;
|
|
23
|
+
if (!existsSync(dir))
|
|
24
|
+
return count;
|
|
25
|
+
const files = readdirSync(dir).filter(f => f.endsWith('.json'));
|
|
26
|
+
for (const file of files) {
|
|
27
|
+
const records = JSON.parse(readFileSync(join(dir, file), 'utf-8'));
|
|
28
|
+
await transaction(async (txn) => {
|
|
29
|
+
for (const record of records) {
|
|
30
|
+
await table.put(record[idField], record, txn);
|
|
31
|
+
count++;
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
await DataLoadJob.patch(jobId, { [countField]: count });
|
|
35
|
+
}
|
|
36
|
+
return count;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Runs data extraction and loading in the background. Updates the DataLoadJob
|
|
40
|
+
* record as it progresses through each step:
|
|
41
|
+
* extracting → loading_locations → loading_cells → completed (or error)
|
|
42
|
+
*
|
|
43
|
+
* On success, marks the job as completed with final counts and duration.
|
|
44
|
+
* On error, marks the job with status "error" and captures the error message.
|
|
45
|
+
* The extracted state folder is always cleaned up, even on failure.
|
|
46
|
+
*
|
|
47
|
+
* Locations are loaded before Cells because Cell records reference Location IDs
|
|
48
|
+
* via their tier_1/tier_2/tier_3 foreign keys.
|
|
49
|
+
*
|
|
50
|
+
* @param jobId - UUID of the DataLoadJob record to update
|
|
51
|
+
* @param state - Lowercase state name (matches the extracted directory name)
|
|
52
|
+
* @param tarPath - Absolute path to the .tar.gz archive
|
|
53
|
+
*/
|
|
54
|
+
async function processDataLoad(jobId, state, tarPath) {
|
|
55
|
+
const startTime = Date.now();
|
|
56
|
+
let locationCount = 0;
|
|
57
|
+
let cellCount = 0;
|
|
58
|
+
try {
|
|
59
|
+
// Uses execFileSync (no shell) to avoid injection risks
|
|
60
|
+
await DataLoadJob.patch(jobId, { status: 'extracting' });
|
|
61
|
+
execFileSync('tar', ['-xzf', tarPath, '-C', DATA_DIR]);
|
|
62
|
+
const stateDir = join(DATA_DIR, state);
|
|
63
|
+
if (!existsSync(stateDir)) {
|
|
64
|
+
throw new Error(`Expected directory ${state} not found after extraction`);
|
|
65
|
+
}
|
|
66
|
+
try {
|
|
67
|
+
await DataLoadJob.patch(jobId, { status: 'loading_locations' });
|
|
68
|
+
locationCount = await loadTableFiles(join(stateDir, 'Location'), Location, 'id', jobId, 'location_count');
|
|
69
|
+
await DataLoadJob.patch(jobId, { status: 'loading_cells' });
|
|
70
|
+
cellCount = await loadTableFiles(join(stateDir, 'Cell'), Cell, 'h3_index', jobId, 'cell_count');
|
|
71
|
+
}
|
|
72
|
+
finally {
|
|
73
|
+
// Always clean up the extracted folder, even if loading fails partway through
|
|
74
|
+
rmSync(stateDir, { recursive: true, force: true });
|
|
75
|
+
}
|
|
76
|
+
const durationMs = Date.now() - startTime;
|
|
77
|
+
await DataLoadJob.patch(jobId, {
|
|
78
|
+
status: 'completed',
|
|
79
|
+
location_count: locationCount,
|
|
80
|
+
cell_count: cellCount,
|
|
81
|
+
completed_at: new Date().toISOString(),
|
|
82
|
+
duration_ms: durationMs,
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
catch (err) {
|
|
86
|
+
const durationMs = Date.now() - startTime;
|
|
87
|
+
await DataLoadJob.patch(jobId, {
|
|
88
|
+
status: 'error',
|
|
89
|
+
error_message: err.message,
|
|
90
|
+
location_count: locationCount,
|
|
91
|
+
cell_count: cellCount,
|
|
92
|
+
completed_at: new Date().toISOString(),
|
|
93
|
+
duration_ms: durationMs,
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Async bulk loading endpoint for populating Location and Cell tables from
|
|
99
|
+
* pre-packaged state data files. Validates the request, creates a DataLoadJob
|
|
100
|
+
* record for tracking, and returns the job ID immediately. The actual data
|
|
101
|
+
* extraction and loading runs in the background — callers poll the DataLoadJob
|
|
102
|
+
* table to check progress.
|
|
103
|
+
*/
|
|
104
|
+
export class DataLoad extends Resource {
|
|
105
|
+
/**
|
|
106
|
+
* Handles GET requests to initiate a data load job.
|
|
107
|
+
*
|
|
108
|
+
* Validates the state parameter, checks that the corresponding .tar.gz file
|
|
109
|
+
* exists, creates a DataLoadJob record, and kicks off background processing.
|
|
110
|
+
* Returns the job ID immediately so the caller can poll for progress.
|
|
111
|
+
*
|
|
112
|
+
* @param target - Harper request target containing query parameters
|
|
113
|
+
* @returns Object with jobId on success, or an error object on validation failure
|
|
114
|
+
*/
|
|
115
|
+
async get(target) {
|
|
116
|
+
const state = target.get('state');
|
|
117
|
+
if (!state) {
|
|
118
|
+
return { error: 'state query parameter is required' };
|
|
119
|
+
}
|
|
120
|
+
// Normalize to lowercase to match the tar.gz filenames in the data directory
|
|
121
|
+
const stateLower = state.toLowerCase();
|
|
122
|
+
const tarPath = join(DATA_DIR, `${stateLower}.tar.gz`);
|
|
123
|
+
if (!existsSync(tarPath)) {
|
|
124
|
+
return { error: `No data file found for state: ${stateLower}` };
|
|
125
|
+
}
|
|
126
|
+
// Create the job record and return the ID to the caller immediately
|
|
127
|
+
const jobId = randomUUID();
|
|
128
|
+
await DataLoadJob.put(jobId, {
|
|
129
|
+
state: stateLower,
|
|
130
|
+
status: 'pending',
|
|
131
|
+
location_count: 0,
|
|
132
|
+
cell_count: 0,
|
|
133
|
+
started_at: new Date().toISOString(),
|
|
134
|
+
});
|
|
135
|
+
// Fire and forget — errors are captured in the job record, .catch() prevents
|
|
136
|
+
// unhandled rejection if the DataLoadJob.patch itself fails in the catch block
|
|
137
|
+
processDataLoad(jobId, stateLower, tarPath).catch(() => { });
|
|
138
|
+
return { jobId };
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
//# sourceMappingURL=DataLoad.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DataLoad.js","sourceRoot":"","sources":["../../src/resources/DataLoad.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACxE,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAEzC,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,GAAG,SAAS,CAAC,SAAS,CAAC;AAC5D,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC;AAE/D;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,cAAc,CAAC,GAAW,EAAE,KAAU,EAAE,OAAe,EAAE,KAAa,EAAE,UAAkB;IACxG,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAC;IAEnC,MAAM,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;IAChE,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QAC1B,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC;QACnE,MAAM,WAAW,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;YAC/B,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;gBAC9B,MAAM,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC;gBAC9C,KAAK,EAAE,CAAC;YACT,CAAC;QACF,CAAC,CAAC,CAAC;QACH,MAAM,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE,EAAE,CAAC,UAAU,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;IACzD,CAAC;IACD,OAAO,KAAK,CAAC;AACd,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,KAAK,UAAU,eAAe,CAAC,KAAa,EAAE,KAAa,EAAE,OAAe;IAC3E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAC7B,IAAI,aAAa,GAAG,CAAC,CAAC;IACtB,IAAI,SAAS,GAAG,CAAC,CAAC;IAElB,IAAI,CAAC;QACJ,wDAAwD;QACxD,MAAM,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC,CAAC;QACzD,YAAY,CAAC,KAAK,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QAEvD,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QACvC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC3B,MAAM,IAAI,KAAK,CAAC,sBAAsB,KAAK,6BAA6B,CAAC,CAAC;QAC3E,CAAC;QAED,IAAI,CAAC;YACJ,MAAM,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,mBAAmB,EAAE,CAAC,CAAC;YAChE,aAAa,GAAG,MAAM,cAAc,CAAC,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC,EAAE,QAAQ,EAAE,IAAI,EAAE,KAAK,EAAE,gBAAgB,CAAC,CAAC;YAE1G,MAAM,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,eAAe,EAAE,CAAC,CAAC;YAC5D,SAAS,GAAG,MAAM,cAAc,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,YAAY,CAAC,CAAC;QACjG,CAAC;gBAAS,CAAC;YACV,8EAA8E;YAC9E,MAAM,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QACpD,CAAC;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAC1C,MAAM,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE;YAC9B,MAAM,EAAE,WAAW;YACnB,cAAc,EAAE,aAAa;YAC7B,UAAU,EAAE,SAAS;YACrB,YAAY,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;YACtC,WAAW,EAAE,UAAU;SACvB,CAAC,CAAC;IACJ,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAC1C,MAAM,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE;YAC9B,MAAM,EAAE,OAAO;YACf,aAAa,EAAE,GAAG,CAAC,OAAO;YAC1B,cAAc,EAAE,aAAa;YAC7B,UAAU,EAAE,SAAS;YACrB,YAAY,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;YACtC,WAAW,EAAE,UAAU;SACvB,CAAC,CAAC;IACJ,CAAC;AACF,CAAC;AAED;;;;;;GAMG;AACH,MAAM,OAAO,QAAS,SAAQ,QAAQ;IACrC;;;;;;;;;OASG;IACH,KAAK,CAAC,GAAG,CAAC,MAAM;QACf,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QAClC,IAAI,CAAC,KAAK,EAAE,CAAC;YACZ,OAAO,EAAE,KAAK,EAAE,mCAAmC,EAAE,CAAC;QACvD,CAAC;QAED,6EAA6E;QAC7E,MAAM,UAAU,GAAG,KAAK,CAAC,WAAW,EAAE,CAAC;QACvC,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,EAAE,GAAG,UAAU,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC1B,OAAO,EAAE,KAAK,EAAE,iCAAiC,UAAU,EAAE,EAAE,CAAC;QACjE,CAAC;QAED,oEAAoE;QACpE,MAAM,KAAK,GAAG,UAAU,EAAE,CAAC;QAC3B,MAAM,WAAW,CAAC,GAAG,CAAC,KAAK,EAAE;YAC5B,KAAK,EAAE,UAAU;YACjB,MAAM,EAAE,SAAS;YACjB,cAAc,EAAE,CAAC;YACjB,UAAU,EAAE,CAAC;YACb,UAAU,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;SACpC,CAAC,CAAC;QAEH,6EAA6E;QAC7E,+EAA+E;QAC/E,eAAe,CAAC,KAAK,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;QAE5D,OAAO,EAAE,KAAK,EAAE,CAAC;IAClB,CAAC;CACD"}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parses the "tiers" query param into a set of tier strings.
|
|
3
|
+
* Accepts comma-separated values ("1,3"), "all", or undefined (defaults to all tiers).
|
|
4
|
+
*
|
|
5
|
+
* @param tiersParam - Raw tiers query parameter value
|
|
6
|
+
* @returns A Set of valid tier strings, or an error object if any requested tier is invalid
|
|
7
|
+
*/
|
|
8
|
+
export declare function parseTiers(tiersParam: string | undefined): Set<string> | {
|
|
9
|
+
error: string;
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* Reverse geocoding resource. Given a lat/lon coordinate, finds the best matching
|
|
13
|
+
* Location at each requested tier by searching H3 cells across multiple resolutions.
|
|
14
|
+
*/
|
|
15
|
+
export declare class Geolookup extends Resource {
|
|
16
|
+
/**
|
|
17
|
+
* Handles GET requests. Validates lat/lon/tiers query params, then delegates
|
|
18
|
+
* to _lookup for the actual H3-based spatial search.
|
|
19
|
+
*
|
|
20
|
+
* @param target - Harper request target containing query parameters
|
|
21
|
+
* @returns Lookup results keyed by tier name, or an error object
|
|
22
|
+
*/
|
|
23
|
+
get(target: any): {
|
|
24
|
+
error: string;
|
|
25
|
+
} | Promise<Record<string, any>>;
|
|
26
|
+
/**
|
|
27
|
+
* Core reverse geocoding logic.
|
|
28
|
+
*
|
|
29
|
+
* 1. Converts lat/lon to an H3 cell at the finest resolution (9).
|
|
30
|
+
* 2. Builds a candidate set of H3 indexes: the resolution-9 cell plus its parent
|
|
31
|
+
* at each coarser resolution (8 down to 2). This is necessary because cells
|
|
32
|
+
* are stored in compact form — a match could be at any resolution level.
|
|
33
|
+
* 3. Searches the Cell table for any matching H3 index (OR across all candidates).
|
|
34
|
+
* Only joins to Location records for the tiers the caller requested.
|
|
35
|
+
* 4. Collects the first Location found for each requested tier. Stops early once
|
|
36
|
+
* all requested tiers have a match.
|
|
37
|
+
*
|
|
38
|
+
* @param lat - Latitude in decimal degrees
|
|
39
|
+
* @param lon - Longitude in decimal degrees
|
|
40
|
+
* @param tiers - Set of tier strings to search for ("1", "2", "3")
|
|
41
|
+
* @returns Object keyed by tier name (place, county_subdivision, county) with matched Locations
|
|
42
|
+
*/
|
|
43
|
+
_lookup(lat: number, lon: number, tiers: Set<string>): Promise<Record<string, any>>;
|
|
44
|
+
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { latLngToCell, cellToParent } from 'h3-js';
|
|
2
|
+
import { databases } from 'harperdb';
|
|
3
|
+
const { Location, Cell } = databases.geolookup;
|
|
4
|
+
// Tiers map to US administrative hierarchy levels:
|
|
5
|
+
// '1' = place (city/town), '2' = county_subdivision (township/MCD), '3' = county
|
|
6
|
+
const VALID_TIERS = new Set(['1', '2', '3']);
|
|
7
|
+
// Fields returned for each matched Location in the lookup response
|
|
8
|
+
const LOCATION_SELECT = ['id', 'tier', 'name', 'name_full', 'state_name', 'state_abbrev', 'h3_index', 'country_code', 'county_name'];
|
|
9
|
+
/**
|
|
10
|
+
* Parses the "tiers" query param into a set of tier strings.
|
|
11
|
+
* Accepts comma-separated values ("1,3"), "all", or undefined (defaults to all tiers).
|
|
12
|
+
*
|
|
13
|
+
* @param tiersParam - Raw tiers query parameter value
|
|
14
|
+
* @returns A Set of valid tier strings, or an error object if any requested tier is invalid
|
|
15
|
+
*/
|
|
16
|
+
export function parseTiers(tiersParam) {
|
|
17
|
+
if (!tiersParam || tiersParam === 'all') {
|
|
18
|
+
return new Set(VALID_TIERS);
|
|
19
|
+
}
|
|
20
|
+
const requested = tiersParam.split(',').map(t => t.trim());
|
|
21
|
+
const invalid = requested.filter(t => !VALID_TIERS.has(t));
|
|
22
|
+
if (invalid.length > 0) {
|
|
23
|
+
return { error: `Invalid tier(s): ${invalid.join(', ')}. Valid values are 1, 2, 3, or all.` };
|
|
24
|
+
}
|
|
25
|
+
return new Set(requested);
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Reverse geocoding resource. Given a lat/lon coordinate, finds the best matching
|
|
29
|
+
* Location at each requested tier by searching H3 cells across multiple resolutions.
|
|
30
|
+
*/
|
|
31
|
+
export class Geolookup extends Resource {
|
|
32
|
+
/**
|
|
33
|
+
* Handles GET requests. Validates lat/lon/tiers query params, then delegates
|
|
34
|
+
* to _lookup for the actual H3-based spatial search.
|
|
35
|
+
*
|
|
36
|
+
* @param target - Harper request target containing query parameters
|
|
37
|
+
* @returns Lookup results keyed by tier name, or an error object
|
|
38
|
+
*/
|
|
39
|
+
get(target) {
|
|
40
|
+
const lat = parseFloat(target.get('lat'));
|
|
41
|
+
const lon = parseFloat(target.get('lon'));
|
|
42
|
+
if (isNaN(lat) || isNaN(lon)) {
|
|
43
|
+
return { error: 'lat and lon query parameters are required' };
|
|
44
|
+
}
|
|
45
|
+
const tiers = parseTiers(target.get('tiers'));
|
|
46
|
+
if ('error' in tiers) {
|
|
47
|
+
return tiers;
|
|
48
|
+
}
|
|
49
|
+
return this._lookup(lat, lon, tiers);
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Core reverse geocoding logic.
|
|
53
|
+
*
|
|
54
|
+
* 1. Converts lat/lon to an H3 cell at the finest resolution (9).
|
|
55
|
+
* 2. Builds a candidate set of H3 indexes: the resolution-9 cell plus its parent
|
|
56
|
+
* at each coarser resolution (8 down to 2). This is necessary because cells
|
|
57
|
+
* are stored in compact form — a match could be at any resolution level.
|
|
58
|
+
* 3. Searches the Cell table for any matching H3 index (OR across all candidates).
|
|
59
|
+
* Only joins to Location records for the tiers the caller requested.
|
|
60
|
+
* 4. Collects the first Location found for each requested tier. Stops early once
|
|
61
|
+
* all requested tiers have a match.
|
|
62
|
+
*
|
|
63
|
+
* @param lat - Latitude in decimal degrees
|
|
64
|
+
* @param lon - Longitude in decimal degrees
|
|
65
|
+
* @param tiers - Set of tier strings to search for ("1", "2", "3")
|
|
66
|
+
* @returns Object keyed by tier name (place, county_subdivision, county) with matched Locations
|
|
67
|
+
*/
|
|
68
|
+
async _lookup(lat, lon, tiers) {
|
|
69
|
+
const h3Index = latLngToCell(lat, lon, 9);
|
|
70
|
+
const conditions = [{ attribute: 'h3_index', value: h3Index }];
|
|
71
|
+
for (let res = 8; res >= 2; res--) {
|
|
72
|
+
conditions.push({ attribute: 'h3_index', value: cellToParent(h3Index, res) });
|
|
73
|
+
}
|
|
74
|
+
// Only select relationship joins for the tiers we care about
|
|
75
|
+
const select = ['h3_index'];
|
|
76
|
+
if (tiers.has('1'))
|
|
77
|
+
select.push({ name: 'place', select: LOCATION_SELECT });
|
|
78
|
+
if (tiers.has('2'))
|
|
79
|
+
select.push({ name: 'county_subdivision', select: LOCATION_SELECT });
|
|
80
|
+
if (tiers.has('3'))
|
|
81
|
+
select.push({ name: 'county', select: LOCATION_SELECT });
|
|
82
|
+
const result = {};
|
|
83
|
+
for await (const cell of Cell.search({
|
|
84
|
+
select,
|
|
85
|
+
conditions,
|
|
86
|
+
operator: 'or',
|
|
87
|
+
})) {
|
|
88
|
+
if (tiers.has('1') && cell.place && !result.place) {
|
|
89
|
+
result.place = cell.place;
|
|
90
|
+
}
|
|
91
|
+
if (tiers.has('2') && cell.county_subdivision && !result.county_subdivision) {
|
|
92
|
+
result.county_subdivision = cell.county_subdivision;
|
|
93
|
+
}
|
|
94
|
+
if (tiers.has('3') && cell.county && !result.county) {
|
|
95
|
+
result.county = cell.county;
|
|
96
|
+
}
|
|
97
|
+
// Early exit: stop scanning once every requested tier has been found
|
|
98
|
+
if (Object.keys(result).length === tiers.size) {
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
return result;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
//# sourceMappingURL=Geolookup.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Geolookup.js","sourceRoot":"","sources":["../../src/resources/Geolookup.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,OAAO,CAAC;AACnD,OAAO,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrC,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,GAAG,SAAS,CAAC,SAAS,CAAC;AAE/C,mDAAmD;AACnD,mFAAmF;AACnF,MAAM,WAAW,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAE7C,mEAAmE;AACnE,MAAM,eAAe,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,YAAY,EAAE,cAAc,EAAE,UAAU,EAAE,cAAc,EAAE,aAAa,CAAC,CAAC;AAErI;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CAAC,UAA8B;IACxD,IAAI,CAAC,UAAU,IAAI,UAAU,KAAK,KAAK,EAAE,CAAC;QACzC,OAAO,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC;IAC7B,CAAC;IACD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;IAC3D,MAAM,OAAO,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACxB,OAAO,EAAE,KAAK,EAAE,oBAAoB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,qCAAqC,EAAE,CAAC;IAC/F,CAAC;IACD,OAAO,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;AAC3B,CAAC;AAED;;;GAGG;AACH,MAAM,OAAO,SAAU,SAAQ,QAAQ;IACtC;;;;;;OAMG;IACH,GAAG,CAAC,MAAM;QACT,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;QAC1C,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;QAE1C,IAAI,KAAK,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC;YAC9B,OAAO,EAAE,KAAK,EAAE,2CAA2C,EAAE,CAAC;QAC/D,CAAC;QAED,MAAM,KAAK,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC;QAC9C,IAAI,OAAO,IAAI,KAAK,EAAE,CAAC;YACtB,OAAO,KAAK,CAAC;QACd,CAAC;QAED,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACtC,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACH,KAAK,CAAC,OAAO,CAAC,GAAW,EAAE,GAAW,EAAE,KAAkB;QACzD,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,CAAC,EAAE,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC;QAC/D,KAAK,IAAI,GAAG,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,EAAE,GAAG,EAAE,EAAE,CAAC;YACnC,UAAU,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,YAAY,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC;QAC/E,CAAC;QAED,6DAA6D;QAC7D,MAAM,MAAM,GAAU,CAAC,UAAU,CAAC,CAAC;QACnC,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC;YAAE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,eAAe,EAAE,CAAC,CAAC;QAC5E,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC;YAAE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,oBAAoB,EAAE,MAAM,EAAE,eAAe,EAAE,CAAC,CAAC;QACzF,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC;YAAE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,eAAe,EAAE,CAAC,CAAC;QAE7E,MAAM,MAAM,GAAwB,EAAE,CAAC;QACvC,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC;YACpC,MAAM;YACN,UAAU;YACV,QAAQ,EAAE,IAAI;SACd,CAAC,EAAE,CAAC;YACJ,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,KAAK,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;gBACnD,MAAM,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;YAC3B,CAAC;YACD,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,kBAAkB,IAAI,CAAC,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC7E,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAC;YACrD,CAAC;YACD,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;gBACrD,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC7B,CAAC;YACD,qEAAqE;YACrE,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,KAAK,KAAK,CAAC,IAAI,EAAE,CAAC;gBAC/C,MAAM;YACP,CAAC;QACF,CAAC;QACD,OAAO,MAAM,CAAC;IACf,CAAC;CACD"}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/** Configuration options for the Geolookup plugin, provided via `config.yaml` in the consuming application. */
|
|
2
|
+
export interface GeolookupConfig {
|
|
3
|
+
/** When truthy, registers the Geolookup resource as a REST endpoint at the path specified by `geoServiceName`. Defaults to `false`. */
|
|
4
|
+
exposeGeoService?: boolean;
|
|
5
|
+
/** The URL path segment for the Geolookup endpoint (e.g. `"geo"` exposes it at `/geo`). Required when `exposeGeoService` is `true`. */
|
|
6
|
+
geoServiceName?: string;
|
|
7
|
+
/** When truthy, registers the DataLoad resource as a REST endpoint at the path specified by `dataLoadServiceName`. Defaults to `false`. */
|
|
8
|
+
exposeDataLoadService?: boolean;
|
|
9
|
+
/** The URL path segment for the DataLoad endpoint (e.g. `"dataload"` exposes it at `/dataload`). Required when `exposeDataLoadService` is `true`. */
|
|
10
|
+
dataLoadServiceName?: string;
|
|
11
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""}
|
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "geolookup-plugin",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "A Harper plugin for fast, tier-based reverse geocoding of US coordinates using H3 spatial indexing.",
|
|
5
|
+
"license": "Apache-2.0",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"keywords": [
|
|
8
|
+
"geocoding",
|
|
9
|
+
"h3",
|
|
10
|
+
"harper",
|
|
11
|
+
"reverse-geocoding",
|
|
12
|
+
"geolookup"
|
|
13
|
+
],
|
|
14
|
+
"author": {
|
|
15
|
+
"name": "Kyle Bernhardy"
|
|
16
|
+
},
|
|
17
|
+
"repository": {
|
|
18
|
+
"type": "git",
|
|
19
|
+
"url": "git+https://github.com/kylebernhardy/geolookup.git"
|
|
20
|
+
},
|
|
21
|
+
"engines": {
|
|
22
|
+
"node": ">=24.0.0",
|
|
23
|
+
"harperdb": ">=4.7.0"
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist/",
|
|
27
|
+
"schemas/",
|
|
28
|
+
"data/",
|
|
29
|
+
"config.yaml",
|
|
30
|
+
"LICENSE",
|
|
31
|
+
"README.md"
|
|
32
|
+
],
|
|
33
|
+
"scripts": {
|
|
34
|
+
"build": "tsc || true",
|
|
35
|
+
"agent:run": "npx -y @harperfast/agent@latest",
|
|
36
|
+
"agent:skills:update": "npx -y skills@latest add harperfast/skills --all --yes",
|
|
37
|
+
"start": "harperdb run .",
|
|
38
|
+
"dev": "harperdb dev .",
|
|
39
|
+
"lint": "eslint .",
|
|
40
|
+
"format": "prettier --write .",
|
|
41
|
+
"test": "node --test test/*.test.js",
|
|
42
|
+
"test:watch": "node --watch --test test/*.test.js",
|
|
43
|
+
"deploy": "dotenv -- npm run deploy:component",
|
|
44
|
+
"deploy:component": "harperdb deploy_component . restart=rolling replicated=true"
|
|
45
|
+
},
|
|
46
|
+
"devDependencies": {
|
|
47
|
+
"@eslint/js": "^10.0.1",
|
|
48
|
+
"dotenv-cli": "^11.0.0",
|
|
49
|
+
"eslint": "^10.0.2",
|
|
50
|
+
"globals": "^17.4.0",
|
|
51
|
+
"harperdb": "^4.7.20",
|
|
52
|
+
"prettier": "^3.8.1"
|
|
53
|
+
},
|
|
54
|
+
"dependencies": {
|
|
55
|
+
"h3-js": "^4.4.0"
|
|
56
|
+
}
|
|
57
|
+
}
|