@reearth/core 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core.js +12487 -9418
- package/dist/core.umd.cjs +76 -73
- package/dist/index.d.ts +1 -0
- package/package.json +4 -2
- package/src/.DS_Store +0 -0
- package/src/Map/utils.ts +8 -2
- package/src/engines/Cesium/.DS_Store +0 -0
- package/src/engines/Cesium/Feature/.DS_Store +0 -0
- package/src/engines/Cesium/core/.DS_Store +0 -0
- package/src/engines/Cesium/hooks.ts +6 -0
- package/src/engines/Cesium/type.d.ts +0 -1
- package/src/engines/Cesium/utils/polygon.ts +2 -2
- package/src/mantle/atoms/compute.ts +2 -2
- package/src/mantle/data/gpx.ts +1 -1
- package/src/mantle/data/shapefile/index.ts +51 -0
- package/src/mantle/data/shapefile/parseDbf.ts +85 -0
- package/src/mantle/data/shapefile/parseShp.ts +459 -0
- package/src/mantle/data/shapefile/parseZip.ts +64 -0
- package/src/mantle/evaluator/simple/expression/variableReplacer.ts +0 -2
- package/src/mantle/evaluator/simple/index.ts +0 -5
- package/src/mantle/evaluator/simple/utils.ts +0 -1
- package/src/test/utils.tsx +0 -2
- package/src/utils/.DS_Store +0 -0
- package/src/mantle/data/shapefile.ts +0 -232
package/dist/index.d.ts
CHANGED
|
@@ -148,6 +148,7 @@ export declare type Cesium3DTilesAppearance = {
|
|
|
148
148
|
imageBasedLightIntensity?: number;
|
|
149
149
|
showWireframe?: boolean;
|
|
150
150
|
showBoundingVolume?: boolean;
|
|
151
|
+
cacheBytes?: number;
|
|
151
152
|
};
|
|
152
153
|
|
|
153
154
|
export declare type ClassificationType = "both" | "terrain" | "3dtiles";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@reearth/core",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.4",
|
|
4
4
|
"author": "Re:Earth contributors <community@reearth.io>",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"description": "A library that abstracts a map engine as one common API.",
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
"scripts": {
|
|
16
16
|
"dev": "vite",
|
|
17
17
|
"build": "tsc && vite build",
|
|
18
|
-
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives
|
|
18
|
+
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives",
|
|
19
19
|
"preview": "vite preview",
|
|
20
20
|
"test": "vitest",
|
|
21
21
|
"storybook": "storybook dev -p 6007",
|
|
@@ -34,6 +34,7 @@
|
|
|
34
34
|
"@seznam/compose-react-refs": "1.0.6",
|
|
35
35
|
"@turf/invariant": "6.5.0",
|
|
36
36
|
"@turf/turf": "6.5.0",
|
|
37
|
+
"@types/proj4": "2.5.5",
|
|
37
38
|
"@ungap/event-target": "0.2.4",
|
|
38
39
|
"@xstate/react": "3.2.1",
|
|
39
40
|
"cesium-dnd": "1.1.0",
|
|
@@ -50,6 +51,7 @@
|
|
|
50
51
|
"lodash-es": "4.17.21",
|
|
51
52
|
"lru-cache": "8.0.4",
|
|
52
53
|
"pbf": "3.2.1",
|
|
54
|
+
"proj4": "2.11.0",
|
|
53
55
|
"protomaps": "1.23.1",
|
|
54
56
|
"react-dnd": "16.0.1",
|
|
55
57
|
"react-dnd-html5-backend": "16.0.1",
|
package/src/.DS_Store
ADDED
|
Binary file
|
package/src/Map/utils.ts
CHANGED
|
@@ -20,8 +20,14 @@ export function mergeProperty(a: any, b: any) {
|
|
|
20
20
|
return mergeWith(
|
|
21
21
|
a2,
|
|
22
22
|
b,
|
|
23
|
-
(
|
|
24
|
-
|
|
23
|
+
(
|
|
24
|
+
s: any,
|
|
25
|
+
v: any,
|
|
26
|
+
_k: string | number | symbol,
|
|
27
|
+
_obj: any,
|
|
28
|
+
_src: any,
|
|
29
|
+
stack: { size: number },
|
|
30
|
+
) => (stack.size > 0 || Array.isArray(v) ? v ?? s : undefined),
|
|
25
31
|
);
|
|
26
32
|
}
|
|
27
33
|
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -571,6 +571,12 @@ export default ({
|
|
|
571
571
|
const viewer = cesium.current?.cesiumElement;
|
|
572
572
|
if (!viewer || viewer.isDestroyed()) return;
|
|
573
573
|
|
|
574
|
+
if (!target || typeof target === "undefined" || !("id" in target && target.id)) {
|
|
575
|
+
viewer.selectedEntity = undefined;
|
|
576
|
+
onLayerSelect?.();
|
|
577
|
+
return;
|
|
578
|
+
}
|
|
579
|
+
|
|
574
580
|
const entity =
|
|
575
581
|
findEntity(viewer, undefined, selectedLayerId?.featureId) ||
|
|
576
582
|
findEntity(viewer, selectedLayerId?.layerId);
|
|
@@ -11,7 +11,6 @@ declare module "@cesium/engine" {
|
|
|
11
11
|
// (...args: Parameter<Listener>[]).
|
|
12
12
|
// This cannot be fixed by augmentation but by overloading.
|
|
13
13
|
export interface Event<Listener extends (...args: any[]) => void = (...args: any[]) => void> {
|
|
14
|
-
// eslint-disable-next-line @typescript-eslint/method-signature-style
|
|
15
14
|
raiseEvent(...arguments: Parameters<Listener>): void;
|
|
16
15
|
}
|
|
17
16
|
|
|
@@ -21,8 +21,8 @@ export function convertGeometryToPositionsArray(
|
|
|
21
21
|
geometry.type === "LineString"
|
|
22
22
|
? coordinatesToPositionsArray([geometry.coordinates])
|
|
23
23
|
: geometry.type === "Polygon"
|
|
24
|
-
|
|
25
|
-
|
|
24
|
+
? coordinatesToPositionsArray(geometry.coordinates)
|
|
25
|
+
: geometry.coordinates.flatMap(coordinates => coordinatesToPositionsArray(coordinates))
|
|
26
26
|
).filter(({ length }) => length > 0);
|
|
27
27
|
}
|
|
28
28
|
|
|
@@ -178,8 +178,8 @@ export function computeAtom(cache?: typeof globalDataFeaturesCache) {
|
|
|
178
178
|
...("properties" in value
|
|
179
179
|
? { properties: value.properties }
|
|
180
180
|
: l && "properties" in l
|
|
181
|
-
|
|
182
|
-
|
|
181
|
+
? { properties: l.properties }
|
|
182
|
+
: {}),
|
|
183
183
|
}
|
|
184
184
|
: undefined,
|
|
185
185
|
);
|
package/src/mantle/data/gpx.ts
CHANGED
|
@@ -24,7 +24,7 @@ const handler = (xmlDataStr: string) => {
|
|
|
24
24
|
const parseGPX = (gpxSource: string) => {
|
|
25
25
|
const parseMethod = (gpxSource: string): Document | null => {
|
|
26
26
|
// Verify that we are in a browser
|
|
27
|
-
if (typeof document
|
|
27
|
+
if (typeof document === "undefined") return null;
|
|
28
28
|
|
|
29
29
|
const domParser = new window.DOMParser();
|
|
30
30
|
return domParser.parseFromString(gpxSource, "text/xml");
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import type { GeometryObject, Feature as GeoJSONFeature, FeatureCollection } from "geojson";
|
|
2
|
+
|
|
3
|
+
import type { Data, DataRange, Feature } from "../../types";
|
|
4
|
+
import { processGeoJSON } from "../geojson";
|
|
5
|
+
import { f, FetchOptions, generateRandomString } from "../utils";
|
|
6
|
+
|
|
7
|
+
import { parseZip } from "./parseZip";
|
|
8
|
+
|
|
9
|
+
export async function combine(
|
|
10
|
+
shp: GeoJSONFeature<GeometryObject>[],
|
|
11
|
+
dbf?: any[],
|
|
12
|
+
): Promise<FeatureCollection> {
|
|
13
|
+
const out: FeatureCollection = {
|
|
14
|
+
type: "FeatureCollection",
|
|
15
|
+
features: [],
|
|
16
|
+
};
|
|
17
|
+
const len = shp.length;
|
|
18
|
+
dbf = dbf || [];
|
|
19
|
+
for (let i = 0; i < len; i++) {
|
|
20
|
+
out.features.push({
|
|
21
|
+
type: "Feature",
|
|
22
|
+
geometry: shp[i].geometry,
|
|
23
|
+
id: generateRandomString(12),
|
|
24
|
+
properties: dbf[i] || {},
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
return out;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export async function fetchShapefile(
|
|
31
|
+
data: Data,
|
|
32
|
+
range?: DataRange,
|
|
33
|
+
options?: FetchOptions,
|
|
34
|
+
): Promise<Feature[] | void> {
|
|
35
|
+
const arrayBuffer = data.url ? await (await f(data.url, options)).arrayBuffer() : data.value;
|
|
36
|
+
|
|
37
|
+
if (!arrayBuffer) {
|
|
38
|
+
console.error("No data provided");
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const geojson = await parseZip(arrayBuffer);
|
|
42
|
+
if (Array.isArray(geojson)) {
|
|
43
|
+
const combinedFeatureCollection: FeatureCollection = {
|
|
44
|
+
type: "FeatureCollection",
|
|
45
|
+
features: geojson.flatMap(layer => (layer as FeatureCollection).features),
|
|
46
|
+
};
|
|
47
|
+
return processGeoJSON(combinedFeatureCollection, range);
|
|
48
|
+
} else {
|
|
49
|
+
return processGeoJSON(geojson as FeatureCollection, range);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
export function parseDbf(dbf: ArrayBuffer, cpg?: string): Record<string, any>[] {
|
|
2
|
+
const header = parseHeader(dbf);
|
|
3
|
+
const records = parseRecords(dbf, header, cpg);
|
|
4
|
+
return records;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export function parseHeader(dbf: ArrayBuffer): {
|
|
8
|
+
version: number;
|
|
9
|
+
dateUpdated: Date;
|
|
10
|
+
recordCount: number;
|
|
11
|
+
recordSize: number;
|
|
12
|
+
fields: {
|
|
13
|
+
name: string;
|
|
14
|
+
type: string;
|
|
15
|
+
size: number;
|
|
16
|
+
decimals: number;
|
|
17
|
+
}[];
|
|
18
|
+
} {
|
|
19
|
+
const view = new DataView(dbf);
|
|
20
|
+
const version = view.getUint8(0);
|
|
21
|
+
const dateUpdated = new Date(1900 + view.getUint8(1), view.getUint8(2) - 1, view.getUint8(3));
|
|
22
|
+
const recordCount = view.getInt32(4, true);
|
|
23
|
+
const headerSize = view.getInt16(8, true);
|
|
24
|
+
const recordSize = view.getInt16(10, true);
|
|
25
|
+
const fields = parseFields(new DataView(dbf, 32, headerSize - 32));
|
|
26
|
+
return { version, dateUpdated, recordCount, recordSize, fields };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function parseFields(fieldData: DataView): {
|
|
30
|
+
name: string;
|
|
31
|
+
type: string;
|
|
32
|
+
size: number;
|
|
33
|
+
decimals: number;
|
|
34
|
+
}[] {
|
|
35
|
+
const fields = [];
|
|
36
|
+
let offset = 0;
|
|
37
|
+
while (offset < fieldData.byteLength && fieldData.getUint8(offset) !== 0x0d) {
|
|
38
|
+
const name = new TextDecoder()
|
|
39
|
+
.decode(new Uint8Array(fieldData.buffer, fieldData.byteOffset + offset, 11))
|
|
40
|
+
.replace(/\0.*/g, "");
|
|
41
|
+
const type = String.fromCharCode(fieldData.getUint8(offset + 11));
|
|
42
|
+
const size = fieldData.getUint8(offset + 16);
|
|
43
|
+
const decimals = fieldData.getUint8(offset + 17);
|
|
44
|
+
fields.push({ name, type, size, decimals });
|
|
45
|
+
offset += 32;
|
|
46
|
+
}
|
|
47
|
+
return fields;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function parseRecords(
|
|
51
|
+
dbf: ArrayBuffer,
|
|
52
|
+
header: ReturnType<typeof parseHeader>,
|
|
53
|
+
cpg?: string,
|
|
54
|
+
): Record<string, any>[] {
|
|
55
|
+
const records = [];
|
|
56
|
+
const decoder = cpg ? new TextDecoder(cpg) : new TextDecoder();
|
|
57
|
+
const fields = header.fields;
|
|
58
|
+
for (let i = 0; i < header.recordCount; i++) {
|
|
59
|
+
const record: Record<string, any> = {};
|
|
60
|
+
const offset = header.recordSize * i + 1;
|
|
61
|
+
for (const field of fields) {
|
|
62
|
+
const value = new TextDecoder().decode(new Uint8Array(dbf, offset, field.size)).trim();
|
|
63
|
+
switch (field.type) {
|
|
64
|
+
case "N":
|
|
65
|
+
case "F":
|
|
66
|
+
record[field.name] = parseFloat(value);
|
|
67
|
+
break;
|
|
68
|
+
case "D":
|
|
69
|
+
record[field.name] = new Date(
|
|
70
|
+
parseInt(value.slice(0, 4), 10),
|
|
71
|
+
parseInt(value.slice(4, 6), 10) - 1,
|
|
72
|
+
parseInt(value.slice(6, 8), 10),
|
|
73
|
+
);
|
|
74
|
+
break;
|
|
75
|
+
case "L":
|
|
76
|
+
record[field.name] = value.toLowerCase() === "y" || value.toLowerCase() === "t";
|
|
77
|
+
break;
|
|
78
|
+
default:
|
|
79
|
+
record[field.name] = decoder.decode(new Uint8Array(dbf, offset, field.size)).trim();
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
records.push(record);
|
|
83
|
+
}
|
|
84
|
+
return records;
|
|
85
|
+
}
|