matterviz 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (286) hide show
  1. package/dist/FilePicker.svelte +37 -20
  2. package/dist/Icon.svelte +2 -2
  3. package/dist/MillerIndexInput.svelte +60 -0
  4. package/dist/MillerIndexInput.svelte.d.ts +7 -0
  5. package/dist/app.css +38 -2
  6. package/dist/brillouin/BrillouinZone.svelte +20 -62
  7. package/dist/brillouin/BrillouinZone.svelte.d.ts +1 -1
  8. package/dist/brillouin/BrillouinZoneExportPane.svelte +12 -20
  9. package/dist/brillouin/BrillouinZoneScene.svelte +2 -2
  10. package/dist/brillouin/BrillouinZoneScene.svelte.d.ts +1 -1
  11. package/dist/chempot-diagram/ChemPotDiagram.svelte +192 -0
  12. package/dist/chempot-diagram/ChemPotDiagram.svelte.d.ts +13 -0
  13. package/dist/chempot-diagram/ChemPotDiagram2D.svelte +677 -0
  14. package/dist/chempot-diagram/ChemPotDiagram2D.svelte.d.ts +16 -0
  15. package/dist/chempot-diagram/ChemPotDiagram3D.svelte +2688 -0
  16. package/dist/chempot-diagram/ChemPotDiagram3D.svelte.d.ts +16 -0
  17. package/dist/chempot-diagram/ChemPotScene3D.svelte +8 -0
  18. package/dist/chempot-diagram/ChemPotScene3D.svelte.d.ts +7 -0
  19. package/dist/chempot-diagram/color.d.ts +10 -0
  20. package/dist/chempot-diagram/color.js +33 -0
  21. package/dist/chempot-diagram/compute.d.ts +38 -0
  22. package/dist/chempot-diagram/compute.js +650 -0
  23. package/dist/chempot-diagram/index.d.ts +5 -0
  24. package/dist/chempot-diagram/index.js +5 -0
  25. package/dist/chempot-diagram/pointer.d.ts +16 -0
  26. package/dist/chempot-diagram/pointer.js +40 -0
  27. package/dist/chempot-diagram/temperature.d.ts +15 -0
  28. package/dist/chempot-diagram/temperature.js +37 -0
  29. package/dist/chempot-diagram/types.d.ts +83 -0
  30. package/dist/chempot-diagram/types.js +27 -0
  31. package/dist/colors/index.d.ts +3 -1
  32. package/dist/colors/index.js +4 -0
  33. package/dist/composition/BarChart.svelte +13 -22
  34. package/dist/composition/BubbleChart.svelte +5 -3
  35. package/dist/composition/FormulaFilter.svelte +770 -90
  36. package/dist/composition/FormulaFilter.svelte.d.ts +37 -1
  37. package/dist/composition/PieChart.svelte +43 -18
  38. package/dist/composition/PieChart.svelte.d.ts +1 -1
  39. package/dist/constants.d.ts +1 -0
  40. package/dist/constants.js +2 -0
  41. package/dist/convex-hull/ConvexHull.svelte +14 -1
  42. package/dist/convex-hull/ConvexHull.svelte.d.ts +1 -1
  43. package/dist/convex-hull/ConvexHull2D.svelte +14 -45
  44. package/dist/convex-hull/ConvexHull2D.svelte.d.ts +1 -1
  45. package/dist/convex-hull/ConvexHull3D.svelte +396 -134
  46. package/dist/convex-hull/ConvexHull3D.svelte.d.ts +1 -1
  47. package/dist/convex-hull/ConvexHull4D.svelte +93 -42
  48. package/dist/convex-hull/ConvexHull4D.svelte.d.ts +1 -1
  49. package/dist/convex-hull/ConvexHullControls.svelte +94 -31
  50. package/dist/convex-hull/ConvexHullControls.svelte.d.ts +4 -2
  51. package/dist/convex-hull/ConvexHullStats.svelte +697 -128
  52. package/dist/convex-hull/ConvexHullStats.svelte.d.ts +6 -1
  53. package/dist/convex-hull/ConvexHullTooltip.svelte +1 -0
  54. package/dist/convex-hull/GasPressureControls.svelte +72 -38
  55. package/dist/convex-hull/GasPressureControls.svelte.d.ts +2 -1
  56. package/dist/convex-hull/TemperatureSlider.svelte +46 -19
  57. package/dist/convex-hull/TemperatureSlider.svelte.d.ts +2 -1
  58. package/dist/convex-hull/demo-temperature.d.ts +6 -0
  59. package/dist/convex-hull/demo-temperature.js +36 -0
  60. package/dist/convex-hull/gas-thermodynamics.js +16 -5
  61. package/dist/convex-hull/helpers.d.ts +7 -1
  62. package/dist/convex-hull/helpers.js +45 -15
  63. package/dist/convex-hull/index.d.ts +15 -1
  64. package/dist/convex-hull/index.js +1 -0
  65. package/dist/convex-hull/thermodynamics.d.ts +8 -21
  66. package/dist/convex-hull/thermodynamics.js +106 -17
  67. package/dist/convex-hull/types.d.ts +7 -0
  68. package/dist/convex-hull/types.js +11 -0
  69. package/dist/coordination/CoordinationBarPlot.svelte +29 -46
  70. package/dist/element/BohrAtom.svelte +1 -1
  71. package/dist/element/data.js +2 -14
  72. package/dist/element/data.json.gz +0 -0
  73. package/dist/element/index.d.ts +1 -1
  74. package/dist/element/index.js +1 -0
  75. package/dist/element/types.d.ts +1 -0
  76. package/dist/fermi-surface/FermiSurface.svelte +21 -65
  77. package/dist/fermi-surface/FermiSurface.svelte.d.ts +1 -1
  78. package/dist/fermi-surface/FermiSurfaceControls.svelte.d.ts +1 -1
  79. package/dist/fermi-surface/FermiSurfaceScene.svelte +1 -1
  80. package/dist/fermi-surface/FermiSurfaceScene.svelte.d.ts +1 -1
  81. package/dist/fermi-surface/compute.js +1 -21
  82. package/dist/fermi-surface/marching-cubes.d.ts +2 -13
  83. package/dist/fermi-surface/marching-cubes.js +2 -519
  84. package/dist/fermi-surface/parse.js +17 -23
  85. package/dist/heatmap-matrix/HeatmapMatrix.svelte +1273 -0
  86. package/dist/heatmap-matrix/HeatmapMatrix.svelte.d.ts +110 -0
  87. package/dist/heatmap-matrix/HeatmapMatrixControls.svelte +171 -0
  88. package/dist/heatmap-matrix/HeatmapMatrixControls.svelte.d.ts +31 -0
  89. package/dist/heatmap-matrix/index.d.ts +53 -0
  90. package/dist/heatmap-matrix/index.js +100 -0
  91. package/dist/heatmap-matrix/shared.d.ts +2 -0
  92. package/dist/heatmap-matrix/shared.js +4 -0
  93. package/dist/icons.d.ts +119 -0
  94. package/dist/icons.js +119 -0
  95. package/dist/index.d.ts +6 -1
  96. package/dist/index.js +6 -1
  97. package/dist/io/export.js +15 -3
  98. package/dist/io/file-drop.d.ts +7 -0
  99. package/dist/io/file-drop.js +43 -0
  100. package/dist/io/index.d.ts +2 -2
  101. package/dist/io/index.js +2 -112
  102. package/dist/io/types.d.ts +1 -0
  103. package/dist/io/url-drop.d.ts +2 -0
  104. package/dist/io/url-drop.js +118 -0
  105. package/dist/isosurface/Isosurface.svelte +231 -0
  106. package/dist/isosurface/Isosurface.svelte.d.ts +8 -0
  107. package/dist/isosurface/IsosurfaceControls.svelte +273 -0
  108. package/dist/isosurface/IsosurfaceControls.svelte.d.ts +9 -0
  109. package/dist/isosurface/index.d.ts +5 -0
  110. package/dist/isosurface/index.js +6 -0
  111. package/dist/isosurface/parse.d.ts +6 -0
  112. package/dist/isosurface/parse.js +548 -0
  113. package/dist/isosurface/slice.d.ts +11 -0
  114. package/dist/isosurface/slice.js +145 -0
  115. package/dist/isosurface/types.d.ts +55 -0
  116. package/dist/isosurface/types.js +178 -0
  117. package/dist/labels.d.ts +2 -1
  118. package/dist/labels.js +1 -0
  119. package/dist/layout/InfoTag.svelte +62 -62
  120. package/dist/layout/SubpageGrid.svelte +74 -0
  121. package/dist/layout/SubpageGrid.svelte.d.ts +14 -0
  122. package/dist/layout/index.d.ts +1 -0
  123. package/dist/layout/index.js +1 -0
  124. package/dist/layout/json-tree/JsonNode.svelte +226 -53
  125. package/dist/layout/json-tree/JsonTree.svelte +425 -51
  126. package/dist/layout/json-tree/JsonTree.svelte.d.ts +1 -1
  127. package/dist/layout/json-tree/JsonValue.svelte +218 -97
  128. package/dist/layout/json-tree/types.d.ts +27 -2
  129. package/dist/layout/json-tree/utils.d.ts +14 -1
  130. package/dist/layout/json-tree/utils.js +254 -0
  131. package/dist/marching-cubes.d.ts +14 -0
  132. package/dist/marching-cubes.js +519 -0
  133. package/dist/math.d.ts +8 -0
  134. package/dist/math.js +374 -7
  135. package/dist/overlays/ContextMenu.svelte +3 -2
  136. package/dist/overlays/DraggablePane.svelte +163 -58
  137. package/dist/overlays/DraggablePane.svelte.d.ts +2 -0
  138. package/dist/phase-diagram/IsobaricBinaryPhaseDiagram.svelte +232 -77
  139. package/dist/phase-diagram/IsobaricBinaryPhaseDiagram.svelte.d.ts +6 -2
  140. package/dist/phase-diagram/PhaseDiagramControls.svelte +32 -11
  141. package/dist/phase-diagram/PhaseDiagramControls.svelte.d.ts +3 -2
  142. package/dist/phase-diagram/PhaseDiagramEditorPane.svelte +103 -0
  143. package/dist/phase-diagram/PhaseDiagramEditorPane.svelte.d.ts +15 -0
  144. package/dist/phase-diagram/PhaseDiagramExportPane.svelte +102 -95
  145. package/dist/phase-diagram/PhaseDiagramExportPane.svelte.d.ts +7 -0
  146. package/dist/phase-diagram/PhaseDiagramTooltip.svelte +100 -26
  147. package/dist/phase-diagram/PhaseDiagramTooltip.svelte.d.ts +6 -3
  148. package/dist/phase-diagram/index.d.ts +2 -0
  149. package/dist/phase-diagram/index.js +2 -0
  150. package/dist/phase-diagram/svg-to-diagram.d.ts +2 -0
  151. package/dist/phase-diagram/svg-to-diagram.js +865 -0
  152. package/dist/phase-diagram/types.d.ts +10 -0
  153. package/dist/phase-diagram/utils.d.ts +7 -4
  154. package/dist/phase-diagram/utils.js +149 -59
  155. package/dist/plot/AxisLabel.svelte +26 -0
  156. package/dist/plot/AxisLabel.svelte.d.ts +16 -0
  157. package/dist/plot/BarPlot.svelte +473 -228
  158. package/dist/plot/BarPlot.svelte.d.ts +3 -3
  159. package/dist/plot/BarPlotControls.svelte +3 -2
  160. package/dist/plot/BarPlotControls.svelte.d.ts +1 -1
  161. package/dist/plot/ColorBar.svelte +54 -54
  162. package/dist/plot/ColorBar.svelte.d.ts +1 -1
  163. package/dist/plot/ElementScatter.svelte +4 -3
  164. package/dist/plot/FillArea.svelte +4 -1
  165. package/dist/plot/Histogram.svelte +320 -230
  166. package/dist/plot/Histogram.svelte.d.ts +2 -2
  167. package/dist/plot/HistogramControls.svelte +29 -10
  168. package/dist/plot/HistogramControls.svelte.d.ts +6 -2
  169. package/dist/plot/InteractiveAxisLabel.svelte.d.ts +2 -2
  170. package/dist/plot/PlotControls.svelte +109 -27
  171. package/dist/plot/PlotControls.svelte.d.ts +1 -1
  172. package/dist/plot/PlotLegend.svelte +1 -1
  173. package/dist/plot/PortalSelect.svelte +2 -1
  174. package/dist/plot/ReferenceLine.svelte +2 -1
  175. package/dist/plot/ReferenceLine.svelte.d.ts +1 -0
  176. package/dist/plot/ReferencePlane.svelte +1 -3
  177. package/dist/plot/ScatterPlot.svelte +343 -209
  178. package/dist/plot/ScatterPlot.svelte.d.ts +3 -3
  179. package/dist/plot/ScatterPlot3D.svelte.d.ts +2 -2
  180. package/dist/plot/ScatterPlot3DControls.svelte +203 -250
  181. package/dist/plot/ScatterPlot3DScene.svelte +4 -7
  182. package/dist/plot/ScatterPlot3DScene.svelte.d.ts +2 -2
  183. package/dist/plot/ScatterPlotControls.svelte +95 -55
  184. package/dist/plot/ScatterPlotControls.svelte.d.ts +1 -1
  185. package/dist/plot/ZeroLines.svelte +44 -0
  186. package/dist/plot/ZeroLines.svelte.d.ts +32 -0
  187. package/dist/plot/ZoomRect.svelte +21 -0
  188. package/dist/plot/ZoomRect.svelte.d.ts +8 -0
  189. package/dist/plot/axis-utils.d.ts +1 -1
  190. package/dist/plot/data-cleaning.js +1 -5
  191. package/dist/plot/index.d.ts +6 -2
  192. package/dist/plot/index.js +6 -2
  193. package/dist/plot/interactions.d.ts +8 -10
  194. package/dist/plot/interactions.js +10 -19
  195. package/dist/plot/layout.d.ts +7 -1
  196. package/dist/plot/layout.js +12 -4
  197. package/dist/plot/reference-line.d.ts +4 -21
  198. package/dist/plot/reference-line.js +7 -81
  199. package/dist/plot/types.d.ts +42 -17
  200. package/dist/plot/types.js +10 -0
  201. package/dist/plot/utils/label-placement.js +14 -11
  202. package/dist/plot/utils.d.ts +1 -0
  203. package/dist/plot/utils.js +14 -0
  204. package/dist/rdf/RdfPlot.svelte +55 -66
  205. package/dist/rdf/RdfPlot.svelte.d.ts +1 -1
  206. package/dist/rdf/index.d.ts +1 -1
  207. package/dist/rdf/index.js +1 -1
  208. package/dist/settings.d.ts +5 -0
  209. package/dist/settings.js +37 -3
  210. package/dist/spectral/Bands.svelte +515 -143
  211. package/dist/spectral/Bands.svelte.d.ts +22 -2
  212. package/dist/spectral/helpers.d.ts +23 -1
  213. package/dist/spectral/helpers.js +65 -9
  214. package/dist/spectral/types.d.ts +2 -0
  215. package/dist/structure/AtomLegend.svelte +31 -10
  216. package/dist/structure/AtomLegend.svelte.d.ts +1 -1
  217. package/dist/structure/CellSelect.svelte +92 -22
  218. package/dist/structure/Lattice.svelte +2 -0
  219. package/dist/structure/Structure.svelte +716 -173
  220. package/dist/structure/Structure.svelte.d.ts +7 -2
  221. package/dist/structure/StructureControls.svelte +26 -14
  222. package/dist/structure/StructureControls.svelte.d.ts +5 -1
  223. package/dist/structure/StructureInfoPane.svelte +7 -1
  224. package/dist/structure/StructureScene.svelte +386 -95
  225. package/dist/structure/StructureScene.svelte.d.ts +15 -4
  226. package/dist/structure/atom-properties.d.ts +6 -2
  227. package/dist/structure/atom-properties.js +38 -25
  228. package/dist/structure/export.js +10 -7
  229. package/dist/structure/ferrox-wasm-types.d.ts +3 -2
  230. package/dist/structure/ferrox-wasm-types.js +0 -3
  231. package/dist/structure/ferrox-wasm.d.ts +3 -2
  232. package/dist/structure/ferrox-wasm.js +1 -2
  233. package/dist/structure/index.d.ts +7 -0
  234. package/dist/structure/index.js +22 -0
  235. package/dist/structure/parse.js +19 -16
  236. package/dist/structure/partial-occupancy.d.ts +25 -0
  237. package/dist/structure/partial-occupancy.js +102 -0
  238. package/dist/structure/validation.js +6 -3
  239. package/dist/symmetry/SymmetryStats.svelte +18 -4
  240. package/dist/symmetry/WyckoffTable.svelte +18 -10
  241. package/dist/symmetry/index.d.ts +7 -4
  242. package/dist/symmetry/index.js +83 -18
  243. package/dist/table/HeatmapTable.svelte +468 -69
  244. package/dist/table/HeatmapTable.svelte.d.ts +13 -1
  245. package/dist/table/ToggleMenu.svelte +291 -44
  246. package/dist/table/ToggleMenu.svelte.d.ts +4 -1
  247. package/dist/table/index.d.ts +3 -0
  248. package/dist/tooltip/index.d.ts +1 -1
  249. package/dist/tooltip/index.js +1 -0
  250. package/dist/trajectory/Trajectory.svelte +147 -145
  251. package/dist/trajectory/TrajectoryExportPane.svelte +13 -9
  252. package/dist/trajectory/TrajectoryExportPane.svelte.d.ts +1 -1
  253. package/dist/trajectory/constants.d.ts +6 -0
  254. package/dist/trajectory/constants.js +7 -0
  255. package/dist/trajectory/extract.js +3 -5
  256. package/dist/trajectory/format-detect.d.ts +9 -0
  257. package/dist/trajectory/format-detect.js +76 -0
  258. package/dist/trajectory/frame-reader.d.ts +17 -0
  259. package/dist/trajectory/frame-reader.js +339 -0
  260. package/dist/trajectory/helpers.d.ts +15 -0
  261. package/dist/trajectory/helpers.js +187 -0
  262. package/dist/trajectory/index.d.ts +1 -0
  263. package/dist/trajectory/index.js +11 -4
  264. package/dist/trajectory/parse/ase.d.ts +2 -0
  265. package/dist/trajectory/parse/ase.js +76 -0
  266. package/dist/trajectory/parse/hdf5.d.ts +2 -0
  267. package/dist/trajectory/parse/hdf5.js +121 -0
  268. package/dist/trajectory/parse/index.d.ts +12 -0
  269. package/dist/trajectory/parse/index.js +304 -0
  270. package/dist/trajectory/parse/lammps.d.ts +5 -0
  271. package/dist/trajectory/parse/lammps.js +169 -0
  272. package/dist/trajectory/parse/vasp.d.ts +2 -0
  273. package/dist/trajectory/parse/vasp.js +65 -0
  274. package/dist/trajectory/parse/xyz.d.ts +2 -0
  275. package/dist/trajectory/parse/xyz.js +109 -0
  276. package/dist/trajectory/types.d.ts +11 -0
  277. package/dist/trajectory/types.js +1 -0
  278. package/dist/utils.d.ts +2 -0
  279. package/dist/utils.js +4 -0
  280. package/dist/xrd/XrdPlot.svelte +6 -4
  281. package/dist/xrd/calc-xrd.js +0 -1
  282. package/package.json +33 -23
  283. package/readme.md +4 -4
  284. package/dist/trajectory/parse.d.ts +0 -42
  285. package/dist/trajectory/parse.js +0 -1267
  286. /package/dist/element/{data.json.d.ts → data.json.gz.d.ts} +0 -0
@@ -1,1267 +0,0 @@
1
- // Parsing functions for trajectory data from various formats
2
- import { ATOMIC_NUMBER_TO_SYMBOL } from '../composition/parse';
3
- import { COMPRESSION_EXTENSIONS_REGEX, CONFIG_DIRS_REGEX, MD_SIM_EXCLUDE_REGEX, TRAJ_EXTENSIONS_REGEX, TRAJ_FALLBACK_EXTENSIONS_REGEX, TRAJ_KEYWORDS_SIMPLE_REGEX, XDATCAR_REGEX, } from '../constants';
4
- import { is_binary } from '../io/is-binary';
5
- import { ELEM_SYMBOLS } from '../labels';
6
- import * as math from '../math';
7
- import { parse_xyz } from '../structure/parse';
8
- import * as h5wasm from 'h5wasm';
9
- // Constants for large file handling
10
- export const MAX_SAFE_STRING_LENGTH = 0x1fffffe8 * 0.5; // 50% of JS max string length as safety
11
- export const MAX_METADATA_SIZE = 50 * 1024 * 1024; // 50MB limit for metadata
12
- export const LARGE_FILE_THRESHOLD = 400 * 1024 * 1024; // 400MB
13
- export const INDEX_SAMPLE_RATE = 100; // Default sample rate for frame indexing
14
- export const MAX_BIN_FILE_SIZE = 100 * 1024 * 1024; // 100MB default for ArrayBuffer files
15
- export const MAX_TEXT_FILE_SIZE = 50 * 1024 * 1024; // 50MB default for string files
16
- // Unified format detection
17
- const FORMAT_PATTERNS = {
18
- ase: (data, filename) => {
19
- if (!filename?.toLowerCase().endsWith(`.traj`) || !(data instanceof ArrayBuffer)) {
20
- return false;
21
- }
22
- const view = new Uint8Array(data.slice(0, 24));
23
- return [0x2d, 0x20, 0x6f, 0x66, 0x20, 0x55, 0x6c, 0x6d].every((byte, idx) => view[idx] === byte);
24
- },
25
- hdf5: (data, filename) => {
26
- const has_ext = filename?.toLowerCase().match(/\.(h5|hdf5)$/);
27
- if (!has_ext || !(data instanceof ArrayBuffer) || data.byteLength < 8)
28
- return false;
29
- const signature = new Uint8Array(data.slice(0, 8));
30
- return [0x89, 0x48, 0x44, 0x46, 0x0d, 0x0a, 0x1a, 0x0a].every((b, idx) => signature[idx] === b);
31
- },
32
- vasp: (data, filename) => {
33
- const basename = filename?.toLowerCase().split(`/`).pop() || ``;
34
- if (basename === `xdatcar` || basename.startsWith(`xdatcar`))
35
- return true;
36
- const lines = data.trim().split(/\r?\n/);
37
- return lines.length >= 10 &&
38
- lines.some((line) => line.includes(`Direct configuration=`)) &&
39
- !isNaN(parseFloat(lines[1])) &&
40
- lines.slice(2, 5).every((line) => line.trim().split(/\s+/).length === 3);
41
- },
42
- xyz_multi: (data, filename) => {
43
- const lower = filename?.toLowerCase() ?? ``;
44
- const base = lower.replace(COMPRESSION_EXTENSIONS_REGEX, ``);
45
- if (!/\.(xyz|extxyz)$/.test(base))
46
- return false;
47
- return count_xyz_frames(data) >= 2;
48
- },
49
- lammpstrj: (data, filename) => {
50
- const lower = filename?.toLowerCase() ?? ``;
51
- const base = lower.replace(COMPRESSION_EXTENSIONS_REGEX, ``);
52
- if (!/\.lammpstrj$/.test(base))
53
- return false;
54
- // Check for LAMMPS trajectory header pattern
55
- return data.includes(`ITEM: TIMESTEP`) && data.includes(`ITEM: ATOMS`);
56
- },
57
- };
58
- // Validate that data is a proper 3x3 matrix
59
- // Accepts both regular arrays and typed arrays (Float32Array, Float64Array, etc.)
60
- function validate_3x3_matrix(data) {
61
- if (!Array.isArray(data) || data.length !== 3) {
62
- throw new Error(`Expected 3x3 matrix, got array of length ${Array.isArray(data) ? data.length : `non-array`}`);
63
- }
64
- // Allow both regular arrays and typed arrays (Float32Array, Float64Array, etc.)
65
- const is_valid_row = (row) => (Array.isArray(row) || ArrayBuffer.isView(row)) &&
66
- row.length === 3;
67
- if (!data.every(is_valid_row)) {
68
- throw new Error(`Invalid 3x3 matrix structure`);
69
- }
70
- return data;
71
- }
72
- // Check if file is a trajectory (supports both filename-only and content-based detection)
73
- export function is_trajectory_file(filename, content) {
74
- if (CONFIG_DIRS_REGEX.test(filename))
75
- return false;
76
- let base_name = filename.toLowerCase();
77
- while (COMPRESSION_EXTENSIONS_REGEX.test(base_name)) {
78
- base_name = base_name.replace(COMPRESSION_EXTENSIONS_REGEX, ``);
79
- }
80
- // For xyz/extxyz files, use content-based detection if available
81
- if (/\.(xyz|extxyz)$/i.test(base_name)) {
82
- if (content)
83
- return count_xyz_frames(content) >= 2;
84
- // Use filename-based detection for auto-render (compressed or not)
85
- return TRAJ_KEYWORDS_SIMPLE_REGEX.test(base_name);
86
- }
87
- // Always detect these specific trajectory formats
88
- if (TRAJ_EXTENSIONS_REGEX.test(base_name) || XDATCAR_REGEX.test(base_name))
89
- return true;
90
- // Special exclusion for generic md_simulation pattern with certain extensions
91
- if (MD_SIM_EXCLUDE_REGEX.test(base_name))
92
- return false;
93
- // For .h5/.hdf5 files, require trajectory keywords
94
- if (/\.(h5|hdf5)$/i.test(base_name)) {
95
- return TRAJ_KEYWORDS_SIMPLE_REGEX.test(base_name);
96
- }
97
- // For other extensions, require both keywords and specific extensions
98
- return TRAJ_KEYWORDS_SIMPLE_REGEX.test(base_name) &&
99
- TRAJ_FALLBACK_EXTENSIONS_REGEX.test(base_name);
100
- }
101
- // Cache inverse matrices by original matrix reference for performance
102
- // IMPORTANT: This cache assumes lattice matrices are immutable. Mutating a cached
103
- // matrix in place yields incorrect inverses. Always create new matrix instances
104
- // if modifications are needed.
105
- const matrix_cache = new WeakMap();
106
- const get_inverse_matrix = (matrix) => {
107
- const cached = matrix_cache.get(matrix);
108
- if (cached)
109
- return cached;
110
- const inverse = math.matrix_inverse_3x3(matrix);
111
- matrix_cache.set(matrix, inverse);
112
- return inverse;
113
- };
114
- // Unified utilities
115
- const convert_atomic_numbers = (numbers) => numbers.map((num) => ATOMIC_NUMBER_TO_SYMBOL[num] || `X`);
116
- const create_structure = (positions, elements, lattice_matrix, pbc, force_data) => {
117
- const inv_matrix = lattice_matrix ? get_inverse_matrix(lattice_matrix) : null;
118
- const sites = positions.map((pos, idx) => {
119
- const xyz = pos;
120
- const abc = inv_matrix
121
- ? math.mat3x3_vec3_multiply(inv_matrix, xyz)
122
- : [0, 0, 0];
123
- const properties = force_data?.[idx] ? { force: force_data[idx] } : {};
124
- return {
125
- species: [{ element: elements[idx], occu: 1, oxidation_state: 0 }],
126
- abc,
127
- xyz,
128
- label: `${elements[idx]}${idx + 1}`,
129
- properties,
130
- };
131
- });
132
- return lattice_matrix
133
- ? {
134
- sites,
135
- lattice: {
136
- matrix: lattice_matrix,
137
- ...math.calc_lattice_params(lattice_matrix),
138
- pbc: pbc || [true, true, true],
139
- },
140
- }
141
- : { sites };
142
- };
143
- const create_trajectory_frame = (positions, elements, lattice_matrix, pbc, step, metadata = {}) => ({
144
- structure: create_structure(positions, elements, lattice_matrix, pbc),
145
- step,
146
- metadata,
147
- });
148
- // Shared utility to read ndarray data from binary format
149
- const read_ndarray_from_view = (view, ref) => {
150
- const [shape, dtype, array_offset] = ref.ndarray;
151
- const total = shape.reduce((a, b) => a * b, 1);
152
- const data = [];
153
- let pos = array_offset;
154
- const readers = {
155
- int64: () => {
156
- const v = Number(view.getBigInt64(pos, true));
157
- pos += 8;
158
- return v;
159
- },
160
- int32: () => {
161
- const v = view.getInt32(pos, true);
162
- pos += 4;
163
- return v;
164
- },
165
- float64: () => {
166
- const v = view.getFloat64(pos, true);
167
- pos += 8;
168
- return v;
169
- },
170
- float32: () => {
171
- const v = view.getFloat32(pos, true);
172
- pos += 4;
173
- return v;
174
- },
175
- };
176
- const reader = readers[dtype];
177
- if (!reader)
178
- throw new Error(`Unsupported dtype: ${dtype}`);
179
- for (let i = 0; i < total; i++)
180
- data.push(reader());
181
- return shape.length === 1
182
- ? [data]
183
- : shape.length === 2
184
- ? Array.from({ length: shape[0] }, (_, idx) => data.slice(idx * shape[1], (idx + 1) * shape[1]))
185
- : (() => {
186
- throw new Error(`Unsupported shape`);
187
- })();
188
- };
189
- // Unified frame counting for XYZ
190
- function count_xyz_frames(data) {
191
- if (!data || typeof data !== `string`)
192
- return 0;
193
- const lines = data.trim().split(/\r?\n/);
194
- let frame_count = 0;
195
- let line_idx = 0;
196
- while (line_idx < lines.length) {
197
- if (!lines[line_idx]?.trim()) {
198
- line_idx++;
199
- continue;
200
- }
201
- const num_atoms = parseInt(lines[line_idx].trim(), 10);
202
- if (isNaN(num_atoms) || num_atoms <= 0 || line_idx + num_atoms + 1 >= lines.length) {
203
- line_idx++;
204
- continue;
205
- }
206
- // Quick validation of first few atom lines
207
- let valid_coords = 0;
208
- for (let idx = 0; idx < Math.min(num_atoms, 3); idx++) {
209
- const parts = lines[line_idx + 2 + idx]?.trim().split(/\s+/);
210
- if (parts?.length >= 4 && isNaN(parseInt(parts[0])) && parts[0].length <= 3) {
211
- if (parts.slice(1, 4).every((coord) => !isNaN(parseFloat(coord))))
212
- valid_coords++;
213
- }
214
- }
215
- if (valid_coords >= Math.min(num_atoms, 3)) {
216
- frame_count++;
217
- line_idx += 2 + num_atoms;
218
- }
219
- else {
220
- line_idx++;
221
- }
222
- }
223
- return frame_count;
224
- }
225
- // HDF5 utilities - consolidated type guards and helpers
226
- const is_hdf5_dataset = (entity) => entity !== null && (`to_array` in entity || entity instanceof h5wasm.Dataset);
227
- const is_hdf5_group = (entity) => entity !== null && (`keys` in entity && entity instanceof h5wasm.Group);
228
- // Specialized parsers - consolidated and optimized
229
- const parse_torch_sim_hdf5 = async (buffer, filename) => {
230
- await h5wasm.ready;
231
- const { FS } = await h5wasm.ready;
232
- const temp_filename = filename || `temp.h5`;
233
- FS.writeFile(temp_filename, new Uint8Array(buffer));
234
- const h5_file = new h5wasm.File(temp_filename, `r`);
235
- try {
236
- // Unified dataset discovery with path tracking
237
- const found_paths = {};
238
- const find_dataset = (names) => {
239
- const discover = (parent, path = ``) => {
240
- for (const name of parent.keys()) {
241
- const item = parent.get(name);
242
- const full_path = path ? `${path}/${name}` : `/${name}`;
243
- if (names.includes(name) && is_hdf5_dataset(item)) {
244
- // Track which name was found and its path
245
- const found_name = names.find((n) => n === name);
246
- if (found_name)
247
- found_paths[found_name] = full_path;
248
- return item;
249
- }
250
- if (is_hdf5_group(item)) {
251
- const result = discover(item, full_path);
252
- if (result)
253
- return result;
254
- }
255
- }
256
- return null;
257
- };
258
- return discover(h5_file);
259
- };
260
- const positions_data = find_dataset([`positions`])?.to_array();
261
- const atomic_numbers_data = find_dataset([`atomic_numbers`, `numbers`, `Z`, `species`])?.to_array();
262
- const cells_data = find_dataset([`cell`, `cells`, `lattice`])?.to_array();
263
- const energies_data = find_dataset([`potential_energy`, `energy`])?.to_array();
264
- if (!positions_data || !atomic_numbers_data) {
265
- const missing_datasets = [];
266
- if (!positions_data) {
267
- missing_datasets.push(`positions (tried: positions, coords, coordinates)`);
268
- }
269
- if (!atomic_numbers_data) {
270
- missing_datasets.push(`atomic numbers (tried: atomic_numbers, numbers, Z, species)`);
271
- }
272
- const missing_str = missing_datasets.join(`, `);
273
- const available_str = Array.from(h5_file.keys()).join(`, `);
274
- throw new Error(`Missing required dataset(s) in HDF5 file: ${missing_str}. Available datasets: ${available_str}`);
275
- }
276
- const positions = Array.isArray(positions_data[0]?.[0])
277
- ? positions_data
278
- : [positions_data];
279
- const atomic_numbers = Array.isArray(atomic_numbers_data[0])
280
- ? atomic_numbers_data
281
- : [atomic_numbers_data];
282
- const elements = convert_atomic_numbers(atomic_numbers[0]);
283
- const frames = positions.map((frame_pos, idx) => {
284
- const cell = cells_data?.[idx];
285
- const lattice_mat = cell
286
- ? math.transpose_3x3_matrix(validate_3x3_matrix(cell))
287
- : undefined;
288
- const energy = energies_data?.[idx]?.[0];
289
- const metadata = {};
290
- if (energy !== undefined)
291
- metadata.energy = energy;
292
- if (lattice_mat) {
293
- metadata.volume = math.calc_lattice_params(lattice_mat).volume;
294
- }
295
- const pbc = lattice_mat ? [true, true, true] : [false, false, false];
296
- return create_trajectory_frame(frame_pos, elements, lattice_mat, pbc, idx, metadata);
297
- });
298
- return {
299
- frames,
300
- metadata: {
301
- source_format: `hdf5_trajectory`,
302
- frame_count: frames.length,
303
- num_atoms: elements.length,
304
- periodic_boundary_conditions: cells_data
305
- ? [true, true, true]
306
- : [false, false, false],
307
- element_counts: elements.reduce((counts, element) => {
308
- counts[element] = (counts[element] || 0) + 1;
309
- return counts;
310
- }, {}),
311
- discovered_datasets: {
312
- positions: found_paths.positions || `positions`,
313
- atomic_numbers: found_paths.atomic_numbers || found_paths.numbers ||
314
- found_paths.Z || found_paths.species || `unknown`,
315
- cells: found_paths.cell || found_paths.cells || found_paths.lattice,
316
- energies: found_paths.potential_energy || found_paths.energy,
317
- },
318
- total_groups_found: 1, // Simplified for now, could be enhanced
319
- has_cell_info: Boolean(cells_data),
320
- },
321
- };
322
- }
323
- finally {
324
- h5_file.close();
325
- try {
326
- FS.unlink(temp_filename);
327
- }
328
- catch { /* temp file cleanup is best-effort */ }
329
- }
330
- };
331
- const parse_vasp_xdatcar = (content, filename) => {
332
- const lines = content.trim().split(/\r?\n/);
333
- if (lines.length < 10)
334
- throw new Error(`XDATCAR file too short`);
335
- const scale = parseFloat(lines[1]);
336
- if (isNaN(scale))
337
- throw new Error(`Invalid scale factor`);
338
- const lattice_matrix = validate_3x3_matrix(lines.slice(2, 5).map((line) => line.trim().split(/\s+/).map((x) => parseFloat(x) * scale)));
339
- const element_names = lines[5].trim().split(/\s+/);
340
- const element_counts = lines[6].trim().split(/\s+/).map(Number);
341
- const elements = element_names.flatMap((name, idx) => Array(element_counts[idx]).fill(name));
342
- const frames = [];
343
- let line_idx = 7;
344
- const frac_to_cart = math.create_frac_to_cart(lattice_matrix);
345
- while (line_idx < lines.length) {
346
- const config_line = lines.find((line, idx) => idx >= line_idx && line.includes(`Direct configuration=`));
347
- if (!config_line)
348
- break;
349
- line_idx = lines.indexOf(config_line) + 1;
350
- const step_match = config_line.match(/configuration=\s*(\d+)/);
351
- const step = step_match ? parseInt(step_match[1]) : frames.length + 1;
352
- const positions = [];
353
- for (let idx = 0; idx < elements.length && line_idx < lines.length; idx++) {
354
- const coords = lines[line_idx].trim().split(/\s+/).slice(0, 3).map(Number);
355
- if (coords.length === 3 && !coords.some(isNaN)) {
356
- positions.push(frac_to_cart(coords));
357
- }
358
- line_idx++;
359
- }
360
- if (positions.length === elements.length) {
361
- const pbc = [true, true, true];
362
- const { volume } = math.calc_lattice_params(lattice_matrix);
363
- frames.push(create_trajectory_frame(positions, elements, lattice_matrix, pbc, step, {
364
- volume,
365
- }));
366
- }
367
- }
368
- return {
369
- frames,
370
- metadata: {
371
- filename,
372
- source_format: `vasp_xdatcar`,
373
- frame_count: frames.length,
374
- total_atoms: elements.length,
375
- periodic_boundary_conditions: [true, true, true],
376
- elements: element_names,
377
- element_counts,
378
- },
379
- };
380
- };
381
- // Parse LAMMPS box bounds → lattice matrix. Handles orthogonal and triclinic boxes.
382
- // Triclinic: converts bounding box to actual dims per https://docs.lammps.org/Howto_triclinic.html
383
- // Lattice vectors: a=(lx,0,0), b=(xy,ly,0), c=(xz,yz,lz)
384
- const parse_lammps_box = (box_lines, is_triclinic) => {
385
- if (box_lines.length !== 3)
386
- return null;
387
- const bounds = box_lines.map((line) => line.split(/\s+/).map(Number));
388
- const min_cols = is_triclinic ? 3 : 2;
389
- if (bounds.some((row) => row.length < min_cols || row.slice(0, min_cols).some(isNaN))) {
390
- return null;
391
- }
392
- if (!is_triclinic) {
393
- // Orthogonal: bounds = [lo, hi] per dimension
394
- const [[lo_x, hi_x], [lo_y, hi_y], [lo_z, hi_z]] = bounds;
395
- return [[hi_x - lo_x, 0, 0], [0, hi_y - lo_y, 0], [0, 0, hi_z - lo_z]];
396
- }
397
- // Triclinic: bounds = [lo_bound, hi_bound, tilt] with tilts xy, xz, yz
398
- const [[xlo_b, xhi_b, xy], [ylo_b, yhi_b, xz], [zlo_b, zhi_b, yz]] = bounds;
399
- const lx = (xhi_b - Math.max(0, xy, xz, xy + xz)) -
400
- (xlo_b - Math.min(0, xy, xz, xy + xz));
401
- const ly = (yhi_b - Math.max(0, yz)) - (ylo_b - Math.min(0, yz));
402
- const lz = zhi_b - zlo_b;
403
- return [[lx, 0, 0], [xy, ly, 0], [xz, yz, lz]];
404
- };
405
- // Parse LAMMPS trajectory (.lammpstrj). Atom types mapped to elements via atom_type_mapping
406
- // or by default: 1→H, 2→He, etc. Supports orthogonal and triclinic simulation boxes.
407
- const parse_lammps_trajectory = (content, filename, atom_type_mapping) => {
408
- const lines = content.trim().split(/\r?\n/);
409
- const frames = [];
410
- const atom_types_found = new Set();
411
- let idx = 0;
412
- const read_line = () => lines[idx++]?.trim() ?? ``;
413
- const peek_line = () => lines[idx]?.trim() ?? ``;
414
- const skip_to = (prefix) => {
415
- while (idx < lines.length && !peek_line().startsWith(prefix))
416
- idx++;
417
- return idx < lines.length;
418
- };
419
- // Helper to map atom type to element symbol
420
- const get_element = (atom_type) => {
421
- if (atom_type_mapping?.[atom_type])
422
- return atom_type_mapping[atom_type];
423
- return ELEM_SYMBOLS[Math.max(0, atom_type - 1) % ELEM_SYMBOLS.length];
424
- };
425
- while (idx < lines.length) {
426
- if (!skip_to(`ITEM: TIMESTEP`))
427
- break;
428
- idx++;
429
- const timestep = parseInt(read_line(), 10) || 0;
430
- if (!skip_to(`ITEM: NUMBER OF ATOMS`))
431
- break;
432
- idx++;
433
- const num_atoms = parseInt(read_line(), 10);
434
- if (!num_atoms || num_atoms <= 0)
435
- continue;
436
- // BOX BOUNDS: orthogonal="pp pp pp", triclinic="xy xz yz pp pp pp"
437
- if (!skip_to(`ITEM: BOX BOUNDS`))
438
- break;
439
- const box_header = read_line();
440
- const is_triclinic = /BOX BOUNDS\s+xy\s+xz\s+yz/i.test(box_header);
441
- const tokens = box_header.replace(`ITEM: BOX BOUNDS`, ``).trim().split(/\s+/).slice(-3);
442
- const is_periodic = (tok) => tok.toLowerCase().startsWith(`p`);
443
- const pbc = tokens.length === 3
444
- ? [is_periodic(tokens[0]), is_periodic(tokens[1]), is_periodic(tokens[2])]
445
- : [true, true, true];
446
- const lattice_matrix = parse_lammps_box([read_line(), read_line(), read_line()], is_triclinic);
447
- if (!lattice_matrix)
448
- continue;
449
- // Find ITEM: ATOMS and parse column headers
450
- if (!skip_to(`ITEM: ATOMS`))
451
- break;
452
- const cols = read_line().replace(`ITEM: ATOMS`, ``).trim().toLowerCase().split(/\s+/);
453
- const col = Object.fromEntries(cols.map((name, col_idx) => [name, col_idx]));
454
- // Determine position columns: prefer unwrapped (xu/yu/zu) > scaled (xs/ys/zs) > regular (x/y/z)
455
- const pos_keys = [`xu`, `yu`, `zu`].every((key) => key in col)
456
- ? [`xu`, `yu`, `zu`]
457
- : [`xs`, `ys`, `zs`].every((key) => key in col)
458
- ? [`xs`, `ys`, `zs`]
459
- : [`x`, `y`, `z`];
460
- const pos_cols = pos_keys.map((key) => col[key]);
461
- // Atom type column: prefer type > element > id (fallback treats id as atomic number)
462
- const type_col = col.type ?? col.element ?? col.id ?? 0;
463
- const use_scaled = pos_keys[0] === `xs`;
464
- if (pos_cols.some((col_idx) => col_idx === undefined))
465
- continue;
466
- // Parse atom data
467
- const positions = [];
468
- const elements = [];
469
- const frac_to_cart = use_scaled ? math.create_frac_to_cart(lattice_matrix) : null;
470
- for (let atom = 0; atom < num_atoms && idx < lines.length; atom++) {
471
- const parts = read_line().split(/\s+/);
472
- const coords = pos_cols.map((col_idx) => parseFloat(parts[col_idx]));
473
- if (coords.some(isNaN) || parts.length <= Math.max(...pos_cols, type_col))
474
- continue;
475
- // Convert scaled coordinates to Cartesian if needed
476
- const xyz = frac_to_cart ? frac_to_cart(coords) : coords;
477
- positions.push(xyz);
478
- // Map atom type to element using custom mapping or default (type 1 → H, etc.)
479
- const atom_type = parseInt(parts[type_col], 10) || 1;
480
- atom_types_found.add(atom_type);
481
- elements.push(get_element(atom_type));
482
- }
483
- if (positions.length === num_atoms) {
484
- const { volume } = math.calc_lattice_params(lattice_matrix);
485
- frames.push(create_trajectory_frame(positions, elements, lattice_matrix, pbc, timestep, { volume, timestep }));
486
- }
487
- }
488
- if (frames.length === 0) {
489
- throw new Error(`No valid frames found in LAMMPS trajectory`);
490
- }
491
- const first_frame = frames[0];
492
- const element_counts = first_frame.structure.sites.reduce((counts, site) => {
493
- const elem = site.species[0].element;
494
- counts[elem] = (counts[elem] || 0) + 1;
495
- return counts;
496
- }, {});
497
- return {
498
- frames,
499
- metadata: {
500
- filename,
501
- source_format: `lammps_trajectory`,
502
- frame_count: frames.length,
503
- total_atoms: first_frame.structure.sites.length,
504
- periodic_boundary_conditions: (`lattice` in first_frame.structure)
505
- ? first_frame.structure.lattice.pbc
506
- : [true, true, true],
507
- atom_types: Array.from(atom_types_found).sort((a, b) => a - b),
508
- element_counts,
509
- },
510
- };
511
- };
512
- const parse_xyz_trajectory = (content) => {
513
- const lines = content.trim().split(/\r?\n/);
514
- const frames = [];
515
- let line_idx = 0;
516
- while (line_idx < lines.length) {
517
- if (!lines[line_idx]?.trim()) {
518
- line_idx++;
519
- continue;
520
- }
521
- const num_atoms = parseInt(lines[line_idx].trim(), 10);
522
- if (isNaN(num_atoms) || num_atoms <= 0 || line_idx + num_atoms + 1 >= lines.length) {
523
- line_idx++;
524
- continue;
525
- }
526
- const comment = lines[++line_idx] || ``;
527
- const metadata = {};
528
- // Extract properties efficiently
529
- const extractors = {
530
- step: /(?:step|frame|ionic_step)\s*[=:]?\s*(\d+)/i,
531
- energy: /(?:energy|E|etot|total_energy)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
532
- volume: /(?:volume|vol|V)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
533
- pressure: /(?:pressure|press|P)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
534
- temperature: /(?:temperature|temp|T)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
535
- force_max: /(?:max_force|force_max|fmax)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
536
- bandgap: /(?:bandgap|E_gap|gap)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
537
- };
538
- const step_match = extractors.step.exec(comment);
539
- const step = step_match?.[1] ? parseInt(step_match[1]) : frames.length;
540
- Object.entries(extractors).forEach(([key, pattern]) => {
541
- if (key === `step`)
542
- return;
543
- const match = pattern.exec(comment);
544
- if (match)
545
- metadata[key] = parseFloat(match[1]);
546
- });
547
- // Extract lattice matrix
548
- const lattice_match = comment.match(/Lattice\s*=\s*"([^"]+)"/i);
549
- let lattice_matrix;
550
- if (lattice_match) {
551
- const values = lattice_match[1].split(/\s+/).map(Number);
552
- if (values.length === 9) {
553
- lattice_matrix = [[values[0], values[1], values[2]], [
554
- values[3],
555
- values[4],
556
- values[5],
557
- ], [values[6], values[7], values[8]]];
558
- metadata.volume = math.calc_lattice_params(lattice_matrix).volume;
559
- }
560
- }
561
- // Parse atoms
562
- const positions = [];
563
- const elements = [];
564
- const forces = [];
565
- const has_forces = comment.includes(`forces:R:3`);
566
- for (let i = 0; i < num_atoms && ++line_idx < lines.length; i++) {
567
- const parts = lines[line_idx].trim().split(/\s+/);
568
- if (parts.length >= 4) {
569
- elements.push(parts[0]);
570
- positions.push([parseFloat(parts[1]), parseFloat(parts[2]), parseFloat(parts[3])]);
571
- if (has_forces && parts.length >= 7) {
572
- forces.push([parseFloat(parts[4]), parseFloat(parts[5]), parseFloat(parts[6])]);
573
- }
574
- }
575
- }
576
- if (forces.length > 0) {
577
- metadata.forces = forces;
578
- const magnitudes = forces.map((force) => Math.hypot(...force));
579
- metadata.force_max = Math.max(...magnitudes);
580
- // Calculate RMS (root mean square) of force magnitudes
581
- metadata.force_norm = Math.sqrt(magnitudes.reduce((sum, mag) => sum + mag ** 2, 0) / magnitudes.length);
582
- }
583
- frames.push(create_trajectory_frame(positions, elements, lattice_matrix, lattice_matrix ? [true, true, true] : undefined, step, metadata));
584
- line_idx++;
585
- }
586
- return {
587
- frames,
588
- metadata: {
589
- source_format: `xyz_trajectory`,
590
- frame_count: frames.length,
591
- total_atoms: frames[0]?.structure.sites.length || 0,
592
- },
593
- };
594
- };
595
- const parse_ase_trajectory = (buffer, filename) => {
596
- const view = new DataView(buffer);
597
- let offset = 0;
598
- // Validate and read header
599
- const signature = new TextDecoder().decode(new Uint8Array(buffer, 0, 8));
600
- if (signature !== `- of Ulm`)
601
- throw new Error(`Invalid ASE trajectory`);
602
- offset += 24; // Skip signature and tag
603
- const _version = Number(view.getBigInt64(offset, true));
604
- offset += 8;
605
- const n_items = Number(view.getBigInt64(offset, true));
606
- offset += 8;
607
- const offsets_pos = Number(view.getBigInt64(offset, true));
608
- if (n_items <= 0)
609
- throw new Error(`Invalid frame count`);
610
- // Read offsets
611
- const frame_offsets = Array.from({ length: n_items }, (_, idx) => Number(view.getBigInt64(offsets_pos + idx * 8, true)));
612
- const frames = [];
613
- let global_numbers;
614
- for (let idx = 0; idx < n_items; idx++) {
615
- try {
616
- offset = frame_offsets[idx];
617
- const json_length = Number(view.getBigInt64(offset, true));
618
- offset += 8;
619
- if (json_length > MAX_SAFE_STRING_LENGTH) {
620
- console.warn(`Skipping frame ${idx + 1}/${n_items}: too large`);
621
- continue;
622
- }
623
- const frame_data = JSON.parse(new TextDecoder().decode(new Uint8Array(buffer, offset, json_length)));
624
- const positions_ref = frame_data[`positions.`] || frame_data.positions;
625
- const positions = positions_ref?.ndarray
626
- ? read_ndarray_from_view(view, positions_ref)
627
- : positions_ref;
628
- const numbers_ref = frame_data[`numbers.`] || frame_data.numbers || global_numbers;
629
- const numbers = numbers_ref?.ndarray
630
- ? read_ndarray_from_view(view, numbers_ref).flat()
631
- : numbers_ref;
632
- if (numbers)
633
- global_numbers = numbers;
634
- if (!numbers || !positions)
635
- continue;
636
- const elements = convert_atomic_numbers(numbers);
637
- const metadata = {
638
- step: idx,
639
- ...(frame_data.calculator || {}),
640
- ...(frame_data.info || {}),
641
- };
642
- frames.push(create_trajectory_frame(positions, elements, frame_data.cell ? validate_3x3_matrix(frame_data.cell) : undefined, frame_data.pbc || [true, true, true], idx, metadata));
643
- }
644
- catch (error) {
645
- console.warn(`Error processing frame ${idx + 1}/${n_items}:`, error);
646
- }
647
- }
648
- if (frames.length === 0)
649
- throw new Error(`No valid frames found`);
650
- return {
651
- frames,
652
- metadata: {
653
- filename,
654
- source_format: `ase_trajectory`,
655
- frame_count: frames.length,
656
- total_atoms: global_numbers?.length || 0,
657
- periodic_boundary_conditions: [true, true, true],
658
- },
659
- };
660
- };
661
- // Unified Frame Loader - replaces separate XYZ and ASE loaders
662
- export class TrajFrameReader {
663
- format;
664
- global_numbers; // For ASE trajectories
665
- constructor(filename) {
666
- this.format = filename.toLowerCase().endsWith(`.traj`) ? `ase` : `xyz`;
667
- }
668
- // async needed to satisfy FrameLoader interface
669
- // deno-lint-ignore require-await
670
- async get_total_frames(data) {
671
- if (this.format === `xyz`) {
672
- if (data instanceof ArrayBuffer)
673
- throw new Error(`XYZ loader requires text data`);
674
- return count_xyz_frames(data);
675
- }
676
- else {
677
- if (!(data instanceof ArrayBuffer)) {
678
- throw new Error(`ASE loader requires binary data`);
679
- }
680
- const view = new DataView(data);
681
- return Number(view.getBigInt64(32, true)); // n_items from header
682
- }
683
- }
684
- async build_frame_index(data, sample_rate, on_progress) {
685
- const total_frames = await this.get_total_frames(data);
686
- const frame_index = [];
687
- if (this.format === `xyz`) {
688
- const data_str = data;
689
- const lines = data_str.trim().split(/\r?\n/);
690
- const encoder = new TextEncoder(); // Reuse single encoder instance
691
- // Detect the actual newline sequence used in the file
692
- const newline_sequence = data_str.includes(`\r\n`) ? `\r\n` : `\n`;
693
- const newline_byte_len = encoder.encode(newline_sequence).length;
694
- let [current_frame, line_idx, byte_offset] = [0, 0, 0];
695
- while (line_idx < lines.length && current_frame < total_frames) {
696
- if (!lines[line_idx]?.trim()) {
697
- byte_offset += encoder.encode(lines[line_idx]).length +
698
- newline_byte_len;
699
- line_idx++;
700
- continue;
701
- }
702
- const num_atoms = parseInt(lines[line_idx].trim(), 10);
703
- if (isNaN(num_atoms) || num_atoms <= 0 || line_idx + num_atoms + 1 >= lines.length) {
704
- byte_offset += encoder.encode(lines[line_idx]).length +
705
- newline_byte_len;
706
- line_idx++;
707
- continue;
708
- }
709
- if (current_frame % sample_rate === 0) {
710
- frame_index.push({
711
- frame_number: current_frame,
712
- byte_offset,
713
- estimated_size: 0,
714
- });
715
- }
716
- // Calculate frame size and advance using actual byte lengths
717
- const frame_start = line_idx;
718
- line_idx += 2 + num_atoms;
719
- let frame_size = 0;
720
- for (let i = frame_start; i < line_idx; i++) {
721
- frame_size += encoder.encode(lines[i]).length + newline_byte_len;
722
- }
723
- if (current_frame % sample_rate === 0) {
724
- frame_index[frame_index.length - 1].estimated_size = frame_size;
725
- }
726
- byte_offset += frame_size;
727
- current_frame++;
728
- if (on_progress && current_frame % 1000 === 0) {
729
- on_progress({
730
- current: (current_frame / total_frames) * 100,
731
- total: 100,
732
- stage: `Indexing: ${current_frame}`,
733
- });
734
- }
735
- }
736
- }
737
- else {
738
- // ASE indexing
739
- const view = new DataView(data);
740
- const offsets_pos = Number(view.getBigInt64(40, true));
741
- for (let i = 0; i < total_frames; i += sample_rate) {
742
- const frame_offset = Number(view.getBigInt64(offsets_pos + i * 8, true));
743
- frame_index.push({
744
- frame_number: i,
745
- byte_offset: frame_offset,
746
- estimated_size: 0,
747
- });
748
- if (on_progress && i % 10000 === 0) {
749
- on_progress({
750
- current: (i / total_frames) * 100,
751
- total: 100,
752
- stage: `Indexing ASE: ${i}`,
753
- });
754
- }
755
- }
756
- }
757
- return frame_index;
758
- }
759
- // async needed to satisfy FrameLoader interface
760
- // deno-lint-ignore require-await
761
- async load_frame(data, frame_number) {
762
- if (this.format === `xyz`)
763
- return this.load_xyz_frame(data, frame_number);
764
- else
765
- return this.load_ase_frame(data, frame_number);
766
- }
767
- async extract_plot_metadata(data, options, on_progress) {
768
- const { sample_rate = 1, properties } = options || {};
769
- const metadata_list = [];
770
- const total_frames = await this.get_total_frames(data);
771
- if (this.format === `xyz`) {
772
- const lines = data.trim().split(/\r?\n/);
773
- let [current_frame, line_idx] = [0, 0];
774
- while (line_idx < lines.length && current_frame < total_frames) {
775
- if (!lines[line_idx]?.trim()) {
776
- line_idx++;
777
- continue;
778
- }
779
- const num_atoms = parseInt(lines[line_idx].trim(), 10);
780
- if (isNaN(num_atoms) || num_atoms <= 0 || line_idx + num_atoms + 1 >= lines.length) {
781
- line_idx++;
782
- continue;
783
- }
784
- if (current_frame % sample_rate === 0) {
785
- const comment = lines[line_idx + 1] || ``;
786
- const frame_metadata = this.parse_xyz_metadata(comment, current_frame);
787
- if (properties) {
788
- const filtered = Object.fromEntries(Object.entries(frame_metadata.properties).filter(([key]) => properties.includes(key)));
789
- frame_metadata.properties = filtered;
790
- }
791
- metadata_list.push(frame_metadata);
792
- }
793
- line_idx += 2 + num_atoms;
794
- current_frame++;
795
- if (on_progress && current_frame % 5000 === 0) {
796
- on_progress({
797
- current: (current_frame / total_frames) * 100,
798
- total: 100,
799
- stage: `Extracting: ${current_frame}`,
800
- });
801
- }
802
- }
803
- }
804
- else if (this.format === `ase`) {
805
- // ASE metadata extraction
806
- const view = new DataView(data);
807
- const n_items = Number(view.getBigInt64(32, true));
808
- const offsets_pos = Number(view.getBigInt64(40, true));
809
- for (let i = 0; i < n_items; i += sample_rate) {
810
- try {
811
- const frame_offset = Number(view.getBigInt64(offsets_pos + i * 8, true));
812
- const json_length = Number(view.getBigInt64(frame_offset, true));
813
- if (json_length > MAX_METADATA_SIZE) {
814
- console.warn(`Skipping large frame ${i}: ${Math.round(json_length / 1024 / 1024)}MB`);
815
- continue;
816
- }
817
- const frame_data = JSON.parse(new TextDecoder().decode(new Uint8Array(data, frame_offset + 8, json_length)));
818
- const frame_metadata = this.parse_ase_metadata(frame_data, i);
819
- if (properties) {
820
- const filtered = Object.fromEntries(Object.entries(frame_metadata.properties).filter(([key]) => properties.includes(key)));
821
- frame_metadata.properties = filtered;
822
- }
823
- metadata_list.push(frame_metadata);
824
- if (on_progress && i % 5000 === 0) {
825
- on_progress({
826
- current: (i / n_items) * 100,
827
- total: 100,
828
- stage: `Extracting ASE: ${i}/${n_items}`,
829
- });
830
- }
831
- }
832
- catch (error) {
833
- console.warn(`Failed to extract metadata from ASE frame ${i}:`, error);
834
- continue;
835
- }
836
- }
837
- }
838
- return metadata_list;
839
- }
840
- load_xyz_frame(data, frame_number) {
841
- const lines = data.trim().split(/\r?\n/);
842
- let [current_frame, line_idx] = [0, 0];
843
- // Skip to target frame
844
- while (line_idx < lines.length && current_frame < frame_number) {
845
- if (!lines[line_idx]?.trim()) {
846
- line_idx++;
847
- continue;
848
- }
849
- const num_atoms = parseInt(lines[line_idx].trim(), 10);
850
- if (isNaN(num_atoms) || num_atoms <= 0) {
851
- line_idx++;
852
- continue;
853
- }
854
- line_idx += 2 + num_atoms;
855
- current_frame++;
856
- }
857
- // Parse target frame
858
- if (line_idx >= lines.length)
859
- return null;
860
- const num_atoms = parseInt(lines[line_idx].trim(), 10);
861
- if (isNaN(num_atoms) || line_idx + num_atoms + 1 >= lines.length)
862
- return null;
863
- const comment = lines[line_idx + 1] || ``;
864
- const positions = [];
865
- const elements = [];
866
- for (let i = 0; i < num_atoms; i++) {
867
- const parts = lines[line_idx + 2 + i]?.trim().split(/\s+/);
868
- if (parts?.length >= 4) {
869
- elements.push(parts[0]);
870
- positions.push([parseFloat(parts[1]), parseFloat(parts[2]), parseFloat(parts[3])]);
871
- }
872
- }
873
- const metadata = this.parse_xyz_metadata(comment, frame_number);
874
- return create_trajectory_frame(positions, elements, undefined, undefined, frame_number, metadata.properties);
875
- }
876
- load_ase_frame(data, frame_number) {
877
- // ASE frame loading with proper ndarray support
878
- try {
879
- const view = new DataView(data);
880
- const n_items = Number(view.getBigInt64(32, true));
881
- const offsets_pos = Number(view.getBigInt64(40, true));
882
- if (frame_number >= n_items)
883
- return null;
884
- const frame_offset = Number(view.getBigInt64(offsets_pos + frame_number * 8, true));
885
- const json_length = Number(view.getBigInt64(frame_offset, true));
886
- const frame_data = JSON.parse(new TextDecoder().decode(new Uint8Array(data, frame_offset + 8, json_length)));
887
- // Extract positions with proper ndarray handling
888
- const positions_ref = frame_data[`positions.`] || frame_data.positions;
889
- const positions = positions_ref?.ndarray
890
- ? read_ndarray_from_view(view, positions_ref)
891
- : positions_ref;
892
- // Extract atomic numbers with proper ndarray handling
893
- const numbers_ref = frame_data[`numbers.`] || frame_data.numbers ||
894
- this.global_numbers;
895
- const numbers = numbers_ref?.ndarray
896
- ? read_ndarray_from_view(view, numbers_ref).flat()
897
- : numbers_ref;
898
- if (numbers)
899
- this.global_numbers = numbers;
900
- if (!numbers || !positions)
901
- throw new Error(`Missing atomic numbers or positions`);
902
- // Extract cell and calculate volume if present
903
- const cell = frame_data.cell ? validate_3x3_matrix(frame_data.cell) : undefined;
904
- const metadata = {
905
- step: frame_number,
906
- ...(frame_data.calculator || {}),
907
- ...(frame_data.info || {}),
908
- };
909
- // Calculate volume from cell matrix if available
910
- if (cell) {
911
- try {
912
- metadata.volume = Math.abs(math.det_3x3(cell));
913
- }
914
- catch (error) {
915
- console.warn(`Failed to calculate volume for frame ${frame_number}:`, error);
916
- }
917
- }
918
- return create_trajectory_frame(positions, convert_atomic_numbers(numbers), cell, frame_data.pbc || [true, true, true], frame_number, metadata);
919
- }
920
- catch (error) {
921
- console.warn(`Failed to load ASE frame ${frame_number}:`, error);
922
- return null;
923
- }
924
- }
925
- parse_xyz_metadata(comment, frame_number) {
926
- const properties = {};
927
- const patterns = {
928
- energy: /(?:energy|E|etot)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
929
- volume: /(?:volume|vol|V)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
930
- pressure: /(?:pressure|press|P)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
931
- force_max: /(?:max_force|fmax)\s*[=:]?\s*([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)/i,
932
- };
933
- Object.entries(patterns).forEach(([key, pattern]) => {
934
- const match = pattern.exec(comment);
935
- if (match)
936
- properties[key] = parseFloat(match[1]);
937
- });
938
- const step_match = comment.match(/(?:step|frame)\s*[=:]?\s*(\d+)/i);
939
- const step = step_match ? parseInt(step_match[1]) : frame_number;
940
- return { frame_number, step, properties };
941
- }
942
- parse_ase_metadata(frame_data, frame_number) {
943
- const properties = {};
944
- const step = frame_number;
945
- // Extract calculator properties (energies, etc.)
946
- if (frame_data.calculator && typeof frame_data.calculator === `object`) {
947
- const calculator = frame_data.calculator;
948
- const calc_properties = [
949
- `energy`,
950
- `potential_energy`,
951
- `kinetic_energy`,
952
- `total_energy`,
953
- ];
954
- for (const prop of calc_properties) {
955
- if (prop in calculator && typeof calculator[prop] === `number`) {
956
- properties[prop] = calculator[prop];
957
- }
958
- }
959
- }
960
- // Extract info properties (forces, stress, etc.)
961
- if (frame_data.info && typeof frame_data.info === `object`) {
962
- const info = frame_data.info;
963
- const info_properties = [
964
- `force_max`,
965
- `force_norm`,
966
- `stress_max`,
967
- `stress_frobenius`,
968
- `pressure`,
969
- `temperature`,
970
- ];
971
- for (const prop of info_properties) {
972
- if (prop in info && typeof info[prop] === `number`) {
973
- properties[prop] = info[prop];
974
- }
975
- }
976
- }
977
- // Calculate volume from cell if present
978
- if (frame_data.cell && Array.isArray(frame_data.cell)) {
979
- try {
980
- const validated_cell = validate_3x3_matrix(frame_data.cell);
981
- properties.volume = Math.abs(math.det_3x3(validated_cell));
982
- }
983
- catch (error) {
984
- console.warn(`Failed to calculate volume for ASE frame ${frame_number}:`, error);
985
- }
986
- }
987
- return { frame_number, step, properties };
988
- }
989
- }
990
- // Main parsing entry point - simplified
991
- export async function parse_trajectory_data(data, filename, atom_type_mapping) {
992
- if (data instanceof ArrayBuffer) {
993
- if (FORMAT_PATTERNS.ase(data, filename))
994
- return parse_ase_trajectory(data, filename);
995
- if (FORMAT_PATTERNS.hdf5(data, filename)) {
996
- return await parse_torch_sim_hdf5(data, filename);
997
- }
998
- throw new Error(`Unsupported binary format${filename ? `: ${filename}` : ``}`);
999
- }
1000
- if (typeof data === `string`) {
1001
- const content = data.trim();
1002
- if (FORMAT_PATTERNS.xyz_multi(content, filename))
1003
- return parse_xyz_trajectory(content);
1004
- if (FORMAT_PATTERNS.vasp(content, filename)) {
1005
- return parse_vasp_xdatcar(content, filename);
1006
- }
1007
- if (FORMAT_PATTERNS.lammpstrj(content, filename)) {
1008
- return parse_lammps_trajectory(content, filename, atom_type_mapping);
1009
- }
1010
- // Single XYZ fallback
1011
- if (filename?.toLowerCase().match(/\.(?:xyz|extxyz)$/)) {
1012
- try {
1013
- const structure = parse_xyz(content);
1014
- if (structure) {
1015
- return {
1016
- frames: [{ structure, step: 0, metadata: {} }],
1017
- metadata: { source_format: `single_xyz`, frame_count: 1 },
1018
- };
1019
- }
1020
- }
1021
- catch { /* single-frame XYZ parsing failed, continue to JSON parsing */ }
1022
- }
1023
- try {
1024
- data = JSON.parse(content);
1025
- }
1026
- catch {
1027
- throw new Error(`Unsupported text format`);
1028
- }
1029
- }
1030
- if (!data || typeof data !== `object`)
1031
- throw new Error(`Invalid data format`);
1032
- // Handle JSON formats
1033
- if (Array.isArray(data)) {
1034
- const frames = data.map((frame_data, idx) => {
1035
- const frame_obj = frame_data;
1036
- return {
1037
- structure: (frame_obj.structure || frame_obj),
1038
- step: frame_obj.step || idx,
1039
- metadata: frame_obj.metadata || {},
1040
- };
1041
- });
1042
- return { frames, metadata: { source_format: `array`, frame_count: frames.length } };
1043
- }
1044
- const obj = data;
1045
- // Pymatgen format
1046
- if (obj[`@class`] === `Trajectory` && obj.species && obj.coords && obj.lattice) {
1047
- const species = obj.species;
1048
- const coords = obj.coords;
1049
- const matrix = validate_3x3_matrix(obj.lattice);
1050
- const frame_properties = obj.frame_properties || [];
1051
- const frac_to_cart = math.create_frac_to_cart(matrix);
1052
- const frames = coords.map((frame_coords, idx) => {
1053
- const positions = frame_coords.map((abc) => frac_to_cart(abc));
1054
- // Process frame properties to extract numpy arrays
1055
- const raw_properties = frame_properties[idx] || {};
1056
- const processed_properties = {};
1057
- Object.entries(raw_properties).forEach(([key, value]) => {
1058
- if (value && typeof value === `object` &&
1059
- value[`@class`] === `array`) {
1060
- // Extract numpy array data
1061
- const array_obj = value;
1062
- processed_properties[key] = array_obj.data;
1063
- // Calculate force statistics for forces
1064
- if (key === `forces` && Array.isArray(array_obj.data)) {
1065
- const forces = array_obj.data;
1066
- const force_magnitudes = forces.map((force) => Math.hypot(...force));
1067
- processed_properties.force_max = Math.max(...force_magnitudes);
1068
- processed_properties.force_norm = Math.sqrt(force_magnitudes.reduce((sum, f) => sum + f ** 2, 0) /
1069
- force_magnitudes.length);
1070
- }
1071
- // Calculate stress statistics for stress tensor
1072
- if (key === `stress` && Array.isArray(array_obj.data)) {
1073
- const stress_tensor = array_obj.data;
1074
- if (!math.is_square_matrix(stress_tensor, 3)) {
1075
- console.warn(`Invalid stress tensor structure in frame ${idx}`);
1076
- }
1077
- else {
1078
- // Calculate stress components (diagonal elements represent normal stresses)
1079
- const normal_stresses = [
1080
- stress_tensor[0][0],
1081
- stress_tensor[1][1],
1082
- stress_tensor[2][2],
1083
- ];
1084
- processed_properties.stress_max = Math.max(...normal_stresses.map(Math.abs));
1085
- // Calculate hydrostatic pressure (negative of mean normal stress)
1086
- processed_properties.pressure =
1087
- -(normal_stresses[0] + normal_stresses[1] + normal_stresses[2]) / 3;
1088
- }
1089
- }
1090
- }
1091
- else {
1092
- processed_properties[key] = value;
1093
- }
1094
- });
1095
- return create_trajectory_frame(positions, species.map((specie) => specie.element), matrix, [true, true, true], idx, processed_properties);
1096
- });
1097
- return {
1098
- frames,
1099
- metadata: {
1100
- filename,
1101
- source_format: `pymatgen_trajectory`,
1102
- frame_count: frames.length,
1103
- species_list: [...new Set(species.map((specie) => specie.element))],
1104
- periodic_boundary_conditions: [true, true, true],
1105
- },
1106
- };
1107
- }
1108
- // Object with frames
1109
- if (obj.frames && Array.isArray(obj.frames)) {
1110
- return {
1111
- frames: obj.frames,
1112
- metadata: {
1113
- ...obj.metadata,
1114
- source_format: `object_with_frames`,
1115
- },
1116
- };
1117
- }
1118
- // Single structure
1119
- if (obj.sites) {
1120
- return {
1121
- frames: [{ structure: obj, step: 0, metadata: {} }],
1122
- metadata: { source_format: `single_structure`, frame_count: 1 },
1123
- };
1124
- }
1125
- throw new Error(`Unrecognized trajectory format`);
1126
- }
1127
- export function get_unsupported_format_message(filename, content) {
1128
- const lower = filename.toLowerCase();
1129
- // Check for unsupported compression formats first
1130
- const unsupported_compression = [
1131
- { ext: `.bz2`, name: `BZ2` },
1132
- { ext: `.xz`, name: `XZ` },
1133
- { ext: `.zip`, name: `ZIP` },
1134
- ];
1135
- for (const { ext, name } of unsupported_compression) {
1136
- if (lower.endsWith(ext)) {
1137
- return `🚫 ${name} compression not supported in browser\nPlease decompress the file first`;
1138
- }
1139
- }
1140
- // .dump files are LAMMPS binary dumps which require external tools to parse.
1141
- // .lammpstrj files are LAMMPS text-based trajectory files supported by parse_lammps_trajectory().
1142
- const formats = [
1143
- { extensions: [`.dump`], name: `LAMMPS binary dump`, tool: `pymatgen` },
1144
- { extensions: [`.nc`, `.netcdf`], name: `NetCDF`, tool: `MDAnalysis` },
1145
- { extensions: [`.dcd`], name: `DCD`, tool: `MDAnalysis` },
1146
- ];
1147
- for (const { extensions, name, tool } of formats) {
1148
- if (extensions.some((ext) => lower.endsWith(ext))) {
1149
- return `🚫 ${name} format not supported\nConvert with ${tool} first`;
1150
- }
1151
- }
1152
- return is_binary(content)
1153
- ? `🚫 Binary format not supported${filename ? `: ${filename}` : ``}`
1154
- : null;
1155
- }
1156
- // Unified async parser with streaming support
1157
- export async function parse_trajectory_async(data, filename, on_progress, options = {}) {
1158
- const { use_indexing, index_sample_rate = INDEX_SAMPLE_RATE, extract_plot_metadata = true, atom_type_mapping, } = options;
1159
- const update_progress = (current, stage) => on_progress?.({ current, total: 100, stage });
1160
- try {
1161
- update_progress(0, `Detecting format...`);
1162
- const data_size = data instanceof ArrayBuffer ? data.byteLength : data.length;
1163
- const is_large_file = data_size > LARGE_FILE_THRESHOLD;
1164
- const should_use_indexing = use_indexing ?? is_large_file;
1165
- if (is_large_file) {
1166
- update_progress(5, `Large file detected (${Math.round(data_size / 1024 / 1024)}MB)`);
1167
- }
1168
- // Use indexed loading for supported large files
1169
- if (should_use_indexing && filename.toLowerCase().match(/\.(xyz|extxyz|traj)$/)) {
1170
- return await parse_with_unified_loader(data, filename, {
1171
- index_sample_rate,
1172
- extract_plot_metadata,
1173
- }, on_progress);
1174
- }
1175
- // Fallback to direct parsing
1176
- update_progress(10, `Parsing trajectory...`);
1177
- const result = await parse_trajectory_data(data, filename, atom_type_mapping);
1178
- update_progress(100, `Complete`);
1179
- return result;
1180
- }
1181
- catch (error) {
1182
- const error_message = error instanceof Error ? error.message : `Unknown error`;
1183
- update_progress(100, `Error: ${error_message}`);
1184
- throw error;
1185
- }
1186
- }
1187
- // Unified frame loading using new TrajFrameReader
1188
- async function parse_with_unified_loader(data, filename, options, on_progress) {
1189
- const { index_sample_rate, extract_plot_metadata } = options;
1190
- const loader = new TrajFrameReader(filename);
1191
- on_progress?.({ current: 10, total: 100, stage: `Counting frames...` });
1192
- const total_frames = await loader.get_total_frames(data);
1193
- on_progress?.({ current: 20, total: 100, stage: `Building frame index...` });
1194
- const frame_index = await loader.build_frame_index(data, index_sample_rate, (progress) => {
1195
- const adjusted = 20 + (progress.current / 100) * 30;
1196
- on_progress?.({
1197
- current: adjusted,
1198
- total: 100,
1199
- stage: `Building index: ${progress.stage}`,
1200
- });
1201
- });
1202
- on_progress?.({ current: 50, total: 100, stage: `Loading initial frames...` });
1203
- const initial_frame_count = Math.min(10, total_frames);
1204
- const frame_promises = Array.from({ length: initial_frame_count }, (_, idx) => loader.load_frame(data, idx));
1205
- const loaded_frames = await Promise.all(frame_promises);
1206
- const frames = loaded_frames.filter((frame) => frame !== null);
1207
- let plot_metadata;
1208
- if (extract_plot_metadata) {
1209
- on_progress?.({ current: 70, total: 100, stage: `Extracting plot metadata...` });
1210
- try {
1211
- plot_metadata = await loader.extract_plot_metadata(data, { sample_rate: 1 }, (progress) => {
1212
- const adjusted = 70 + (progress.current / 100) * 20;
1213
- on_progress?.({
1214
- current: adjusted,
1215
- total: 100,
1216
- stage: `Extracting: ${progress.stage}`,
1217
- });
1218
- });
1219
- }
1220
- catch (error) {
1221
- console.warn(`Failed to extract plot metadata:`, error);
1222
- }
1223
- }
1224
- const stage = `Ready: ${total_frames} frames indexed`;
1225
- on_progress?.({ current: 100, total: 100, stage });
1226
- return {
1227
- frames,
1228
- metadata: {
1229
- source_format: filename.toLowerCase().endsWith(`.traj`)
1230
- ? `ase_trajectory`
1231
- : `xyz_trajectory`,
1232
- frame_count: total_frames,
1233
- },
1234
- total_frames,
1235
- indexed_frames: frame_index,
1236
- plot_metadata,
1237
- is_indexed: true,
1238
- };
1239
- }
1240
- // Factory function for frame loader (simplified)
1241
- export function create_frame_loader(filename) {
1242
- if (!filename.toLowerCase().match(/\.(xyz|extxyz|traj)$/)) {
1243
- throw new Error(`Unsupported format for frame loading: ${filename}`);
1244
- }
1245
- return new TrajFrameReader(filename);
1246
- }
1247
- // Backward compatibility exports
1248
- export const XYZFrameLoader = TrajFrameReader;
1249
- export const ASEFrameLoader = TrajFrameReader;
1250
- export async function load_binary_traj(resp, type, fallback = false) {
1251
- try {
1252
- // Read binary from a clone so the original can be used for text fallback
1253
- return await resp.clone().arrayBuffer();
1254
- }
1255
- catch (err1) {
1256
- if (fallback) {
1257
- console.warn(`Binary load failed for ${type}, using text:`, err1);
1258
- try {
1259
- return await resp.text();
1260
- }
1261
- catch (err2) {
1262
- console.error(`Fallback to text also failed for ${type}:`, err2);
1263
- }
1264
- }
1265
- throw new Error(`Failed to load ${type} as binary: ${err1}`);
1266
- }
1267
- }