expo-tiddlywiki-filesystem-android-external-storage 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +123 -0
- package/android/build.gradle +19 -0
- package/android/src/main/AndroidManifest.xml +2 -0
- package/android/src/main/java/expo/modules/externalstorage/ExternalStorageModule.kt +823 -0
- package/app.plugin.js +25 -0
- package/build/index.d.ts +130 -0
- package/build/index.d.ts.map +1 -0
- package/build/index.js +45 -0
- package/build/index.js.map +1 -0
- package/expo-module.config.json +6 -0
- package/package.json +50 -0
- package/plugin.js +25 -0
- package/src/index.ts +196 -0
|
@@ -0,0 +1,823 @@
|
|
|
1
|
+
package expo.modules.externalstorage
|
|
2
|
+
|
|
3
|
+
import android.os.Build
|
|
4
|
+
import android.os.Environment
|
|
5
|
+
import android.util.Base64
|
|
6
|
+
import expo.modules.kotlin.modules.Module
|
|
7
|
+
import expo.modules.kotlin.modules.ModuleDefinition
|
|
8
|
+
import okhttp3.Headers.Companion.toHeaders
|
|
9
|
+
import okhttp3.MediaType.Companion.toMediaType
|
|
10
|
+
import okhttp3.OkHttpClient
|
|
11
|
+
import okhttp3.Request
|
|
12
|
+
import okhttp3.RequestBody.Companion.toRequestBody
|
|
13
|
+
import org.json.JSONArray
|
|
14
|
+
import org.json.JSONObject
|
|
15
|
+
import java.io.BufferedInputStream
|
|
16
|
+
import java.io.File
|
|
17
|
+
import java.io.FileInputStream
|
|
18
|
+
import java.io.FileOutputStream
|
|
19
|
+
import java.io.RandomAccessFile
|
|
20
|
+
import java.util.concurrent.TimeUnit
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Expo native module that performs raw java.io.File I/O on external storage.
|
|
24
|
+
*
|
|
25
|
+
* Expo's built-in FileSystem module restricts writes to its own directory
|
|
26
|
+
* whitelist, blocking access to shared storage even when MANAGE_EXTERNAL_STORAGE
|
|
27
|
+
* is granted. This module bypasses that restriction.
|
|
28
|
+
*
|
|
29
|
+
* All paths are plain filesystem paths (no file:// prefix).
|
|
30
|
+
*/
|
|
31
|
+
class ExternalStorageModule : Module() {
|
|
32
|
+
override fun definition() = ModuleDefinition {
|
|
33
|
+
Name("ExternalStorage")
|
|
34
|
+
|
|
35
|
+
// --- Basic queries ---
|
|
36
|
+
|
|
37
|
+
AsyncFunction("exists") { path: String ->
|
|
38
|
+
File(path).exists()
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
AsyncFunction("getInfo") { path: String ->
|
|
42
|
+
val file = File(path)
|
|
43
|
+
if (!file.exists()) {
|
|
44
|
+
return@AsyncFunction mapOf(
|
|
45
|
+
"exists" to false,
|
|
46
|
+
"isDirectory" to false,
|
|
47
|
+
"size" to 0L,
|
|
48
|
+
"modificationTime" to 0L,
|
|
49
|
+
)
|
|
50
|
+
}
|
|
51
|
+
mapOf(
|
|
52
|
+
"exists" to true,
|
|
53
|
+
"isDirectory" to file.isDirectory,
|
|
54
|
+
"size" to file.length(),
|
|
55
|
+
"modificationTime" to file.lastModified(),
|
|
56
|
+
)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// --- Directory operations ---
|
|
60
|
+
|
|
61
|
+
AsyncFunction("mkdir") { path: String ->
|
|
62
|
+
val dir = File(path)
|
|
63
|
+
if (!dir.exists()) {
|
|
64
|
+
val ok = dir.mkdirs()
|
|
65
|
+
if (!ok && !dir.exists()) {
|
|
66
|
+
throw Exception("Failed to create directory: $path")
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
AsyncFunction("readDir") { path: String ->
|
|
72
|
+
val dir = File(path)
|
|
73
|
+
if (!dir.exists() || !dir.isDirectory) {
|
|
74
|
+
throw Exception("ENOENT: no such directory: $path")
|
|
75
|
+
}
|
|
76
|
+
dir.list()?.toList() ?: emptyList<String>()
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Recursively list all files under a directory, returning paths relative to `path`.
|
|
80
|
+
// Skips .git, node_modules, .DS_Store, output directories.
|
|
81
|
+
AsyncFunction("readDirRecursive") { path: String ->
|
|
82
|
+
val root = File(path)
|
|
83
|
+
if (!root.exists() || !root.isDirectory) {
|
|
84
|
+
throw Exception("ENOENT: no such directory: $path")
|
|
85
|
+
}
|
|
86
|
+
val skipNames = setOf(".git", "node_modules", ".DS_Store", "output")
|
|
87
|
+
val result = mutableListOf<String>()
|
|
88
|
+
fun walk(dir: File, prefix: String) {
|
|
89
|
+
val children = dir.listFiles() ?: return
|
|
90
|
+
for (child in children) {
|
|
91
|
+
val relativePath = if (prefix.isEmpty()) child.name else "$prefix/${child.name}"
|
|
92
|
+
if (child.isDirectory) {
|
|
93
|
+
if (child.name !in skipNames) {
|
|
94
|
+
walk(child, relativePath)
|
|
95
|
+
}
|
|
96
|
+
} else {
|
|
97
|
+
result.add(relativePath)
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
walk(root, "")
|
|
102
|
+
result
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
AsyncFunction("rmdir") { path: String ->
|
|
106
|
+
val dir = File(path)
|
|
107
|
+
if (dir.exists()) {
|
|
108
|
+
dir.deleteRecursively()
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// --- File read/write ---
|
|
113
|
+
|
|
114
|
+
AsyncFunction("readFileUtf8") { path: String ->
|
|
115
|
+
val file = File(path)
|
|
116
|
+
if (!file.exists()) {
|
|
117
|
+
throw Exception("ENOENT: no such file: $path")
|
|
118
|
+
}
|
|
119
|
+
file.readText(Charsets.UTF_8)
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
AsyncFunction("readFileBase64") { path: String ->
|
|
123
|
+
val file = File(path)
|
|
124
|
+
if (!file.exists()) {
|
|
125
|
+
throw Exception("ENOENT: no such file: $path")
|
|
126
|
+
}
|
|
127
|
+
Base64.encodeToString(file.readBytes(), Base64.NO_WRAP)
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
AsyncFunction("writeFileUtf8") { path: String, content: String ->
|
|
131
|
+
val file = File(path)
|
|
132
|
+
file.parentFile?.let { parent ->
|
|
133
|
+
if (!parent.exists()) parent.mkdirs()
|
|
134
|
+
}
|
|
135
|
+
file.writeText(content, Charsets.UTF_8)
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
AsyncFunction("writeFileBase64") { path: String, base64Content: String ->
|
|
139
|
+
val file = File(path)
|
|
140
|
+
file.parentFile?.let { parent ->
|
|
141
|
+
if (!parent.exists()) parent.mkdirs()
|
|
142
|
+
}
|
|
143
|
+
val bytes = Base64.decode(base64Content, Base64.DEFAULT)
|
|
144
|
+
file.writeBytes(bytes)
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Append a Base64-encoded chunk to a file, optionally truncating it first.
|
|
149
|
+
*
|
|
150
|
+
* This is designed for **streaming large writes from JS in bounded-memory
|
|
151
|
+
* chunks** (e.g. 512 KB per call). By keeping each chunk small the JVM
|
|
152
|
+
* never needs to allocate the full file content at once, avoiding OOM on
|
|
153
|
+
* 50+ MB git pack files.
|
|
154
|
+
*
|
|
155
|
+
* @param path Plain filesystem path
|
|
156
|
+
* @param base64Content Chunk of data encoded as Base64
|
|
157
|
+
* @param truncateFirst If true the file is created / truncated before
|
|
158
|
+
* writing; pass true for the first chunk only.
|
|
159
|
+
*/
|
|
160
|
+
AsyncFunction("appendFileBase64") { path: String, base64Content: String, truncateFirst: Boolean ->
|
|
161
|
+
val file = File(path)
|
|
162
|
+
file.parentFile?.let { parent ->
|
|
163
|
+
if (!parent.exists()) parent.mkdirs()
|
|
164
|
+
}
|
|
165
|
+
val bytes = Base64.decode(base64Content, Base64.DEFAULT)
|
|
166
|
+
// truncateFirst=true → overwrite (new file or truncate existing)
|
|
167
|
+
// truncateFirst=false → append to existing file
|
|
168
|
+
FileOutputStream(file, !truncateFirst).use { fos ->
|
|
169
|
+
fos.write(bytes)
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
AsyncFunction("writeFilesBase64") { paths: List<String>, base64Contents: List<String> ->
|
|
174
|
+
if (paths.size != base64Contents.size) {
|
|
175
|
+
throw Exception("paths/base64Contents length mismatch: ${paths.size} vs ${base64Contents.size}")
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
for (index in paths.indices) {
|
|
179
|
+
val file = File(paths[index])
|
|
180
|
+
file.parentFile?.let { parent ->
|
|
181
|
+
if (!parent.exists()) parent.mkdirs()
|
|
182
|
+
}
|
|
183
|
+
val bytes = Base64.decode(base64Contents[index], Base64.DEFAULT)
|
|
184
|
+
file.writeBytes(bytes)
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
mapOf("writtenCount" to paths.size)
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
AsyncFunction("deleteFile") { path: String ->
|
|
191
|
+
val file = File(path)
|
|
192
|
+
if (file.exists()) {
|
|
193
|
+
file.delete()
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// --- Helper: check if external storage is available and MANAGE permission effective ---
|
|
198
|
+
|
|
199
|
+
AsyncFunction("isExternalStorageWritable") {
|
|
200
|
+
Environment.getExternalStorageState() == Environment.MEDIA_MOUNTED
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
AsyncFunction("getExternalStorageDirectory") {
|
|
204
|
+
Environment.getExternalStorageDirectory()?.absolutePath ?: ""
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Check if this app has MANAGE_EXTERNAL_STORAGE ("All files access") granted.
|
|
209
|
+
* On Android < 11 (API 30), returns true (not needed).
|
|
210
|
+
*/
|
|
211
|
+
AsyncFunction("isExternalStorageManager") {
|
|
212
|
+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
|
213
|
+
Environment.isExternalStorageManager()
|
|
214
|
+
} else {
|
|
215
|
+
true
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// --- Streaming HTTP → disk ---
|
|
220
|
+
|
|
221
|
+
/**
|
|
222
|
+
* Make an HTTP POST request and stream the response body directly to a file
|
|
223
|
+
* on disk, **never buffering the full body in JVM heap**.
|
|
224
|
+
*
|
|
225
|
+
* This is critical for git-upload-pack responses which can be 100+ MB.
|
|
226
|
+
* React Native's built-in `fetch()` goes through OkHttp but buffers the
|
|
227
|
+
* entire response in the JVM before handing it to Hermes, causing OOM.
|
|
228
|
+
*
|
|
229
|
+
* @param url Target URL
|
|
230
|
+
* @param headersMap HTTP headers as { key: value }
|
|
231
|
+
* @param bodyBase64 Request body encoded as Base64 (git protocol binary data)
|
|
232
|
+
* @param destPath Plain filesystem path for the response file
|
|
233
|
+
* @param contentType MIME type for the request body
|
|
234
|
+
* @return Map with "statusCode", "headers" (Map<String,String>), "bytesWritten"
|
|
235
|
+
*/
|
|
236
|
+
AsyncFunction("httpPostToFile") { url: String, headersMap: Map<String, String>, bodyBase64: String, destPath: String, contentType: String ->
|
|
237
|
+
val client = OkHttpClient.Builder()
|
|
238
|
+
.connectTimeout(30, TimeUnit.SECONDS)
|
|
239
|
+
.readTimeout(5, TimeUnit.MINUTES) // large packs take time
|
|
240
|
+
.writeTimeout(30, TimeUnit.SECONDS)
|
|
241
|
+
.build()
|
|
242
|
+
|
|
243
|
+
val requestBody = Base64.decode(bodyBase64, Base64.DEFAULT)
|
|
244
|
+
.toRequestBody(contentType.toMediaType())
|
|
245
|
+
|
|
246
|
+
val request = Request.Builder()
|
|
247
|
+
.url(url)
|
|
248
|
+
.post(requestBody)
|
|
249
|
+
.headers(headersMap.toHeaders())
|
|
250
|
+
.build()
|
|
251
|
+
|
|
252
|
+
val response = client.newCall(request).execute()
|
|
253
|
+
|
|
254
|
+
val destFile = File(destPath)
|
|
255
|
+
destFile.parentFile?.let { parent ->
|
|
256
|
+
if (!parent.exists()) parent.mkdirs()
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
var bytesWritten = 0L
|
|
260
|
+
response.body?.let { body ->
|
|
261
|
+
body.byteStream().use { inputStream ->
|
|
262
|
+
FileOutputStream(destFile).use { outputStream ->
|
|
263
|
+
val buffer = ByteArray(64 * 1024) // 64 KB chunks
|
|
264
|
+
var read: Int
|
|
265
|
+
while (inputStream.read(buffer).also { read = it } != -1) {
|
|
266
|
+
outputStream.write(buffer, 0, read)
|
|
267
|
+
bytesWritten += read
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
val responseHeaders = mutableMapOf<String, String>()
|
|
274
|
+
for (i in 0 until response.headers.size) {
|
|
275
|
+
responseHeaders[response.headers.name(i)] = response.headers.value(i)
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
mapOf(
|
|
279
|
+
"statusCode" to response.code,
|
|
280
|
+
"headers" to responseHeaders,
|
|
281
|
+
"bytesWritten" to bytesWritten,
|
|
282
|
+
)
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// --- Chunked file reading ---
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Read a chunk of a file as Base64, starting at `offset` for up to `length`
|
|
289
|
+
* bytes. Returns `{ data: string, bytesRead: number }`.
|
|
290
|
+
*
|
|
291
|
+
* This lets JS consume a large temp file in bounded-memory chunks without
|
|
292
|
+
* ever holding the full content in the Hermes heap.
|
|
293
|
+
*/
|
|
294
|
+
AsyncFunction("readFileChunk") { path: String, offset: Long, length: Int ->
|
|
295
|
+
val file = RandomAccessFile(path, "r")
|
|
296
|
+
file.use { raf ->
|
|
297
|
+
val fileLength = raf.length()
|
|
298
|
+
if (offset >= fileLength) {
|
|
299
|
+
return@AsyncFunction mapOf(
|
|
300
|
+
"data" to "",
|
|
301
|
+
"bytesRead" to 0,
|
|
302
|
+
)
|
|
303
|
+
}
|
|
304
|
+
raf.seek(offset)
|
|
305
|
+
val toRead = minOf(length.toLong(), fileLength - offset).toInt()
|
|
306
|
+
val buffer = ByteArray(toRead)
|
|
307
|
+
val bytesRead = raf.read(buffer, 0, toRead)
|
|
308
|
+
if (bytesRead <= 0) {
|
|
309
|
+
return@AsyncFunction mapOf(
|
|
310
|
+
"data" to "",
|
|
311
|
+
"bytesRead" to 0,
|
|
312
|
+
)
|
|
313
|
+
}
|
|
314
|
+
val actual = if (bytesRead < toRead) buffer.copyOf(bytesRead) else buffer
|
|
315
|
+
mapOf(
|
|
316
|
+
"data" to Base64.encodeToString(actual, Base64.NO_WRAP),
|
|
317
|
+
"bytesRead" to bytesRead,
|
|
318
|
+
)
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
// --- Resumable HTTP download → disk ---
|
|
323
|
+
|
|
324
|
+
/**
|
|
325
|
+
* Download a file via HTTP GET with support for resumable downloads.
|
|
326
|
+
*
|
|
327
|
+
* If `destPath` already exists, sends a `Range: bytes=<existingSize>-`
|
|
328
|
+
* header to resume the download from where it left off.
|
|
329
|
+
*
|
|
330
|
+
* The server must respond with 206 Partial Content and the correct
|
|
331
|
+
* Content-Range header for resume to work. If the server responds
|
|
332
|
+
* with 200, the file is overwritten from the start (full download).
|
|
333
|
+
*
|
|
334
|
+
* @param url Target URL
|
|
335
|
+
* @param headers Extra HTTP headers (e.g. Authorization, ETag)
|
|
336
|
+
* @param destPath Plain filesystem path for the downloaded file
|
|
337
|
+
* @return Map with "statusCode", "totalBytes" (final file size), "resumed" (boolean)
|
|
338
|
+
*/
|
|
339
|
+
AsyncFunction("downloadFileResumable") { url: String, headersMap: Map<String, String>, destPath: String ->
|
|
340
|
+
val destFile = File(destPath)
|
|
341
|
+
destFile.parentFile?.let { parent ->
|
|
342
|
+
if (!parent.exists()) parent.mkdirs()
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
val existingBytes = if (destFile.exists()) destFile.length() else 0L
|
|
346
|
+
|
|
347
|
+
val client = OkHttpClient.Builder()
|
|
348
|
+
.connectTimeout(30, TimeUnit.SECONDS)
|
|
349
|
+
.readTimeout(10, TimeUnit.MINUTES) // large archives
|
|
350
|
+
.writeTimeout(30, TimeUnit.SECONDS)
|
|
351
|
+
.build()
|
|
352
|
+
|
|
353
|
+
val requestBuilder = Request.Builder()
|
|
354
|
+
.url(url)
|
|
355
|
+
.headers(headersMap.toHeaders())
|
|
356
|
+
|
|
357
|
+
// Request resume if we have partial data
|
|
358
|
+
if (existingBytes > 0) {
|
|
359
|
+
requestBuilder.addHeader("Range", "bytes=$existingBytes-")
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
val request = requestBuilder.build()
|
|
363
|
+
val response = client.newCall(request).execute()
|
|
364
|
+
|
|
365
|
+
val statusCode = response.code
|
|
366
|
+
var resumed = false
|
|
367
|
+
|
|
368
|
+
response.body?.let { body ->
|
|
369
|
+
if (statusCode == 206) {
|
|
370
|
+
// Server supports Range — append to existing file
|
|
371
|
+
resumed = true
|
|
372
|
+
FileOutputStream(destFile, true).use { outputStream ->
|
|
373
|
+
body.byteStream().use { inputStream ->
|
|
374
|
+
val buffer = ByteArray(64 * 1024)
|
|
375
|
+
var read: Int
|
|
376
|
+
while (inputStream.read(buffer).also { read = it } != -1) {
|
|
377
|
+
outputStream.write(buffer, 0, read)
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
} else {
|
|
382
|
+
// Full download (200 or other) — overwrite
|
|
383
|
+
FileOutputStream(destFile, false).use { outputStream ->
|
|
384
|
+
body.byteStream().use { inputStream ->
|
|
385
|
+
val buffer = ByteArray(64 * 1024)
|
|
386
|
+
var read: Int
|
|
387
|
+
while (inputStream.read(buffer).also { read = it } != -1) {
|
|
388
|
+
outputStream.write(buffer, 0, read)
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
mapOf(
|
|
396
|
+
"statusCode" to statusCode,
|
|
397
|
+
"totalBytes" to destFile.length(),
|
|
398
|
+
"resumed" to resumed,
|
|
399
|
+
)
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
// --- Tar extraction ---
|
|
403
|
+
|
|
404
|
+
/**
|
|
405
|
+
* Extract a tar archive (uncompressed) to a destination directory.
|
|
406
|
+
*
|
|
407
|
+
* Uses a minimal tar parser: reads 512-byte headers, extracts file
|
|
408
|
+
* name and size, writes content. Supports POSIX ustar long paths
|
|
409
|
+
* via the "L" (LongLink) type extension.
|
|
410
|
+
*
|
|
411
|
+
* This avoids any third-party dependency while handling the tar
|
|
412
|
+
* files generated by `git archive` + system tar.
|
|
413
|
+
*
|
|
414
|
+
* @param tarPath Path to the .tar file
|
|
415
|
+
* @param destDir Destination directory (will be created if needed)
|
|
416
|
+
* @return Map with "filesExtracted" count
|
|
417
|
+
*/
|
|
418
|
+
AsyncFunction("extractTar") { tarPath: String, destDir: String ->
|
|
419
|
+
val tarFile = File(tarPath)
|
|
420
|
+
if (!tarFile.exists()) {
|
|
421
|
+
throw Exception("ENOENT: tar file not found: $tarPath")
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
val dest = File(destDir)
|
|
425
|
+
if (!dest.exists()) dest.mkdirs()
|
|
426
|
+
|
|
427
|
+
// Resolve the canonical dest to prevent path traversal
|
|
428
|
+
val canonicalDest = dest.canonicalPath
|
|
429
|
+
|
|
430
|
+
var filesExtracted = 0
|
|
431
|
+
var longName: String? = null
|
|
432
|
+
|
|
433
|
+
BufferedInputStream(FileInputStream(tarFile), 256 * 1024).use { bis ->
|
|
434
|
+
val headerBuf = ByteArray(512)
|
|
435
|
+
|
|
436
|
+
while (true) {
|
|
437
|
+
// Read 512-byte tar header
|
|
438
|
+
val headerRead = readFully(bis, headerBuf)
|
|
439
|
+
if (headerRead < 512) break
|
|
440
|
+
|
|
441
|
+
// Check for end-of-archive (two zero blocks)
|
|
442
|
+
if (headerBuf.all { it == 0.toByte() }) break
|
|
443
|
+
|
|
444
|
+
// Parse file name (bytes 0-99, null-terminated)
|
|
445
|
+
val rawName = extractString(headerBuf, 0, 100)
|
|
446
|
+
// Parse size (bytes 124-135, octal)
|
|
447
|
+
val sizeStr = extractString(headerBuf, 124, 12).trim()
|
|
448
|
+
val fileSize = if (sizeStr.isEmpty()) 0L else sizeStr.toLong(8)
|
|
449
|
+
// Parse type flag (byte 156)
|
|
450
|
+
val typeFlag = headerBuf[156].toInt().toChar()
|
|
451
|
+
// Parse prefix (bytes 345-499, POSIX ustar)
|
|
452
|
+
val prefix = extractString(headerBuf, 345, 155)
|
|
453
|
+
|
|
454
|
+
// Handle GNU tar long-name extension (type 'L')
|
|
455
|
+
if (typeFlag == 'L') {
|
|
456
|
+
val nameBuf = ByteArray(fileSize.toInt())
|
|
457
|
+
readFully(bis, nameBuf)
|
|
458
|
+
longName = String(nameBuf, Charsets.UTF_8).trimEnd('\u0000')
|
|
459
|
+
// Skip padding to 512-byte boundary
|
|
460
|
+
val remainder = (512 - (fileSize % 512).toInt()) % 512
|
|
461
|
+
if (remainder > 0) bis.skip(remainder.toLong())
|
|
462
|
+
continue
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
// Determine the file name
|
|
466
|
+
val fileName = longName ?: if (prefix.isNotEmpty()) "$prefix/$rawName" else rawName
|
|
467
|
+
longName = null
|
|
468
|
+
|
|
469
|
+
if (fileName.isEmpty()) {
|
|
470
|
+
// Skip data blocks for this entry
|
|
471
|
+
skipDataBlocks(bis, fileSize)
|
|
472
|
+
continue
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Type '5' = directory, '0' or '\0' = regular file
|
|
476
|
+
when (typeFlag) {
|
|
477
|
+
'5' -> {
|
|
478
|
+
val dir = File(dest, fileName)
|
|
479
|
+
if (!dir.canonicalPath.startsWith(canonicalDest)) {
|
|
480
|
+
throw Exception("Path traversal detected: $fileName")
|
|
481
|
+
}
|
|
482
|
+
dir.mkdirs()
|
|
483
|
+
skipDataBlocks(bis, fileSize)
|
|
484
|
+
}
|
|
485
|
+
'0', '\u0000' -> {
|
|
486
|
+
val outFile = File(dest, fileName)
|
|
487
|
+
if (!outFile.canonicalPath.startsWith(canonicalDest)) {
|
|
488
|
+
throw Exception("Path traversal detected: $fileName")
|
|
489
|
+
}
|
|
490
|
+
outFile.parentFile?.let { parent ->
|
|
491
|
+
if (!parent.exists()) parent.mkdirs()
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
FileOutputStream(outFile).use { fos ->
|
|
495
|
+
var remaining = fileSize
|
|
496
|
+
val buf = ByteArray(64 * 1024)
|
|
497
|
+
while (remaining > 0) {
|
|
498
|
+
val toRead = minOf(remaining, buf.size.toLong()).toInt()
|
|
499
|
+
val read = bis.read(buf, 0, toRead)
|
|
500
|
+
if (read <= 0) break
|
|
501
|
+
fos.write(buf, 0, read)
|
|
502
|
+
remaining -= read
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
// Skip padding to 512-byte boundary
|
|
507
|
+
val remainder = (512 - (fileSize % 512).toInt()) % 512
|
|
508
|
+
if (remainder > 0) bis.skip(remainder.toLong())
|
|
509
|
+
|
|
510
|
+
filesExtracted++
|
|
511
|
+
}
|
|
512
|
+
else -> {
|
|
513
|
+
// Skip unknown entry types (symlinks, etc.)
|
|
514
|
+
skipDataBlocks(bis, fileSize)
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
mapOf("filesExtracted" to filesExtracted)
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
// ─── TiddlyWiki batch file parsing ─────────────────────────────────
|
|
524
|
+
|
|
525
|
+
/**
|
|
526
|
+
* Parse a batch of TiddlyWiki tiddler files entirely in Kotlin.
|
|
527
|
+
*
|
|
528
|
+
* This is the critical performance optimization: instead of making
|
|
529
|
+
* 100+ JS→Native bridge calls (one per file), a single call parses
|
|
530
|
+
* an entire batch and returns a ready-to-inject JSON array string.
|
|
531
|
+
*
|
|
532
|
+
* Supports:
|
|
533
|
+
* - .tid files: header + body, with skinny mode (omit text for large tiddlers)
|
|
534
|
+
* - .json files: single tiddler or array of tiddlers (also plugin bundles)
|
|
535
|
+
* - .meta files: metadata companion for binary/.json files
|
|
536
|
+
*
|
|
537
|
+
* @param filePaths Array of absolute file paths to parse
|
|
538
|
+
* @param quickLoadMode If true, always return skinny tiddlers (no text)
|
|
539
|
+
* @return JSON string: array of tiddler objects, e.g. `[{"title":"...","text":"..."}, ...]`
|
|
540
|
+
*/
|
|
541
|
+
AsyncFunction("batchParseTidFiles") { filePaths: List<String>, quickLoadMode: Boolean ->
|
|
542
|
+
// Parse all files in parallel using a thread pool.
|
|
543
|
+
// Expo's AsyncFunction already runs off the main thread, so we
|
|
544
|
+
// use Java's ForkJoinPool (via parallelStream) for concurrent I/O.
|
|
545
|
+
val results = filePaths.parallelStream().map { path ->
|
|
546
|
+
try {
|
|
547
|
+
parseTiddlerFile(path, quickLoadMode)
|
|
548
|
+
} catch (e: Exception) {
|
|
549
|
+
null
|
|
550
|
+
}
|
|
551
|
+
}.toList()
|
|
552
|
+
|
|
553
|
+
// Build a JSON array string directly — avoids JS-side JSON.stringify
|
|
554
|
+
val jsonArray = JSONArray()
|
|
555
|
+
for (result in results) {
|
|
556
|
+
if (result == null) continue
|
|
557
|
+
when (result) {
|
|
558
|
+
is JSONObject -> jsonArray.put(result)
|
|
559
|
+
is JSONArray -> {
|
|
560
|
+
for (i in 0 until result.length()) {
|
|
561
|
+
jsonArray.put(result.getJSONObject(i))
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
jsonArray.toString()
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
// ─── TiddlyWiki file parsing helpers ───────────────────────────────
|
|
571
|
+
|
|
572
|
+
/**
|
|
573
|
+
* Parse a single tiddler file. Returns JSONObject, JSONArray (for .json
|
|
574
|
+
* arrays), or null if the file cannot be parsed.
|
|
575
|
+
*/
|
|
576
|
+
private fun parseTiddlerFile(path: String, quickLoadMode: Boolean): Any? {
|
|
577
|
+
val file = File(path)
|
|
578
|
+
if (!file.exists()) return null
|
|
579
|
+
val name = file.name
|
|
580
|
+
|
|
581
|
+
return when {
|
|
582
|
+
name.endsWith(".tid") -> parseDotTid(file, quickLoadMode)
|
|
583
|
+
name.endsWith(".json") -> parseDotJson(file, quickLoadMode)
|
|
584
|
+
name.endsWith(".meta") -> parseDotMeta(file, quickLoadMode)
|
|
585
|
+
else -> null
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
/**
|
|
590
|
+
* Parse a .tid file (TiddlyWiki native format).
|
|
591
|
+
* Format: `key: value\n` headers, blank line, then body text.
|
|
592
|
+
*/
|
|
593
|
+
private fun parseDotTid(file: File, quickLoadMode: Boolean): JSONObject? {
|
|
594
|
+
val content = file.readText(Charsets.UTF_8)
|
|
595
|
+
val json = JSONObject()
|
|
596
|
+
|
|
597
|
+
// Find the first blank line separating headers from body
|
|
598
|
+
val blankLineRegex = Regex("\r?\n\r?\n")
|
|
599
|
+
val match = blankLineRegex.find(content)
|
|
600
|
+
val headerText = if (match != null) content.substring(0, match.range.first) else content
|
|
601
|
+
val bodyOffset = match?.let { it.range.last + 1 } ?: -1
|
|
602
|
+
val estimatedBodyLength = if (bodyOffset >= 0) content.length - bodyOffset else 0
|
|
603
|
+
|
|
604
|
+
// Parse header lines
|
|
605
|
+
for (line in headerText.split(Regex("\r?\n"))) {
|
|
606
|
+
val colonIndex = line.indexOf(':')
|
|
607
|
+
if (colonIndex != -1) {
|
|
608
|
+
val fieldName = line.substring(0, colonIndex).trim()
|
|
609
|
+
val fieldValue = line.substring(colonIndex + 1).trim()
|
|
610
|
+
if (fieldName.isNotEmpty()) {
|
|
611
|
+
json.put(fieldName, fieldValue)
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
// Use filename as title fallback
|
|
617
|
+
if (!json.has("title")) {
|
|
618
|
+
json.put("title", getTitleFromFilename(file.name))
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
val title = json.optString("title", "")
|
|
622
|
+
val type = json.optString("type", "")
|
|
623
|
+
val hasModuleType = json.has("module-type")
|
|
624
|
+
val hasPluginType = json.has("plugin-type")
|
|
625
|
+
|
|
626
|
+
// Quick load still needs full text for boot-critical tiddlers.
|
|
627
|
+
val shouldIncludeText = if (quickLoadMode) {
|
|
628
|
+
shouldPreserveFullTextInQuickLoad(title, type, hasModuleType, hasPluginType)
|
|
629
|
+
} else {
|
|
630
|
+
shouldSaveFullTiddler(title, type, hasModuleType, hasPluginType, estimatedBodyLength)
|
|
631
|
+
}
|
|
632
|
+
|
|
633
|
+
if (shouldIncludeText && bodyOffset >= 0 && estimatedBodyLength > 0) {
|
|
634
|
+
json.put("text", content.substring(bodyOffset))
|
|
635
|
+
} else if (!shouldIncludeText) {
|
|
636
|
+
// Skinny tiddler — mark for lazy loading
|
|
637
|
+
json.remove("text")
|
|
638
|
+
json.put("_is_skinny", "yes")
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
return json
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
/**
|
|
645
|
+
* Parse a .json tiddler file.
|
|
646
|
+
* Can be: single tiddler `{title: ...}`, array of tiddlers, or
|
|
647
|
+
* a plugin bundle `{tiddlers: {...}}` (returned as null — loaded via .meta).
|
|
648
|
+
*/
|
|
649
|
+
private fun parseDotJson(file: File, quickLoadMode: Boolean): Any? {
|
|
650
|
+
val content = file.readText(Charsets.UTF_8)
|
|
651
|
+
val fallbackTitle = getTitleFromFilename(file.name)
|
|
652
|
+
return try {
|
|
653
|
+
// Try as JSON array first
|
|
654
|
+
if (content.trimStart().startsWith("[")) {
|
|
655
|
+
val array = JSONArray(content)
|
|
656
|
+
val result = JSONArray()
|
|
657
|
+
for (i in 0 until array.length()) {
|
|
658
|
+
val obj = array.optJSONObject(i)
|
|
659
|
+
if (obj != null && obj.has("title")) {
|
|
660
|
+
result.put(obj)
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
if (result.length() > 0) {
|
|
664
|
+
result
|
|
665
|
+
} else {
|
|
666
|
+
createStandaloneJsonTiddler(fallbackTitle, content, quickLoadMode)
|
|
667
|
+
}
|
|
668
|
+
} else {
|
|
669
|
+
val obj = JSONObject(content)
|
|
670
|
+
if (obj.has("title")) {
|
|
671
|
+
if (!obj.has("type")) {
|
|
672
|
+
obj.put("type", "application/json")
|
|
673
|
+
}
|
|
674
|
+
obj
|
|
675
|
+
} else if (obj.has("tiddlers")) {
|
|
676
|
+
// Plugin bundle format {tiddlers: {...}} — skip here,
|
|
677
|
+
// it's loaded via .meta companion file
|
|
678
|
+
null
|
|
679
|
+
} else {
|
|
680
|
+
createStandaloneJsonTiddler(fallbackTitle, content, quickLoadMode)
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
} catch (_: Exception) {
|
|
684
|
+
createStandaloneJsonTiddler(fallbackTitle, content, quickLoadMode)
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
private fun createStandaloneJsonTiddler(title: String, content: String, quickLoadMode: Boolean): JSONObject {
|
|
689
|
+
val json = JSONObject()
|
|
690
|
+
json.put("title", title)
|
|
691
|
+
json.put("type", "application/json")
|
|
692
|
+
if (quickLoadMode) {
|
|
693
|
+
json.put("_is_skinny", "yes")
|
|
694
|
+
} else {
|
|
695
|
+
json.put("text", content)
|
|
696
|
+
}
|
|
697
|
+
return json
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
/**
|
|
701
|
+
* Parse a .meta companion file. The .meta has only field definitions;
|
|
702
|
+
* the actual content is in the companion file (same name without .meta).
|
|
703
|
+
*/
|
|
704
|
+
private fun parseDotMeta(metaFile: File, quickLoadMode: Boolean): JSONObject? {
|
|
705
|
+
val metaContent = metaFile.readText(Charsets.UTF_8)
|
|
706
|
+
val json = JSONObject()
|
|
707
|
+
|
|
708
|
+
// Parse key: value pairs
|
|
709
|
+
for (line in metaContent.split(Regex("\r?\n"))) {
|
|
710
|
+
val colonIndex = line.indexOf(':')
|
|
711
|
+
if (colonIndex != -1) {
|
|
712
|
+
val fieldName = line.substring(0, colonIndex).trim()
|
|
713
|
+
val fieldValue = line.substring(colonIndex + 1).trim()
|
|
714
|
+
if (fieldName.isNotEmpty()) {
|
|
715
|
+
json.put(fieldName, fieldValue)
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
if (!json.has("title")) {
|
|
721
|
+
val metaName = metaFile.name
|
|
722
|
+
json.put("title", getTitleFromFilename(metaName.removeSuffix(".meta")))
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
// Find companion file
|
|
726
|
+
val companionPath = metaFile.absolutePath.removeSuffix(".meta")
|
|
727
|
+
val companionFile = File(companionPath)
|
|
728
|
+
|
|
729
|
+
if (companionFile.exists()) {
|
|
730
|
+
if (companionPath.endsWith(".json")) {
|
|
731
|
+
// .meta + .json pair: the .json IS the text content (e.g. plugin bundles).
|
|
732
|
+
// Must include text so the tiddler is complete.
|
|
733
|
+
val shouldIncludeText = if (quickLoadMode) {
|
|
734
|
+
shouldPreserveFullTextInQuickLoad(
|
|
735
|
+
json.optString("title", ""),
|
|
736
|
+
json.optString("type", ""),
|
|
737
|
+
json.has("module-type"),
|
|
738
|
+
json.has("plugin-type"),
|
|
739
|
+
)
|
|
740
|
+
} else {
|
|
741
|
+
true
|
|
742
|
+
}
|
|
743
|
+
if (shouldIncludeText) {
|
|
744
|
+
val jsonContent = companionFile.readText(Charsets.UTF_8)
|
|
745
|
+
json.put("text", jsonContent)
|
|
746
|
+
} else {
|
|
747
|
+
json.put("_is_skinny", "yes")
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
// For non-JSON companions (images, etc.), we don't set _canonical_uri here —
|
|
751
|
+
// that requires knowing the workspace base path. JS side handles it.
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
return if (json.has("title")) json else null
|
|
755
|
+
}
|
|
756
|
+
|
|
757
|
+
/**
|
|
758
|
+
* Decide whether a tiddler's full text should be included in the boot store.
|
|
759
|
+
* Mirrors the JS `shouldSaveFullTiddler()` logic.
|
|
760
|
+
*/
|
|
761
|
+
private fun shouldSaveFullTiddler(
|
|
762
|
+
title: String,
|
|
763
|
+
type: String,
|
|
764
|
+
hasModuleType: Boolean,
|
|
765
|
+
hasPluginType: Boolean,
|
|
766
|
+
estimatedTextLength: Int,
|
|
767
|
+
): Boolean {
|
|
768
|
+
if (shouldPreserveFullTextInQuickLoad(title, type, hasModuleType, hasPluginType)) return true
|
|
769
|
+
// Small tiddlers (< 10KB)
|
|
770
|
+
if (estimatedTextLength < 10000) return true
|
|
771
|
+
return false
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
private fun shouldPreserveFullTextInQuickLoad(
|
|
775
|
+
title: String,
|
|
776
|
+
type: String,
|
|
777
|
+
hasModuleType: Boolean,
|
|
778
|
+
hasPluginType: Boolean,
|
|
779
|
+
): Boolean {
|
|
780
|
+
if (title.startsWith("\$:/")) return true
|
|
781
|
+
if (type == "application/json" && hasPluginType) return true
|
|
782
|
+
if (hasModuleType) return true
|
|
783
|
+
return false
|
|
784
|
+
}
|
|
785
|
+
|
|
786
|
+
private fun getTitleFromFilename(filename: String): String {
|
|
787
|
+
return filename
|
|
788
|
+
.removeSuffix(".tid")
|
|
789
|
+
.removeSuffix(".json")
|
|
790
|
+
.removeSuffix(".meta")
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
// --- Tar helper functions ---
|
|
794
|
+
|
|
795
|
+
private fun readFully(stream: BufferedInputStream, buf: ByteArray): Int {
|
|
796
|
+
var offset = 0
|
|
797
|
+
while (offset < buf.size) {
|
|
798
|
+
val read = stream.read(buf, offset, buf.size - offset)
|
|
799
|
+
if (read <= 0) return offset
|
|
800
|
+
offset += read
|
|
801
|
+
}
|
|
802
|
+
return offset
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
private fun extractString(header: ByteArray, offset: Int, maxLen: Int): String {
|
|
806
|
+
val end = (offset until minOf(offset + maxLen, header.size))
|
|
807
|
+
.firstOrNull { header[it] == 0.toByte() }
|
|
808
|
+
?: (offset + maxLen)
|
|
809
|
+
return String(header, offset, end - offset, Charsets.UTF_8)
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
private fun skipDataBlocks(stream: BufferedInputStream, fileSize: Long) {
|
|
813
|
+
if (fileSize <= 0) return
|
|
814
|
+
// Data occupies ceil(fileSize / 512) * 512 bytes
|
|
815
|
+
val totalBytes = ((fileSize + 511) / 512) * 512
|
|
816
|
+
var skipped = 0L
|
|
817
|
+
while (skipped < totalBytes) {
|
|
818
|
+
val n = stream.skip(totalBytes - skipped)
|
|
819
|
+
if (n <= 0) break
|
|
820
|
+
skipped += n
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
}
|