@pol-studios/powersync 1.0.30 → 1.0.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/dist/{CacheSettingsManager-uz-kbnRH.d.ts → CacheSettingsManager-0H_7thHW.d.ts} +21 -3
  2. package/dist/attachments/index.d.ts +30 -30
  3. package/dist/attachments/index.js +13 -4
  4. package/dist/{background-sync-CVR3PkFi.d.ts → background-sync-BujnI3IR.d.ts} +1 -1
  5. package/dist/{chunk-RE5HWLCB.js → chunk-2RDWLXJW.js} +322 -103
  6. package/dist/chunk-2RDWLXJW.js.map +1 -0
  7. package/dist/{chunk-P4HZA6ZT.js → chunk-4665ZSE5.js} +2 -2
  8. package/dist/chunk-4665ZSE5.js.map +1 -0
  9. package/dist/{chunk-XOY2CJ67.js → chunk-4F5B5CZ7.js} +3 -3
  10. package/dist/chunk-5WRI5ZAA.js +31 -0
  11. package/dist/{chunk-BC2SRII2.js → chunk-65A3SYJZ.js} +14 -1
  12. package/dist/chunk-65A3SYJZ.js.map +1 -0
  13. package/dist/chunk-6SZ64KCZ.js +755 -0
  14. package/dist/chunk-6SZ64KCZ.js.map +1 -0
  15. package/dist/{chunk-C2ACBYBZ.js → chunk-74TBHWJ4.js} +10 -96
  16. package/dist/{chunk-C2ACBYBZ.js.map → chunk-74TBHWJ4.js.map} +1 -1
  17. package/dist/chunk-ANXWYQEJ.js +1 -0
  18. package/dist/chunk-ANXWYQEJ.js.map +1 -0
  19. package/dist/{chunk-CAB26E6F.js → chunk-C4J4MLER.js} +29 -24
  20. package/dist/chunk-C4J4MLER.js.map +1 -0
  21. package/dist/{chunk-C5ODS3XH.js → chunk-EOW7JK7Q.js} +9 -16
  22. package/dist/chunk-EOW7JK7Q.js.map +1 -0
  23. package/dist/chunk-HRAVPIAZ.js +220 -0
  24. package/dist/chunk-HRAVPIAZ.js.map +1 -0
  25. package/dist/{chunk-XAEII4ZX.js → chunk-NUGQOTEM.js} +32 -4
  26. package/dist/chunk-NUGQOTEM.js.map +1 -0
  27. package/dist/chunk-OGUFUZSY.js +5415 -0
  28. package/dist/chunk-OGUFUZSY.js.map +1 -0
  29. package/dist/{chunk-JCGOZVWL.js → chunk-P4D6BQ4X.js} +115 -576
  30. package/dist/chunk-P4D6BQ4X.js.map +1 -0
  31. package/dist/{chunk-CACKC6XG.js → chunk-PGEDE6IM.js} +136 -89
  32. package/dist/chunk-PGEDE6IM.js.map +1 -0
  33. package/dist/{chunk-A4IBBWGO.js → chunk-RALHHPTU.js} +1 -1
  34. package/dist/chunk-RIDSPLE5.js +42 -0
  35. package/dist/chunk-RIDSPLE5.js.map +1 -0
  36. package/dist/{chunk-Z6VOBGTU.js → chunk-UOMHWUHV.js} +2 -12
  37. package/dist/chunk-UOMHWUHV.js.map +1 -0
  38. package/dist/{chunk-QREWE3NR.js → chunk-YONQYTVH.js} +2 -2
  39. package/dist/chunk-ZAN22NGL.js +13 -0
  40. package/dist/chunk-ZAN22NGL.js.map +1 -0
  41. package/dist/config/index.d.ts +200 -0
  42. package/dist/config/index.js +23 -0
  43. package/dist/config/index.js.map +1 -0
  44. package/dist/connector/index.d.ts +23 -5
  45. package/dist/connector/index.js +4 -1
  46. package/dist/core/index.d.ts +2 -2
  47. package/dist/core/index.js +1 -0
  48. package/dist/error/index.js +1 -0
  49. package/dist/generator/index.js +2 -0
  50. package/dist/generator/index.js.map +1 -1
  51. package/dist/index.d.ts +19 -16
  52. package/dist/index.js +68 -36
  53. package/dist/index.native.d.ts +18 -14
  54. package/dist/index.native.js +73 -34
  55. package/dist/index.web.d.ts +17 -14
  56. package/dist/index.web.js +68 -36
  57. package/dist/maintenance/index.d.ts +2 -2
  58. package/dist/maintenance/index.js +3 -2
  59. package/dist/platform/index.d.ts +1 -1
  60. package/dist/platform/index.js +2 -0
  61. package/dist/platform/index.js.map +1 -1
  62. package/dist/platform/index.native.d.ts +1 -1
  63. package/dist/platform/index.native.js +1 -0
  64. package/dist/platform/index.web.d.ts +1 -1
  65. package/dist/platform/index.web.js +1 -0
  66. package/dist/pol-attachment-queue-DqBvLAEY.d.ts +255 -0
  67. package/dist/provider/index.d.ts +149 -114
  68. package/dist/provider/index.js +9 -14
  69. package/dist/provider/index.native.d.ts +108 -0
  70. package/dist/provider/index.native.js +121 -0
  71. package/dist/provider/index.native.js.map +1 -0
  72. package/dist/provider/index.web.d.ts +16 -0
  73. package/dist/provider/index.web.js +112 -0
  74. package/dist/provider/index.web.js.map +1 -0
  75. package/dist/react/index.d.ts +16 -65
  76. package/dist/react/index.js +2 -9
  77. package/dist/storage/index.d.ts +5 -4
  78. package/dist/storage/index.js +12 -9
  79. package/dist/storage/index.native.d.ts +5 -4
  80. package/dist/storage/index.native.js +8 -5
  81. package/dist/storage/index.web.d.ts +5 -4
  82. package/dist/storage/index.web.js +11 -8
  83. package/dist/storage/upload/index.d.ts +4 -3
  84. package/dist/storage/upload/index.js +4 -2
  85. package/dist/storage/upload/index.native.d.ts +4 -3
  86. package/dist/storage/upload/index.native.js +4 -2
  87. package/dist/storage/upload/index.web.d.ts +2 -1
  88. package/dist/storage/upload/index.web.js +4 -2
  89. package/dist/{supabase-connector-C4YpH_l3.d.ts → supabase-connector-HMxBA9Kg.d.ts} +2 -2
  90. package/dist/sync/index.d.ts +155 -20
  91. package/dist/sync/index.js +13 -3
  92. package/dist/{types-CyvBaAl8.d.ts → types-6QHGELuY.d.ts} +4 -1
  93. package/dist/{types-Dv1uf0LZ.d.ts → types-B9MptP7E.d.ts} +7 -10
  94. package/dist/types-BhAEsJj-.d.ts +330 -0
  95. package/dist/{types-D0WcHrq6.d.ts → types-CGMibJKD.d.ts} +8 -0
  96. package/dist/{types-CpM2_LhU.d.ts → types-DqJnP50o.d.ts} +6 -1
  97. package/dist/{pol-attachment-queue-BE2HU3Us.d.ts → types-JCEhw2Lf.d.ts} +139 -346
  98. package/package.json +18 -4
  99. package/dist/chunk-654ERHA7.js +0 -1
  100. package/dist/chunk-BC2SRII2.js.map +0 -1
  101. package/dist/chunk-C5ODS3XH.js.map +0 -1
  102. package/dist/chunk-CAB26E6F.js.map +0 -1
  103. package/dist/chunk-CACKC6XG.js.map +0 -1
  104. package/dist/chunk-FNYQFILT.js +0 -44
  105. package/dist/chunk-FNYQFILT.js.map +0 -1
  106. package/dist/chunk-JCGOZVWL.js.map +0 -1
  107. package/dist/chunk-P4HZA6ZT.js.map +0 -1
  108. package/dist/chunk-RBPWEOIV.js +0 -358
  109. package/dist/chunk-RBPWEOIV.js.map +0 -1
  110. package/dist/chunk-RE5HWLCB.js.map +0 -1
  111. package/dist/chunk-XAEII4ZX.js.map +0 -1
  112. package/dist/chunk-Z6VOBGTU.js.map +0 -1
  113. /package/dist/{chunk-XOY2CJ67.js.map → chunk-4F5B5CZ7.js.map} +0 -0
  114. /package/dist/{chunk-654ERHA7.js.map → chunk-5WRI5ZAA.js.map} +0 -0
  115. /package/dist/{chunk-A4IBBWGO.js.map → chunk-RALHHPTU.js.map} +0 -0
  116. /package/dist/{chunk-QREWE3NR.js.map → chunk-YONQYTVH.js.map} +0 -0
@@ -1,21 +1,16 @@
1
1
  import {
2
- DEFAULT_UPLOAD_NOTIFICATION,
2
+ DEFAULT_UPLOAD_NOTIFICATION
3
+ } from "./chunk-ZAN22NGL.js";
4
+ import {
3
5
  resolveBucket
4
- } from "./chunk-Z6VOBGTU.js";
6
+ } from "./chunk-UOMHWUHV.js";
5
7
  import {
6
8
  AbortError
7
9
  } from "./chunk-FV2HXEIY.js";
8
10
 
9
11
  // src/storage/upload/SupabaseUploadHandler.native.ts
10
12
  import { Platform } from "react-native";
11
- var Upload = null;
12
- async function loadUploadModule() {
13
- if (!Upload) {
14
- const module = await import("react-native-background-upload");
15
- Upload = module.default;
16
- }
17
- return Upload;
18
- }
13
+ import Upload from "react-native-background-upload";
19
14
  var SupabaseUploadHandler = class {
20
15
  supabase;
21
16
  defaultBucket;
@@ -40,22 +35,30 @@ var SupabaseUploadHandler = class {
40
35
  if (signal?.aborted) {
41
36
  throw new AbortError();
42
37
  }
38
+ if (!this.supabase?.storage?.from) {
39
+ throw new Error(`Supabase client is not properly initialized: storage=${!!this.supabase?.storage}, from=${typeof this.supabase?.storage?.from}`);
40
+ }
43
41
  const bucket = this.resolveBucket(storagePath);
44
- const {
45
- data,
46
- error
47
- } = await this.supabase.storage.from(bucket).createSignedUploadUrl(storagePath);
42
+ let data = null;
43
+ let error = null;
44
+ try {
45
+ const result = await this.supabase.storage.from(bucket).createSignedUploadUrl(storagePath);
46
+ data = result.data;
47
+ error = result.error;
48
+ } catch (e) {
49
+ const err = e instanceof Error ? e : new Error(String(e));
50
+ throw new Error(`createSignedUploadUrl threw unexpectedly for ${storagePath}: ${err.message} (stack: ${err.stack?.split("\n").slice(0, 3).join(" | ")})`);
51
+ }
48
52
  if (error || !data?.signedUrl) {
49
53
  throw new Error(error?.message || "Failed to create signed upload URL");
50
54
  }
51
55
  if (signal?.aborted) {
52
56
  throw new AbortError();
53
57
  }
54
- const UploadModule = await loadUploadModule();
55
58
  return new Promise((resolve, reject) => {
56
- const uploadId = `upload_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
59
+ const uploadId = `upload_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`;
57
60
  let isSettled = false;
58
- const completedSub = UploadModule.addListener("completed", uploadId, (eventData) => {
61
+ const completedSub = Upload.addListener("completed", uploadId, (eventData) => {
59
62
  if (isSettled) return;
60
63
  isSettled = true;
61
64
  cleanup();
@@ -65,13 +68,13 @@ var SupabaseUploadHandler = class {
65
68
  reject(new Error(`Upload failed with status ${eventData.responseCode}`));
66
69
  }
67
70
  });
68
- const errorSub = UploadModule.addListener("error", uploadId, (eventData) => {
71
+ const errorSub = Upload.addListener("error", uploadId, (eventData) => {
69
72
  if (isSettled) return;
70
73
  isSettled = true;
71
74
  cleanup();
72
- reject(new Error(eventData.error));
75
+ reject(new Error(`Upload error event for ${storagePath}: ${eventData.error}`));
73
76
  });
74
- const cancelledSub = UploadModule.addListener("cancelled", uploadId, () => {
77
+ const cancelledSub = Upload.addListener("cancelled", uploadId, () => {
75
78
  if (isSettled) return;
76
79
  isSettled = true;
77
80
  cleanup();
@@ -87,7 +90,7 @@ var SupabaseUploadHandler = class {
87
90
  };
88
91
  const abortHandler = () => {
89
92
  if (isSettled) return;
90
- UploadModule.cancelUpload(uploadId).catch(() => {
93
+ Upload.cancelUpload(uploadId).catch(() => {
91
94
  });
92
95
  };
93
96
  if (signal) {
@@ -96,7 +99,7 @@ var SupabaseUploadHandler = class {
96
99
  });
97
100
  }
98
101
  const isAndroid = Platform.OS === "android";
99
- UploadModule.startUpload({
102
+ Upload.startUpload({
100
103
  url: data.signedUrl,
101
104
  path: localFileUri,
102
105
  method: "PUT",
@@ -113,7 +116,9 @@ var SupabaseUploadHandler = class {
113
116
  if (isSettled) return;
114
117
  isSettled = true;
115
118
  cleanup();
116
- reject(err);
119
+ const contextualError = new Error(`Upload startUpload failed for ${storagePath}: ${err.message}`);
120
+ contextualError.stack = err.stack;
121
+ reject(contextualError);
117
122
  });
118
123
  });
119
124
  }
@@ -139,4 +144,4 @@ export {
139
144
  SupabaseUploadHandler,
140
145
  createSupabaseUploadHandler
141
146
  };
142
- //# sourceMappingURL=chunk-CAB26E6F.js.map
147
+ //# sourceMappingURL=chunk-C4J4MLER.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/storage/upload/SupabaseUploadHandler.native.ts"],"sourcesContent":["/**\n * Supabase Upload Handler for React Native\n *\n * Implements UploadHandler interface using react-native-background-upload\n * for iOS background upload capability.\n */\n\nimport { Platform } from 'react-native';\nimport Upload from 'react-native-background-upload';\nimport type { SupabaseClient } from '@supabase/supabase-js';\nimport type { SupabaseStorageOptions } from '../types';\nimport { resolveBucket } from '../types';\nimport type { SupabaseUploadHandlerOptions, UploadNotificationConfig, BucketConfig } from './types';\nimport { DEFAULT_UPLOAD_NOTIFICATION } from './types';\nimport { AbortError } from '../../utils/retry';\n\n/**\n * React Native upload handler using react-native-background-upload.\n *\n * Features:\n * - iOS background upload support\n * - AbortSignal cancellation support\n * - Android notification support\n * - Signed URL-based uploads\n *\n * @example\n * ```typescript\n * const handler = createSupabaseUploadHandler(supabaseClient, {\n * defaultBucket: 'attachments',\n * bucketMap: new Map([['avatars/', 'user-avatars']]),\n * });\n *\n * await handler.uploadFile(\n * 'photos/image.jpg',\n * 'file:///path/to/image.jpg',\n * 'image/jpeg'\n * );\n * ```\n */\nexport class SupabaseUploadHandler {\n private supabase: SupabaseClient;\n private defaultBucket: string;\n private bucketMap?: Map<string, string>;\n private bucketResolver?: (storagePath: string) => string | undefined;\n private notificationConfig: UploadNotificationConfig;\n constructor(options: SupabaseUploadHandlerOptions, notificationConfig?: Partial<UploadNotificationConfig>) {\n this.supabase = options.supabaseClient;\n this.defaultBucket = options.bucketConfig.defaultBucket;\n this.bucketMap = options.bucketConfig.bucketMap;\n this.bucketResolver = options.bucketConfig.resolver;\n this.notificationConfig = {\n ...DEFAULT_UPLOAD_NOTIFICATION,\n ...notificationConfig\n };\n }\n\n /**\n * Upload a file to Supabase Storage using react-native-background-upload.\n * Supports iOS background uploads and AbortSignal for cancellation.\n */\n async uploadFile(storagePath: string, localFileUri: string, mediaType: string, signal?: AbortSignal): Promise<void> {\n // Check if already aborted\n if (signal?.aborted) {\n throw new AbortError();\n }\n\n // Defensive check: ensure Supabase client is valid\n if (!this.supabase?.storage?.from) {\n throw new Error(`Supabase client is not properly initialized: storage=${!!this.supabase?.storage}, from=${typeof this.supabase?.storage?.from}`);\n }\n const bucket = this.resolveBucket(storagePath);\n\n // 1. Get signed upload URL from Supabase\n let data: {\n signedUrl: string;\n } | null = null;\n let error: Error | null = null;\n try {\n const result = await this.supabase.storage.from(bucket).createSignedUploadUrl(storagePath);\n data = result.data;\n error = result.error;\n } catch (e) {\n // Wrap any thrown errors with context\n const err = e instanceof Error ? e : new Error(String(e));\n throw new Error(`createSignedUploadUrl threw unexpectedly for ${storagePath}: ${err.message} (stack: ${err.stack?.split('\\n').slice(0, 3).join(' | ')})`);\n }\n if (error || !data?.signedUrl) {\n throw new Error(error?.message || 'Failed to create signed upload URL');\n }\n\n // Check abort after async operation\n if (signal?.aborted) {\n throw new AbortError();\n }\n\n // 2. Upload using react-native-background-upload\n return new Promise((resolve, reject) => {\n const uploadId = `upload_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`;\n let isSettled = false;\n\n // Setup listeners before starting\n const completedSub = Upload.addListener('completed', uploadId, (eventData: {\n responseCode: number;\n }) => {\n if (isSettled) return;\n isSettled = true;\n cleanup();\n if (eventData.responseCode >= 200 && eventData.responseCode < 300) {\n resolve();\n } else {\n reject(new Error(`Upload failed with status ${eventData.responseCode}`));\n }\n });\n const errorSub = Upload.addListener('error', uploadId, (eventData: {\n error: string;\n }) => {\n if (isSettled) return;\n isSettled = true;\n cleanup();\n // Add context to help debug the source of errors\n reject(new Error(`Upload error event for ${storagePath}: ${eventData.error}`));\n });\n const cancelledSub = Upload.addListener('cancelled', uploadId, () => {\n if (isSettled) return;\n isSettled = true;\n cleanup();\n reject(new AbortError());\n });\n const cleanup = () => {\n completedSub.remove();\n errorSub.remove();\n cancelledSub.remove();\n if (signal) {\n signal.removeEventListener('abort', abortHandler);\n }\n };\n\n // Register abort handler to cancel the upload\n const abortHandler = () => {\n if (isSettled) return;\n Upload.cancelUpload(uploadId).catch(() => {\n // Ignore errors when cancelling - the upload may have already completed\n });\n };\n if (signal) {\n signal.addEventListener('abort', abortHandler, {\n once: true\n });\n }\n\n // Determine platform for notification settings\n const isAndroid = Platform.OS === 'android';\n\n // Start the upload\n Upload.startUpload({\n url: data.signedUrl,\n path: localFileUri,\n method: 'PUT',\n type: 'raw',\n customUploadId: uploadId,\n headers: {\n 'Content-Type': mediaType\n },\n // Android notification settings\n ...(isAndroid && {\n notification: this.notificationConfig\n })\n }).catch((err: Error) => {\n if (isSettled) return;\n isSettled = true;\n cleanup();\n // Add context to help debug mysterious errors like \"Cannot read property 'reload' of undefined\"\n const contextualError = new Error(`Upload startUpload failed for ${storagePath}: ${err.message}`);\n contextualError.stack = err.stack;\n reject(contextualError);\n });\n });\n }\n\n /**\n * Resolve the storage bucket for a given path.\n */\n resolveBucket(storagePath: string): string {\n return resolveBucket({\n defaultBucket: this.defaultBucket,\n bucketMap: this.bucketMap,\n bucketResolver: this.bucketResolver\n }, storagePath);\n }\n}\n\n/**\n * Factory function for creating a SupabaseUploadHandler.\n */\nexport function createSupabaseUploadHandler(supabaseClient: SupabaseClient, bucketConfig: BucketConfig, notificationConfig?: Partial<UploadNotificationConfig>): SupabaseUploadHandler {\n return new SupabaseUploadHandler({\n supabaseClient,\n bucketConfig\n }, notificationConfig);\n}"],"mappings":";;;;;;;;;;;AAOA,SAAS,gBAAgB;AACzB,OAAO,YAAY;AA+BZ,IAAM,wBAAN,MAA4B;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACR,YAAY,SAAuC,oBAAwD;AACzG,SAAK,WAAW,QAAQ;AACxB,SAAK,gBAAgB,QAAQ,aAAa;AAC1C,SAAK,YAAY,QAAQ,aAAa;AACtC,SAAK,iBAAiB,QAAQ,aAAa;AAC3C,SAAK,qBAAqB;AAAA,MACxB,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,aAAqB,cAAsB,WAAmB,QAAqC;AAElH,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,WAAW;AAAA,IACvB;AAGA,QAAI,CAAC,KAAK,UAAU,SAAS,MAAM;AACjC,YAAM,IAAI,MAAM,wDAAwD,CAAC,CAAC,KAAK,UAAU,OAAO,UAAU,OAAO,KAAK,UAAU,SAAS,IAAI,EAAE;AAAA,IACjJ;AACA,UAAM,SAAS,KAAK,cAAc,WAAW;AAG7C,QAAI,OAEO;AACX,QAAI,QAAsB;AAC1B,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,QAAQ,KAAK,MAAM,EAAE,sBAAsB,WAAW;AACzF,aAAO,OAAO;AACd,cAAQ,OAAO;AAAA,IACjB,SAAS,GAAG;AAEV,YAAM,MAAM,aAAa,QAAQ,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;AACxD,YAAM,IAAI,MAAM,gDAAgD,WAAW,KAAK,IAAI,OAAO,YAAY,IAAI,OAAO,MAAM,IAAI,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,KAAK,CAAC,GAAG;AAAA,IAC1J;AACA,QAAI,SAAS,CAAC,MAAM,WAAW;AAC7B,YAAM,IAAI,MAAM,OAAO,WAAW,oCAAoC;AAAA,IACxE;AAGA,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,WAAW;AAAA,IACvB;AAGA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,WAAW,UAAU,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,GAAG,EAAE,CAAC;AAChF,UAAI,YAAY;AAGhB,YAAM,eAAe,OAAO,YAAY,aAAa,UAAU,CAAC,cAE1D;AACJ,YAAI,UAAW;AACf,oBAAY;AACZ,gBAAQ;AACR,YAAI,UAAU,gBAAgB,OAAO,UAAU,eAAe,KAAK;AACjE,kBAAQ;AAAA,QACV,OAAO;AACL,iBAAO,IAAI,MAAM,6BAA6B,UAAU,YAAY,EAAE,CAAC;AAAA,QACzE;AAAA,MACF,CAAC;AACD,YAAM,WAAW,OAAO,YAAY,SAAS,UAAU,CAAC,cAElD;AACJ,YAAI,UAAW;AACf,oBAAY;AACZ,gBAAQ;AAER,eAAO,IAAI,MAAM,0BAA0B,WAAW,KAAK,UAAU,KAAK,EAAE,CAAC;AAAA,MAC/E,CAAC;AACD,YAAM,eAAe,OAAO,YAAY,aAAa,UAAU,MAAM;AACnE,YAAI,UAAW;AACf,oBAAY;AACZ,gBAAQ;AACR,eAAO,IAAI,WAAW,CAAC;AAAA,MACzB,CAAC;AACD,YAAM,UAAU,MAAM;AACpB,qBAAa,OAAO;AACpB,iBAAS,OAAO;AAChB,qBAAa,OAAO;AACpB,YAAI,QAAQ;AACV,iBAAO,oBAAoB,SAAS,YAAY;AAAA,QAClD;AAAA,MACF;AAGA,YAAM,eAAe,MAAM;AACzB,YAAI,UAAW;AACf,eAAO,aAAa,QAAQ,EAAE,MAAM,MAAM;AAAA,QAE1C,CAAC;AAAA,MACH;AACA,UAAI,QAAQ;AACV,eAAO,iBAAiB,SAAS,cAAc;AAAA,UAC7C,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAGA,YAAM,YAAY,SAAS,OAAO;AAGlC,aAAO,YAAY;AAAA,QACjB,KAAK,KAAK;AAAA,QACV,MAAM;AAAA,QACN,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,gBAAgB;AAAA,QAChB,SAAS;AAAA,UACP,gBAAgB;AAAA,QAClB;AAAA;AAAA,QAEA,GAAI,aAAa;AAAA,UACf,cAAc,KAAK;AAAA,QACrB;AAAA,MACF,CAAC,EAAE,MAAM,CAAC,QAAe;AACvB,YAAI,UAAW;AACf,oBAAY;AACZ,gBAAQ;AAER,cAAM,kBAAkB,IAAI,MAAM,iCAAiC,WAAW,KAAK,IAAI,OAAO,EAAE;AAChG,wBAAgB,QAAQ,IAAI;AAC5B,eAAO,eAAe;AAAA,MACxB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,aAA6B;AACzC,WAAO,cAAc;AAAA,MACnB,eAAe,KAAK;AAAA,MACpB,WAAW,KAAK;AAAA,MAChB,gBAAgB,KAAK;AAAA,IACvB,GAAG,WAAW;AAAA,EAChB;AACF;AAKO,SAAS,4BAA4B,gBAAgC,cAA4B,oBAA+E;AACrL,SAAO,IAAI,sBAAsB;AAAA,IAC/B;AAAA,IACA;AAAA,EACF,GAAG,kBAAkB;AACvB;","names":[]}
@@ -59,11 +59,7 @@ function validateWhereClause(whereClause) {
59
59
  }
60
60
  }
61
61
  function resolvePathColumn(config) {
62
- const column = config.pathColumn ?? config.idColumn;
63
- if (!column) {
64
- throw new Error("WatchConfig requires either pathColumn or idColumn. pathColumn is preferred; idColumn is deprecated.");
65
- }
66
- return column;
62
+ return config.pathColumn;
67
63
  }
68
64
  function buildWatchQuery(config) {
69
65
  const pathColumn = resolvePathColumn(config);
@@ -140,8 +136,6 @@ function watchConfigToSourceConfig(watchConfig) {
140
136
  return {
141
137
  table: watchConfig.table,
142
138
  pathColumn,
143
- idColumn: pathColumn,
144
- // For backwards compatibility
145
139
  orderByColumn: watchConfig.orderBy?.column ?? null
146
140
  };
147
141
  }
@@ -159,12 +153,12 @@ function extractIdsFromRows(results) {
159
153
  }
160
154
  return ids;
161
155
  }
162
- function createWatchIds(table, pathColumn) {
156
+ function createWatchPaths(table, pathColumn) {
163
157
  const sql = buildIdOnlyWatchQuery({
164
158
  table,
165
159
  pathColumn
166
160
  });
167
- return (db, onUpdate) => {
161
+ return (db, _supabase, onUpdate) => {
168
162
  const abortController = new AbortController();
169
163
  db.watch(sql, [], {
170
164
  onResult: (results) => onUpdate(extractIdsFromRows(results))
@@ -185,14 +179,13 @@ var STATE_MAPPING = /* @__PURE__ */ new Map([
185
179
  [AttachmentState.SYNCED, 3 /* SYNCED */],
186
180
  [AttachmentState.ARCHIVED, 4 /* ARCHIVED */],
187
181
  // POL extension states (identity mapping)
188
- [5 /* FAILED_PERMANENT */, 5 /* FAILED_PERMANENT */],
189
- [6 /* DOWNLOAD_SKIPPED */, 6 /* DOWNLOAD_SKIPPED */]
182
+ [5 /* FAILED_PERMANENT */, 5 /* FAILED_PERMANENT */]
190
183
  ]);
191
- var STATE_NAMES = /* @__PURE__ */ new Map([[0 /* QUEUED_SYNC */, "QUEUED_SYNC"], [1 /* QUEUED_UPLOAD */, "QUEUED_UPLOAD"], [2 /* QUEUED_DOWNLOAD */, "QUEUED_DOWNLOAD"], [3 /* SYNCED */, "SYNCED"], [4 /* ARCHIVED */, "ARCHIVED"], [5 /* FAILED_PERMANENT */, "FAILED_PERMANENT"], [6 /* DOWNLOAD_SKIPPED */, "DOWNLOAD_SKIPPED"]]);
192
- var VALID_STATES = /* @__PURE__ */ new Set([0 /* QUEUED_SYNC */, 1 /* QUEUED_UPLOAD */, 2 /* QUEUED_DOWNLOAD */, 3 /* SYNCED */, 4 /* ARCHIVED */, 5 /* FAILED_PERMANENT */, 6 /* DOWNLOAD_SKIPPED */]);
184
+ var STATE_NAMES = /* @__PURE__ */ new Map([[0 /* QUEUED_SYNC */, "QUEUED_SYNC"], [1 /* QUEUED_UPLOAD */, "QUEUED_UPLOAD"], [2 /* QUEUED_DOWNLOAD */, "QUEUED_DOWNLOAD"], [3 /* SYNCED */, "SYNCED"], [4 /* ARCHIVED */, "ARCHIVED"], [5 /* FAILED_PERMANENT */, "FAILED_PERMANENT"]]);
185
+ var VALID_STATES = /* @__PURE__ */ new Set([0 /* QUEUED_SYNC */, 1 /* QUEUED_UPLOAD */, 2 /* QUEUED_DOWNLOAD */, 3 /* SYNCED */, 4 /* ARCHIVED */, 5 /* FAILED_PERMANENT */]);
193
186
  var UPLOAD_WORKFLOW_STATES = /* @__PURE__ */ new Set([1 /* QUEUED_UPLOAD */, 5 /* FAILED_PERMANENT */]);
194
187
  var DOWNLOAD_WORKFLOW_STATES = /* @__PURE__ */ new Set([2 /* QUEUED_DOWNLOAD */, 0 /* QUEUED_SYNC */]);
195
- var TERMINAL_STATES = /* @__PURE__ */ new Set([3 /* SYNCED */, 4 /* ARCHIVED */, 6 /* DOWNLOAD_SKIPPED */]);
188
+ var TERMINAL_STATES = /* @__PURE__ */ new Set([3 /* SYNCED */, 4 /* ARCHIVED */]);
196
189
  function migrateAttachmentState(oldState) {
197
190
  const newState = STATE_MAPPING.get(oldState);
198
191
  if (newState === void 0) {
@@ -252,7 +245,7 @@ export {
252
245
  buildRecordFetchQuery,
253
246
  watchConfigToSourceConfig,
254
247
  extractIdsFromRows,
255
- createWatchIds,
248
+ createWatchPaths,
256
249
  STATE_MAPPING,
257
250
  STATE_NAMES,
258
251
  VALID_STATES,
@@ -270,4 +263,4 @@ export {
270
263
  recordMigration,
271
264
  formatMigrationStats
272
265
  };
273
- //# sourceMappingURL=chunk-C5ODS3XH.js.map
266
+ //# sourceMappingURL=chunk-EOW7JK7Q.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/attachments/query-builder.ts","../src/attachments/migration.ts"],"sourcesContent":["/**\n * Query Builder for Attachment Watch Queries\n *\n * Generates SQL queries from WatchConfig objects.\n * Provides type-safe query generation without raw SQL strings.\n */\n\nimport type { WatchConfig } from './types';\n\n// ─── SQL Identifier Validation ────────────────────────────────────────────────\n\n/**\n * Valid SQL identifier pattern.\n * Allows alphanumeric characters and underscores, must start with letter or underscore.\n */\nconst VALID_IDENTIFIER_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]*$/;\n\n/**\n * SQL reserved words that cannot be used as identifiers (subset of common ones).\n */\nconst SQL_RESERVED_WORDS = new Set(['SELECT', 'FROM', 'WHERE', 'ORDER', 'BY', 'AND', 'OR', 'NOT', 'NULL', 'INSERT', 'UPDATE', 'DELETE', 'DROP', 'CREATE', 'TABLE', 'INDEX', 'JOIN', 'LEFT', 'RIGHT', 'INNER', 'OUTER', 'ON', 'AS', 'DISTINCT', 'GROUP', 'HAVING', 'LIMIT', 'OFFSET', 'UNION', 'EXCEPT', 'INTERSECT', 'IN', 'BETWEEN', 'LIKE', 'IS', 'TRUE', 'FALSE', 'CASE', 'WHEN', 'THEN', 'ELSE', 'END', 'ASC', 'DESC', 'NULLS', 'FIRST', 'LAST']);\n\n/**\n * Validates that a string is a safe SQL identifier.\n * Throws an error if the identifier is invalid or potentially dangerous.\n *\n * @param identifier - The identifier to validate\n * @param context - Description of where this identifier is used (for error messages)\n * @throws Error if identifier is invalid\n */\nexport function validateSqlIdentifier(identifier: string, context: string): void {\n if (!identifier || typeof identifier !== 'string') {\n throw new Error(`Invalid ${context}: must be a non-empty string`);\n }\n if (!VALID_IDENTIFIER_PATTERN.test(identifier)) {\n throw new Error(`Invalid ${context}: \"${identifier}\" contains invalid characters. ` + `Identifiers must start with a letter or underscore and contain only alphanumeric characters and underscores.`);\n }\n if (SQL_RESERVED_WORDS.has(identifier.toUpperCase())) {\n throw new Error(`Invalid ${context}: \"${identifier}\" is a SQL reserved word. ` + `Use a different name or quote the identifier.`);\n }\n\n // Additional safety: check for SQL injection patterns\n const dangerousPatterns = [/--/,\n // SQL comment\n /;/,\n // Statement terminator\n /'/,\n // String delimiter\n /\"/,\n // Quote\n /\\\\/ // Escape character\n ];\n for (const pattern of dangerousPatterns) {\n if (pattern.test(identifier)) {\n throw new Error(`Invalid ${context}: \"${identifier}\" contains potentially dangerous characters.`);\n }\n }\n}\n\n/**\n * Validates a WHERE clause fragment for basic safety.\n * Note: This is a best-effort validation. Complex WHERE clauses should be reviewed.\n *\n * @param whereClause - The WHERE clause fragment to validate\n * @throws Error if the clause contains dangerous patterns\n */\nexport function validateWhereClause(whereClause: string): void {\n if (!whereClause || typeof whereClause !== 'string') {\n throw new Error('Invalid WHERE clause: must be a non-empty string');\n }\n\n // Check for dangerous patterns\n const dangerousPatterns = [{\n pattern: /;\\s*(SELECT|INSERT|UPDATE|DELETE|DROP|CREATE)/i,\n name: 'SQL injection (statement)'\n }, {\n pattern: /UNION\\s+(ALL\\s+)?SELECT/i,\n name: 'UNION injection'\n }, {\n pattern: /--/,\n name: 'SQL comment'\n }, {\n pattern: /\\/\\*/,\n name: 'block comment'\n }, {\n pattern: /xp_|sp_|exec\\s*\\(/i,\n name: 'stored procedure'\n }];\n for (const {\n pattern,\n name\n } of dangerousPatterns) {\n if (pattern.test(whereClause)) {\n throw new Error(`Invalid WHERE clause: contains ${name}`);\n }\n }\n}\n\n// ─── Query Builder ────────────────────────────────────────────────────────────\n\n/**\n * Gets the path column from a WatchConfig.\n *\n * @param config - The WatchConfig to get the column from\n * @returns The path column name\n */\nfunction resolvePathColumn(config: WatchConfig): string {\n return config.pathColumn;\n}\n\n/**\n * Build a SQL watch query from a WatchConfig.\n *\n * Generates a SELECT statement that:\n * - Selects the path column (aliased as `id`)\n * - Optionally selects additional columns\n * - Applies an optional WHERE clause\n * - Optionally orders by a column\n *\n * @param config - The WatchConfig to build a query from\n * @returns SQL query string\n *\n * @example\n * ```typescript\n * // Using pathColumn (preferred)\n * const query = buildWatchQuery({\n * table: 'EquipmentUnitMediaContent',\n * pathColumn: 'storagePath',\n * selectColumns: ['equipmentUnitId', 'takenOn'],\n * where: 'storagePath IS NOT NULL',\n * orderBy: { column: 'takenOn', direction: 'DESC' },\n * });\n *\n * // Result:\n * // SELECT storagePath AS id, equipmentUnitId, takenOn\n * // FROM EquipmentUnitMediaContent\n * // WHERE storagePath IS NOT NULL\n * // ORDER BY takenOn DESC\n * ```\n */\nexport function buildWatchQuery(config: WatchConfig): string {\n const pathColumn = resolvePathColumn(config);\n\n // Validate all identifiers\n validateSqlIdentifier(config.table, 'table');\n validateSqlIdentifier(pathColumn, 'pathColumn');\n if (config.selectColumns) {\n for (const col of config.selectColumns) {\n validateSqlIdentifier(col, 'selectColumns');\n }\n }\n if (config.orderBy) {\n validateSqlIdentifier(config.orderBy.column, 'orderBy.column');\n if (config.orderBy.direction !== 'ASC' && config.orderBy.direction !== 'DESC') {\n throw new Error(`Invalid orderBy.direction: must be \"ASC\" or \"DESC\"`);\n }\n }\n if (config.where) {\n validateWhereClause(config.where);\n }\n\n // Build SELECT clause\n const selectParts: string[] = [`${pathColumn} AS id`];\n if (config.selectColumns && config.selectColumns.length > 0) {\n selectParts.push(...config.selectColumns);\n }\n const selectClause = selectParts.join(', ');\n\n // Build FROM clause\n const fromClause = config.table;\n\n // Build WHERE clause\n let whereClause = `${pathColumn} IS NOT NULL AND ${pathColumn} != ''`;\n if (config.where) {\n whereClause = `${whereClause} AND (${config.where})`;\n }\n\n // Build ORDER BY clause\n let orderByClause = '';\n if (config.orderBy) {\n orderByClause = `ORDER BY ${config.orderBy.column} ${config.orderBy.direction}`;\n }\n\n // Assemble query\n const parts = [`SELECT ${selectClause}`, `FROM ${fromClause}`, `WHERE ${whereClause}`];\n if (orderByClause) {\n parts.push(orderByClause);\n }\n return parts.join('\\n');\n}\n\n/**\n * Build a simpler path-only watch query.\n * Use this when you only need paths without additional columns.\n *\n * @param config - The WatchConfig to build a query from\n * @returns SQL query string that selects only paths\n *\n * @example\n * ```typescript\n * const query = buildIdOnlyWatchQuery({\n * table: 'EquipmentUnitMediaContent',\n * pathColumn: 'storagePath',\n * where: 'storagePath IS NOT NULL',\n * });\n *\n * // Result:\n * // SELECT storagePath AS id\n * // FROM EquipmentUnitMediaContent\n * // WHERE storagePath IS NOT NULL AND storagePath != ''\n * ```\n */\nexport function buildIdOnlyWatchQuery(config: WatchConfig): string {\n // Use the full builder but ignore selectColumns\n return buildWatchQuery({\n ...config,\n selectColumns: undefined\n });\n}\n\n/**\n * Build a query to fetch records with their IDs and additional columns.\n * Used for populating the BatchFilterContext.records map.\n *\n * @param config - The WatchConfig to build a query from\n * @param ids - Optional list of IDs to filter to (for efficiency)\n * @returns SQL query string and parameters\n *\n * @example\n * ```typescript\n * const { query, params } = buildRecordFetchQuery(\n * {\n * table: 'EquipmentUnitMediaContent',\n * pathColumn: 'storagePath',\n * selectColumns: ['equipmentUnitId'],\n * },\n * ['path/to/file1.jpg', 'path/to/file2.jpg']\n * );\n * ```\n */\nexport function buildRecordFetchQuery(config: WatchConfig, ids?: string[]): {\n query: string;\n params: unknown[];\n} {\n const pathColumn = resolvePathColumn(config);\n\n // Validate identifiers\n validateSqlIdentifier(config.table, 'table');\n validateSqlIdentifier(pathColumn, 'pathColumn');\n if (config.selectColumns) {\n for (const col of config.selectColumns) {\n validateSqlIdentifier(col, 'selectColumns');\n }\n }\n\n // Build SELECT clause - always include the path column\n const selectParts: string[] = [`${pathColumn} AS id`];\n if (config.selectColumns && config.selectColumns.length > 0) {\n selectParts.push(...config.selectColumns);\n }\n const selectClause = selectParts.join(', ');\n\n // Build query\n let query = `SELECT ${selectClause} FROM ${config.table}`;\n const params: unknown[] = [];\n\n // Add WHERE clause for IDs if provided\n if (ids && ids.length > 0) {\n const placeholders = ids.map(() => '?').join(', ');\n query += ` WHERE ${pathColumn} IN (${placeholders})`;\n params.push(...ids);\n }\n return {\n query,\n params\n };\n}\n\n/**\n * Convert WatchConfig to a source config format.\n *\n * @param watchConfig - The WatchConfig to convert\n * @returns Object with table, pathColumn, and optional orderByColumn\n */\nexport function watchConfigToSourceConfig(watchConfig: WatchConfig): {\n table: string;\n pathColumn: string;\n orderByColumn?: string | null;\n} {\n const pathColumn = resolvePathColumn(watchConfig);\n return {\n table: watchConfig.table,\n pathColumn,\n orderByColumn: watchConfig.orderBy?.column ?? null\n };\n}\n\n// ─── Row Extraction Utility ────────────────────────────────────────────────────\n\n/**\n * Extracts an array of string IDs from PowerSync watch results.\n * Handles platform differences between React Native (`rows._array`) and web (`rows` as array).\n * Uses single-pass extraction for efficiency.\n *\n * @param results - The results object from db.watch() onResult callback\n * @returns Array of non-null string IDs\n */\nexport function extractIdsFromRows(results: {\n rows?: unknown;\n}): string[] {\n const rows = results.rows;\n if (!rows) return [];\n // React Native: { _array: Row[] }, Web: Row[]\n const rowArray = (rows as {\n _array?: unknown[];\n })?._array ?? rows;\n if (!Array.isArray(rowArray)) {\n return [];\n }\n\n // Single-pass extraction (more efficient than .map().filter())\n const ids: string[] = [];\n for (let i = 0; i < rowArray.length; i++) {\n const id = (rowArray[i] as {\n id?: string;\n })?.id;\n if (id) ids.push(id);\n }\n return ids;\n}\n\n// ─── Helper for Simple Attachment Configuration ────────────────────────────────\n\n/**\n * Creates a watchPaths callback for simple single-table attachment configurations.\n *\n * This is a convenience function that generates a properly typed watchPaths callback\n * from a table name and path column. Use this for simple cases where you don't need\n * JOINs or complex watch logic.\n *\n * @param table - The source table name\n * @param pathColumn - The column containing the attachment path\n * @returns A properly typed watchPaths callback for use in AttachmentSourceConfig\n *\n * @example\n * ```typescript\n * import { createWatchPaths } from '@pol-studios/powersync/attachments';\n *\n * const config: AttachmentSourceConfig = {\n * source: { type: 'supabase-bucket', bucket: 'photos' },\n * watchPaths: createWatchPaths('EquipmentUnitMediaContent', 'storagePath'),\n * };\n * ```\n */\nexport function createWatchPaths(table: string, pathColumn: string): (db: import('./types').PowerSyncDBInterface, supabase: unknown, onUpdate: (paths: string[]) => void) => () => void {\n // Validate identifiers and generate SQL at creation time (not on every callback)\n const sql = buildIdOnlyWatchQuery({\n table,\n pathColumn\n });\n return (db, _supabase, onUpdate) => {\n const abortController = new AbortController();\n db.watch(sql, [], {\n onResult: results => onUpdate(extractIdsFromRows(results))\n }, {\n signal: abortController.signal\n });\n return () => abortController.abort();\n };\n}","/**\n * Migration Utilities for @pol-studios/powersync Attachments\n *\n * This module provides utilities for migrating from the old attachment API\n * to the new callback-based API. It includes:\n *\n * - State mapping constants for old → new state transitions\n * - `migrateAttachmentState()` for converting state values\n * - Validation helpers for migration safety\n *\n * @example\n * ```typescript\n * import { migrateAttachmentState, isValidAttachmentState } from '@pol-studios/powersync/attachments';\n *\n * // Migrate a single state value\n * const newState = migrateAttachmentState(oldState);\n *\n * // Validate before migration\n * if (isValidAttachmentState(value)) {\n * const migrated = migrateAttachmentState(value);\n * }\n * ```\n */\n\nimport { AttachmentState } from '@powersync/attachments';\nimport { PolAttachmentState } from './types';\n\n// ─── State Mapping Constants ──────────────────────────────────────────────────\n\n/**\n * Maps old state values to new state values.\n *\n * The official @powersync/attachments AttachmentState enum has values 0-4:\n * QUEUED_SYNC=0, QUEUED_UPLOAD=1, QUEUED_DOWNLOAD=2, SYNCED=3, ARCHIVED=4\n *\n * POL extensions add:\n * FAILED_PERMANENT=5\n *\n * For migration purposes, most states map 1:1. The mapping exists to:\n * 1. Document the relationship between old and new states\n * 2. Provide a clear upgrade path for custom state handling code\n * 3. Allow future state reorganization if needed\n */\nexport const STATE_MAPPING: ReadonlyMap<number, number> = new Map<number, number>([\n// Official states (1:1 mapping)\n[AttachmentState.QUEUED_SYNC as number, PolAttachmentState.QUEUED_SYNC], [AttachmentState.QUEUED_UPLOAD as number, PolAttachmentState.QUEUED_UPLOAD], [AttachmentState.QUEUED_DOWNLOAD as number, PolAttachmentState.QUEUED_DOWNLOAD], [AttachmentState.SYNCED as number, PolAttachmentState.SYNCED], [AttachmentState.ARCHIVED as number, PolAttachmentState.ARCHIVED],\n// POL extension states (identity mapping)\n[PolAttachmentState.FAILED_PERMANENT, PolAttachmentState.FAILED_PERMANENT]]);\n\n/**\n * Human-readable names for attachment states.\n * Useful for logging and debugging during migration.\n */\nexport const STATE_NAMES: ReadonlyMap<number, string> = new Map([[PolAttachmentState.QUEUED_SYNC, 'QUEUED_SYNC'], [PolAttachmentState.QUEUED_UPLOAD, 'QUEUED_UPLOAD'], [PolAttachmentState.QUEUED_DOWNLOAD, 'QUEUED_DOWNLOAD'], [PolAttachmentState.SYNCED, 'SYNCED'], [PolAttachmentState.ARCHIVED, 'ARCHIVED'], [PolAttachmentState.FAILED_PERMANENT, 'FAILED_PERMANENT']]);\n\n/**\n * All valid state values (official + POL extensions).\n */\nexport const VALID_STATES: ReadonlySet<number> = new Set([PolAttachmentState.QUEUED_SYNC, PolAttachmentState.QUEUED_UPLOAD, PolAttachmentState.QUEUED_DOWNLOAD, PolAttachmentState.SYNCED, PolAttachmentState.ARCHIVED, PolAttachmentState.FAILED_PERMANENT]);\n\n/**\n * States that indicate an active upload workflow.\n * Records in these states should not be migrated to download states.\n */\nexport const UPLOAD_WORKFLOW_STATES: ReadonlySet<number> = new Set([PolAttachmentState.QUEUED_UPLOAD, PolAttachmentState.FAILED_PERMANENT]);\n\n/**\n * States that indicate an active download workflow.\n */\nexport const DOWNLOAD_WORKFLOW_STATES: ReadonlySet<number> = new Set([PolAttachmentState.QUEUED_DOWNLOAD, PolAttachmentState.QUEUED_SYNC]);\n\n/**\n * Terminal states (no further processing needed).\n */\nexport const TERMINAL_STATES: ReadonlySet<number> = new Set([PolAttachmentState.SYNCED, PolAttachmentState.ARCHIVED]);\n\n// ─── Migration Functions ──────────────────────────────────────────────────────\n\n/**\n * Migrates an attachment state from the old API to the new API.\n *\n * Currently, this is a 1:1 mapping since the state values haven't changed.\n * This function exists to:\n * 1. Provide a clear migration path for apps using custom state handling\n * 2. Document the state relationship\n * 3. Allow future state reorganization without breaking existing code\n *\n * @param oldState - The state value from the old API\n * @returns The corresponding state value in the new API\n * @throws Error if the state value is invalid\n *\n * @example\n * ```typescript\n * import { migrateAttachmentState } from '@pol-studios/powersync/attachments';\n *\n * // Migrate a record's state\n * const newState = migrateAttachmentState(record.state);\n *\n * // Migrate with fallback for unknown states\n * const safeState = isValidAttachmentState(record.state)\n * ? migrateAttachmentState(record.state)\n * : PolAttachmentState.QUEUED_SYNC;\n * ```\n */\nexport function migrateAttachmentState(oldState: number): number {\n const newState = STATE_MAPPING.get(oldState);\n if (newState === undefined) {\n throw new Error(`Invalid attachment state: ${oldState}. ` + `Valid states are: ${Array.from(STATE_NAMES.entries()).map(([v, n]) => `${n}(${v})`).join(', ')}`);\n }\n return newState;\n}\n\n/**\n * Safely migrates an attachment state with a fallback.\n *\n * Unlike `migrateAttachmentState`, this function never throws.\n * Invalid states are mapped to the provided fallback.\n *\n * @param oldState - The state value from the old API\n * @param fallback - State to use if oldState is invalid (default: QUEUED_SYNC)\n * @returns The corresponding state value in the new API, or the fallback\n *\n * @example\n * ```typescript\n * // Safely migrate with QUEUED_SYNC as fallback\n * const state = migrateAttachmentStateSafe(unknownValue);\n *\n * // Use custom fallback\n * const state = migrateAttachmentStateSafe(unknownValue, PolAttachmentState.ARCHIVED);\n * ```\n */\nexport function migrateAttachmentStateSafe(oldState: number, fallback: number = PolAttachmentState.QUEUED_SYNC): number {\n const newState = STATE_MAPPING.get(oldState);\n return newState !== undefined ? newState : fallback;\n}\n\n// ─── Validation Helpers ───────────────────────────────────────────────────────\n\n/**\n * Checks if a value is a valid attachment state.\n *\n * @param value - The value to check\n * @returns true if the value is a valid attachment state\n *\n * @example\n * ```typescript\n * if (isValidAttachmentState(record.state)) {\n * // Safe to use record.state\n * } else {\n * console.warn(`Invalid state: ${record.state}`);\n * }\n * ```\n */\nexport function isValidAttachmentState(value: unknown): value is number {\n return typeof value === 'number' && VALID_STATES.has(value);\n}\n\n/**\n * Checks if a state represents an upload workflow.\n *\n * Records in upload workflow states should not be demoted to download states.\n *\n * @param state - The state to check\n * @returns true if the state is part of an upload workflow\n */\nexport function isUploadWorkflowState(state: number): boolean {\n return UPLOAD_WORKFLOW_STATES.has(state);\n}\n\n/**\n * Checks if a state represents a download workflow.\n *\n * @param state - The state to check\n * @returns true if the state is part of a download workflow\n */\nexport function isDownloadWorkflowState(state: number): boolean {\n return DOWNLOAD_WORKFLOW_STATES.has(state);\n}\n\n/**\n * Checks if a state is terminal (no further processing needed).\n *\n * @param state - The state to check\n * @returns true if the state is terminal\n */\nexport function isTerminalState(state: number): boolean {\n return TERMINAL_STATES.has(state);\n}\n\n/**\n * Gets the human-readable name of a state.\n *\n * @param state - The state value\n * @returns The state name, or \"UNKNOWN\" for invalid states\n *\n * @example\n * ```typescript\n * console.log(`State: ${getStateName(record.state)}`); // \"State: SYNCED\"\n * ```\n */\nexport function getStateName(state: number): string {\n return STATE_NAMES.get(state) ?? 'UNKNOWN';\n}\n\n// ─── Migration Report ─────────────────────────────────────────────────────────\n\n/**\n * Statistics about a batch migration.\n */\nexport interface MigrationStats {\n /** Total records processed */\n total: number;\n /** Records successfully migrated */\n migrated: number;\n /** Records with invalid states (used fallback) */\n invalid: number;\n /** Breakdown by state */\n byState: Map<number, number>;\n}\n\n/**\n * Creates empty migration stats.\n */\nexport function createMigrationStats(): MigrationStats {\n return {\n total: 0,\n migrated: 0,\n invalid: 0,\n byState: new Map()\n };\n}\n\n/**\n * Records a migration result in the stats.\n *\n * @param stats - The stats object to update\n * @param oldState - The original state\n * @param newState - The migrated state\n * @param wasValid - Whether the original state was valid\n */\nexport function recordMigration(stats: MigrationStats, oldState: number, newState: number, wasValid: boolean): void {\n stats.total++;\n if (wasValid) {\n stats.migrated++;\n } else {\n stats.invalid++;\n }\n stats.byState.set(newState, (stats.byState.get(newState) ?? 0) + 1);\n}\n\n/**\n * Formats migration stats as a human-readable summary.\n *\n * @param stats - The stats to format\n * @returns A formatted string summary\n *\n * @example\n * ```typescript\n * const stats = createMigrationStats();\n * // ... process records ...\n * console.log(formatMigrationStats(stats));\n * // Output:\n * // Migration Summary:\n * // Total: 100\n * // Migrated: 98\n * // Invalid (used fallback): 2\n * // By State:\n * // SYNCED: 50\n * // QUEUED_DOWNLOAD: 30\n * // QUEUED_UPLOAD: 18\n * // QUEUED_SYNC: 2\n * ```\n */\nexport function formatMigrationStats(stats: MigrationStats): string {\n const lines = ['Migration Summary:', ` Total: ${stats.total}`, ` Migrated: ${stats.migrated}`, ` Invalid (used fallback): ${stats.invalid}`, ' By State:'];\n for (const [state, count] of stats.byState.entries()) {\n lines.push(` ${getStateName(state)}: ${count}`);\n }\n return lines.join('\\n');\n}"],"mappings":";AAeA,IAAM,2BAA2B;AAKjC,IAAM,qBAAqB,oBAAI,IAAI,CAAC,UAAU,QAAQ,SAAS,SAAS,MAAM,OAAO,MAAM,OAAO,QAAQ,UAAU,UAAU,UAAU,QAAQ,UAAU,SAAS,SAAS,QAAQ,QAAQ,SAAS,SAAS,SAAS,MAAM,MAAM,YAAY,SAAS,UAAU,SAAS,UAAU,SAAS,UAAU,aAAa,MAAM,WAAW,QAAQ,MAAM,QAAQ,SAAS,QAAQ,QAAQ,QAAQ,QAAQ,OAAO,OAAO,QAAQ,SAAS,SAAS,MAAM,CAAC;AAU7a,SAAS,sBAAsB,YAAoB,SAAuB;AAC/E,MAAI,CAAC,cAAc,OAAO,eAAe,UAAU;AACjD,UAAM,IAAI,MAAM,WAAW,OAAO,8BAA8B;AAAA,EAClE;AACA,MAAI,CAAC,yBAAyB,KAAK,UAAU,GAAG;AAC9C,UAAM,IAAI,MAAM,WAAW,OAAO,MAAM,UAAU,6IAAkJ;AAAA,EACtM;AACA,MAAI,mBAAmB,IAAI,WAAW,YAAY,CAAC,GAAG;AACpD,UAAM,IAAI,MAAM,WAAW,OAAO,MAAM,UAAU,yEAA8E;AAAA,EAClI;AAGA,QAAM,oBAAoB;AAAA,IAAC;AAAA;AAAA,IAE3B;AAAA;AAAA,IAEA;AAAA;AAAA,IAEA;AAAA;AAAA,IAEA;AAAA;AAAA,EACA;AACA,aAAW,WAAW,mBAAmB;AACvC,QAAI,QAAQ,KAAK,UAAU,GAAG;AAC5B,YAAM,IAAI,MAAM,WAAW,OAAO,MAAM,UAAU,8CAA8C;AAAA,IAClG;AAAA,EACF;AACF;AASO,SAAS,oBAAoB,aAA2B;AAC7D,MAAI,CAAC,eAAe,OAAO,gBAAgB,UAAU;AACnD,UAAM,IAAI,MAAM,kDAAkD;AAAA,EACpE;AAGA,QAAM,oBAAoB,CAAC;AAAA,IACzB,SAAS;AAAA,IACT,MAAM;AAAA,EACR,GAAG;AAAA,IACD,SAAS;AAAA,IACT,MAAM;AAAA,EACR,GAAG;AAAA,IACD,SAAS;AAAA,IACT,MAAM;AAAA,EACR,GAAG;AAAA,IACD,SAAS;AAAA,IACT,MAAM;AAAA,EACR,GAAG;AAAA,IACD,SAAS;AAAA,IACT,MAAM;AAAA,EACR,CAAC;AACD,aAAW;AAAA,IACT;AAAA,IACA;AAAA,EACF,KAAK,mBAAmB;AACtB,QAAI,QAAQ,KAAK,WAAW,GAAG;AAC7B,YAAM,IAAI,MAAM,kCAAkC,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF;AACF;AAUA,SAAS,kBAAkB,QAA6B;AACtD,SAAO,OAAO;AAChB;AAgCO,SAAS,gBAAgB,QAA6B;AAC3D,QAAM,aAAa,kBAAkB,MAAM;AAG3C,wBAAsB,OAAO,OAAO,OAAO;AAC3C,wBAAsB,YAAY,YAAY;AAC9C,MAAI,OAAO,eAAe;AACxB,eAAW,OAAO,OAAO,eAAe;AACtC,4BAAsB,KAAK,eAAe;AAAA,IAC5C;AAAA,EACF;AACA,MAAI,OAAO,SAAS;AAClB,0BAAsB,OAAO,QAAQ,QAAQ,gBAAgB;AAC7D,QAAI,OAAO,QAAQ,cAAc,SAAS,OAAO,QAAQ,cAAc,QAAQ;AAC7E,YAAM,IAAI,MAAM,oDAAoD;AAAA,IACtE;AAAA,EACF;AACA,MAAI,OAAO,OAAO;AAChB,wBAAoB,OAAO,KAAK;AAAA,EAClC;AAGA,QAAM,cAAwB,CAAC,GAAG,UAAU,QAAQ;AACpD,MAAI,OAAO,iBAAiB,OAAO,cAAc,SAAS,GAAG;AAC3D,gBAAY,KAAK,GAAG,OAAO,aAAa;AAAA,EAC1C;AACA,QAAM,eAAe,YAAY,KAAK,IAAI;AAG1C,QAAM,aAAa,OAAO;AAG1B,MAAI,cAAc,GAAG,UAAU,oBAAoB,UAAU;AAC7D,MAAI,OAAO,OAAO;AAChB,kBAAc,GAAG,WAAW,SAAS,OAAO,KAAK;AAAA,EACnD;AAGA,MAAI,gBAAgB;AACpB,MAAI,OAAO,SAAS;AAClB,oBAAgB,YAAY,OAAO,QAAQ,MAAM,IAAI,OAAO,QAAQ,SAAS;AAAA,EAC/E;AAGA,QAAM,QAAQ,CAAC,UAAU,YAAY,IAAI,QAAQ,UAAU,IAAI,SAAS,WAAW,EAAE;AACrF,MAAI,eAAe;AACjB,UAAM,KAAK,aAAa;AAAA,EAC1B;AACA,SAAO,MAAM,KAAK,IAAI;AACxB;AAuBO,SAAS,sBAAsB,QAA6B;AAEjE,SAAO,gBAAgB;AAAA,IACrB,GAAG;AAAA,IACH,eAAe;AAAA,EACjB,CAAC;AACH;AAsBO,SAAS,sBAAsB,QAAqB,KAGzD;AACA,QAAM,aAAa,kBAAkB,MAAM;AAG3C,wBAAsB,OAAO,OAAO,OAAO;AAC3C,wBAAsB,YAAY,YAAY;AAC9C,MAAI,OAAO,eAAe;AACxB,eAAW,OAAO,OAAO,eAAe;AACtC,4BAAsB,KAAK,eAAe;AAAA,IAC5C;AAAA,EACF;AAGA,QAAM,cAAwB,CAAC,GAAG,UAAU,QAAQ;AACpD,MAAI,OAAO,iBAAiB,OAAO,cAAc,SAAS,GAAG;AAC3D,gBAAY,KAAK,GAAG,OAAO,aAAa;AAAA,EAC1C;AACA,QAAM,eAAe,YAAY,KAAK,IAAI;AAG1C,MAAI,QAAQ,UAAU,YAAY,SAAS,OAAO,KAAK;AACvD,QAAM,SAAoB,CAAC;AAG3B,MAAI,OAAO,IAAI,SAAS,GAAG;AACzB,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACjD,aAAS,UAAU,UAAU,QAAQ,YAAY;AACjD,WAAO,KAAK,GAAG,GAAG;AAAA,EACpB;AACA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAQO,SAAS,0BAA0B,aAIxC;AACA,QAAM,aAAa,kBAAkB,WAAW;AAChD,SAAO;AAAA,IACL,OAAO,YAAY;AAAA,IACnB;AAAA,IACA,eAAe,YAAY,SAAS,UAAU;AAAA,EAChD;AACF;AAYO,SAAS,mBAAmB,SAEtB;AACX,QAAM,OAAO,QAAQ;AACrB,MAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,QAAM,WAAY,MAEd,UAAU;AACd,MAAI,CAAC,MAAM,QAAQ,QAAQ,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,MAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,UAAM,KAAM,SAAS,CAAC,GAElB;AACJ,QAAI,GAAI,KAAI,KAAK,EAAE;AAAA,EACrB;AACA,SAAO;AACT;AAyBO,SAAS,iBAAiB,OAAe,YAAwI;AAEtL,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AACD,SAAO,CAAC,IAAI,WAAW,aAAa;AAClC,UAAM,kBAAkB,IAAI,gBAAgB;AAC5C,OAAG,MAAM,KAAK,CAAC,GAAG;AAAA,MAChB,UAAU,aAAW,SAAS,mBAAmB,OAAO,CAAC;AAAA,IAC3D,GAAG;AAAA,MACD,QAAQ,gBAAgB;AAAA,IAC1B,CAAC;AACD,WAAO,MAAM,gBAAgB,MAAM;AAAA,EACrC;AACF;;;ACzVA,SAAS,uBAAuB;AAmBzB,IAAM,gBAA6C,oBAAI,IAAoB;AAAA;AAAA,EAElF,CAAC,gBAAgB,gCAAqD;AAAA,EAAG,CAAC,gBAAgB,oCAAyD;AAAA,EAAG,CAAC,gBAAgB,wCAA6D;AAAA,EAAG,CAAC,gBAAgB,sBAA2C;AAAA,EAAG,CAAC,gBAAgB,0BAA+C;AAAA;AAAA,EAEtW,mDAAyE;AAAC,CAAC;AAMpE,IAAM,cAA2C,oBAAI,IAAI,CAAC,sBAAiC,aAAa,GAAG,wBAAmC,eAAe,GAAG,0BAAqC,iBAAiB,GAAG,iBAA4B,QAAQ,GAAG,mBAA8B,UAAU,GAAG,2BAAsC,kBAAkB,CAAC,CAAC;AAKrW,IAAM,eAAoC,oBAAI,IAAI,gIAAkM,CAAC;AAMrP,IAAM,yBAA8C,oBAAI,IAAI,gDAAsE,CAAC;AAKnI,IAAM,2BAAgD,oBAAI,IAAI,6CAAmE,CAAC;AAKlI,IAAM,kBAAuC,oBAAI,IAAI,iCAAuD,CAAC;AA8B7G,SAAS,uBAAuB,UAA0B;AAC/D,QAAM,WAAW,cAAc,IAAI,QAAQ;AAC3C,MAAI,aAAa,QAAW;AAC1B,UAAM,IAAI,MAAM,6BAA6B,QAAQ,uBAA4B,MAAM,KAAK,YAAY,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,EAC/J;AACA,SAAO;AACT;AAqBO,SAAS,2BAA2B,UAAkB,gCAA2D;AACtH,QAAM,WAAW,cAAc,IAAI,QAAQ;AAC3C,SAAO,aAAa,SAAY,WAAW;AAC7C;AAmBO,SAAS,uBAAuB,OAAiC;AACtE,SAAO,OAAO,UAAU,YAAY,aAAa,IAAI,KAAK;AAC5D;AAUO,SAAS,sBAAsB,OAAwB;AAC5D,SAAO,uBAAuB,IAAI,KAAK;AACzC;AAQO,SAAS,wBAAwB,OAAwB;AAC9D,SAAO,yBAAyB,IAAI,KAAK;AAC3C;AAQO,SAAS,gBAAgB,OAAwB;AACtD,SAAO,gBAAgB,IAAI,KAAK;AAClC;AAaO,SAAS,aAAa,OAAuB;AAClD,SAAO,YAAY,IAAI,KAAK,KAAK;AACnC;AAqBO,SAAS,uBAAuC;AACrD,SAAO;AAAA,IACL,OAAO;AAAA,IACP,UAAU;AAAA,IACV,SAAS;AAAA,IACT,SAAS,oBAAI,IAAI;AAAA,EACnB;AACF;AAUO,SAAS,gBAAgB,OAAuB,UAAkB,UAAkB,UAAyB;AAClH,QAAM;AACN,MAAI,UAAU;AACZ,UAAM;AAAA,EACR,OAAO;AACL,UAAM;AAAA,EACR;AACA,QAAM,QAAQ,IAAI,WAAW,MAAM,QAAQ,IAAI,QAAQ,KAAK,KAAK,CAAC;AACpE;AAyBO,SAAS,qBAAqB,OAA+B;AAClE,QAAM,QAAQ,CAAC,sBAAsB,YAAY,MAAM,KAAK,IAAI,eAAe,MAAM,QAAQ,IAAI,8BAA8B,MAAM,OAAO,IAAI,aAAa;AAC7J,aAAW,CAAC,OAAO,KAAK,KAAK,MAAM,QAAQ,QAAQ,GAAG;AACpD,UAAM,KAAK,OAAO,aAAa,KAAK,CAAC,KAAK,KAAK,EAAE;AAAA,EACnD;AACA,SAAO,MAAM,KAAK,IAAI;AACxB;","names":[]}
@@ -0,0 +1,220 @@
1
+ import {
2
+ LOCAL_ONLY_TABLES
3
+ } from "./chunk-OGUFUZSY.js";
4
+
5
+ // src/config/schema-generator.ts
6
+ import { column, Schema, Table } from "@powersync/react-native";
7
+ function normalizeTableSpec(spec) {
8
+ if (typeof spec === "string") {
9
+ if (spec.includes(".")) {
10
+ const [schemaName2, tableName] = spec.split(".");
11
+ return {
12
+ tableName,
13
+ schemaName: schemaName2,
14
+ trackMetadata: false,
15
+ alias: toPascalCase(schemaName2) + tableName
16
+ };
17
+ }
18
+ return {
19
+ tableName: spec,
20
+ schemaName: "public",
21
+ trackMetadata: false,
22
+ alias: spec
23
+ };
24
+ }
25
+ const schemaName = spec.schema || "public";
26
+ const alias = schemaName === "public" ? spec.table : toPascalCase(schemaName) + spec.table;
27
+ return {
28
+ tableName: spec.table,
29
+ schemaName,
30
+ trackMetadata: spec.trackMetadata ?? false,
31
+ alias
32
+ };
33
+ }
34
+ function toPascalCase(str) {
35
+ return str.charAt(0).toUpperCase() + str.slice(1);
36
+ }
37
+ function mapColumnType(col) {
38
+ const type = col.type;
39
+ if (type === "json") {
40
+ return null;
41
+ }
42
+ switch (type) {
43
+ case "number":
44
+ return column.real;
45
+ case "boolean":
46
+ return column.integer;
47
+ case "string":
48
+ case "date":
49
+ case "enum":
50
+ return column.text;
51
+ default:
52
+ return column.text;
53
+ }
54
+ }
55
+ function shouldSkipColumn(col) {
56
+ if (col.name === "id") {
57
+ return true;
58
+ }
59
+ if (col.type === "json") {
60
+ return true;
61
+ }
62
+ return false;
63
+ }
64
+ function getTableFromSchema(databaseSchema, schemaName, tableName) {
65
+ const schema = databaseSchema.schemas[schemaName];
66
+ if (!schema) {
67
+ console.warn(`[PolConfig] Schema "${schemaName}" not found in databaseSchema`);
68
+ return null;
69
+ }
70
+ const table = schema.tables[tableName];
71
+ if (!table) {
72
+ const view = schema.views?.[tableName];
73
+ if (view) {
74
+ return view;
75
+ }
76
+ console.warn(`[PolConfig] Table "${schemaName}.${tableName}" not found in databaseSchema`);
77
+ return null;
78
+ }
79
+ return table;
80
+ }
81
+ function generatePowerSyncSchema(databaseSchema, tables) {
82
+ const warnings = [];
83
+ const tableDefinitions = {};
84
+ const tableMap = /* @__PURE__ */ new Map();
85
+ for (const spec of tables) {
86
+ const normalized = normalizeTableSpec(spec);
87
+ const {
88
+ tableName,
89
+ schemaName,
90
+ alias
91
+ } = normalized;
92
+ const tableDef = getTableFromSchema(databaseSchema, schemaName, tableName);
93
+ if (!tableDef) {
94
+ warnings.push(`Table "${schemaName}.${tableName}" not found, skipping`);
95
+ continue;
96
+ }
97
+ const columns = {};
98
+ let columnCount = 0;
99
+ for (const col of tableDef.columns) {
100
+ if (shouldSkipColumn(col)) {
101
+ continue;
102
+ }
103
+ const psType = mapColumnType(col);
104
+ if (psType === null) {
105
+ warnings.push(`Column "${tableName}.${col.name}" has unsupported type "${col.type}", skipping`);
106
+ continue;
107
+ }
108
+ columns[col.name] = psType;
109
+ columnCount++;
110
+ }
111
+ if (columnCount > 0) {
112
+ if (tableDefinitions[alias]) {
113
+ warnings.push(`Duplicate table "${alias}" - later definition wins`);
114
+ }
115
+ tableDefinitions[alias] = normalized.trackMetadata ? new Table(columns, {
116
+ trackMetadata: true
117
+ }) : new Table(columns);
118
+ tableMap.set(alias, normalized);
119
+ } else {
120
+ warnings.push(`Table "${schemaName}.${tableName}" has no valid columns after filtering, skipping`);
121
+ }
122
+ }
123
+ const allTableDefinitions = {
124
+ ...tableDefinitions,
125
+ ...LOCAL_ONLY_TABLES
126
+ };
127
+ const schema = new Schema(allTableDefinitions);
128
+ return {
129
+ schema,
130
+ warnings,
131
+ tableMap
132
+ };
133
+ }
134
+ function createSchemaRouter(tableMap) {
135
+ return (table) => {
136
+ const spec = tableMap.get(table);
137
+ if (spec) {
138
+ return spec.schemaName;
139
+ }
140
+ if (table.includes(".")) {
141
+ const [schema] = table.split(".");
142
+ return schema;
143
+ }
144
+ return "public";
145
+ };
146
+ }
147
+
148
+ // src/config/index.ts
149
+ function definePolConfig(config) {
150
+ if (!config.databaseSchema) {
151
+ throw new Error("[definePolConfig] databaseSchema is required");
152
+ }
153
+ if (!config.powersync || config.powersync.length === 0) {
154
+ throw new Error("[definePolConfig] powersync array must contain at least one table");
155
+ }
156
+ const {
157
+ schema,
158
+ warnings,
159
+ tableMap
160
+ } = generatePowerSyncSchema(config.databaseSchema, config.powersync);
161
+ if (warnings.length > 0 && typeof __DEV__ !== "undefined" && __DEV__) {
162
+ console.warn("[definePolConfig] Schema generation warnings:", warnings);
163
+ }
164
+ if (tableMap.size === 0) {
165
+ throw new Error(`[definePolConfig] No valid tables found in powersync array. Check that the tables exist in databaseSchema. Warnings: ${warnings.join(", ")}`);
166
+ }
167
+ const tableStrategies = {};
168
+ const powersyncTables = [];
169
+ for (const [alias, spec] of tableMap.entries()) {
170
+ tableStrategies[alias] = {
171
+ strategy: "powersync",
172
+ schema: spec.schemaName,
173
+ table: spec.tableName,
174
+ alias
175
+ // PowerSync SQLite table name
176
+ };
177
+ const qualifiedName = spec.schemaName === "public" ? spec.tableName : `${spec.schemaName}.${spec.tableName}`;
178
+ if (qualifiedName !== alias) {
179
+ tableStrategies[qualifiedName] = {
180
+ strategy: "powersync",
181
+ schema: spec.schemaName,
182
+ table: spec.tableName,
183
+ alias
184
+ // PowerSync SQLite table name (same alias for both keys)
185
+ };
186
+ }
187
+ powersyncTables.push(qualifiedName);
188
+ }
189
+ const schemaRouter = createSchemaRouter(tableMap);
190
+ const processedConfig = {
191
+ ...config,
192
+ __generatedSchema: schema,
193
+ __tableStrategies: tableStrategies,
194
+ __schemaRouter: schemaRouter,
195
+ __powersyncTables: powersyncTables
196
+ };
197
+ return processedConfig;
198
+ }
199
+ function isProcessedPolConfig(config) {
200
+ return typeof config === "object" && config !== null && "__generatedSchema" in config && "__tableStrategies" in config;
201
+ }
202
+ function getTableStrategy(config, tableName) {
203
+ return config.__tableStrategies[tableName];
204
+ }
205
+ function isTablePowerSync(config, tableName) {
206
+ const strategy = getTableStrategy(config, tableName);
207
+ return strategy?.strategy === "powersync";
208
+ }
209
+
210
+ export {
211
+ normalizeTableSpec,
212
+ getTableFromSchema,
213
+ generatePowerSyncSchema,
214
+ createSchemaRouter,
215
+ definePolConfig,
216
+ isProcessedPolConfig,
217
+ getTableStrategy,
218
+ isTablePowerSync
219
+ };
220
+ //# sourceMappingURL=chunk-HRAVPIAZ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/config/schema-generator.ts","../src/config/index.ts"],"sourcesContent":["/**\n * Schema Generator for @pol-studios/powersync\n *\n * Generates PowerSync Schema at runtime from the Supabase database schema object.\n * This eliminates the need to manually maintain a separate PowerSync schema file.\n *\n * Type Mapping:\n * - number -> column.real (ALL numbers use real, no integer/real distinction)\n * - boolean -> column.integer (SQLite stores booleans as 0/1)\n * - string -> column.text\n * - date -> column.text (ISO strings)\n * - enum -> column.text\n * - json -> SKIPPED (not supported in PowerSync)\n * - arrays -> SKIPPED (not supported in PowerSync)\n *\n * Skipped columns:\n * - `id` (PowerSync handles this automatically)\n * - JSON types (complex objects not supported)\n * - Array types (not supported)\n */\n\nimport { column, Schema, Table } from '@powersync/react-native';\nimport type { DatabaseSchemaObject, DatabaseTableDef, DatabaseColumnDef, TableSpec } from './types';\nimport { LOCAL_ONLY_TABLES } from '../sync/local-tables';\n\n// Type alias for PowerSync column types (same as what column.real/integer/text return)\ntype PowerSyncColumn = typeof column.real | typeof column.integer | typeof column.text;\n\n// ─── Table Spec Normalization ─────────────────────────────────────────────────\n\n/**\n * Normalized table specification.\n */\ninterface NormalizedTableSpec {\n /** Table name (without schema prefix) */\n tableName: string;\n /** Schema name */\n schemaName: string;\n /** Whether to track sync metadata */\n trackMetadata: boolean;\n /**\n * Alias for PowerSync table name.\n * For cross-schema tables, this is SchemaTable (e.g., \"CoreProfile\").\n * For public schema, this is just the table name.\n */\n alias: string;\n}\n\n/**\n * Normalize a TableSpec to a consistent format.\n *\n * @param spec - The table specification (string or object)\n * @returns Normalized specification with all fields filled\n */\nexport function normalizeTableSpec(spec: TableSpec): NormalizedTableSpec {\n if (typeof spec === 'string') {\n // Check for schema prefix (e.g., \"core.Profile\")\n if (spec.includes('.')) {\n const [schemaName, tableName] = spec.split('.');\n return {\n tableName,\n schemaName,\n trackMetadata: false,\n alias: toPascalCase(schemaName) + tableName\n };\n }\n // Simple table name - public schema\n return {\n tableName: spec,\n schemaName: 'public',\n trackMetadata: false,\n alias: spec\n };\n }\n\n // Object format\n const schemaName = spec.schema || 'public';\n const alias = schemaName === 'public' ? spec.table : toPascalCase(schemaName) + spec.table;\n return {\n tableName: spec.table,\n schemaName,\n trackMetadata: spec.trackMetadata ?? false,\n alias\n };\n}\n\n/**\n * Convert a string to PascalCase.\n */\nfunction toPascalCase(str: string): string {\n return str.charAt(0).toUpperCase() + str.slice(1);\n}\n\n// ─── Type Mapping ─────────────────────────────────────────────────────────────\n\n/**\n * Map a database column type to a PowerSync column type.\n *\n * @param col - The column definition\n * @returns PowerSync column type or null if should be skipped\n */\nfunction mapColumnType(col: DatabaseColumnDef): PowerSyncColumn | null {\n const type = col.type;\n\n // Skip JSON types - not supported in PowerSync\n if (type === 'json') {\n return null;\n }\n\n // Map based on type\n switch (type) {\n case 'number':\n // ALL numbers use column.real - no heuristics\n return column.real;\n case 'boolean':\n // SQLite stores booleans as integers (0/1)\n return column.integer;\n case 'string':\n case 'date':\n case 'enum':\n return column.text;\n default:\n // Unknown types default to text\n return column.text;\n }\n}\n\n/**\n * Check if a column should be skipped.\n *\n * @param col - The column definition\n * @returns true if the column should be skipped\n */\nfunction shouldSkipColumn(col: DatabaseColumnDef): boolean {\n // Skip `id` - PowerSync handles this automatically\n if (col.name === 'id') {\n return true;\n }\n\n // Skip JSON types\n if (col.type === 'json') {\n return true;\n }\n\n // Skip array-like columns (columns ending with 's' that are JSON)\n // This is a heuristic - actual array detection would need type info\n // For now, we rely on the type being 'json' which is already skipped\n\n return false;\n}\n\n// ─── Table Lookup ─────────────────────────────────────────────────────────────\n\n/**\n * Get a table definition from the database schema.\n *\n * @param databaseSchema - The full database schema object\n * @param schemaName - The schema name (e.g., \"public\", \"core\")\n * @param tableName - The table name\n * @returns The table definition or null if not found\n */\nexport function getTableFromSchema(databaseSchema: DatabaseSchemaObject, schemaName: string, tableName: string): DatabaseTableDef | null {\n const schema = databaseSchema.schemas[schemaName];\n if (!schema) {\n console.warn(`[PolConfig] Schema \"${schemaName}\" not found in databaseSchema`);\n return null;\n }\n const table = schema.tables[tableName];\n if (!table) {\n // Also check views\n const view = schema.views?.[tableName];\n if (view) {\n return view;\n }\n console.warn(`[PolConfig] Table \"${schemaName}.${tableName}\" not found in databaseSchema`);\n return null;\n }\n return table;\n}\n\n// ─── Schema Generation ────────────────────────────────────────────────────────\n\n/**\n * Result of schema generation.\n */\nexport interface SchemaGenerationResult {\n /** The generated PowerSync Schema */\n schema: Schema;\n /** Warnings encountered during generation */\n warnings: string[];\n /** Map of table alias to normalized spec */\n tableMap: Map<string, NormalizedTableSpec>;\n}\n\n/**\n * Generate a PowerSync Schema from the database schema and table specs.\n *\n * @param databaseSchema - The database schema object (from generated types)\n * @param tables - Array of table specifications to sync\n * @returns The generated PowerSync Schema and metadata\n *\n * @example\n * ```typescript\n * const { schema, warnings } = generatePowerSyncSchema(databaseSchema, [\n * \"Project\",\n * \"Task\",\n * { table: \"Profile\", schema: \"core\" },\n * ]);\n *\n * // Use the schema with PowerSyncProvider\n * <PowerSyncProvider config={{ schema, ... }} />\n * ```\n */\nexport function generatePowerSyncSchema(databaseSchema: DatabaseSchemaObject, tables: TableSpec[]): SchemaGenerationResult {\n const warnings: string[] = [];\n const tableDefinitions: Record<string, Table> = {};\n const tableMap = new Map<string, NormalizedTableSpec>();\n for (const spec of tables) {\n const normalized = normalizeTableSpec(spec);\n const {\n tableName,\n schemaName,\n alias\n } = normalized;\n\n // Get table definition from schema\n const tableDef = getTableFromSchema(databaseSchema, schemaName, tableName);\n if (!tableDef) {\n warnings.push(`Table \"${schemaName}.${tableName}\" not found, skipping`);\n continue;\n }\n\n // Build PowerSync columns\n const columns: Record<string, PowerSyncColumn> = {};\n let columnCount = 0;\n for (const col of tableDef.columns) {\n // Skip certain columns\n if (shouldSkipColumn(col)) {\n continue;\n }\n\n // Map the column type\n const psType = mapColumnType(col);\n if (psType === null) {\n // Column type not supported, skip with warning\n warnings.push(`Column \"${tableName}.${col.name}\" has unsupported type \"${col.type}\", skipping`);\n continue;\n }\n columns[col.name] = psType;\n columnCount++;\n }\n\n // Create PowerSync Table\n if (columnCount > 0) {\n // Check for duplicate tables\n if (tableDefinitions[alias]) {\n warnings.push(`Duplicate table \"${alias}\" - later definition wins`);\n }\n\n // Pass trackMetadata option if specified\n tableDefinitions[alias] = normalized.trackMetadata ? new Table(columns, {\n trackMetadata: true\n }) : new Table(columns);\n tableMap.set(alias, normalized);\n } else {\n warnings.push(`Table \"${schemaName}.${tableName}\" has no valid columns after filtering, skipping`);\n }\n }\n\n // Add local-only tables for sync status persistence\n const allTableDefinitions = {\n ...tableDefinitions,\n ...LOCAL_ONLY_TABLES\n };\n\n // Create the Schema\n const schema = new Schema(allTableDefinitions);\n return {\n schema,\n warnings,\n tableMap\n };\n}\n\n// ─── Schema Router ────────────────────────────────────────────────────────────\n\n/**\n * Create a schema router function from the table map.\n *\n * @param tableMap - Map of table alias to normalized spec\n * @returns A function that returns the schema for a given table\n */\nexport function createSchemaRouter(tableMap: Map<string, NormalizedTableSpec>): (table: string) => string {\n return (table: string): string => {\n // Check if it's a known PowerSync table\n const spec = tableMap.get(table);\n if (spec) {\n return spec.schemaName;\n }\n\n // Check for schema-prefixed format (e.g., \"core.Profile\")\n if (table.includes('.')) {\n const [schema] = table.split('.');\n return schema;\n }\n\n // Default to public\n return 'public';\n };\n}","/**\n * Unified Configuration for @pol-studios/powersync\n *\n * This module provides the `definePolConfig` function for creating\n * a unified configuration that automatically generates:\n * - PowerSync Schema from the database schema\n * - Table routing strategies for DataLayer\n * - Attachment queue configuration\n *\n * @example\n * ```typescript\n * import { definePolConfig } from '@pol-studios/powersync/config';\n * import { databaseSchema } from './databaseSchema';\n *\n * export const polConfig = definePolConfig({\n * powerSyncUrl: process.env.EXPO_PUBLIC_POWERSYNC_URL,\n * databaseSchema,\n * powersync: [\"Project\", \"Task\", { table: \"Profile\", schema: \"core\" }],\n * attachments: {\n * source: { type: 'supabase-bucket', bucket: 'photos' },\n * watchPaths: (db, supabase, onUpdate) => {\n * const abort = new AbortController();\n * db.watch(\n * `SELECT storagePath FROM photos WHERE storagePath IS NOT NULL`,\n * [],\n * { onResult: (r) => onUpdate(r.rows._array.map(x => x.storagePath)) },\n * { signal: abort.signal }\n * );\n * return () => abort.abort();\n * },\n * },\n * });\n *\n * // Use with OfflineDataProvider\n * <OfflineDataProvider polConfig={polConfig}>\n * <App />\n * </OfflineDataProvider>\n * ```\n */\n\nimport type { PolConfig, ProcessedPolConfig, ProcessedTableStrategy, TableSpec } from './types';\nimport { generatePowerSyncSchema, normalizeTableSpec, createSchemaRouter } from './schema-generator';\n\n// Re-export types\nexport type { PolConfig, ProcessedPolConfig, TableSpec, ProcessedTableStrategy, PolConfigAttachments, PolConfigConnector, PolConfigSync, DatabaseSchemaObject, DatabaseTableDef, DatabaseColumnDef,\n// Alias for attachment source type from config perspective\nPolAttachmentSource } from './types';\n\n// Re-export attachment source types from attachments module\n// (these are the canonical definitions)\nexport type { AttachmentSource, SupabaseBucketSource, CustomAttachmentSource } from '../attachments/types';\n\n// Re-export schema generator utilities\nexport { generatePowerSyncSchema, normalizeTableSpec, createSchemaRouter, getTableFromSchema } from './schema-generator';\n\n// ─── Configuration Processing ─────────────────────────────────────────────────\n\n/**\n * Create a unified PowerSync configuration.\n *\n * This function processes the configuration to generate:\n * - `__generatedSchema`: PowerSync Schema ready for use\n * - `__tableStrategies`: Lookup map for routing queries\n * - `__schemaRouter`: Function to resolve table schemas\n *\n * @param config - The unified configuration\n * @returns Processed configuration with generated artifacts\n *\n * @example\n * ```typescript\n * const polConfig = definePolConfig({\n * powerSyncUrl: 'https://your-instance.powersync.journeyapps.com',\n * databaseSchema,\n * powersync: [\n * \"Project\",\n * \"Task\",\n * { table: \"Profile\", schema: \"core\" },\n * ],\n * });\n *\n * // Access generated schema\n * console.log(polConfig.__generatedSchema);\n *\n * // Check if a table uses PowerSync\n * const strategy = polConfig.__tableStrategies[\"Project\"];\n * console.log(strategy.strategy); // \"powersync\"\n * ```\n */\nexport function definePolConfig(config: PolConfig): ProcessedPolConfig {\n // Validate required fields\n if (!config.databaseSchema) {\n throw new Error('[definePolConfig] databaseSchema is required');\n }\n if (!config.powersync || config.powersync.length === 0) {\n throw new Error('[definePolConfig] powersync array must contain at least one table');\n }\n\n // Generate PowerSync schema\n const {\n schema,\n warnings,\n tableMap\n } = generatePowerSyncSchema(config.databaseSchema, config.powersync);\n\n // Log warnings in development (safe check for non-React-Native environments)\n if (warnings.length > 0 && typeof __DEV__ !== 'undefined' && __DEV__) {\n console.warn('[definePolConfig] Schema generation warnings:', warnings);\n }\n\n // Validate that at least one table was successfully generated\n if (tableMap.size === 0) {\n throw new Error('[definePolConfig] No valid tables found in powersync array. ' + `Check that the tables exist in databaseSchema. Warnings: ${warnings.join(', ')}`);\n }\n\n // Build table strategies lookup\n const tableStrategies: Record<string, ProcessedTableStrategy> = {};\n const powersyncTables: string[] = [];\n for (const [alias, spec] of tableMap.entries()) {\n // Add to strategies map with alias as key\n tableStrategies[alias] = {\n strategy: 'powersync',\n schema: spec.schemaName,\n table: spec.tableName,\n alias: alias // PowerSync SQLite table name\n };\n\n // Also add with full qualified name for lookup\n const qualifiedName = spec.schemaName === 'public' ? spec.tableName : `${spec.schemaName}.${spec.tableName}`;\n if (qualifiedName !== alias) {\n tableStrategies[qualifiedName] = {\n strategy: 'powersync',\n schema: spec.schemaName,\n table: spec.tableName,\n alias: alias // PowerSync SQLite table name (same alias for both keys)\n };\n }\n powersyncTables.push(qualifiedName);\n }\n\n // Create schema router\n const schemaRouter = createSchemaRouter(tableMap);\n\n // Return processed config\n const processedConfig: ProcessedPolConfig = {\n ...config,\n __generatedSchema: schema,\n __tableStrategies: tableStrategies,\n __schemaRouter: schemaRouter,\n __powersyncTables: powersyncTables\n };\n return processedConfig;\n}\n\n// ─── Helper Functions ─────────────────────────────────────────────────────────\n\n/**\n * Check if a configuration is a processed PolConfig.\n *\n * @param config - The configuration to check\n * @returns true if the config has been processed by definePolConfig\n */\nexport function isProcessedPolConfig(config: unknown): config is ProcessedPolConfig {\n return typeof config === 'object' && config !== null && '__generatedSchema' in config && '__tableStrategies' in config;\n}\n\n/**\n * Get the strategy for a table from a processed config.\n *\n * @param config - The processed configuration\n * @param tableName - Table name (can be \"Table\" or \"schema.Table\")\n * @returns The strategy or undefined if not found\n */\nexport function getTableStrategy(config: ProcessedPolConfig, tableName: string): ProcessedTableStrategy | undefined {\n return config.__tableStrategies[tableName];\n}\n\n/**\n * Check if a table uses PowerSync for sync.\n *\n * @param config - The processed configuration\n * @param tableName - Table name\n * @returns true if the table is synced via PowerSync\n */\nexport function isTablePowerSync(config: ProcessedPolConfig, tableName: string): boolean {\n const strategy = getTableStrategy(config, tableName);\n return strategy?.strategy === 'powersync';\n}\n\n// ─── Global Declaration ───────────────────────────────────────────────────────\n\n// Declare __DEV__ global for React Native\ndeclare const __DEV__: boolean;"],"mappings":";;;;;AAqBA,SAAS,QAAQ,QAAQ,aAAa;AAiC/B,SAAS,mBAAmB,MAAsC;AACvE,MAAI,OAAO,SAAS,UAAU;AAE5B,QAAI,KAAK,SAAS,GAAG,GAAG;AACtB,YAAM,CAACA,aAAY,SAAS,IAAI,KAAK,MAAM,GAAG;AAC9C,aAAO;AAAA,QACL;AAAA,QACA,YAAAA;AAAA,QACA,eAAe;AAAA,QACf,OAAO,aAAaA,WAAU,IAAI;AAAA,MACpC;AAAA,IACF;AAEA,WAAO;AAAA,MACL,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,eAAe;AAAA,MACf,OAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,aAAa,KAAK,UAAU;AAClC,QAAM,QAAQ,eAAe,WAAW,KAAK,QAAQ,aAAa,UAAU,IAAI,KAAK;AACrF,SAAO;AAAA,IACL,WAAW,KAAK;AAAA,IAChB;AAAA,IACA,eAAe,KAAK,iBAAiB;AAAA,IACrC;AAAA,EACF;AACF;AAKA,SAAS,aAAa,KAAqB;AACzC,SAAO,IAAI,OAAO,CAAC,EAAE,YAAY,IAAI,IAAI,MAAM,CAAC;AAClD;AAUA,SAAS,cAAc,KAAgD;AACrE,QAAM,OAAO,IAAI;AAGjB,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,EACT;AAGA,UAAQ,MAAM;AAAA,IACZ,KAAK;AAEH,aAAO,OAAO;AAAA,IAChB,KAAK;AAEH,aAAO,OAAO;AAAA,IAChB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,OAAO;AAAA,IAChB;AAEE,aAAO,OAAO;AAAA,EAClB;AACF;AAQA,SAAS,iBAAiB,KAAiC;AAEzD,MAAI,IAAI,SAAS,MAAM;AACrB,WAAO;AAAA,EACT;AAGA,MAAI,IAAI,SAAS,QAAQ;AACvB,WAAO;AAAA,EACT;AAMA,SAAO;AACT;AAYO,SAAS,mBAAmB,gBAAsC,YAAoB,WAA4C;AACvI,QAAM,SAAS,eAAe,QAAQ,UAAU;AAChD,MAAI,CAAC,QAAQ;AACX,YAAQ,KAAK,uBAAuB,UAAU,+BAA+B;AAC7E,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,OAAO,OAAO,SAAS;AACrC,MAAI,CAAC,OAAO;AAEV,UAAM,OAAO,OAAO,QAAQ,SAAS;AACrC,QAAI,MAAM;AACR,aAAO;AAAA,IACT;AACA,YAAQ,KAAK,sBAAsB,UAAU,IAAI,SAAS,+BAA+B;AACzF,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAmCO,SAAS,wBAAwB,gBAAsC,QAA6C;AACzH,QAAM,WAAqB,CAAC;AAC5B,QAAM,mBAA0C,CAAC;AACjD,QAAM,WAAW,oBAAI,IAAiC;AACtD,aAAW,QAAQ,QAAQ;AACzB,UAAM,aAAa,mBAAmB,IAAI;AAC1C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AAGJ,UAAM,WAAW,mBAAmB,gBAAgB,YAAY,SAAS;AACzE,QAAI,CAAC,UAAU;AACb,eAAS,KAAK,UAAU,UAAU,IAAI,SAAS,uBAAuB;AACtE;AAAA,IACF;AAGA,UAAM,UAA2C,CAAC;AAClD,QAAI,cAAc;AAClB,eAAW,OAAO,SAAS,SAAS;AAElC,UAAI,iBAAiB,GAAG,GAAG;AACzB;AAAA,MACF;AAGA,YAAM,SAAS,cAAc,GAAG;AAChC,UAAI,WAAW,MAAM;AAEnB,iBAAS,KAAK,WAAW,SAAS,IAAI,IAAI,IAAI,2BAA2B,IAAI,IAAI,aAAa;AAC9F;AAAA,MACF;AACA,cAAQ,IAAI,IAAI,IAAI;AACpB;AAAA,IACF;AAGA,QAAI,cAAc,GAAG;AAEnB,UAAI,iBAAiB,KAAK,GAAG;AAC3B,iBAAS,KAAK,oBAAoB,KAAK,2BAA2B;AAAA,MACpE;AAGA,uBAAiB,KAAK,IAAI,WAAW,gBAAgB,IAAI,MAAM,SAAS;AAAA,QACtE,eAAe;AAAA,MACjB,CAAC,IAAI,IAAI,MAAM,OAAO;AACtB,eAAS,IAAI,OAAO,UAAU;AAAA,IAChC,OAAO;AACL,eAAS,KAAK,UAAU,UAAU,IAAI,SAAS,kDAAkD;AAAA,IACnG;AAAA,EACF;AAGA,QAAM,sBAAsB;AAAA,IAC1B,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AAGA,QAAM,SAAS,IAAI,OAAO,mBAAmB;AAC7C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAUO,SAAS,mBAAmB,UAAuE;AACxG,SAAO,CAAC,UAA0B;AAEhC,UAAM,OAAO,SAAS,IAAI,KAAK;AAC/B,QAAI,MAAM;AACR,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,MAAM,SAAS,GAAG,GAAG;AACvB,YAAM,CAAC,MAAM,IAAI,MAAM,MAAM,GAAG;AAChC,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AACF;;;AC7NO,SAAS,gBAAgB,QAAuC;AAErE,MAAI,CAAC,OAAO,gBAAgB;AAC1B,UAAM,IAAI,MAAM,8CAA8C;AAAA,EAChE;AACA,MAAI,CAAC,OAAO,aAAa,OAAO,UAAU,WAAW,GAAG;AACtD,UAAM,IAAI,MAAM,mEAAmE;AAAA,EACrF;AAGA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,wBAAwB,OAAO,gBAAgB,OAAO,SAAS;AAGnE,MAAI,SAAS,SAAS,KAAK,OAAO,YAAY,eAAe,SAAS;AACpE,YAAQ,KAAK,iDAAiD,QAAQ;AAAA,EACxE;AAGA,MAAI,SAAS,SAAS,GAAG;AACvB,UAAM,IAAI,MAAM,wHAA6H,SAAS,KAAK,IAAI,CAAC,EAAE;AAAA,EACpK;AAGA,QAAM,kBAA0D,CAAC;AACjE,QAAM,kBAA4B,CAAC;AACnC,aAAW,CAAC,OAAO,IAAI,KAAK,SAAS,QAAQ,GAAG;AAE9C,oBAAgB,KAAK,IAAI;AAAA,MACvB,UAAU;AAAA,MACV,QAAQ,KAAK;AAAA,MACb,OAAO,KAAK;AAAA,MACZ;AAAA;AAAA,IACF;AAGA,UAAM,gBAAgB,KAAK,eAAe,WAAW,KAAK,YAAY,GAAG,KAAK,UAAU,IAAI,KAAK,SAAS;AAC1G,QAAI,kBAAkB,OAAO;AAC3B,sBAAgB,aAAa,IAAI;AAAA,QAC/B,UAAU;AAAA,QACV,QAAQ,KAAK;AAAA,QACb,OAAO,KAAK;AAAA,QACZ;AAAA;AAAA,MACF;AAAA,IACF;AACA,oBAAgB,KAAK,aAAa;AAAA,EACpC;AAGA,QAAM,eAAe,mBAAmB,QAAQ;AAGhD,QAAM,kBAAsC;AAAA,IAC1C,GAAG;AAAA,IACH,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,gBAAgB;AAAA,IAChB,mBAAmB;AAAA,EACrB;AACA,SAAO;AACT;AAUO,SAAS,qBAAqB,QAA+C;AAClF,SAAO,OAAO,WAAW,YAAY,WAAW,QAAQ,uBAAuB,UAAU,uBAAuB;AAClH;AASO,SAAS,iBAAiB,QAA4B,WAAuD;AAClH,SAAO,OAAO,kBAAkB,SAAS;AAC3C;AASO,SAAS,iBAAiB,QAA4B,WAA4B;AACvF,QAAM,WAAW,iBAAiB,QAAQ,SAAS;AACnD,SAAO,UAAU,aAAa;AAChC;","names":["schemaName"]}
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  resolveBucket
3
- } from "./chunk-Z6VOBGTU.js";
3
+ } from "./chunk-UOMHWUHV.js";
4
4
  import {
5
5
  AbortError
6
6
  } from "./chunk-FV2HXEIY.js";
@@ -93,6 +93,8 @@ var SupabaseStorageAdapter = class {
93
93
  fileSystem;
94
94
  logger;
95
95
  imageTransform;
96
+ useSignedUrls;
97
+ customUrlResolver;
96
98
  constructor(options, fileSystem) {
97
99
  this.client = options.client;
98
100
  this.defaultBucket = options.defaultBucket;
@@ -102,6 +104,8 @@ var SupabaseStorageAdapter = class {
102
104
  this.fileSystem = fileSystem;
103
105
  this.logger = options.logger;
104
106
  this.imageTransform = options.imageTransform;
107
+ this.useSignedUrls = options.useSignedUrls ?? true;
108
+ this.customUrlResolver = options.customUrlResolver;
105
109
  }
106
110
  /**
107
111
  * Update image transform options (can be set after construction).
@@ -250,15 +254,39 @@ var SupabaseStorageAdapter = class {
250
254
  });
251
255
  }
252
256
  /**
253
- * Get a signed download URL for a remote file.
257
+ * Get a download URL for a remote file.
254
258
  *
255
- * Includes session refresh logic to handle stale tokens during background downloads.
259
+ * Supports three modes:
260
+ * 1. Custom URL resolver (for non-Supabase backends)
261
+ * 2. Public URLs (for public Supabase buckets, when useSignedUrls=false)
262
+ * 3. Signed URLs (default, for private Supabase buckets)
263
+ *
264
+ * Includes session refresh logic for signed URLs to handle stale tokens during background downloads.
256
265
  * If the initial request fails with an auth error (400/401/403), refreshes the session
257
266
  * and retries once.
258
267
  */
259
268
  async getDownloadUrl(remotePath) {
260
269
  const normalizedPath = normalizeStoragePath(remotePath);
270
+ if (this.customUrlResolver) {
271
+ this.logger?.debug?.(`[SupabaseStorageAdapter] Using custom URL resolver`, {
272
+ path: normalizedPath
273
+ });
274
+ return this.customUrlResolver(normalizedPath);
275
+ }
261
276
  const bucket = this.resolveBucket(normalizedPath);
277
+ if (!this.useSignedUrls) {
278
+ this.logger?.debug?.(`[SupabaseStorageAdapter] Creating public URL`, {
279
+ bucket,
280
+ path: normalizedPath
281
+ });
282
+ const {
283
+ data: data2
284
+ } = this.client.storage.from(bucket).getPublicUrl(normalizedPath);
285
+ if (!data2?.publicUrl) {
286
+ throw new Error(`Failed to create public URL for '${normalizedPath}' in bucket '${bucket}': No URL returned`);
287
+ }
288
+ return data2.publicUrl;
289
+ }
262
290
  const useTransform = this.imageTransform?.enabled !== false && this.isTransformableImage(normalizedPath);
263
291
  const signedUrlOptions = useTransform && this.imageTransform ? {
264
292
  transform: {
@@ -453,4 +481,4 @@ export {
453
481
  SupabaseStorageAdapter,
454
482
  createSupabaseStorageAdapter
455
483
  };
456
- //# sourceMappingURL=chunk-XAEII4ZX.js.map
484
+ //# sourceMappingURL=chunk-NUGQOTEM.js.map