@dotdo/pglite 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. package/README.md +233 -0
  2. package/dist/chunk-FY2WAP66.js +2 -0
  3. package/dist/chunk-FY2WAP66.js.map +1 -0
  4. package/dist/chunk-NS5M47NH.js +9 -0
  5. package/dist/chunk-NS5M47NH.js.map +1 -0
  6. package/dist/chunk-OO3CQQYD.js +2 -0
  7. package/dist/chunk-OO3CQQYD.js.map +1 -0
  8. package/dist/chunk-VCBR6USK.js +2 -0
  9. package/dist/chunk-VCBR6USK.js.map +1 -0
  10. package/dist/chunk-XGK36GMM.js +2 -0
  11. package/dist/chunk-XGK36GMM.js.map +1 -0
  12. package/dist/contrib/amcheck.cjs +2 -0
  13. package/dist/contrib/amcheck.cjs.map +1 -0
  14. package/dist/contrib/amcheck.js +2 -0
  15. package/dist/contrib/amcheck.js.map +1 -0
  16. package/dist/contrib/auto_explain.cjs +2 -0
  17. package/dist/contrib/auto_explain.cjs.map +1 -0
  18. package/dist/contrib/auto_explain.js +2 -0
  19. package/dist/contrib/auto_explain.js.map +1 -0
  20. package/dist/contrib/bloom.cjs +2 -0
  21. package/dist/contrib/bloom.cjs.map +1 -0
  22. package/dist/contrib/bloom.js +2 -0
  23. package/dist/contrib/bloom.js.map +1 -0
  24. package/dist/contrib/btree_gin.cjs +2 -0
  25. package/dist/contrib/btree_gin.cjs.map +1 -0
  26. package/dist/contrib/btree_gin.js +2 -0
  27. package/dist/contrib/btree_gin.js.map +1 -0
  28. package/dist/contrib/btree_gist.cjs +2 -0
  29. package/dist/contrib/btree_gist.cjs.map +1 -0
  30. package/dist/contrib/btree_gist.js +2 -0
  31. package/dist/contrib/btree_gist.js.map +1 -0
  32. package/dist/contrib/citext.cjs +2 -0
  33. package/dist/contrib/citext.cjs.map +1 -0
  34. package/dist/contrib/citext.js +2 -0
  35. package/dist/contrib/citext.js.map +1 -0
  36. package/dist/contrib/cube.cjs +2 -0
  37. package/dist/contrib/cube.cjs.map +1 -0
  38. package/dist/contrib/cube.js +2 -0
  39. package/dist/contrib/cube.js.map +1 -0
  40. package/dist/contrib/dict_int.cjs +2 -0
  41. package/dist/contrib/dict_int.cjs.map +1 -0
  42. package/dist/contrib/dict_int.js +2 -0
  43. package/dist/contrib/dict_int.js.map +1 -0
  44. package/dist/contrib/dict_xsyn.cjs +2 -0
  45. package/dist/contrib/dict_xsyn.cjs.map +1 -0
  46. package/dist/contrib/dict_xsyn.js +2 -0
  47. package/dist/contrib/dict_xsyn.js.map +1 -0
  48. package/dist/contrib/earthdistance.cjs +2 -0
  49. package/dist/contrib/earthdistance.cjs.map +1 -0
  50. package/dist/contrib/earthdistance.js +2 -0
  51. package/dist/contrib/earthdistance.js.map +1 -0
  52. package/dist/contrib/file_fdw.cjs +2 -0
  53. package/dist/contrib/file_fdw.cjs.map +1 -0
  54. package/dist/contrib/file_fdw.js +2 -0
  55. package/dist/contrib/file_fdw.js.map +1 -0
  56. package/dist/contrib/fuzzystrmatch.cjs +2 -0
  57. package/dist/contrib/fuzzystrmatch.cjs.map +1 -0
  58. package/dist/contrib/fuzzystrmatch.js +2 -0
  59. package/dist/contrib/fuzzystrmatch.js.map +1 -0
  60. package/dist/contrib/hstore.cjs +2 -0
  61. package/dist/contrib/hstore.cjs.map +1 -0
  62. package/dist/contrib/hstore.js +2 -0
  63. package/dist/contrib/hstore.js.map +1 -0
  64. package/dist/contrib/intarray.cjs +2 -0
  65. package/dist/contrib/intarray.cjs.map +1 -0
  66. package/dist/contrib/intarray.js +2 -0
  67. package/dist/contrib/intarray.js.map +1 -0
  68. package/dist/contrib/isn.cjs +2 -0
  69. package/dist/contrib/isn.cjs.map +1 -0
  70. package/dist/contrib/isn.js +2 -0
  71. package/dist/contrib/isn.js.map +1 -0
  72. package/dist/contrib/lo.cjs +2 -0
  73. package/dist/contrib/lo.cjs.map +1 -0
  74. package/dist/contrib/lo.js +2 -0
  75. package/dist/contrib/lo.js.map +1 -0
  76. package/dist/contrib/ltree.cjs +2 -0
  77. package/dist/contrib/ltree.cjs.map +1 -0
  78. package/dist/contrib/ltree.js +2 -0
  79. package/dist/contrib/ltree.js.map +1 -0
  80. package/dist/contrib/pageinspect.cjs +2 -0
  81. package/dist/contrib/pageinspect.cjs.map +1 -0
  82. package/dist/contrib/pageinspect.js +2 -0
  83. package/dist/contrib/pageinspect.js.map +1 -0
  84. package/dist/contrib/pg_buffercache.cjs +2 -0
  85. package/dist/contrib/pg_buffercache.cjs.map +1 -0
  86. package/dist/contrib/pg_buffercache.js +2 -0
  87. package/dist/contrib/pg_buffercache.js.map +1 -0
  88. package/dist/contrib/pg_freespacemap.cjs +2 -0
  89. package/dist/contrib/pg_freespacemap.cjs.map +1 -0
  90. package/dist/contrib/pg_freespacemap.js +2 -0
  91. package/dist/contrib/pg_freespacemap.js.map +1 -0
  92. package/dist/contrib/pg_surgery.cjs +2 -0
  93. package/dist/contrib/pg_surgery.cjs.map +1 -0
  94. package/dist/contrib/pg_surgery.js +2 -0
  95. package/dist/contrib/pg_surgery.js.map +1 -0
  96. package/dist/contrib/pg_trgm.cjs +2 -0
  97. package/dist/contrib/pg_trgm.cjs.map +1 -0
  98. package/dist/contrib/pg_trgm.js +2 -0
  99. package/dist/contrib/pg_trgm.js.map +1 -0
  100. package/dist/contrib/pg_visibility.cjs +2 -0
  101. package/dist/contrib/pg_visibility.cjs.map +1 -0
  102. package/dist/contrib/pg_visibility.js +2 -0
  103. package/dist/contrib/pg_visibility.js.map +1 -0
  104. package/dist/contrib/pg_walinspect.cjs +2 -0
  105. package/dist/contrib/pg_walinspect.cjs.map +1 -0
  106. package/dist/contrib/pg_walinspect.js +2 -0
  107. package/dist/contrib/pg_walinspect.js.map +1 -0
  108. package/dist/contrib/pgcrypto.cjs +2 -0
  109. package/dist/contrib/pgcrypto.cjs.map +1 -0
  110. package/dist/contrib/pgcrypto.js +2 -0
  111. package/dist/contrib/pgcrypto.js.map +1 -0
  112. package/dist/contrib/seg.cjs +2 -0
  113. package/dist/contrib/seg.cjs.map +1 -0
  114. package/dist/contrib/seg.js +2 -0
  115. package/dist/contrib/seg.js.map +1 -0
  116. package/dist/contrib/tablefunc.cjs +2 -0
  117. package/dist/contrib/tablefunc.cjs.map +1 -0
  118. package/dist/contrib/tablefunc.js +2 -0
  119. package/dist/contrib/tablefunc.js.map +1 -0
  120. package/dist/contrib/tcn.cjs +2 -0
  121. package/dist/contrib/tcn.cjs.map +1 -0
  122. package/dist/contrib/tcn.js +2 -0
  123. package/dist/contrib/tcn.js.map +1 -0
  124. package/dist/contrib/tsm_system_rows.cjs +2 -0
  125. package/dist/contrib/tsm_system_rows.cjs.map +1 -0
  126. package/dist/contrib/tsm_system_rows.js +2 -0
  127. package/dist/contrib/tsm_system_rows.js.map +1 -0
  128. package/dist/contrib/tsm_system_time.cjs +2 -0
  129. package/dist/contrib/tsm_system_time.cjs.map +1 -0
  130. package/dist/contrib/tsm_system_time.js +2 -0
  131. package/dist/contrib/tsm_system_time.js.map +1 -0
  132. package/dist/contrib/unaccent.cjs +2 -0
  133. package/dist/contrib/unaccent.cjs.map +1 -0
  134. package/dist/contrib/unaccent.js +2 -0
  135. package/dist/contrib/unaccent.js.map +1 -0
  136. package/dist/contrib/uuid_ossp.cjs +2 -0
  137. package/dist/contrib/uuid_ossp.cjs.map +1 -0
  138. package/dist/contrib/uuid_ossp.js +2 -0
  139. package/dist/contrib/uuid_ossp.js.map +1 -0
  140. package/dist/fs/base.cjs +2 -0
  141. package/dist/fs/base.cjs.map +1 -0
  142. package/dist/fs/base.js +2 -0
  143. package/dist/fs/base.js.map +1 -0
  144. package/dist/fs/nodefs.cjs +2 -0
  145. package/dist/fs/nodefs.cjs.map +1 -0
  146. package/dist/fs/nodefs.js +2 -0
  147. package/dist/fs/nodefs.js.map +1 -0
  148. package/dist/fs/opfs-ahp.cjs +4 -0
  149. package/dist/fs/opfs-ahp.cjs.map +1 -0
  150. package/dist/fs/opfs-ahp.js +4 -0
  151. package/dist/fs/opfs-ahp.js.map +1 -0
  152. package/dist/index.cjs +23 -0
  153. package/dist/index.cjs.map +1 -0
  154. package/dist/index.js +14 -0
  155. package/dist/index.js.map +1 -0
  156. package/dist/live/index.cjs +133 -0
  157. package/dist/live/index.cjs.map +1 -0
  158. package/dist/live/index.js +133 -0
  159. package/dist/live/index.js.map +1 -0
  160. package/dist/pg_hashids/index.cjs +2 -0
  161. package/dist/pg_hashids/index.cjs.map +1 -0
  162. package/dist/pg_hashids/index.js +2 -0
  163. package/dist/pg_hashids/index.js.map +1 -0
  164. package/dist/pg_ivm/index.cjs +2 -0
  165. package/dist/pg_ivm/index.cjs.map +1 -0
  166. package/dist/pg_ivm/index.js +2 -0
  167. package/dist/pg_ivm/index.js.map +1 -0
  168. package/dist/pg_uuidv7/index.cjs +2 -0
  169. package/dist/pg_uuidv7/index.cjs.map +1 -0
  170. package/dist/pg_uuidv7/index.js +2 -0
  171. package/dist/pg_uuidv7/index.js.map +1 -0
  172. package/dist/pglite.cjs +3 -0
  173. package/dist/pgtap/index.cjs +2 -0
  174. package/dist/pgtap/index.cjs.map +1 -0
  175. package/dist/pgtap/index.js +2 -0
  176. package/dist/pgtap/index.js.map +1 -0
  177. package/dist/templating.cjs +2 -0
  178. package/dist/templating.cjs.map +1 -0
  179. package/dist/templating.js +2 -0
  180. package/dist/templating.js.map +1 -0
  181. package/dist/vector/index.cjs +2 -0
  182. package/dist/vector/index.cjs.map +1 -0
  183. package/dist/vector/index.js +2 -0
  184. package/dist/vector/index.js.map +1 -0
  185. package/dist/worker/index.cjs +9 -0
  186. package/dist/worker/index.cjs.map +1 -0
  187. package/dist/worker/index.js +2 -0
  188. package/dist/worker/index.js.map +1 -0
  189. package/package.json +222 -0
@@ -0,0 +1,133 @@
1
+ "use strict";var J=Object.defineProperty;var me=Object.getOwnPropertyDescriptor;var ye=Object.getOwnPropertyNames;var he=Object.prototype.hasOwnProperty;var re=e=>{throw TypeError(e)};var ge=(e,t)=>{for(var n in t)J(e,n,{get:t[n],enumerable:!0})},be=(e,t,n,s)=>{if(t&&typeof t=="object"||typeof t=="function")for(let r of ye(t))!he.call(e,r)&&r!==n&&J(e,r,{get:()=>t[r],enumerable:!(s=me(t,r))||s.enumerable});return e};var _e=e=>be(J({},"__esModule",{value:!0}),e);var Y=(e,t,n)=>t.has(e)||re("Cannot "+n);var u=(e,t,n)=>(Y(e,t,"read from private field"),n?n.call(e):t.get(e)),M=(e,t,n)=>t.has(e)?re("Cannot add the same private member more than once"):t instanceof WeakSet?t.add(e):t.set(e,n),C=(e,t,n,s)=>(Y(e,t,"write to private field"),s?s.call(e,n):t.set(e,n),n),v=(e,t,n)=>(Y(e,t,"access private method"),n);var K=(e,t,n,s)=>({set _(r){C(e,t,r,n)},get _(){return u(e,t,s)}});var ct={};ge(ct,{live:()=>ut});module.exports=_e(ct);function U(e){let t=e.length;for(let n=e.length-1;n>=0;n--){let s=e.charCodeAt(n);s>127&&s<=2047?t++:s>2047&&s<=65535&&(t+=2),s>=56320&&s<=57343&&n--}return t}var A,T,k,j,V,S,W,F,se,P=class{constructor(t=256){this.size=t;M(this,S);M(this,A);M(this,T,5);M(this,k,!1);M(this,j,new TextEncoder);M(this,V,0);C(this,A,v(this,S,W).call(this,t))}addInt32(t){return v(this,S,F).call(this,4),u(this,A).setInt32(u(this,T),t,u(this,k)),C(this,T,u(this,T)+4),this}addInt16(t){return v(this,S,F).call(this,2),u(this,A).setInt16(u(this,T),t,u(this,k)),C(this,T,u(this,T)+2),this}addCString(t){return t&&this.addString(t),v(this,S,F).call(this,1),u(this,A).setUint8(u(this,T),0),K(this,T)._++,this}addString(t=""){let n=U(t);return v(this,S,F).call(this,n),u(this,j).encodeInto(t,new Uint8Array(u(this,A).buffer,u(this,T))),C(this,T,u(this,T)+n),this}add(t){return v(this,S,F).call(this,t.byteLength),new Uint8Array(u(this,A).buffer).set(new Uint8Array(t),u(this,T)),C(this,T,u(this,T)+t.byteLength),this}flush(t){let n=v(this,S,se).call(this,t);return C(this,T,5),C(this,A,v(this,S,W).call(this,this.size)),new Uint8Array(n)}};A=new WeakMap,T=new WeakMap,k=new WeakMap,j=new WeakMap,V=new WeakMap,S=new WeakSet,W=function(t){return new DataView(new ArrayBuffer(t))},F=function(t){if(u(this,A).byteLength-u(this,T)<t){let s=u(this,A).buffer,r=s.byteLength+(s.byteLength>>1)+t;C(this,A,v(this,S,W).call(this,r)),new Uint8Array(u(this,A).buffer).set(new Uint8Array(s))}},se=function(t){if(t){u(this,A).setUint8(u(this,V),t);let n=u(this,T)-(u(this,V)+1);u(this,A).setInt32(u(this,V)+1,n,u(this,k))}return u(this,A).buffer.slice(t?0:5,u(this,T))};var g=new P,Ee=e=>{g.addInt16(3).addInt16(0);for(let s of Object.keys(e))g.addCString(s).addCString(e[s]);g.addCString("client_encoding").addCString("UTF8");let t=g.addCString("").flush(),n=t.byteLength+4;return new P().addInt32(n).add(t.buffer).flush()},we=()=>{let e=new DataView(new ArrayBuffer(8));return e.setInt32(0,8,!1),e.setInt32(4,80877103,!1),new Uint8Array(e.buffer)},Ae=e=>g.addCString(e).flush(112),Te=(e,t)=>(g.addCString(e).addInt32(U(t)).addString(t),g.flush(112)),Re=e=>g.addString(e).flush(112),Se=e=>g.addCString(e).flush(81),Ie=[],Ce=e=>{let t=e.name??"";t.length>63&&(console.error("Warning! Postgres only supports 63 characters for query names."),console.error("You supplied %s (%s)",t,t.length),console.error("This can cause conflicts and silent errors executing queries"));let n=g.addCString(t).addCString(e.text).addInt16(e.types?.length??0);return e.types?.forEach(s=>n.addInt32(s)),g.flush(80)},G=new P;var Le=(e,t)=>{for(let n=0;n<e.length;n++){let s=t?t(e[n],n):e[n];if(s===null)g.addInt16(0),G.addInt32(-1);else if(s instanceof ArrayBuffer||ArrayBuffer.isView(s)){let r=ArrayBuffer.isView(s)?s.buffer.slice(s.byteOffset,s.byteOffset+s.byteLength):s;g.addInt16(1),G.addInt32(r.byteLength),G.add(r)}else g.addInt16(0),G.addInt32(U(s)),G.addString(s)}},ve=(e={})=>{let t=e.portal??"",n=e.statement??"",s=e.binary??!1,r=e.values??Ie,l=r.length;return g.addCString(t).addCString(n),g.addInt16(l),Le(r,e.valueMapper),g.addInt16(l),g.add(G.flush().buffer),g.addInt16(s?1:0),g.flush(66)},Ne=new Uint8Array([69,0,0,0,9,0,0,0,0,0]),De=e=>{if(!e||!e.portal&&!e.rows)return Ne;let t=e.portal??"",n=e.rows??0,s=U(t),r=4+s+1+4,l=new DataView(new ArrayBuffer(1+r));return l.setUint8(0,69),l.setInt32(1,r,!1),new TextEncoder().encodeInto(t,new Uint8Array(l.buffer,5)),l.setUint8(s+5,0),l.setUint32(l.byteLength-4,n,!1),new Uint8Array(l.buffer)},Me=(e,t)=>{let n=new DataView(new ArrayBuffer(16));return n.setInt32(0,16,!1),n.setInt16(4,1234,!1),n.setInt16(6,5678,!1),n.setInt32(8,e,!1),n.setInt32(12,t,!1),new Uint8Array(n.buffer)},Z=(e,t)=>{let n=new P;return n.addCString(t),n.flush(e)},Oe=g.addCString("P").flush(68),Be=g.addCString("S").flush(68),xe=e=>e.name?Z(68,`${e.type}${e.name??""}`):e.type==="P"?Oe:Be,Pe=e=>{let t=`${e.type}${e.name??""}`;return Z(67,t)},$e=e=>g.add(e).flush(100),Ue=e=>Z(102,e),Q=e=>new Uint8Array([e,0,0,0,4]),Fe=Q(72),ke=Q(83),Ve=Q(88),Ge=Q(99),q={startup:Ee,password:Ae,requestSsl:we,sendSASLInitialResponseMessage:Te,sendSCRAMClientFinalMessage:Re,query:Se,parse:Ce,bind:ve,execute:De,describe:xe,close:Pe,flush:()=>Fe,sync:()=>ke,end:()=>Ve,copyData:$e,copyDone:()=>Ge,copyFail:Ue,cancel:Me};var St=new ArrayBuffer(0);var We=1,je=4,pn=We+je,fn=new ArrayBuffer(0);var Qe=globalThis.JSON.parse,He=globalThis.JSON.stringify,ie=16,ae=17;var oe=20,ze=21,Xe=23;var H=25,Je=26;var ue=114;var Ye=700,Ke=701;var Ze=1042,et=1043,tt=1082;var nt=1114,le=1184;var rt=3802;var st={string:{to:H,from:[H,et,Ze],serialize:e=>{if(typeof e=="string")return e;if(typeof e=="number")return e.toString();throw new Error("Invalid input for string type")},parse:e=>e},number:{to:0,from:[ze,Xe,Je,Ye,Ke],serialize:e=>e.toString(),parse:e=>+e},bigint:{to:oe,from:[oe],serialize:e=>e.toString(),parse:e=>{let t=BigInt(e);return t<Number.MIN_SAFE_INTEGER||t>Number.MAX_SAFE_INTEGER?t:Number(t)}},json:{to:ue,from:[ue,rt],serialize:e=>typeof e=="string"?e:He(e),parse:e=>Qe(e)},boolean:{to:ie,from:[ie],serialize:e=>{if(typeof e!="boolean")throw new Error("Invalid input for boolean type");return e?"t":"f"},parse:e=>e==="t"},date:{to:le,from:[tt,nt,le],serialize:e=>{if(typeof e=="string")return e;if(typeof e=="number")return new Date(e).toISOString();if(e instanceof Date)return e.toISOString();throw new Error("Invalid input for date type")},parse:e=>new Date(e)},bytea:{to:ae,from:[ae],serialize:e=>{if(!(e instanceof Uint8Array))throw new Error("Invalid input for bytea type");return"\\x"+Array.from(e).map(t=>t.toString(16).padStart(2,"0")).join("")},parse:e=>{let t=e.slice(2);return Uint8Array.from({length:t.length/2},(n,s)=>parseInt(t.substring(s*2,(s+1)*2),16))}}},ce=it(st),An=ce.parsers,Tn=ce.serializers;function it(e){return Object.keys(e).reduce(({parsers:t,serializers:n},s)=>{let{to:r,from:l,serialize:i,parse:b}=e[s];return n[r]=i,n[s]=i,t[s]=b,Array.isArray(l)?l.forEach(y=>{t[y]=b,n[y]=i}):(t[l]=b,n[l]=i),{parsers:t,serializers:n}},{parsers:{},serializers:{}})}function de(e){let t=e.find(n=>n.name==="parameterDescription");return t?t.dataTypeIDs:[]}var On=typeof process=="object"&&typeof process.versions=="object"&&typeof process.versions.node=="string";var ee=()=>{if(globalThis.crypto?.randomUUID)return globalThis.crypto.randomUUID();let e=new Uint8Array(16);if(globalThis.crypto?.getRandomValues)globalThis.crypto.getRandomValues(e);else for(let n=0;n<e.length;n++)e[n]=Math.floor(Math.random()*256);e[6]=e[6]&15|64,e[8]=e[8]&63|128;let t=[];return e.forEach(n=>{t.push(n.toString(16).padStart(2,"0"))}),t.slice(0,4).join("")+"-"+t.slice(4,6).join("")+"-"+t.slice(6,8).join("")+"-"+t.slice(8,10).join("")+"-"+t.slice(10).join("")};async function te(e,t,n,s){if(!n||n.length===0)return t;s=s??e;let r=[];try{await e.execProtocol(q.parse({text:t}),{syncToFs:!1}),r.push(...(await e.execProtocol(q.describe({type:"S"}),{syncToFs:!1})).messages)}finally{r.push(...(await e.execProtocol(q.sync(),{syncToFs:!1})).messages)}let l=de(r),i=t.replace(/\$([0-9]+)/g,(y,c)=>"%"+c+"L");return(await s.query(`SELECT format($1, ${n.map((y,c)=>`$${c+2}`).join(", ")}) as query`,[i,...n],{paramTypes:[H,...l]})).rows[0].query}function ne(e){let t,n=!1,s=async()=>{if(!t){n=!1;return}n=!0;let{args:r,resolve:l,reject:i}=t;t=void 0;try{let b=await e(...r);l(b)}catch(b){i(b)}finally{s()}};return async(...r)=>{t&&t.resolve(void 0);let l=new Promise((i,b)=>{t={args:r,resolve:i,reject:b}});return n||s(),l}}var at=5,ot=async(e,t)=>{let n=new Set,s={async query(r,l,i){let b,y,c;if(typeof r!="string"&&(b=r.signal,l=r.params,i=r.callback,y=r.offset,c=r.limit,r=r.query),y===void 0!=(c===void 0))throw new Error("offset and limit must be provided together");let a=y!==void 0&&c!==void 0,R;if(a&&(typeof y!="number"||isNaN(y)||typeof c!="number"||isNaN(c)))throw new Error("offset and limit must be numbers");let _=i?[i]:[],m=ee().replace(/-/g,""),D=!1,I,O,$=async()=>{await e.transaction(async o=>{let d=l&&l.length>0?await te(e,r,l,o):r;await o.exec(`CREATE OR REPLACE TEMP VIEW live_query_${m}_view AS ${d}`);let E=await pe(o,`live_query_${m}_view`);await fe(o,E,n),a?(await o.exec(`
2
+ PREPARE live_query_${m}_get(int, int) AS
3
+ SELECT * FROM live_query_${m}_view
4
+ LIMIT $1 OFFSET $2;
5
+ `),await o.exec(`
6
+ PREPARE live_query_${m}_get_total_count AS
7
+ SELECT COUNT(*) FROM live_query_${m}_view;
8
+ `),R=(await o.query(`EXECUTE live_query_${m}_get_total_count;`)).rows[0].count,I={...await o.query(`EXECUTE live_query_${m}_get(${c}, ${y});`),offset:y,limit:c,totalCount:R}):(await o.exec(`
9
+ PREPARE live_query_${m}_get AS
10
+ SELECT * FROM live_query_${m}_view;
11
+ `),I=await o.query(`EXECUTE live_query_${m}_get;`)),O=await Promise.all(E.map(w=>o.listen(`"table_change__${w.schema_oid}__${w.table_oid}"`,async()=>{L()})))})};await $();let L=ne(async({offset:o,limit:d}={})=>{if(!a&&(o!==void 0||d!==void 0))throw new Error("offset and limit cannot be provided for non-windowed queries");if(o&&(typeof o!="number"||isNaN(o))||d&&(typeof d!="number"||isNaN(d)))throw new Error("offset and limit must be numbers");y=o??y,c=d??c;let E=async(w=0)=>{if(_.length!==0){try{a?I={...await e.query(`EXECUTE live_query_${m}_get(${c}, ${y});`),offset:y,limit:c,totalCount:R}:I=await e.query(`EXECUTE live_query_${m}_get;`)}catch(h){let p=h.message;if(p.startsWith(`prepared statement "live_query_${m}`)&&p.endsWith("does not exist")){if(w>at)throw h;await $(),E(w+1)}else throw h}if(z(_,I),a){let h=(await e.query(`EXECUTE live_query_${m}_get_total_count;`)).rows[0].count;h!==R&&(R=h,L())}}};await E()}),B=o=>{if(D)throw new Error("Live query is no longer active and cannot be subscribed to");_.push(o)},f=async o=>{o?_=_.filter(d=>d!==d):_=[],_.length===0&&!D&&(D=!0,await e.transaction(async d=>{await Promise.all(O.map(E=>E(d))),await d.exec(`
12
+ DROP VIEW IF EXISTS live_query_${m}_view;
13
+ DEALLOCATE live_query_${m}_get;
14
+ `)}))};return b?.aborted?await f():b?.addEventListener("abort",()=>{f()},{once:!0}),z(_,I),{initialResults:I,subscribe:B,unsubscribe:f,refresh:L}},async changes(r,l,i,b){let y;if(typeof r!="string"&&(y=r.signal,l=r.params,i=r.key,b=r.callback,r=r.query),!i)throw new Error("key is required for changes queries");let c=b?[b]:[],a=ee().replace(/-/g,""),R=!1,_=1,m,D,I=async()=>{await e.transaction(async f=>{let o=await te(e,r,l,f);await f.query(`CREATE OR REPLACE TEMP VIEW live_query_${a}_view AS ${o}`);let d=await pe(f,`live_query_${a}_view`);await fe(f,d,n);let E=[...(await f.query(`
15
+ SELECT column_name, data_type, udt_name
16
+ FROM information_schema.columns
17
+ WHERE table_name = 'live_query_${a}_view'
18
+ `)).rows,{column_name:"__after__",data_type:"integer"}];await f.exec(`
19
+ CREATE TEMP TABLE live_query_${a}_state1 (LIKE live_query_${a}_view INCLUDING ALL);
20
+ CREATE TEMP TABLE live_query_${a}_state2 (LIKE live_query_${a}_view INCLUDING ALL);
21
+ `);for(let w of[1,2]){let h=w===1?2:1;await f.exec(`
22
+ PREPARE live_query_${a}_diff${w} AS
23
+ WITH
24
+ prev AS (SELECT LAG("${i}") OVER () as __after__, * FROM live_query_${a}_state${h}),
25
+ curr AS (SELECT LAG("${i}") OVER () as __after__, * FROM live_query_${a}_state${w}),
26
+ data_diff AS (
27
+ -- INSERT operations: Include all columns
28
+ SELECT
29
+ 'INSERT' AS __op__,
30
+ ${E.map(({column_name:p})=>`curr."${p}" AS "${p}"`).join(`,
31
+ `)},
32
+ ARRAY[]::text[] AS __changed_columns__
33
+ FROM curr
34
+ LEFT JOIN prev ON curr.${i} = prev.${i}
35
+ WHERE prev.${i} IS NULL
36
+ UNION ALL
37
+ -- DELETE operations: Include only the primary key
38
+ SELECT
39
+ 'DELETE' AS __op__,
40
+ ${E.map(({column_name:p,data_type:x,udt_name:X})=>p===i?`prev."${p}" AS "${p}"`:`NULL${x==="USER-DEFINED"?`::${X}`:""} AS "${p}"`).join(`,
41
+ `)},
42
+ ARRAY[]::text[] AS __changed_columns__
43
+ FROM prev
44
+ LEFT JOIN curr ON prev.${i} = curr.${i}
45
+ WHERE curr.${i} IS NULL
46
+ UNION ALL
47
+ -- UPDATE operations: Include only changed columns
48
+ SELECT
49
+ 'UPDATE' AS __op__,
50
+ ${E.map(({column_name:p,data_type:x,udt_name:X})=>p===i?`curr."${p}" AS "${p}"`:`CASE
51
+ WHEN curr."${p}" IS DISTINCT FROM prev."${p}"
52
+ THEN curr."${p}"
53
+ ELSE NULL${x==="USER-DEFINED"?`::${X}`:""}
54
+ END AS "${p}"`).join(`,
55
+ `)},
56
+ ARRAY(SELECT unnest FROM unnest(ARRAY[${E.filter(({column_name:p})=>p!==i).map(({column_name:p})=>`CASE
57
+ WHEN curr."${p}" IS DISTINCT FROM prev."${p}"
58
+ THEN '${p}'
59
+ ELSE NULL
60
+ END`).join(", ")}]) WHERE unnest IS NOT NULL) AS __changed_columns__
61
+ FROM curr
62
+ INNER JOIN prev ON curr.${i} = prev.${i}
63
+ WHERE NOT (curr IS NOT DISTINCT FROM prev)
64
+ )
65
+ SELECT * FROM data_diff;
66
+ `)}D=await Promise.all(d.map(w=>f.listen(`"table_change__${w.schema_oid}__${w.table_oid}"`,async()=>{O()})))})};await I();let O=ne(async()=>{if(c.length===0&&m)return;let f=!1;for(let o=0;o<5;o++)try{await e.transaction(async d=>{await d.exec(`
67
+ INSERT INTO live_query_${a}_state${_}
68
+ SELECT * FROM live_query_${a}_view;
69
+ `),m=await d.query(`EXECUTE live_query_${a}_diff${_};`),_=_===1?2:1,await d.exec(`
70
+ TRUNCATE live_query_${a}_state${_};
71
+ `)});break}catch(d){if(d.message===`relation "live_query_${a}_state${_}" does not exist`){f=!0,await I();continue}else throw d}lt(c,[...f?[{__op__:"RESET"}]:[],...m.rows])}),$=f=>{if(R)throw new Error("Live query is no longer active and cannot be subscribed to");c.push(f)},L=async f=>{f?c=c.filter(o=>o!==o):c=[],c.length===0&&!R&&(R=!0,await e.transaction(async o=>{await Promise.all(D.map(d=>d(o))),await o.exec(`
72
+ DROP VIEW IF EXISTS live_query_${a}_view;
73
+ DROP TABLE IF EXISTS live_query_${a}_state1;
74
+ DROP TABLE IF EXISTS live_query_${a}_state2;
75
+ DEALLOCATE live_query_${a}_diff1;
76
+ DEALLOCATE live_query_${a}_diff2;
77
+ `)}))};return y?.aborted?await L():y?.addEventListener("abort",()=>{L()},{once:!0}),await O(),{fields:m.fields.filter(f=>!["__after__","__op__","__changed_columns__"].includes(f.name)),initialChanges:m.rows,subscribe:$,unsubscribe:L,refresh:O}},async incrementalQuery(r,l,i,b){let y;if(typeof r!="string"&&(y=r.signal,l=r.params,i=r.key,b=r.callback,r=r.query),!i)throw new Error("key is required for incremental queries");let c=b?[b]:[],a=new Map,R=new Map,_=[],m=!0,{fields:D,unsubscribe:I,refresh:O}=await s.changes(r,l,i,B=>{for(let d of B){let{__op__:E,__changed_columns__:w,...h}=d;switch(E){case"RESET":a.clear(),R.clear();break;case"INSERT":a.set(h[i],h),R.set(h.__after__,h[i]);break;case"DELETE":{let p=a.get(h[i]);a.delete(h[i]),p.__after__!==null&&R.delete(p.__after__);break}case"UPDATE":{let p={...a.get(h[i])??{}};for(let x of w)p[x]=h[x],x==="__after__"&&R.set(h.__after__,h[i]);a.set(h[i],p);break}}}let f=[],o=null;for(let d=0;d<a.size;d++){let E=R.get(o),w=a.get(E);if(!w)break;let h={...w};delete h.__after__,f.push(h),o=E}_=f,m||z(c,{rows:f,fields:D})});m=!1,z(c,{rows:_,fields:D});let $=B=>{c.push(B)},L=async B=>{B?c=c.filter(f=>f!==f):c=[],c.length===0&&await I()};return y?.aborted?await L():y?.addEventListener("abort",()=>{L()},{once:!0}),{initialResults:{rows:_,fields:D},subscribe:$,unsubscribe:L,refresh:O}}};return{namespaceObj:s}},ut={name:"Live Queries",setup:ot};async function pe(e,t){return(await e.query(`
78
+ WITH RECURSIVE view_dependencies AS (
79
+ -- Base case: Get the initial view's dependencies
80
+ SELECT DISTINCT
81
+ cl.relname AS dependent_name,
82
+ n.nspname AS schema_name,
83
+ cl.oid AS dependent_oid,
84
+ n.oid AS schema_oid,
85
+ cl.relkind = 'v' AS is_view
86
+ FROM pg_rewrite r
87
+ JOIN pg_depend d ON r.oid = d.objid
88
+ JOIN pg_class cl ON d.refobjid = cl.oid
89
+ JOIN pg_namespace n ON cl.relnamespace = n.oid
90
+ WHERE
91
+ r.ev_class = (
92
+ SELECT oid FROM pg_class WHERE relname = $1 AND relkind = 'v'
93
+ )
94
+ AND d.deptype = 'n'
95
+
96
+ UNION ALL
97
+
98
+ -- Recursive case: Traverse dependencies for views
99
+ SELECT DISTINCT
100
+ cl.relname AS dependent_name,
101
+ n.nspname AS schema_name,
102
+ cl.oid AS dependent_oid,
103
+ n.oid AS schema_oid,
104
+ cl.relkind = 'v' AS is_view
105
+ FROM view_dependencies vd
106
+ JOIN pg_rewrite r ON vd.dependent_name = (
107
+ SELECT relname FROM pg_class WHERE oid = r.ev_class AND relkind = 'v'
108
+ )
109
+ JOIN pg_depend d ON r.oid = d.objid
110
+ JOIN pg_class cl ON d.refobjid = cl.oid
111
+ JOIN pg_namespace n ON cl.relnamespace = n.oid
112
+ WHERE d.deptype = 'n'
113
+ )
114
+ SELECT DISTINCT
115
+ dependent_name AS table_name,
116
+ schema_name,
117
+ dependent_oid AS table_oid,
118
+ schema_oid
119
+ FROM view_dependencies
120
+ WHERE NOT is_view; -- Exclude intermediate views
121
+ `,[t])).rows.map(s=>({table_name:s.table_name,schema_name:s.schema_name,table_oid:s.table_oid,schema_oid:s.schema_oid}))}async function fe(e,t,n){let s=t.filter(r=>!n.has(`${r.schema_oid}_${r.table_oid}`)).map(r=>`
122
+ CREATE OR REPLACE FUNCTION "_notify_${r.schema_oid}_${r.table_oid}"() RETURNS TRIGGER AS $$
123
+ BEGIN
124
+ PERFORM pg_notify('table_change__${r.schema_oid}__${r.table_oid}', '');
125
+ RETURN NULL;
126
+ END;
127
+ $$ LANGUAGE plpgsql;
128
+ CREATE OR REPLACE TRIGGER "_notify_trigger_${r.schema_oid}_${r.table_oid}"
129
+ AFTER INSERT OR UPDATE OR DELETE ON "${r.schema_name}"."${r.table_name}"
130
+ FOR EACH STATEMENT EXECUTE FUNCTION "_notify_${r.schema_oid}_${r.table_oid}"();
131
+ `).join(`
132
+ `);s.trim()!==""&&await e.exec(s),t.map(r=>n.add(`${r.schema_oid}_${r.table_oid}`))}var z=(e,t)=>{for(let n of e)n(t)},lt=(e,t)=>{for(let n of e)n(t)};0&&(module.exports={live});
133
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/live/index.ts","../../../pg-protocol/src/string-utils.ts","../../../pg-protocol/src/buffer-writer.ts","../../../pg-protocol/src/serializer.ts","../../../pg-protocol/src/buffer-reader.ts","../../../pg-protocol/src/parser.ts","../../src/types.ts","../../src/parse.ts","../../src/utils.ts"],"sourcesContent":["import type {\n Extension,\n PGliteInterface,\n Results,\n Transaction,\n} from '../interface'\nimport type {\n LiveQueryOptions,\n LiveIncrementalQueryOptions,\n LiveChangesOptions,\n LiveNamespace,\n LiveQuery,\n LiveChanges,\n Change,\n LiveQueryResults,\n} from './interface'\nimport { uuid, formatQuery, debounceMutex } from '../utils.js'\n\nexport type {\n LiveNamespace,\n LiveQuery,\n LiveChanges,\n Change,\n LiveQueryResults,\n} from './interface.js'\n\nconst MAX_RETRIES = 5\n\nconst setup = async (pg: PGliteInterface, _emscriptenOpts: any) => {\n // The notify triggers are only ever added and never removed\n // Keep track of which triggers have been added to avoid adding them multiple times\n const tableNotifyTriggersAdded = new Set<string>()\n\n const namespaceObj: LiveNamespace = {\n async query<T>(\n query: string | LiveQueryOptions<T>,\n params?: any[] | null,\n callback?: (results: Results<T>) => void,\n ) {\n let signal: AbortSignal | undefined\n let offset: number | undefined\n let limit: number | undefined\n if (typeof query !== 'string') {\n signal = query.signal\n params = query.params\n callback = query.callback\n offset = query.offset\n limit = query.limit\n query = query.query\n }\n\n // Offset and limit must be provided together\n if ((offset === undefined) !== (limit === undefined)) {\n throw new Error('offset and limit must be provided together')\n }\n\n const isWindowed = offset !== undefined && limit !== undefined\n let totalCount: number | undefined = undefined\n\n if (\n isWindowed &&\n (typeof offset !== 'number' ||\n isNaN(offset) ||\n typeof limit !== 'number' ||\n isNaN(limit))\n ) {\n throw new Error('offset and limit must be numbers')\n }\n\n let callbacks: Array<(results: Results<T>) => void> = callback\n ? [callback]\n : []\n const id = uuid().replace(/-/g, '')\n let dead = false\n\n let results: LiveQueryResults<T>\n\n let unsubList: Array<(tx?: Transaction) => Promise<void>>\n const init = async () => {\n await pg.transaction(async (tx) => {\n // Create a temporary view with the query\n const formattedQuery =\n params && params.length > 0\n ? await formatQuery(pg, query, params, tx)\n : query\n await tx.exec(\n `CREATE OR REPLACE TEMP VIEW live_query_${id}_view AS ${formattedQuery}`,\n )\n\n // Get the tables used in the view and add triggers to notify when they change\n const tables = await getTablesForView(tx, `live_query_${id}_view`)\n await addNotifyTriggersToTables(tx, tables, tableNotifyTriggersAdded)\n\n if (isWindowed) {\n await tx.exec(`\n PREPARE live_query_${id}_get(int, int) AS\n SELECT * FROM live_query_${id}_view\n LIMIT $1 OFFSET $2;\n `)\n await tx.exec(`\n PREPARE live_query_${id}_get_total_count AS\n SELECT COUNT(*) FROM live_query_${id}_view;\n `)\n totalCount = (\n await tx.query<{ count: number }>(\n `EXECUTE live_query_${id}_get_total_count;`,\n )\n ).rows[0].count\n results = {\n ...(await tx.query<T>(\n `EXECUTE live_query_${id}_get(${limit}, ${offset});`,\n )),\n offset,\n limit,\n totalCount,\n }\n } else {\n await tx.exec(`\n PREPARE live_query_${id}_get AS\n SELECT * FROM live_query_${id}_view;\n `)\n results = await tx.query<T>(`EXECUTE live_query_${id}_get;`)\n }\n // Setup the listeners\n unsubList = await Promise.all(\n tables!.map((table) =>\n tx.listen(\n `\"table_change__${table.schema_oid}__${table.table_oid}\"`,\n async () => {\n refresh()\n },\n ),\n ),\n )\n })\n }\n await init()\n\n // Function to refresh the query\n const refresh = debounceMutex(\n async ({\n offset: newOffset,\n limit: newLimit,\n }: {\n offset?: number\n limit?: number\n } = {}) => {\n // We can optionally provide new offset and limit values to refresh with\n if (\n !isWindowed &&\n (newOffset !== undefined || newLimit !== undefined)\n ) {\n throw new Error(\n 'offset and limit cannot be provided for non-windowed queries',\n )\n }\n if (\n (newOffset &&\n (typeof newOffset !== 'number' || isNaN(newOffset))) ||\n (newLimit && (typeof newLimit !== 'number' || isNaN(newLimit)))\n ) {\n throw new Error('offset and limit must be numbers')\n }\n offset = newOffset ?? offset\n limit = newLimit ?? limit\n\n const run = async (count = 0) => {\n if (callbacks.length === 0) {\n return\n }\n try {\n if (isWindowed) {\n // For a windowed query we defer the refresh of the total count until\n // after we have returned the results with the old total count. This\n // is due to a count(*) being a fairly slow query and we want to update\n // the rows on screen as quickly as possible.\n results = {\n ...(await pg.query<T>(\n `EXECUTE live_query_${id}_get(${limit}, ${offset});`,\n )),\n offset,\n limit,\n totalCount, // This is the old total count\n }\n } else {\n results = await pg.query<T>(`EXECUTE live_query_${id}_get;`)\n }\n } catch (e) {\n const msg = (e as Error).message\n if (\n msg.startsWith(`prepared statement \"live_query_${id}`) &&\n msg.endsWith('does not exist')\n ) {\n // If the prepared statement does not exist, reset and try again\n // This can happen if using the multi-tab worker\n if (count > MAX_RETRIES) {\n throw e\n }\n await init()\n run(count + 1)\n } else {\n throw e\n }\n }\n\n runResultCallbacks(callbacks, results)\n\n // Update the total count\n // If the total count has changed, refresh the query\n if (isWindowed) {\n const newTotalCount = (\n await pg.query<{ count: number }>(\n `EXECUTE live_query_${id}_get_total_count;`,\n )\n ).rows[0].count\n if (newTotalCount !== totalCount) {\n // The total count has changed, refresh the query\n totalCount = newTotalCount\n refresh()\n }\n }\n }\n await run()\n },\n )\n\n // Function to subscribe to the query\n const subscribe = (callback: (results: Results<T>) => void) => {\n if (dead) {\n throw new Error(\n 'Live query is no longer active and cannot be subscribed to',\n )\n }\n callbacks.push(callback)\n }\n\n // Function to unsubscribe from the query\n // If no function is provided, unsubscribe all callbacks\n // If there are no callbacks, unsubscribe from the notify triggers\n const unsubscribe = async (callback?: (results: Results<T>) => void) => {\n if (callback) {\n callbacks = callbacks.filter((callback) => callback !== callback)\n } else {\n callbacks = []\n }\n if (callbacks.length === 0 && !dead) {\n dead = true\n await pg.transaction(async (tx) => {\n await Promise.all(unsubList.map((unsub) => unsub(tx)))\n await tx.exec(`\n DROP VIEW IF EXISTS live_query_${id}_view;\n DEALLOCATE live_query_${id}_get;\n `)\n })\n }\n }\n\n // If the signal has already been aborted, unsubscribe\n if (signal?.aborted) {\n await unsubscribe()\n } else {\n // Add an event listener to unsubscribe if the signal is aborted\n signal?.addEventListener(\n 'abort',\n () => {\n unsubscribe()\n },\n { once: true },\n )\n }\n\n // Run the callback with the initial results\n runResultCallbacks(callbacks, results!)\n\n // Return the initial results\n return {\n initialResults: results!,\n subscribe,\n unsubscribe,\n refresh,\n } satisfies LiveQuery<T>\n },\n\n async changes<T>(\n query: string | LiveChangesOptions<T>,\n params?: any[] | null,\n key?: string,\n callback?: (changes: Array<Change<T>>) => void,\n ) {\n let signal: AbortSignal | undefined\n if (typeof query !== 'string') {\n signal = query.signal\n params = query.params\n key = query.key\n callback = query.callback\n query = query.query\n }\n if (!key) {\n throw new Error('key is required for changes queries')\n }\n let callbacks: Array<(changes: Array<Change<T>>) => void> = callback\n ? [callback]\n : []\n const id = uuid().replace(/-/g, '')\n let dead = false\n\n let stateSwitch: 1 | 2 = 1\n let changes: Results<Change<T>>\n\n let unsubList: Array<(tx?: Transaction) => Promise<void>>\n\n const init = async () => {\n await pg.transaction(async (tx) => {\n // Create a temporary view with the query\n const formattedQuery = await formatQuery(pg, query, params, tx)\n await tx.query(\n `CREATE OR REPLACE TEMP VIEW live_query_${id}_view AS ${formattedQuery}`,\n )\n\n // Get the tables used in the view and add triggers to notify when they change\n const tables = await getTablesForView(tx, `live_query_${id}_view`)\n await addNotifyTriggersToTables(tx, tables, tableNotifyTriggersAdded)\n\n // Get the columns of the view\n const columns = [\n ...(\n await tx.query<any>(`\n SELECT column_name, data_type, udt_name\n FROM information_schema.columns \n WHERE table_name = 'live_query_${id}_view'\n `)\n ).rows,\n { column_name: '__after__', data_type: 'integer' },\n ]\n\n // Init state tables as empty temp table\n await tx.exec(`\n CREATE TEMP TABLE live_query_${id}_state1 (LIKE live_query_${id}_view INCLUDING ALL);\n CREATE TEMP TABLE live_query_${id}_state2 (LIKE live_query_${id}_view INCLUDING ALL);\n `)\n\n // Create Diff views and prepared statements\n for (const curr of [1, 2]) {\n const prev = curr === 1 ? 2 : 1\n await tx.exec(`\n PREPARE live_query_${id}_diff${curr} AS\n WITH\n prev AS (SELECT LAG(\"${key}\") OVER () as __after__, * FROM live_query_${id}_state${prev}),\n curr AS (SELECT LAG(\"${key}\") OVER () as __after__, * FROM live_query_${id}_state${curr}),\n data_diff AS (\n -- INSERT operations: Include all columns\n SELECT \n 'INSERT' AS __op__,\n ${columns\n .map(\n ({ column_name }) =>\n `curr.\"${column_name}\" AS \"${column_name}\"`,\n )\n .join(',\\n')},\n ARRAY[]::text[] AS __changed_columns__\n FROM curr\n LEFT JOIN prev ON curr.${key} = prev.${key}\n WHERE prev.${key} IS NULL\n UNION ALL\n -- DELETE operations: Include only the primary key\n SELECT \n 'DELETE' AS __op__,\n ${columns\n .map(({ column_name, data_type, udt_name }) => {\n if (column_name === key) {\n return `prev.\"${column_name}\" AS \"${column_name}\"`\n } else {\n return `NULL${data_type === 'USER-DEFINED' ? `::${udt_name}` : ``} AS \"${column_name}\"`\n }\n })\n .join(',\\n')},\n ARRAY[]::text[] AS __changed_columns__\n FROM prev\n LEFT JOIN curr ON prev.${key} = curr.${key}\n WHERE curr.${key} IS NULL\n UNION ALL\n -- UPDATE operations: Include only changed columns\n SELECT \n 'UPDATE' AS __op__,\n ${columns\n .map(({ column_name, data_type, udt_name }) =>\n column_name === key\n ? `curr.\"${column_name}\" AS \"${column_name}\"`\n : `CASE \n WHEN curr.\"${column_name}\" IS DISTINCT FROM prev.\"${column_name}\" \n THEN curr.\"${column_name}\"\n ELSE NULL${data_type === 'USER-DEFINED' ? `::${udt_name}` : ``}\n END AS \"${column_name}\"`,\n )\n .join(',\\n')},\n ARRAY(SELECT unnest FROM unnest(ARRAY[${columns\n .filter(({ column_name }) => column_name !== key)\n .map(\n ({ column_name }) =>\n `CASE\n WHEN curr.\"${column_name}\" IS DISTINCT FROM prev.\"${column_name}\" \n THEN '${column_name}' \n ELSE NULL \n END`,\n )\n .join(\n ', ',\n )}]) WHERE unnest IS NOT NULL) AS __changed_columns__\n FROM curr\n INNER JOIN prev ON curr.${key} = prev.${key}\n WHERE NOT (curr IS NOT DISTINCT FROM prev)\n )\n SELECT * FROM data_diff;\n `)\n }\n\n // Setup the listeners\n unsubList = await Promise.all(\n tables!.map((table) =>\n tx.listen(\n `\"table_change__${table.schema_oid}__${table.table_oid}\"`,\n async () => {\n refresh()\n },\n ),\n ),\n )\n })\n }\n\n await init()\n\n const refresh = debounceMutex(async () => {\n if (callbacks.length === 0 && changes) {\n return\n }\n let reset = false\n for (let i = 0; i < 5; i++) {\n try {\n await pg.transaction(async (tx) => {\n // Populate the state table\n await tx.exec(`\n INSERT INTO live_query_${id}_state${stateSwitch} \n SELECT * FROM live_query_${id}_view;\n `)\n\n // Get the changes\n changes = await tx.query<any>(\n `EXECUTE live_query_${id}_diff${stateSwitch};`,\n )\n\n // Switch state\n stateSwitch = stateSwitch === 1 ? 2 : 1\n\n // Truncate the old state table\n await tx.exec(`\n TRUNCATE live_query_${id}_state${stateSwitch};\n `)\n })\n break\n } catch (e) {\n const msg = (e as Error).message\n if (\n msg ===\n `relation \"live_query_${id}_state${stateSwitch}\" does not exist`\n ) {\n // If the state table does not exist, reset and try again\n // This can happen if using the multi-tab worker\n reset = true\n await init()\n continue\n } else {\n throw e\n }\n }\n }\n\n runChangeCallbacks(callbacks, [\n ...(reset\n ? [\n {\n __op__: 'RESET' as const,\n },\n ]\n : []),\n ...changes!.rows,\n ])\n })\n\n // Function to subscribe to the query\n const subscribe = (callback: (changes: Array<Change<T>>) => void) => {\n if (dead) {\n throw new Error(\n 'Live query is no longer active and cannot be subscribed to',\n )\n }\n callbacks.push(callback)\n }\n\n // Function to unsubscribe from the query\n const unsubscribe = async (\n callback?: (changes: Array<Change<T>>) => void,\n ) => {\n if (callback) {\n callbacks = callbacks.filter((callback) => callback !== callback)\n } else {\n callbacks = []\n }\n if (callbacks.length === 0 && !dead) {\n dead = true\n await pg.transaction(async (tx) => {\n await Promise.all(unsubList.map((unsub) => unsub(tx)))\n await tx.exec(`\n DROP VIEW IF EXISTS live_query_${id}_view;\n DROP TABLE IF EXISTS live_query_${id}_state1;\n DROP TABLE IF EXISTS live_query_${id}_state2;\n DEALLOCATE live_query_${id}_diff1;\n DEALLOCATE live_query_${id}_diff2;\n `)\n })\n }\n }\n\n // If the signal has already been aborted, unsubscribe\n if (signal?.aborted) {\n await unsubscribe()\n } else {\n // Add an event listener to unsubscribe if the signal is aborted\n signal?.addEventListener(\n 'abort',\n () => {\n unsubscribe()\n },\n { once: true },\n )\n }\n\n // Run the callback with the initial changes\n await refresh()\n\n // Fields\n const fields = changes!.fields.filter(\n (field) =>\n !['__after__', '__op__', '__changed_columns__'].includes(field.name),\n )\n\n // Return the initial results\n return {\n fields,\n initialChanges: changes!.rows,\n subscribe,\n unsubscribe,\n refresh,\n } satisfies LiveChanges<T>\n },\n\n async incrementalQuery<T>(\n query: string | LiveIncrementalQueryOptions<T>,\n params?: any[] | null,\n key?: string,\n callback?: (results: Results<T>) => void,\n ) {\n let signal: AbortSignal | undefined\n if (typeof query !== 'string') {\n signal = query.signal\n params = query.params\n key = query.key\n callback = query.callback\n query = query.query\n }\n if (!key) {\n throw new Error('key is required for incremental queries')\n }\n let callbacks: Array<(results: Results<T>) => void> = callback\n ? [callback]\n : []\n const rowsMap: Map<any, any> = new Map()\n const afterMap: Map<any, any> = new Map()\n let lastRows: T[] = []\n let firstRun = true\n\n const {\n fields,\n unsubscribe: unsubscribeChanges,\n refresh,\n } = await namespaceObj.changes<T>(query, params, key, (changes) => {\n // Process the changes\n for (const change of changes) {\n const {\n __op__: op,\n __changed_columns__: changedColumns,\n ...obj\n } = change as typeof change & { [key: string]: any }\n switch (op) {\n case 'RESET':\n rowsMap.clear()\n afterMap.clear()\n break\n case 'INSERT':\n rowsMap.set(obj[key], obj)\n afterMap.set(obj.__after__, obj[key])\n break\n case 'DELETE': {\n const oldObj = rowsMap.get(obj[key])\n rowsMap.delete(obj[key])\n // null is the starting point, we don't delete it as another insert\n // may have happened thats replacing it\n if (oldObj.__after__ !== null) {\n afterMap.delete(oldObj.__after__)\n }\n break\n }\n case 'UPDATE': {\n const newObj = { ...(rowsMap.get(obj[key]) ?? {}) }\n for (const columnName of changedColumns) {\n newObj[columnName] = obj[columnName]\n if (columnName === '__after__') {\n afterMap.set(obj.__after__, obj[key])\n }\n }\n rowsMap.set(obj[key], newObj)\n break\n }\n }\n }\n\n // Get the rows in order\n const rows: T[] = []\n let lastKey: any = null\n for (let i = 0; i < rowsMap.size; i++) {\n const nextKey = afterMap.get(lastKey)\n const obj = rowsMap.get(nextKey)\n if (!obj) {\n break\n }\n // Remove the __after__ key from the exposed row\n const cleanObj = { ...obj }\n delete cleanObj.__after__\n rows.push(cleanObj)\n lastKey = nextKey\n }\n lastRows = rows\n\n // Run the callbacks\n if (!firstRun) {\n runResultCallbacks(callbacks, {\n rows,\n fields,\n })\n }\n })\n\n firstRun = false\n runResultCallbacks(callbacks, {\n rows: lastRows,\n fields,\n })\n\n const subscribe = (callback: (results: Results<T>) => void) => {\n callbacks.push(callback)\n }\n\n const unsubscribe = async (callback?: (results: Results<T>) => void) => {\n if (callback) {\n callbacks = callbacks.filter((callback) => callback !== callback)\n } else {\n callbacks = []\n }\n if (callbacks.length === 0) {\n await unsubscribeChanges()\n }\n }\n\n if (signal?.aborted) {\n await unsubscribe()\n } else {\n signal?.addEventListener(\n 'abort',\n () => {\n unsubscribe()\n },\n { once: true },\n )\n }\n\n return {\n initialResults: {\n rows: lastRows,\n fields,\n },\n subscribe,\n unsubscribe,\n refresh,\n } satisfies LiveQuery<T>\n },\n }\n\n return {\n namespaceObj,\n }\n}\n\nexport const live = {\n name: 'Live Queries',\n setup,\n} satisfies Extension\n\nexport type PGliteWithLive = PGliteInterface & {\n live: LiveNamespace\n}\n\n/**\n * Get a list of all the tables used in a view, recursively\n * @param tx a transaction or PGlite instance\n * @param viewName the name of the view\n * @returns list of tables used in the view\n */\nasync function getTablesForView(\n tx: Transaction | PGliteInterface,\n viewName: string,\n): Promise<\n {\n table_name: string\n schema_name: string\n table_oid: number\n schema_oid: number\n }[]\n> {\n const result = await tx.query<{\n table_name: string\n schema_name: string\n table_oid: number\n schema_oid: number\n }>(\n `\n WITH RECURSIVE view_dependencies AS (\n -- Base case: Get the initial view's dependencies\n SELECT DISTINCT\n cl.relname AS dependent_name,\n n.nspname AS schema_name,\n cl.oid AS dependent_oid,\n n.oid AS schema_oid,\n cl.relkind = 'v' AS is_view\n FROM pg_rewrite r\n JOIN pg_depend d ON r.oid = d.objid\n JOIN pg_class cl ON d.refobjid = cl.oid\n JOIN pg_namespace n ON cl.relnamespace = n.oid\n WHERE\n r.ev_class = (\n SELECT oid FROM pg_class WHERE relname = $1 AND relkind = 'v'\n )\n AND d.deptype = 'n'\n\n UNION ALL\n\n -- Recursive case: Traverse dependencies for views\n SELECT DISTINCT\n cl.relname AS dependent_name,\n n.nspname AS schema_name,\n cl.oid AS dependent_oid,\n n.oid AS schema_oid,\n cl.relkind = 'v' AS is_view\n FROM view_dependencies vd\n JOIN pg_rewrite r ON vd.dependent_name = (\n SELECT relname FROM pg_class WHERE oid = r.ev_class AND relkind = 'v'\n )\n JOIN pg_depend d ON r.oid = d.objid\n JOIN pg_class cl ON d.refobjid = cl.oid\n JOIN pg_namespace n ON cl.relnamespace = n.oid\n WHERE d.deptype = 'n'\n )\n SELECT DISTINCT\n dependent_name AS table_name,\n schema_name,\n dependent_oid AS table_oid,\n schema_oid\n FROM view_dependencies\n WHERE NOT is_view; -- Exclude intermediate views\n `,\n [viewName],\n )\n\n return result.rows.map((row) => ({\n table_name: row.table_name,\n schema_name: row.schema_name,\n table_oid: row.table_oid,\n schema_oid: row.schema_oid,\n }))\n}\n\n/**\n * Add triggers to tables to notify when they change\n * @param tx a transaction or PGlite instance\n * @param tables list of tables to add triggers to\n */\nasync function addNotifyTriggersToTables(\n tx: Transaction | PGliteInterface,\n tables: {\n table_name: string\n table_oid: number\n schema_name: string\n schema_oid: number\n }[],\n tableNotifyTriggersAdded: Set<string>,\n) {\n const triggers = tables\n .filter(\n (table) =>\n !tableNotifyTriggersAdded.has(`${table.schema_oid}_${table.table_oid}`),\n )\n .map((table) => {\n return `\n CREATE OR REPLACE FUNCTION \"_notify_${table.schema_oid}_${table.table_oid}\"() RETURNS TRIGGER AS $$\n BEGIN\n PERFORM pg_notify('table_change__${table.schema_oid}__${table.table_oid}', '');\n RETURN NULL;\n END;\n $$ LANGUAGE plpgsql;\n CREATE OR REPLACE TRIGGER \"_notify_trigger_${table.schema_oid}_${table.table_oid}\"\n AFTER INSERT OR UPDATE OR DELETE ON \"${table.schema_name}\".\"${table.table_name}\"\n FOR EACH STATEMENT EXECUTE FUNCTION \"_notify_${table.schema_oid}_${table.table_oid}\"();\n `\n })\n .join('\\n')\n if (triggers.trim() !== '') {\n await tx.exec(triggers)\n }\n tables.map((table) =>\n tableNotifyTriggersAdded.add(`${table.schema_oid}_${table.table_oid}`),\n )\n}\n\nconst runResultCallbacks = <T>(\n callbacks: Array<(results: Results<T>) => void>,\n results: Results<T>,\n) => {\n for (const callback of callbacks) {\n callback(results)\n }\n}\n\nconst runChangeCallbacks = <T>(\n callbacks: Array<(changes: Array<Change<T>>) => void>,\n changes: Array<Change<T>>,\n) => {\n for (const callback of callbacks) {\n callback(changes)\n }\n}\n","/**\n * Calculates the byte length of a UTF-8 encoded string\n * Adapted from https://stackoverflow.com/a/23329386\n * @param str - UTF-8 encoded string\n * @returns byte length of string\n */\nfunction byteLengthUtf8(str: string): number {\n let byteLength = str.length\n for (let i = str.length - 1; i >= 0; i--) {\n const code = str.charCodeAt(i)\n if (code > 0x7f && code <= 0x7ff) byteLength++\n else if (code > 0x7ff && code <= 0xffff) byteLength += 2\n if (code >= 0xdc00 && code <= 0xdfff) i-- // trail surrogate\n }\n return byteLength\n}\n\nexport { byteLengthUtf8 }\n","import { byteLengthUtf8 } from './string-utils'\n\nexport class Writer {\n #bufferView: DataView\n #offset: number = 5\n\n readonly #littleEndian = false as const\n readonly #encoder = new TextEncoder()\n readonly #headerPosition: number = 0\n constructor(private size = 256) {\n this.#bufferView = this.#allocateBuffer(size)\n }\n\n #allocateBuffer(size: number): DataView {\n return new DataView(new ArrayBuffer(size))\n }\n\n #ensure(size: number): void {\n const remaining = this.#bufferView.byteLength - this.#offset\n if (remaining < size) {\n const oldBuffer = this.#bufferView.buffer\n // exponential growth factor of around ~ 1.5\n // https://stackoverflow.com/questions/2269063/buffer-growth-strategy\n const newSize = oldBuffer.byteLength + (oldBuffer.byteLength >> 1) + size\n this.#bufferView = this.#allocateBuffer(newSize)\n new Uint8Array(this.#bufferView.buffer).set(new Uint8Array(oldBuffer))\n }\n }\n\n public addInt32(num: number): Writer {\n this.#ensure(4)\n this.#bufferView.setInt32(this.#offset, num, this.#littleEndian)\n this.#offset += 4\n return this\n }\n\n public addInt16(num: number): Writer {\n this.#ensure(2)\n this.#bufferView.setInt16(this.#offset, num, this.#littleEndian)\n this.#offset += 2\n return this\n }\n\n public addCString(string: string): Writer {\n if (string) {\n // TODO(msfstef): might be faster to extract `addString` code and\n // ensure length + 1 once rather than length and then +1?\n this.addString(string)\n }\n\n // set null terminator\n this.#ensure(1)\n this.#bufferView.setUint8(this.#offset, 0)\n this.#offset++\n return this\n }\n\n public addString(string: string = ''): Writer {\n const length = byteLengthUtf8(string)\n this.#ensure(length)\n this.#encoder.encodeInto(\n string,\n new Uint8Array(this.#bufferView.buffer, this.#offset),\n )\n this.#offset += length\n return this\n }\n\n public add(otherBuffer: ArrayBuffer): Writer {\n this.#ensure(otherBuffer.byteLength)\n new Uint8Array(this.#bufferView.buffer).set(\n new Uint8Array(otherBuffer),\n this.#offset,\n )\n\n this.#offset += otherBuffer.byteLength\n return this\n }\n\n #join(code?: number): ArrayBuffer {\n if (code) {\n this.#bufferView.setUint8(this.#headerPosition, code)\n // length is everything in this packet minus the code\n const length = this.#offset - (this.#headerPosition + 1)\n this.#bufferView.setInt32(\n this.#headerPosition + 1,\n length,\n this.#littleEndian,\n )\n }\n return this.#bufferView.buffer.slice(code ? 0 : 5, this.#offset) as ArrayBuffer\n }\n\n public flush(code?: number): Uint8Array {\n const result = this.#join(code)\n this.#offset = 5\n this.#bufferView = this.#allocateBuffer(this.size)\n return new Uint8Array(result)\n }\n}\n","import { Writer } from './buffer-writer'\nimport { byteLengthUtf8 } from './string-utils'\n\nconst enum code {\n startup = 0x70,\n query = 0x51,\n parse = 0x50,\n bind = 0x42,\n execute = 0x45,\n flush = 0x48,\n sync = 0x53,\n end = 0x58,\n close = 0x43,\n describe = 0x44,\n copyFromChunk = 0x64,\n copyDone = 0x63,\n copyFail = 0x66,\n}\n\ntype LegalValue = string | ArrayBuffer | ArrayBufferView | null\n\nconst writer = new Writer()\n\nconst startup = (opts: Record<string, string>): Uint8Array => {\n // protocol version\n writer.addInt16(3).addInt16(0)\n for (const key of Object.keys(opts)) {\n writer.addCString(key).addCString(opts[key])\n }\n\n writer.addCString('client_encoding').addCString('UTF8')\n\n const bodyBuffer = writer.addCString('').flush()\n // this message is sent without a code\n\n const length = bodyBuffer.byteLength + 4\n\n return new Writer().addInt32(length).add(bodyBuffer.buffer as ArrayBuffer).flush()\n}\n\nconst requestSsl = (): Uint8Array => {\n const bufferView = new DataView(new ArrayBuffer(8))\n bufferView.setInt32(0, 8, false)\n bufferView.setInt32(4, 80877103, false)\n return new Uint8Array(bufferView.buffer)\n}\n\nconst password = (password: string): Uint8Array => {\n return writer.addCString(password).flush(code.startup)\n}\n\nconst sendSASLInitialResponseMessage = (\n mechanism: string,\n initialResponse: string,\n): Uint8Array => {\n // 0x70 = 'p'\n writer\n .addCString(mechanism)\n .addInt32(byteLengthUtf8(initialResponse))\n .addString(initialResponse)\n\n return writer.flush(code.startup)\n}\n\nconst sendSCRAMClientFinalMessage = (additionalData: string): Uint8Array => {\n return writer.addString(additionalData).flush(code.startup)\n}\n\nconst query = (text: string): Uint8Array => {\n return writer.addCString(text).flush(code.query)\n}\n\ntype ParseOpts = {\n name?: string\n types?: number[]\n text: string\n}\n\nconst emptyValueArray: LegalValue[] = []\n\nconst parse = (query: ParseOpts): Uint8Array => {\n // expect something like this:\n // { name: 'queryName',\n // text: 'select * from blah',\n // types: ['int8', 'bool'] }\n\n // normalize missing query names to allow for null\n const name = query.name ?? ''\n if (name.length > 63) {\n /* eslint-disable no-console */\n console.error(\n 'Warning! Postgres only supports 63 characters for query names.',\n )\n console.error('You supplied %s (%s)', name, name.length)\n console.error(\n 'This can cause conflicts and silent errors executing queries',\n )\n /* eslint-enable no-console */\n }\n\n const buffer = writer\n .addCString(name) // name of query\n .addCString(query.text) // actual query text\n .addInt16(query.types?.length ?? 0)\n\n query.types?.forEach((type) => buffer.addInt32(type))\n\n return writer.flush(code.parse)\n}\n\ntype ValueMapper = (param: unknown, index: number) => LegalValue\n\ntype BindOpts = {\n portal?: string\n binary?: boolean\n statement?: string\n values?: LegalValue[]\n // optional map from JS value to postgres value per parameter\n valueMapper?: ValueMapper\n}\n\nconst paramWriter = new Writer()\n\n// make this a const enum so typescript will inline the value\nconst enum ParamType {\n STRING = 0,\n BINARY = 1,\n}\n\nconst writeValues = (values: LegalValue[], valueMapper?: ValueMapper): void => {\n for (let i = 0; i < values.length; i++) {\n const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]\n if (mappedVal === null) {\n // add the param type (string) to the writer\n writer.addInt16(ParamType.STRING)\n // write -1 to the param writer to indicate null\n paramWriter.addInt32(-1)\n } else if (\n mappedVal instanceof ArrayBuffer ||\n ArrayBuffer.isView(mappedVal)\n ) {\n const buffer = ArrayBuffer.isView(mappedVal)\n ? (mappedVal.buffer.slice(\n mappedVal.byteOffset,\n mappedVal.byteOffset + mappedVal.byteLength,\n ) as ArrayBuffer)\n : mappedVal\n // add the param type (binary) to the writer\n writer.addInt16(ParamType.BINARY)\n // add the buffer to the param writer\n paramWriter.addInt32(buffer.byteLength)\n paramWriter.add(buffer)\n } else {\n // add the param type (string) to the writer\n writer.addInt16(ParamType.STRING)\n paramWriter.addInt32(byteLengthUtf8(mappedVal))\n paramWriter.addString(mappedVal)\n }\n }\n}\n\nconst bind = (config: BindOpts = {}): Uint8Array => {\n // normalize config\n const portal = config.portal ?? ''\n const statement = config.statement ?? ''\n const binary = config.binary ?? false\n const values = config.values ?? emptyValueArray\n const len = values.length\n\n writer.addCString(portal).addCString(statement)\n writer.addInt16(len)\n\n writeValues(values, config.valueMapper)\n\n writer.addInt16(len)\n writer.add(paramWriter.flush().buffer as ArrayBuffer)\n\n // format code\n writer.addInt16(binary ? ParamType.BINARY : ParamType.STRING)\n return writer.flush(code.bind)\n}\n\ntype ExecOpts = {\n portal?: string\n rows?: number\n}\n\nconst emptyExecute = new Uint8Array([\n code.execute,\n 0x00,\n 0x00,\n 0x00,\n 0x09,\n 0x00,\n 0x00,\n 0x00,\n 0x00,\n 0x00,\n])\n\nconst execute = (config?: ExecOpts): Uint8Array => {\n // this is the happy path for most queries\n if (!config || (!config.portal && !config.rows)) {\n return emptyExecute\n }\n\n const portal = config.portal ?? ''\n const rows = config.rows ?? 0\n\n const portalLength = byteLengthUtf8(portal)\n const len = 4 + portalLength + 1 + 4\n // one extra bit for code\n const bufferView = new DataView(new ArrayBuffer(1 + len))\n bufferView.setUint8(0, code.execute)\n bufferView.setInt32(1, len, false)\n new TextEncoder().encodeInto(portal, new Uint8Array(bufferView.buffer, 5))\n bufferView.setUint8(portalLength + 5, 0) // null terminate portal cString\n bufferView.setUint32(bufferView.byteLength - 4, rows, false)\n return new Uint8Array(bufferView.buffer)\n}\n\nconst cancel = (processID: number, secretKey: number): Uint8Array => {\n const bufferView = new DataView(new ArrayBuffer(16))\n bufferView.setInt32(0, 16, false)\n bufferView.setInt16(4, 1234, false)\n bufferView.setInt16(6, 5678, false)\n bufferView.setInt32(8, processID, false)\n bufferView.setInt32(12, secretKey, false)\n return new Uint8Array(bufferView.buffer)\n}\n\ntype PortalOpts = {\n type: 'S' | 'P'\n name?: string\n}\n\nconst cstringMessage = (code: code, string: string): Uint8Array => {\n const writer = new Writer()\n writer.addCString(string)\n return writer.flush(code)\n}\n\nconst emptyDescribePortal = writer.addCString('P').flush(code.describe)\nconst emptyDescribeStatement = writer.addCString('S').flush(code.describe)\n\nconst describe = (msg: PortalOpts): Uint8Array => {\n return msg.name\n ? cstringMessage(code.describe, `${msg.type}${msg.name ?? ''}`)\n : msg.type === 'P'\n ? emptyDescribePortal\n : emptyDescribeStatement\n}\n\nconst close = (msg: PortalOpts): Uint8Array => {\n const text = `${msg.type}${msg.name ?? ''}`\n return cstringMessage(code.close, text)\n}\n\nconst copyData = (chunk: ArrayBuffer): Uint8Array => {\n return writer.add(chunk).flush(code.copyFromChunk)\n}\n\nconst copyFail = (message: string): Uint8Array => {\n return cstringMessage(code.copyFail, message)\n}\n\nconst codeOnlyBuffer = (code: code): Uint8Array =>\n new Uint8Array([code, 0x00, 0x00, 0x00, 0x04])\n\nconst flushBuffer = codeOnlyBuffer(code.flush)\nconst syncBuffer = codeOnlyBuffer(code.sync)\nconst endBuffer = codeOnlyBuffer(code.end)\nconst copyDoneBuffer = codeOnlyBuffer(code.copyDone)\n\nconst serialize = {\n startup,\n password,\n requestSsl,\n sendSASLInitialResponseMessage,\n sendSCRAMClientFinalMessage,\n query,\n parse,\n bind,\n execute,\n describe,\n close,\n flush: () => flushBuffer,\n sync: () => syncBuffer,\n end: () => endBuffer,\n copyData,\n copyDone: () => copyDoneBuffer,\n copyFail,\n cancel,\n}\n\nexport { serialize }\n","const emptyBuffer = new ArrayBuffer(0)\n\nexport class BufferReader {\n #bufferView: DataView = new DataView(emptyBuffer)\n #offset: number\n\n // TODO(bmc): support non-utf8 encoding?\n readonly #encoding: string = 'utf-8' as const\n readonly #decoder = new TextDecoder(this.#encoding)\n readonly #littleEndian: boolean = false as const\n\n constructor(offset: number = 0) {\n this.#offset = offset\n }\n\n public setBuffer(offset: number, buffer: ArrayBuffer): void {\n this.#offset = offset\n this.#bufferView = new DataView(buffer)\n }\n\n public int16(): number {\n // const result = this.buffer.readInt16BE(this.#offset)\n const result = this.#bufferView.getInt16(this.#offset, this.#littleEndian)\n this.#offset += 2\n return result\n }\n\n public byte(): number {\n // const result = this.bufferView[this.#offset]\n const result = this.#bufferView.getUint8(this.#offset)\n this.#offset++\n return result\n }\n\n public int32(): number {\n // const result = this.buffer.readInt32BE(this.#offset)\n const result = this.#bufferView.getInt32(this.#offset, this.#littleEndian)\n this.#offset += 4\n return result\n }\n\n public string(length: number): string {\n // const result = this.#bufferView.toString(\n // this.#encoding,\n // this.#offset,\n // this.#offset + length,\n // )\n // this.#offset += length\n\n const result = this.#decoder.decode(this.bytes(length))\n return result\n }\n\n public cstring(): string {\n // const start = this.#offset\n // let end = start\n // while (this.#bufferView[end++] !== 0) {}\n\n const start = this.#offset\n let end = start\n while (this.#bufferView.getUint8(end++) !== 0) {\n // no-op - increment until terminator reached\n }\n const result = this.string(end - start - 1)\n this.#offset = end\n return result\n }\n\n public bytes(length: number): Uint8Array {\n // const result = this.buffer.slice(this.#offset, this.#offset + length)\n const result = this.#bufferView.buffer.slice(\n this.#offset,\n this.#offset + length,\n )\n this.#offset += length\n return new Uint8Array(result)\n }\n}\n","import {\n bindComplete,\n parseComplete,\n closeComplete,\n noData,\n portalSuspended,\n copyDone,\n replicationStart,\n emptyQuery,\n ReadyForQueryMessage,\n CommandCompleteMessage,\n CopyDataMessage,\n CopyResponse,\n NotificationResponseMessage,\n RowDescriptionMessage,\n ParameterDescriptionMessage,\n Field,\n DataRowMessage,\n ParameterStatusMessage,\n BackendKeyDataMessage,\n DatabaseError,\n BackendMessage,\n MessageName,\n NoticeMessage,\n AuthenticationMessage,\n AuthenticationOk,\n AuthenticationCleartextPassword,\n AuthenticationMD5Password,\n AuthenticationSASL,\n AuthenticationSASLContinue,\n AuthenticationSASLFinal,\n} from './messages'\nimport { BufferParameter, Modes } from './types'\nimport { BufferReader } from './buffer-reader'\n\n// every message is prefixed with a single bye\nconst CODE_LENGTH = 1 as const\n// every message has an int32 length which includes itself but does\n// NOT include the code in the length\nconst LEN_LENGTH = 4 as const\n\nconst HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH\n\nexport type Packet = {\n code: number\n packet: ArrayBuffer\n}\n\nconst emptyBuffer = new ArrayBuffer(0)\n\nconst enum MessageCodes {\n DataRow = 0x44, // D\n ParseComplete = 0x31, // 1\n BindComplete = 0x32, // 2\n CloseComplete = 0x33, // 3\n CommandComplete = 0x43, // C\n ReadyForQuery = 0x5a, // Z\n NoData = 0x6e, // n\n NotificationResponse = 0x41, // A\n AuthenticationResponse = 0x52, // R\n ParameterStatus = 0x53, // S\n BackendKeyData = 0x4b, // K\n ErrorMessage = 0x45, // E\n NoticeMessage = 0x4e, // N\n RowDescriptionMessage = 0x54, // T\n ParameterDescriptionMessage = 0x74, // t\n PortalSuspended = 0x73, // s\n ReplicationStart = 0x57, // W\n EmptyQuery = 0x49, // I\n CopyIn = 0x47, // G\n CopyOut = 0x48, // H\n CopyDone = 0x63, // c\n CopyData = 0x64, // d\n}\n\nexport type MessageCallback = (msg: BackendMessage) => void\n\nexport class Parser {\n #bufferView: DataView = new DataView(emptyBuffer)\n #bufferRemainingLength: number = 0\n #bufferOffset: number = 0\n #reader = new BufferReader()\n\n public parse(buffer: BufferParameter, callback: MessageCallback) {\n this.#mergeBuffer(\n ArrayBuffer.isView(buffer)\n ? (buffer.buffer.slice(\n buffer.byteOffset,\n buffer.byteOffset + buffer.byteLength,\n ) as ArrayBuffer)\n : buffer,\n )\n const bufferFullLength = this.#bufferOffset + this.#bufferRemainingLength\n let offset = this.#bufferOffset\n while (offset + HEADER_LENGTH <= bufferFullLength) {\n // code is 1 byte long - it identifies the message type\n const code = this.#bufferView.getUint8(offset)\n // length is 1 Uint32BE - it is the length of the message EXCLUDING the code\n const length = this.#bufferView.getUint32(offset + CODE_LENGTH, false)\n const fullMessageLength = CODE_LENGTH + length\n if (fullMessageLength + offset <= bufferFullLength && length > 0) {\n const message = this.#handlePacket(\n offset + HEADER_LENGTH,\n code,\n length,\n this.#bufferView.buffer as ArrayBuffer,\n )\n callback(message)\n offset += fullMessageLength\n } else {\n break\n }\n }\n if (offset === bufferFullLength) {\n // No more use for the buffer\n this.#bufferView = new DataView(emptyBuffer)\n this.#bufferRemainingLength = 0\n this.#bufferOffset = 0\n } else {\n // Adjust the cursors of remainingBuffer\n this.#bufferRemainingLength = bufferFullLength - offset\n this.#bufferOffset = offset\n }\n }\n\n #mergeBuffer(buffer: ArrayBuffer): void {\n if (this.#bufferRemainingLength > 0) {\n const newLength = this.#bufferRemainingLength + buffer.byteLength\n const newFullLength = newLength + this.#bufferOffset\n if (newFullLength > this.#bufferView.byteLength) {\n // We can't concat the new buffer with the remaining one\n let newBuffer: ArrayBuffer\n if (\n newLength <= this.#bufferView.byteLength &&\n this.#bufferOffset >= this.#bufferRemainingLength\n ) {\n // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer\n newBuffer = this.#bufferView.buffer as ArrayBuffer\n } else {\n // Allocate a new larger buffer\n let newBufferLength = this.#bufferView.byteLength * 2\n while (newLength >= newBufferLength) {\n newBufferLength *= 2\n }\n newBuffer = new ArrayBuffer(newBufferLength)\n }\n // Move the remaining buffer to the new one\n new Uint8Array(newBuffer).set(\n new Uint8Array(\n this.#bufferView.buffer,\n this.#bufferOffset,\n this.#bufferRemainingLength,\n ),\n )\n this.#bufferView = new DataView(newBuffer)\n this.#bufferOffset = 0\n }\n\n // Concat the new buffer with the remaining one\n new Uint8Array(this.#bufferView.buffer).set(\n new Uint8Array(buffer),\n this.#bufferOffset + this.#bufferRemainingLength,\n )\n this.#bufferRemainingLength = newLength\n } else {\n this.#bufferView = new DataView(buffer)\n this.#bufferOffset = 0\n this.#bufferRemainingLength = buffer.byteLength\n }\n }\n\n #handlePacket(\n offset: number,\n code: number,\n length: number,\n bytes: ArrayBuffer,\n ): BackendMessage {\n switch (code) {\n case MessageCodes.BindComplete:\n return bindComplete\n case MessageCodes.ParseComplete:\n return parseComplete\n case MessageCodes.CloseComplete:\n return closeComplete\n case MessageCodes.NoData:\n return noData\n case MessageCodes.PortalSuspended:\n return portalSuspended\n case MessageCodes.CopyDone:\n return copyDone\n case MessageCodes.ReplicationStart:\n return replicationStart\n case MessageCodes.EmptyQuery:\n return emptyQuery\n case MessageCodes.DataRow:\n return this.#parseDataRowMessage(offset, length, bytes)\n case MessageCodes.CommandComplete:\n return this.#parseCommandCompleteMessage(offset, length, bytes)\n case MessageCodes.ReadyForQuery:\n return this.#parseReadyForQueryMessage(offset, length, bytes)\n case MessageCodes.NotificationResponse:\n return this.#parseNotificationMessage(offset, length, bytes)\n case MessageCodes.AuthenticationResponse:\n return this.#parseAuthenticationResponse(offset, length, bytes)\n case MessageCodes.ParameterStatus:\n return this.#parseParameterStatusMessage(offset, length, bytes)\n case MessageCodes.BackendKeyData:\n return this.#parseBackendKeyData(offset, length, bytes)\n case MessageCodes.ErrorMessage:\n return this.#parseErrorMessage(offset, length, bytes, 'error')\n case MessageCodes.NoticeMessage:\n return this.#parseErrorMessage(offset, length, bytes, 'notice')\n case MessageCodes.RowDescriptionMessage:\n return this.#parseRowDescriptionMessage(offset, length, bytes)\n case MessageCodes.ParameterDescriptionMessage:\n return this.#parseParameterDescriptionMessage(offset, length, bytes)\n case MessageCodes.CopyIn:\n return this.#parseCopyInMessage(offset, length, bytes)\n case MessageCodes.CopyOut:\n return this.#parseCopyOutMessage(offset, length, bytes)\n case MessageCodes.CopyData:\n return this.#parseCopyData(offset, length, bytes)\n default:\n return new DatabaseError(\n 'received invalid response: ' + code.toString(16),\n length,\n 'error',\n )\n }\n }\n\n #parseReadyForQueryMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const status = this.#reader.string(1)\n return new ReadyForQueryMessage(length, status)\n }\n\n #parseCommandCompleteMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const text = this.#reader.cstring()\n return new CommandCompleteMessage(length, text)\n }\n\n #parseCopyData(offset: number, length: number, bytes: ArrayBuffer) {\n const chunk = bytes.slice(offset, offset + (length - 4))\n return new CopyDataMessage(length, new Uint8Array(chunk))\n }\n\n #parseCopyInMessage(offset: number, length: number, bytes: ArrayBuffer) {\n return this.#parseCopyMessage(offset, length, bytes, 'copyInResponse')\n }\n\n #parseCopyOutMessage(offset: number, length: number, bytes: ArrayBuffer) {\n return this.#parseCopyMessage(offset, length, bytes, 'copyOutResponse')\n }\n\n #parseCopyMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n messageName: MessageName,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const isBinary = this.#reader.byte() !== 0\n const columnCount = this.#reader.int16()\n const message = new CopyResponse(length, messageName, isBinary, columnCount)\n for (let i = 0; i < columnCount; i++) {\n message.columnTypes[i] = this.#reader.int16()\n }\n return message\n }\n\n #parseNotificationMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const processId = this.#reader.int32()\n const channel = this.#reader.cstring()\n const payload = this.#reader.cstring()\n return new NotificationResponseMessage(length, processId, channel, payload)\n }\n\n #parseRowDescriptionMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const fieldCount = this.#reader.int16()\n const message = new RowDescriptionMessage(length, fieldCount)\n for (let i = 0; i < fieldCount; i++) {\n message.fields[i] = this.#parseField()\n }\n return message\n }\n\n #parseField(): Field {\n const name = this.#reader.cstring()\n const tableID = this.#reader.int32()\n const columnID = this.#reader.int16()\n const dataTypeID = this.#reader.int32()\n const dataTypeSize = this.#reader.int16()\n const dataTypeModifier = this.#reader.int32()\n const mode = this.#reader.int16() === 0 ? Modes.text : Modes.binary\n return new Field(\n name,\n tableID,\n columnID,\n dataTypeID,\n dataTypeSize,\n dataTypeModifier,\n mode,\n )\n }\n\n #parseParameterDescriptionMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const parameterCount = this.#reader.int16()\n const message = new ParameterDescriptionMessage(length, parameterCount)\n for (let i = 0; i < parameterCount; i++) {\n message.dataTypeIDs[i] = this.#reader.int32()\n }\n return message\n }\n\n #parseDataRowMessage(offset: number, length: number, bytes: ArrayBuffer) {\n this.#reader.setBuffer(offset, bytes)\n const fieldCount = this.#reader.int16()\n const fields: (string | null)[] = new Array(fieldCount)\n for (let i = 0; i < fieldCount; i++) {\n const len = this.#reader.int32()\n // a -1 for length means the value of the field is null\n fields[i] = len === -1 ? null : this.#reader.string(len)\n }\n return new DataRowMessage(length, fields)\n }\n\n #parseParameterStatusMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const name = this.#reader.cstring()\n const value = this.#reader.cstring()\n return new ParameterStatusMessage(length, name, value)\n }\n\n #parseBackendKeyData(offset: number, length: number, bytes: ArrayBuffer) {\n this.#reader.setBuffer(offset, bytes)\n const processID = this.#reader.int32()\n const secretKey = this.#reader.int32()\n return new BackendKeyDataMessage(length, processID, secretKey)\n }\n\n #parseAuthenticationResponse(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n ): AuthenticationMessage {\n this.#reader.setBuffer(offset, bytes)\n const code = this.#reader.int32()\n switch (code) {\n case 0:\n return new AuthenticationOk(length)\n case 3:\n return new AuthenticationCleartextPassword(length)\n\n case 5:\n return new AuthenticationMD5Password(length, this.#reader.bytes(4))\n\n case 10: {\n const mechanisms: string[] = []\n while (true) {\n const mechanism = this.#reader.cstring()\n if (mechanism.length === 0) {\n return new AuthenticationSASL(length, mechanisms)\n }\n mechanisms.push(mechanism)\n }\n }\n case 11:\n return new AuthenticationSASLContinue(\n length,\n this.#reader.string(length - 8),\n )\n\n case 12:\n return new AuthenticationSASLFinal(\n length,\n this.#reader.string(length - 8),\n )\n\n default:\n throw new Error('Unknown authenticationOk message type ' + code)\n }\n }\n\n #parseErrorMessage(\n offset: number,\n length: number,\n bytes: ArrayBuffer,\n name: MessageName,\n ) {\n this.#reader.setBuffer(offset, bytes)\n const fields: Record<string, string> = {}\n let fieldType = this.#reader.string(1)\n while (fieldType !== '\\0') {\n fields[fieldType] = this.#reader.cstring()\n fieldType = this.#reader.string(1)\n }\n\n const messageValue = fields.M\n\n const message =\n name === 'notice'\n ? new NoticeMessage(length, messageValue)\n : new DatabaseError(messageValue, length, name)\n\n message.severity = fields.S\n message.code = fields.C\n message.detail = fields.D\n message.hint = fields.H\n message.position = fields.P\n message.internalPosition = fields.p\n message.internalQuery = fields.q\n message.where = fields.W\n message.schema = fields.s\n message.table = fields.t\n message.column = fields.c\n message.dataType = fields.d\n message.constraint = fields.n\n message.file = fields.F\n message.line = fields.L\n message.routine = fields.R\n return message\n }\n}\n","/*\nBased on postgres.js types.js\nhttps://github.com/porsager/postgres/blob/master/src/types.js\nPublished under the Unlicense:\nhttps://github.com/porsager/postgres/blob/master/UNLICENSE \n*/\n\nimport type { ParserOptions } from './interface.js'\n\nconst JSON_parse = globalThis.JSON.parse\nconst JSON_stringify = globalThis.JSON.stringify\n\nexport const BOOL = 16,\n BYTEA = 17,\n CHAR = 18,\n INT8 = 20,\n INT2 = 21,\n INT4 = 23,\n REGPROC = 24,\n TEXT = 25,\n OID = 26,\n TID = 27,\n XID = 28,\n CID = 29,\n JSON = 114,\n XML = 142,\n PG_NODE_TREE = 194,\n SMGR = 210,\n PATH = 602,\n POLYGON = 604,\n CIDR = 650,\n FLOAT4 = 700,\n FLOAT8 = 701,\n ABSTIME = 702,\n RELTIME = 703,\n TINTERVAL = 704,\n CIRCLE = 718,\n MACADDR8 = 774,\n MONEY = 790,\n MACADDR = 829,\n INET = 869,\n ACLITEM = 1033,\n BPCHAR = 1042,\n VARCHAR = 1043,\n DATE = 1082,\n TIME = 1083,\n TIMESTAMP = 1114,\n TIMESTAMPTZ = 1184,\n INTERVAL = 1186,\n TIMETZ = 1266,\n BIT = 1560,\n VARBIT = 1562,\n NUMERIC = 1700,\n REFCURSOR = 1790,\n REGPROCEDURE = 2202,\n REGOPER = 2203,\n REGOPERATOR = 2204,\n REGCLASS = 2205,\n REGTYPE = 2206,\n UUID = 2950,\n TXID_SNAPSHOT = 2970,\n PG_LSN = 3220,\n PG_NDISTINCT = 3361,\n PG_DEPENDENCIES = 3402,\n TSVECTOR = 3614,\n TSQUERY = 3615,\n GTSVECTOR = 3642,\n REGCONFIG = 3734,\n REGDICTIONARY = 3769,\n JSONB = 3802,\n REGNAMESPACE = 4089,\n REGROLE = 4096\n\nexport const types = {\n string: {\n to: TEXT,\n from: [TEXT, VARCHAR, BPCHAR],\n serialize: (x: string | number) => {\n if (typeof x === 'string') {\n return x\n } else if (typeof x === 'number') {\n return x.toString()\n } else {\n throw new Error('Invalid input for string type')\n }\n },\n parse: (x: string) => x,\n },\n number: {\n to: 0,\n from: [INT2, INT4, OID, FLOAT4, FLOAT8],\n serialize: (x: number) => x.toString(),\n parse: (x: string) => +x,\n },\n bigint: {\n to: INT8,\n from: [INT8],\n serialize: (x: bigint) => x.toString(),\n parse: (x: string) => {\n const n = BigInt(x)\n if (n < Number.MIN_SAFE_INTEGER || n > Number.MAX_SAFE_INTEGER) {\n return n // return BigInt\n } else {\n return Number(n) // in range of standard JS numbers so return number\n }\n },\n },\n json: {\n to: JSON,\n from: [JSON, JSONB],\n serialize: (x: any) => {\n if (typeof x === 'string') {\n return x\n } else {\n return JSON_stringify(x)\n }\n },\n parse: (x: string) => JSON_parse(x),\n },\n boolean: {\n to: BOOL,\n from: [BOOL],\n serialize: (x: boolean) => {\n if (typeof x !== 'boolean') {\n throw new Error('Invalid input for boolean type')\n }\n return x ? 't' : 'f'\n },\n parse: (x: string) => x === 't',\n },\n date: {\n to: TIMESTAMPTZ,\n from: [DATE, TIMESTAMP, TIMESTAMPTZ],\n serialize: (x: Date | string | number) => {\n if (typeof x === 'string') {\n return x\n } else if (typeof x === 'number') {\n return new Date(x).toISOString()\n } else if (x instanceof Date) {\n return x.toISOString()\n } else {\n throw new Error('Invalid input for date type')\n }\n },\n parse: (x: string | number) => new Date(x),\n },\n bytea: {\n to: BYTEA,\n from: [BYTEA],\n serialize: (x: Uint8Array) => {\n if (!(x instanceof Uint8Array)) {\n throw new Error('Invalid input for bytea type')\n }\n return (\n '\\\\x' +\n Array.from(x)\n .map((byte) => byte.toString(16).padStart(2, '0'))\n .join('')\n )\n },\n parse: (x: string): Uint8Array => {\n const hexString = x.slice(2)\n return Uint8Array.from({ length: hexString.length / 2 }, (_, idx) =>\n parseInt(hexString.substring(idx * 2, (idx + 1) * 2), 16),\n )\n },\n },\n} satisfies TypeHandlers\n\nexport type Parser = (x: string, typeId?: number) => any\nexport type Serializer = (x: any) => string\n\nexport type TypeHandler = {\n to: number\n from: number | number[]\n serialize: Serializer\n parse: Parser\n}\n\nexport type TypeHandlers = {\n [key: string]: TypeHandler\n}\n\nconst defaultHandlers = typeHandlers(types)\n\nexport const parsers = defaultHandlers.parsers\nexport const serializers = defaultHandlers.serializers\n\nexport function parseType(\n x: string | null,\n type: number,\n parsers?: ParserOptions,\n): any {\n if (x === null) {\n return null\n }\n const handler = parsers?.[type] ?? defaultHandlers.parsers[type]\n if (handler) {\n return handler(x, type)\n } else {\n return x\n }\n}\n\nfunction typeHandlers(types: TypeHandlers) {\n return Object.keys(types).reduce(\n ({ parsers, serializers }, k) => {\n const { to, from, serialize, parse } = types[k]\n serializers[to] = serialize\n serializers[k] = serialize\n parsers[k] = parse\n if (Array.isArray(from)) {\n from.forEach((f) => {\n parsers[f] = parse\n serializers[f] = serialize\n })\n } else {\n parsers[from] = parse\n serializers[from] = serialize\n }\n return { parsers, serializers }\n },\n {\n parsers: {} as {\n [key: number | string]: (x: string, typeId?: number) => any\n },\n serializers: {} as {\n [key: number | string]: Serializer\n },\n },\n )\n}\n\nconst escapeBackslash = /\\\\/g\nconst escapeQuote = /\"/g\n\nfunction arrayEscape(x: string) {\n return x.replace(escapeBackslash, '\\\\\\\\').replace(escapeQuote, '\\\\\"')\n}\n\nexport function arraySerializer(\n xs: any,\n serializer: Serializer | undefined,\n typarray: number,\n): string {\n if (Array.isArray(xs) === false) return xs\n\n if (!xs.length) return '{}'\n\n const first = xs[0]\n // Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter\n const delimiter = typarray === 1020 ? ';' : ','\n\n if (Array.isArray(first)) {\n return `{${xs.map((x) => arraySerializer(x, serializer, typarray)).join(delimiter)}}`\n } else {\n return `{${xs\n .map((x) => {\n if (x === undefined) {\n x = null\n // TODO: Add an option to specify how to handle undefined values\n }\n return x === null\n ? 'null'\n : '\"' + arrayEscape(serializer ? serializer(x) : x.toString()) + '\"'\n })\n .join(delimiter)}}`\n }\n}\n\nconst arrayParserState = {\n i: 0,\n char: null as string | null,\n str: '',\n quoted: false,\n last: 0,\n p: null as string | null,\n}\n\nexport function arrayParser(x: string, parser: Parser, typarray: number) {\n arrayParserState.i = arrayParserState.last = 0\n return arrayParserLoop(arrayParserState, x, parser, typarray)[0]\n}\n\nfunction arrayParserLoop(\n s: typeof arrayParserState,\n x: string,\n parser: Parser | undefined,\n typarray: number,\n): any[] {\n const xs = []\n // Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter\n const delimiter = typarray === 1020 ? ';' : ','\n for (; s.i < x.length; s.i++) {\n s.char = x[s.i]\n if (s.quoted) {\n if (s.char === '\\\\') {\n s.str += x[++s.i]\n } else if (s.char === '\"') {\n xs.push(parser ? parser(s.str) : s.str)\n s.str = ''\n s.quoted = x[s.i + 1] === '\"'\n s.last = s.i + 2\n } else {\n s.str += s.char\n }\n } else if (s.char === '\"') {\n s.quoted = true\n } else if (s.char === '{') {\n s.last = ++s.i\n xs.push(arrayParserLoop(s, x, parser, typarray))\n } else if (s.char === '}') {\n s.quoted = false\n s.last < s.i &&\n xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))\n s.last = s.i + 1\n break\n } else if (s.char === delimiter && s.p !== '}' && s.p !== '\"') {\n xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))\n s.last = s.i + 1\n }\n s.p = s.char\n }\n s.last < s.i &&\n xs.push(\n parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1),\n )\n return xs\n}\n","import {\n BackendMessage,\n RowDescriptionMessage,\n DataRowMessage,\n CommandCompleteMessage,\n ParameterDescriptionMessage,\n} from '@electric-sql/pg-protocol/messages'\nimport type { Results, QueryOptions } from './interface.js'\nimport { parseType, type Parser } from './types.js'\n\n/**\n * This function is used to parse the results of either a simple or extended query.\n * https://www.postgresql.org/docs/current/protocol-flow.html#PROTOCOL-FLOW-SIMPLE-QUERY\n */\nexport function parseResults(\n messages: Array<BackendMessage>,\n defaultParsers: Record<number | string, Parser>,\n options?: QueryOptions,\n blob?: Blob,\n): Array<Results> {\n const resultSets: Results[] = []\n let currentResultSet: Results = { rows: [], fields: [] }\n let affectedRows = 0\n const parsers = { ...defaultParsers, ...options?.parsers }\n\n messages.forEach((message) => {\n switch (message.name) {\n case 'rowDescription': {\n const msg = message as RowDescriptionMessage\n currentResultSet.fields = msg.fields.map((field) => ({\n name: field.name,\n dataTypeID: field.dataTypeID,\n }))\n break\n }\n case 'dataRow': {\n if (!currentResultSet) break\n const msg = message as DataRowMessage\n if (options?.rowMode === 'array') {\n currentResultSet.rows.push(\n msg.fields.map((field, i) =>\n parseType(field, currentResultSet!.fields[i].dataTypeID, parsers),\n ),\n )\n } else {\n // rowMode === \"object\"\n currentResultSet.rows.push(\n Object.fromEntries(\n msg.fields.map((field, i) => [\n currentResultSet!.fields[i].name,\n parseType(\n field,\n currentResultSet!.fields[i].dataTypeID,\n parsers,\n ),\n ]),\n ),\n )\n }\n break\n }\n case 'commandComplete': {\n const msg = message as CommandCompleteMessage\n affectedRows += retrieveRowCount(msg)\n\n resultSets.push({\n ...currentResultSet,\n affectedRows,\n ...(blob ? { blob } : {}),\n })\n\n currentResultSet = { rows: [], fields: [] }\n break\n }\n }\n })\n\n if (resultSets.length === 0) {\n resultSets.push({\n affectedRows: 0,\n rows: [],\n fields: [],\n })\n }\n\n return resultSets\n}\n\nfunction retrieveRowCount(msg: CommandCompleteMessage): number {\n const parts = msg.text.split(' ')\n switch (parts[0]) {\n case 'INSERT':\n return parseInt(parts[2], 10)\n case 'UPDATE':\n case 'DELETE':\n case 'COPY':\n case 'MERGE':\n return parseInt(parts[1], 10)\n default:\n return 0\n }\n}\n\n/** Get the dataTypeIDs from a list of messages, if it's available. */\nexport function parseDescribeStatementResults(\n messages: Array<BackendMessage>,\n): number[] {\n const message = messages.find(\n (msg): msg is ParameterDescriptionMessage =>\n msg.name === 'parameterDescription',\n )\n\n if (message) {\n return message.dataTypeIDs\n }\n\n return []\n}\n","import type { PGliteInterface, Transaction } from './interface.js'\nimport { serialize as serializeProtocol } from '@electric-sql/pg-protocol'\nimport { parseDescribeStatementResults } from './parse.js'\nimport { TEXT } from './types.js'\n\nexport const IN_NODE =\n typeof process === 'object' &&\n typeof process.versions === 'object' &&\n typeof process.versions.node === 'string'\n\nlet wasmDownloadPromise: Promise<Response> | undefined\n\n// Helper to safely get wasm URL - deferred to avoid import.meta.url issues in Workers\nfunction getWasmUrl(): URL {\n try {\n return new URL('../release/pglite.wasm', import.meta.url)\n } catch {\n throw new Error(\n 'Cannot resolve WASM URL. In Cloudflare Workers, you must provide wasmModule option.',\n )\n }\n}\n\n// Helper to safely get fsBundle URL - deferred to avoid import.meta.url issues in Workers\nfunction getFsBundleUrl(): URL {\n try {\n return new URL('../release/pglite.data', import.meta.url)\n } catch {\n throw new Error(\n 'Cannot resolve fsBundle URL. In Cloudflare Workers, you must provide fsBundle option.',\n )\n }\n}\n\nexport async function startWasmDownload() {\n if (IN_NODE || wasmDownloadPromise) {\n return\n }\n const moduleUrl = getWasmUrl()\n wasmDownloadPromise = fetch(moduleUrl)\n}\n\n// This is a global cache of the PGlite Wasm module to avoid having to re-download or\n// compile it on subsequent calls.\nlet cachedWasmModule: WebAssembly.Module | undefined\n\nexport async function instantiateWasm(\n imports: WebAssembly.Imports,\n module?: WebAssembly.Module,\n): Promise<{\n instance: WebAssembly.Instance\n module: WebAssembly.Module\n}> {\n // Check for provided module FIRST before trying to resolve URLs\n if (module || cachedWasmModule) {\n return {\n instance: await WebAssembly.instantiate(\n module || cachedWasmModule!,\n imports,\n ),\n module: module || cachedWasmModule!,\n }\n }\n // Only resolve URL if no module provided - this may throw in Workers\n const moduleUrl = getWasmUrl()\n if (IN_NODE) {\n const fs = await import('fs/promises')\n const buffer = await fs.readFile(moduleUrl)\n const { module: newModule, instance } = await WebAssembly.instantiate(\n buffer,\n imports,\n )\n cachedWasmModule = newModule\n return {\n instance,\n module: newModule,\n }\n } else {\n if (!wasmDownloadPromise) {\n wasmDownloadPromise = fetch(moduleUrl)\n }\n const response = await wasmDownloadPromise\n const { module: newModule, instance } =\n await WebAssembly.instantiateStreaming(response, imports)\n cachedWasmModule = newModule\n return {\n instance,\n module: newModule,\n }\n }\n}\n\nexport async function getFsBundle(): Promise<ArrayBuffer> {\n // Only resolve URL when called - this may throw in Workers\n // Callers should provide fsBundle option to avoid this\n const fsBundleUrl = getFsBundleUrl()\n if (IN_NODE) {\n const fs = await import('fs/promises')\n const fileData = await fs.readFile(fsBundleUrl)\n return fileData.buffer\n } else {\n const response = await fetch(fsBundleUrl)\n return response.arrayBuffer()\n }\n}\n\nexport const uuid = (): string => {\n // best case, `crypto.randomUUID` is available\n if (globalThis.crypto?.randomUUID) {\n return globalThis.crypto.randomUUID()\n }\n\n const bytes = new Uint8Array(16)\n\n if (globalThis.crypto?.getRandomValues) {\n // `crypto.getRandomValues` is available even in non-secure contexts\n globalThis.crypto.getRandomValues(bytes)\n } else {\n // fallback to Math.random, if the Crypto API is completely missing\n for (let i = 0; i < bytes.length; i++) {\n bytes[i] = Math.floor(Math.random() * 256)\n }\n }\n\n bytes[6] = (bytes[6] & 0x0f) | 0x40 // Set the 4 most significant bits to 0100\n bytes[8] = (bytes[8] & 0x3f) | 0x80 // Set the 2 most significant bits to 10\n\n const hexValues: string[] = []\n bytes.forEach((byte) => {\n hexValues.push(byte.toString(16).padStart(2, '0'))\n })\n\n return (\n hexValues.slice(0, 4).join('') +\n '-' +\n hexValues.slice(4, 6).join('') +\n '-' +\n hexValues.slice(6, 8).join('') +\n '-' +\n hexValues.slice(8, 10).join('') +\n '-' +\n hexValues.slice(10).join('')\n )\n}\n\n/**\n * Formats a query with parameters\n * Expects that any tables/relations referenced in the query exist in the database\n * due to requiring them to be present to describe the parameters types.\n * `tx` is optional, and to be used when formatQuery is called during a transaction.\n * @param pg - The PGlite instance\n * @param query - The query to format\n * @param params - The parameters to format the query with\n * @param tx - The transaction to use, defaults to the PGlite instance\n * @returns The formatted query\n */\nexport async function formatQuery(\n pg: PGliteInterface,\n query: string,\n params?: any[] | null,\n tx?: Transaction | PGliteInterface,\n) {\n if (!params || params.length === 0) {\n // no params so no formatting needed\n return query\n }\n\n tx = tx ?? pg\n\n // Get the types of the parameters\n const messages = []\n try {\n await pg.execProtocol(serializeProtocol.parse({ text: query }), {\n syncToFs: false,\n })\n\n messages.push(\n ...(\n await pg.execProtocol(serializeProtocol.describe({ type: 'S' }), {\n syncToFs: false,\n })\n ).messages,\n )\n } finally {\n messages.push(\n ...(await pg.execProtocol(serializeProtocol.sync(), { syncToFs: false }))\n .messages,\n )\n }\n\n const dataTypeIDs = parseDescribeStatementResults(messages)\n\n // replace $1, $2, etc with %1L, %2L, etc\n const subbedQuery = query.replace(/\\$([0-9]+)/g, (_, num) => {\n return '%' + num + 'L'\n })\n\n const ret = await tx.query<{\n query: string\n }>(\n `SELECT format($1, ${params.map((_, i) => `$${i + 2}`).join(', ')}) as query`,\n [subbedQuery, ...params],\n { paramTypes: [TEXT, ...dataTypeIDs] },\n )\n return ret.rows[0].query\n}\n\n/**\n * Debounce a function to ensure that only one instance of the function is running at\n * a time.\n * - If the function is called while an instance is already running, the new\n * call is scheduled to run after the current instance completes.\n * - If there is already a scheduled call, it is replaced with the new call.\n * @param fn - The function to debounce\n * @returns A debounced version of the function\n */\nexport function debounceMutex<A extends any[], R>(\n fn: (...args: A) => Promise<R>,\n): (...args: A) => Promise<R | void> {\n let next:\n | {\n args: A\n resolve: (value: R | void) => void\n reject: (reason?: any) => void\n }\n | undefined = undefined\n\n let isRunning = false\n const processNext = async () => {\n if (!next) {\n isRunning = false\n return\n }\n isRunning = true\n const { args, resolve, reject } = next\n next = undefined\n try {\n const ret = await fn(...args)\n resolve(ret)\n } catch (e) {\n reject(e)\n } finally {\n processNext()\n }\n }\n return async (...args: A) => {\n if (next) {\n next.resolve(undefined)\n }\n const promise = new Promise<R | void>((resolve, reject) => {\n next = { args, resolve, reject }\n })\n if (!isRunning) {\n processNext()\n }\n return promise\n }\n}\n\n/**\n * Postgresql handles quoted names as CaseSensitive and unquoted as lower case.\n * If input is quoted, returns an unquoted string (same casing)\n * If input is unquoted, returns a lower-case string\n */\nexport function toPostgresName(input: string): string {\n let output\n if (input.startsWith('\"') && input.endsWith('\"')) {\n // Postgres sensitive case\n output = input.substring(1, input.length - 1)\n } else {\n // Postgres case insensitive - all to lower\n output = input.toLowerCase()\n }\n return output\n}\n"],"mappings":"k3BAAA,IAAAA,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,eAAAC,GAAAH,ICMA,SAASI,EAAeC,EAAqB,CAC3C,IAAIC,EAAaD,EAAI,OACrB,QAASE,EAAIF,EAAI,OAAS,EAAGE,GAAK,EAAGA,IAAK,CACxC,IAAMC,EAAOH,EAAI,WAAWE,CAAC,EACzBC,EAAO,KAAQA,GAAQ,KAAOF,IACzBE,EAAO,MAASA,GAAQ,QAAQF,GAAc,GACnDE,GAAQ,OAAUA,GAAQ,OAAQD,GACxC,CACA,OAAOD,CACT,CCfA,IAAAG,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,GAEaC,EAAN,KAAa,CAOlB,YAAoBC,EAAO,IAAK,CAAZ,UAAAA,EAPfC,EAAA,KAAAN,GACLM,EAAA,KAAAX,GACAW,EAAA,KAAAV,EAAkB,GAElBU,EAAA,KAAST,EAAgB,IACzBS,EAAA,KAASR,EAAW,IAAI,aACxBQ,EAAA,KAASP,EAA0B,GAEjCQ,EAAA,KAAKZ,EAAca,EAAA,KAAKR,EAAAC,GAAL,UAAqBI,GAC1C,CAkBO,SAASI,EAAqB,CACnC,OAAAD,EAAA,KAAKR,EAAAE,GAAL,UAAa,GACbQ,EAAA,KAAKf,GAAY,SAASe,EAAA,KAAKd,GAASa,EAAKC,EAAA,KAAKb,EAAa,EAC/DU,EAAA,KAAKX,EAALc,EAAA,KAAKd,GAAW,GACT,IACT,CAEO,SAASa,EAAqB,CACnC,OAAAD,EAAA,KAAKR,EAAAE,GAAL,UAAa,GACbQ,EAAA,KAAKf,GAAY,SAASe,EAAA,KAAKd,GAASa,EAAKC,EAAA,KAAKb,EAAa,EAC/DU,EAAA,KAAKX,EAALc,EAAA,KAAKd,GAAW,GACT,IACT,CAEO,WAAWe,EAAwB,CACxC,OAAIA,GAGF,KAAK,UAAUA,CAAM,EAIvBH,EAAA,KAAKR,EAAAE,GAAL,UAAa,GACbQ,EAAA,KAAKf,GAAY,SAASe,EAAA,KAAKd,GAAS,CAAC,EACzCgB,EAAA,KAAKhB,GAAL,IACO,IACT,CAEO,UAAUe,EAAiB,GAAY,CAC5C,IAAME,EAASC,EAAeH,CAAM,EACpC,OAAAH,EAAA,KAAKR,EAAAE,GAAL,UAAaW,GACbH,EAAA,KAAKZ,GAAS,WACZa,EACA,IAAI,WAAWD,EAAA,KAAKf,GAAY,OAAQe,EAAA,KAAKd,EAAO,CACtD,EACAW,EAAA,KAAKX,EAALc,EAAA,KAAKd,GAAWiB,GACT,IACT,CAEO,IAAIE,EAAkC,CAC3C,OAAAP,EAAA,KAAKR,EAAAE,GAAL,UAAaa,EAAY,YACzB,IAAI,WAAWL,EAAA,KAAKf,GAAY,MAAM,EAAE,IACtC,IAAI,WAAWoB,CAAW,EAC1BL,EAAA,KAAKd,EACP,EAEAW,EAAA,KAAKX,EAALc,EAAA,KAAKd,GAAWmB,EAAY,YACrB,IACT,CAgBO,MAAMC,EAA2B,CACtC,IAAMC,EAAST,EAAA,KAAKR,EAAAG,IAAL,UAAWa,GAC1B,OAAAT,EAAA,KAAKX,EAAU,GACfW,EAAA,KAAKZ,EAAca,EAAA,KAAKR,EAAAC,GAAL,UAAqB,KAAK,OACtC,IAAI,WAAWgB,CAAM,CAC9B,CACF,EAhGEtB,EAAA,YACAC,EAAA,YAESC,EAAA,YACAC,EAAA,YACAC,EAAA,YANJC,EAAA,YAWLC,EAAe,SAACI,EAAwB,CACtC,OAAO,IAAI,SAAS,IAAI,YAAYA,CAAI,CAAC,CAC3C,EAEAH,EAAO,SAACG,EAAoB,CAE1B,GADkBK,EAAA,KAAKf,GAAY,WAAae,EAAA,KAAKd,GACrCS,EAAM,CACpB,IAAMa,EAAYR,EAAA,KAAKf,GAAY,OAG7BwB,EAAUD,EAAU,YAAcA,EAAU,YAAc,GAAKb,EACrEE,EAAA,KAAKZ,EAAca,EAAA,KAAKR,EAAAC,GAAL,UAAqBkB,IACxC,IAAI,WAAWT,EAAA,KAAKf,GAAY,MAAM,EAAE,IAAI,IAAI,WAAWuB,CAAS,CAAC,CACvE,CACF,EAoDAf,GAAK,SAACa,EAA4B,CAChC,GAAIA,EAAM,CACRN,EAAA,KAAKf,GAAY,SAASe,EAAA,KAAKX,GAAiBiB,CAAI,EAEpD,IAAMH,EAASH,EAAA,KAAKd,IAAWc,EAAA,KAAKX,GAAkB,GACtDW,EAAA,KAAKf,GAAY,SACfe,EAAA,KAAKX,GAAkB,EACvBc,EACAH,EAAA,KAAKb,EACP,CACF,CACA,OAAOa,EAAA,KAAKf,GAAY,OAAO,MAAMqB,EAAO,EAAI,EAAGN,EAAA,KAAKd,EAAO,CACjE,ECtEF,IAAMwB,EAAS,IAAIC,EAEbC,GAAWC,GAA6C,CAE5DH,EAAO,SAAS,CAAC,EAAE,SAAS,CAAC,EAC7B,QAAWI,KAAO,OAAO,KAAKD,CAAI,EAChCH,EAAO,WAAWI,CAAG,EAAE,WAAWD,EAAKC,CAAG,CAAC,EAG7CJ,EAAO,WAAW,iBAAiB,EAAE,WAAW,MAAM,EAEtD,IAAMK,EAAaL,EAAO,WAAW,EAAE,EAAE,MAAM,EAGzCM,EAASD,EAAW,WAAa,EAEvC,OAAO,IAAIJ,EAAO,EAAE,SAASK,CAAM,EAAE,IAAID,EAAW,MAAqB,EAAE,MAAM,CACnF,EAEME,GAAa,IAAkB,CACnC,IAAMC,EAAa,IAAI,SAAS,IAAI,YAAY,CAAC,CAAC,EAClD,OAAAA,EAAW,SAAS,EAAG,EAAG,EAAK,EAC/BA,EAAW,SAAS,EAAG,SAAU,EAAK,EAC/B,IAAI,WAAWA,EAAW,MAAM,CACzC,EAEMC,GAAYA,GACTT,EAAO,WAAWS,CAAQ,EAAE,MAAM,GAAY,EAGjDC,GAAiC,CACrCC,EACAC,KAGAZ,EACG,WAAWW,CAAS,EACpB,SAASE,EAAeD,CAAe,CAAC,EACxC,UAAUA,CAAe,EAErBZ,EAAO,MAAM,GAAY,GAG5Bc,GAA+BC,GAC5Bf,EAAO,UAAUe,CAAc,EAAE,MAAM,GAAY,EAGtDC,GAASC,GACNjB,EAAO,WAAWiB,CAAI,EAAE,MAAM,EAAU,EAS3CC,GAAgC,CAAC,EAEjCC,GAASH,GAAiC,CAO9C,IAAMI,EAAOJ,EAAM,MAAQ,GACvBI,EAAK,OAAS,KAEhB,QAAQ,MACN,gEACF,EACA,QAAQ,MAAM,uBAAwBA,EAAMA,EAAK,MAAM,EACvD,QAAQ,MACN,8DACF,GAIF,IAAMC,EAASrB,EACZ,WAAWoB,CAAI,EACf,WAAWJ,EAAM,IAAI,EACrB,SAASA,EAAM,OAAO,QAAU,CAAC,EAEpC,OAAAA,EAAM,OAAO,QAASM,GAASD,EAAO,SAASC,CAAI,CAAC,EAE7CtB,EAAO,MAAM,EAAU,CAChC,EAaMuB,EAAc,IAAItB,EAQxB,IAAMuB,GAAc,CAACC,EAAsBC,IAAoC,CAC7E,QAASC,EAAI,EAAGA,EAAIF,EAAO,OAAQE,IAAK,CACtC,IAAMC,EAAYF,EAAcA,EAAYD,EAAOE,CAAC,EAAGA,CAAC,EAAIF,EAAOE,CAAC,EACpE,GAAIC,IAAc,KAEhBC,EAAO,SAAS,CAAgB,EAEhCC,EAAY,SAAS,EAAE,UAEvBF,aAAqB,aACrB,YAAY,OAAOA,CAAS,EAC5B,CACA,IAAMG,EAAS,YAAY,OAAOH,CAAS,EACtCA,EAAU,OAAO,MAChBA,EAAU,WACVA,EAAU,WAAaA,EAAU,UACnC,EACAA,EAEJC,EAAO,SAAS,CAAgB,EAEhCC,EAAY,SAASC,EAAO,UAAU,EACtCD,EAAY,IAAIC,CAAM,CACxB,MAEEF,EAAO,SAAS,CAAgB,EAChCC,EAAY,SAASE,EAAeJ,CAAS,CAAC,EAC9CE,EAAY,UAAUF,CAAS,CAEnC,CACF,EAEMK,GAAO,CAACC,EAAmB,CAAC,IAAkB,CAElD,IAAMC,EAASD,EAAO,QAAU,GAC1BE,EAAYF,EAAO,WAAa,GAChCG,EAASH,EAAO,QAAU,GAC1BT,EAASS,EAAO,QAAUI,GAC1BC,EAAMd,EAAO,OAEnB,OAAAI,EAAO,WAAWM,CAAM,EAAE,WAAWC,CAAS,EAC9CP,EAAO,SAASU,CAAG,EAEnBf,GAAYC,EAAQS,EAAO,WAAW,EAEtCL,EAAO,SAASU,CAAG,EACnBV,EAAO,IAAIC,EAAY,MAAM,EAAE,MAAqB,EAGpDD,EAAO,SAASQ,EAAS,EAAmB,CAAgB,EACrDR,EAAO,MAAM,EAAS,CAC/B,EAOMW,GAAe,IAAI,WAAW,CAClC,GACA,EACA,EACA,EACA,EACA,EACA,EACA,EACA,EACA,CACF,CAAC,EAEKC,GAAWP,GAAkC,CAEjD,GAAI,CAACA,GAAW,CAACA,EAAO,QAAU,CAACA,EAAO,KACxC,OAAOM,GAGT,IAAML,EAASD,EAAO,QAAU,GAC1BQ,EAAOR,EAAO,MAAQ,EAEtBS,EAAeX,EAAeG,CAAM,EACpCI,EAAM,EAAII,EAAe,EAAI,EAE7BC,EAAa,IAAI,SAAS,IAAI,YAAY,EAAIL,CAAG,CAAC,EACxD,OAAAK,EAAW,SAAS,EAAG,EAAY,EACnCA,EAAW,SAAS,EAAGL,EAAK,EAAK,EACjC,IAAI,YAAY,EAAE,WAAWJ,EAAQ,IAAI,WAAWS,EAAW,OAAQ,CAAC,CAAC,EACzEA,EAAW,SAASD,EAAe,EAAG,CAAC,EACvCC,EAAW,UAAUA,EAAW,WAAa,EAAGF,EAAM,EAAK,EACpD,IAAI,WAAWE,EAAW,MAAM,CACzC,EAEMC,GAAS,CAACC,EAAmBC,IAAkC,CACnE,IAAMH,EAAa,IAAI,SAAS,IAAI,YAAY,EAAE,CAAC,EACnD,OAAAA,EAAW,SAAS,EAAG,GAAI,EAAK,EAChCA,EAAW,SAAS,EAAG,KAAM,EAAK,EAClCA,EAAW,SAAS,EAAG,KAAM,EAAK,EAClCA,EAAW,SAAS,EAAGE,EAAW,EAAK,EACvCF,EAAW,SAAS,GAAIG,EAAW,EAAK,EACjC,IAAI,WAAWH,EAAW,MAAM,CACzC,EAOMI,EAAiB,CAACC,EAAYC,IAA+B,CACjE,IAAMrB,EAAS,IAAIsB,EACnB,OAAAtB,EAAO,WAAWqB,CAAM,EACjBrB,EAAO,MAAMoB,CAAI,CAC1B,EAEMG,GAAsBvB,EAAO,WAAW,GAAG,EAAE,MAAM,EAAa,EAChEwB,GAAyBxB,EAAO,WAAW,GAAG,EAAE,MAAM,EAAa,EAEnEyB,GAAYC,GACTA,EAAI,KACPP,EAAe,GAAe,GAAGO,EAAI,IAAI,GAAGA,EAAI,MAAQ,EAAE,EAAE,EAC5DA,EAAI,OAAS,IACXH,GACAC,GAGFG,GAASD,GAAgC,CAC7C,IAAME,EAAO,GAAGF,EAAI,IAAI,GAAGA,EAAI,MAAQ,EAAE,GACzC,OAAOP,EAAe,GAAYS,CAAI,CACxC,EAEMC,GAAYC,GACT9B,EAAO,IAAI8B,CAAK,EAAE,MAAM,GAAkB,EAG7CC,GAAYC,GACTb,EAAe,IAAea,CAAO,EAGxCC,EAAkBb,GACtB,IAAI,WAAW,CAACA,EAAM,EAAM,EAAM,EAAM,CAAI,CAAC,EAEzCc,GAAcD,EAAe,EAAU,EACvCE,GAAaF,EAAe,EAAS,EACrCG,GAAYH,EAAe,EAAQ,EACnCI,GAAiBJ,EAAe,EAAa,EAE7CK,EAAY,CAChB,QAAAC,GACA,SAAAC,GACA,WAAAC,GACA,+BAAAC,GACA,4BAAAC,GACA,MAAAC,GACA,MAAAC,GACA,KAAAzC,GACA,QAAAQ,GACA,SAAAa,GACA,MAAAE,GACA,MAAO,IAAMO,GACb,KAAM,IAAMC,GACZ,IAAK,IAAMC,GACX,SAAAP,GACA,SAAU,IAAMQ,GAChB,SAAAN,GACA,OAAAf,EACF,ECrSA,IAAM8B,GAAc,IAAI,YAAY,CAAC,ECoCrC,IAAMC,GAAc,EAGdC,GAAa,EAEbC,GAAgBF,GAAcC,GAO9BE,GAAc,IAAI,YAAY,CAAC,ECvCrC,IAAMC,GAAa,WAAW,KAAK,MAC7BC,GAAiB,WAAW,KAAK,UAE1BC,GAAO,GAClBC,GAAQ,GADH,IAGLC,GAAO,GACPC,GAAO,GACPC,GAAO,GALF,IAOLC,EAAO,GACPC,GAAM,GARD,IAYLC,GAAO,IAZF,IAmBLC,GAAS,IACTC,GAAS,IApBJ,IA8BLC,GAAS,KACTC,GAAU,KACVC,GAAO,KAhCF,IAkCLC,GAAY,KACZC,GAAc,KAnCT,IAyDLC,GAAQ,KAIH,IAAMC,GAAQ,CACnB,OAAQ,CACN,GAAIC,EACJ,KAAM,CAACA,EAAMC,GAASC,EAAM,EAC5B,UAAYC,GAAuB,CACjC,GAAI,OAAOA,GAAM,SACf,OAAOA,EACF,GAAI,OAAOA,GAAM,SACtB,OAAOA,EAAE,SAAS,EAElB,MAAM,IAAI,MAAM,+BAA+B,CAEnD,EACA,MAAQA,GAAcA,CACxB,EACA,OAAQ,CACN,GAAI,EACJ,KAAM,CAACC,GAAMC,GAAMC,GAAKC,GAAQC,EAAM,EACtC,UAAYL,GAAcA,EAAE,SAAS,EACrC,MAAQA,GAAc,CAACA,CACzB,EACA,OAAQ,CACN,GAAIM,GACJ,KAAM,CAACA,EAAI,EACX,UAAYN,GAAcA,EAAE,SAAS,EACrC,MAAQA,GAAc,CACpB,IAAMO,EAAI,OAAOP,CAAC,EAClB,OAAIO,EAAI,OAAO,kBAAoBA,EAAI,OAAO,iBACrCA,EAEA,OAAOA,CAAC,CAEnB,CACF,EACA,KAAM,CACJ,GAAIC,GACJ,KAAM,CAACA,GAAMC,EAAK,EAClB,UAAYT,GACN,OAAOA,GAAM,SACRA,EAEAU,GAAeV,CAAC,EAG3B,MAAQA,GAAcW,GAAWX,CAAC,CACpC,EACA,QAAS,CACP,GAAIY,GACJ,KAAM,CAACA,EAAI,EACX,UAAYZ,GAAe,CACzB,GAAI,OAAOA,GAAM,UACf,MAAM,IAAI,MAAM,gCAAgC,EAElD,OAAOA,EAAI,IAAM,GACnB,EACA,MAAQA,GAAcA,IAAM,GAC9B,EACA,KAAM,CACJ,GAAIa,GACJ,KAAM,CAACC,GAAMC,GAAWF,EAAW,EACnC,UAAYb,GAA8B,CACxC,GAAI,OAAOA,GAAM,SACf,OAAOA,EACF,GAAI,OAAOA,GAAM,SACtB,OAAO,IAAI,KAAKA,CAAC,EAAE,YAAY,EAC1B,GAAIA,aAAa,KACtB,OAAOA,EAAE,YAAY,EAErB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EACA,MAAQA,GAAuB,IAAI,KAAKA,CAAC,CAC3C,EACA,MAAO,CACL,GAAIgB,GACJ,KAAM,CAACA,EAAK,EACZ,UAAYhB,GAAkB,CAC5B,GAAI,EAAEA,aAAa,YACjB,MAAM,IAAI,MAAM,8BAA8B,EAEhD,MACE,MACA,MAAM,KAAKA,CAAC,EACT,IAAKiB,GAASA,EAAK,SAAS,EAAE,EAAE,SAAS,EAAG,GAAG,CAAC,EAChD,KAAK,EAAE,CAEd,EACA,MAAQjB,GAA0B,CAChC,IAAMkB,EAAYlB,EAAE,MAAM,CAAC,EAC3B,OAAO,WAAW,KAAK,CAAE,OAAQkB,EAAU,OAAS,CAAE,EAAG,CAACC,EAAGC,IAC3D,SAASF,EAAU,UAAUE,EAAM,GAAIA,EAAM,GAAK,CAAC,EAAG,EAAE,CAC1D,CACF,CACF,CACF,EAgBMC,GAAkBC,GAAa1B,EAAK,EAE7B2B,GAAUF,GAAgB,QAC1BG,GAAcH,GAAgB,YAkB3C,SAASI,GAAaC,EAAqB,CACzC,OAAO,OAAO,KAAKA,CAAK,EAAE,OACxB,CAAC,CAAE,QAAAC,EAAS,YAAAC,CAAY,EAAGC,IAAM,CAC/B,GAAM,CAAE,GAAAC,EAAI,KAAAC,EAAM,UAAAC,EAAW,MAAAC,CAAM,EAAIP,EAAMG,CAAC,EAC9C,OAAAD,EAAYE,CAAE,EAAIE,EAClBJ,EAAYC,CAAC,EAAIG,EACjBL,EAAQE,CAAC,EAAII,EACT,MAAM,QAAQF,CAAI,EACpBA,EAAK,QAASG,GAAM,CAClBP,EAAQO,CAAC,EAAID,EACbL,EAAYM,CAAC,EAAIF,CACnB,CAAC,GAEDL,EAAQI,CAAI,EAAIE,EAChBL,EAAYG,CAAI,EAAIC,GAEf,CAAE,QAAAL,EAAS,YAAAC,CAAY,CAChC,EACA,CACE,QAAS,CAAC,EAGV,YAAa,CAAC,CAGhB,CACF,CACF,CC/HO,SAASO,GACdC,EACU,CACV,IAAMC,EAAUD,EAAS,KACtBE,GACCA,EAAI,OAAS,sBACjB,EAEA,OAAID,EACKA,EAAQ,YAGV,CAAC,CACV,CChHO,IAAME,GACX,OAAO,SAAY,UACnB,OAAO,QAAQ,UAAa,UAC5B,OAAO,QAAQ,SAAS,MAAS,SAkG5B,IAAMC,GAAO,IAAc,CAEhC,GAAI,WAAW,QAAQ,WACrB,OAAO,WAAW,OAAO,WAAW,EAGtC,IAAMC,EAAQ,IAAI,WAAW,EAAE,EAE/B,GAAI,WAAW,QAAQ,gBAErB,WAAW,OAAO,gBAAgBA,CAAK,MAGvC,SAASC,EAAI,EAAGA,EAAID,EAAM,OAAQC,IAChCD,EAAMC,CAAC,EAAI,KAAK,MAAM,KAAK,OAAO,EAAI,GAAG,EAI7CD,EAAM,CAAC,EAAKA,EAAM,CAAC,EAAI,GAAQ,GAC/BA,EAAM,CAAC,EAAKA,EAAM,CAAC,EAAI,GAAQ,IAE/B,IAAME,EAAsB,CAAC,EAC7B,OAAAF,EAAM,QAASG,GAAS,CACtBD,EAAU,KAAKC,EAAK,SAAS,EAAE,EAAE,SAAS,EAAG,GAAG,CAAC,CACnD,CAAC,EAGCD,EAAU,MAAM,EAAG,CAAC,EAAE,KAAK,EAAE,EAC7B,IACAA,EAAU,MAAM,EAAG,CAAC,EAAE,KAAK,EAAE,EAC7B,IACAA,EAAU,MAAM,EAAG,CAAC,EAAE,KAAK,EAAE,EAC7B,IACAA,EAAU,MAAM,EAAG,EAAE,EAAE,KAAK,EAAE,EAC9B,IACAA,EAAU,MAAM,EAAE,EAAE,KAAK,EAAE,CAE/B,EAaA,eAAsBE,GACpBC,EACAC,EACAC,EACAC,EACA,CACA,GAAI,CAACD,GAAUA,EAAO,SAAW,EAE/B,OAAOD,EAGTE,EAAKA,GAAMH,EAGX,IAAMI,EAAW,CAAC,EAClB,GAAI,CACF,MAAMJ,EAAG,aAAaK,EAAkB,MAAM,CAAE,KAAMJ,CAAM,CAAC,EAAG,CAC9D,SAAU,EACZ,CAAC,EAEDG,EAAS,KACP,IACE,MAAMJ,EAAG,aAAaK,EAAkB,SAAS,CAAE,KAAM,GAAI,CAAC,EAAG,CAC/D,SAAU,EACZ,CAAC,GACD,QACJ,CACF,QAAE,CACAD,EAAS,KACP,IAAI,MAAMJ,EAAG,aAAaK,EAAkB,KAAK,EAAG,CAAE,SAAU,EAAM,CAAC,GACpE,QACL,CACF,CAEA,IAAMC,EAAcC,GAA8BH,CAAQ,EAGpDI,EAAcP,EAAM,QAAQ,cAAe,CAACQ,EAAGC,IAC5C,IAAMA,EAAM,GACpB,EASD,OAPY,MAAMP,EAAG,MAGnB,qBAAqBD,EAAO,IAAI,CAACO,EAAGb,IAAM,IAAIA,EAAI,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC,aACjE,CAACY,EAAa,GAAGN,CAAM,EACvB,CAAE,WAAY,CAACS,EAAM,GAAGL,CAAW,CAAE,CACvC,GACW,KAAK,CAAC,EAAE,KACrB,CAWO,SAASM,GACdC,EACmC,CACnC,IAAIC,EAQAC,EAAY,GACVC,EAAc,SAAY,CAC9B,GAAI,CAACF,EAAM,CACTC,EAAY,GACZ,MACF,CACAA,EAAY,GACZ,GAAM,CAAE,KAAAE,EAAM,QAAAC,EAAS,OAAAC,CAAO,EAAIL,EAClCA,EAAO,OACP,GAAI,CACF,IAAMM,EAAM,MAAMP,EAAG,GAAGI,CAAI,EAC5BC,EAAQE,CAAG,CACb,OAASC,EAAG,CACVF,EAAOE,CAAC,CACV,QAAE,CACAL,EAAY,CACd,CACF,EACA,MAAO,UAAUC,IAAY,CACvBH,GACFA,EAAK,QAAQ,MAAS,EAExB,IAAMQ,EAAU,IAAI,QAAkB,CAACJ,EAASC,IAAW,CACzDL,EAAO,CAAE,KAAAG,EAAM,QAAAC,EAAS,OAAAC,CAAO,CACjC,CAAC,EACD,OAAKJ,GACHC,EAAY,EAEPM,CACT,CACF,CRvOA,IAAMC,GAAc,EAEdC,GAAQ,MAAOC,EAAqBC,IAAyB,CAGjE,IAAMC,EAA2B,IAAI,IAE/BC,EAA8B,CAClC,MAAM,MACJC,EACAC,EACAC,EACA,CACA,IAAIC,EACAC,EACAC,EAWJ,GAVI,OAAOL,GAAU,WACnBG,EAASH,EAAM,OACfC,EAASD,EAAM,OACfE,EAAWF,EAAM,SACjBI,EAASJ,EAAM,OACfK,EAAQL,EAAM,MACdA,EAAQA,EAAM,OAIXI,IAAW,SAAgBC,IAAU,QACxC,MAAM,IAAI,MAAM,4CAA4C,EAG9D,IAAMC,EAAaF,IAAW,QAAaC,IAAU,OACjDE,EAEJ,GACED,IACC,OAAOF,GAAW,UACjB,MAAMA,CAAM,GACZ,OAAOC,GAAU,UACjB,MAAMA,CAAK,GAEb,MAAM,IAAI,MAAM,kCAAkC,EAGpD,IAAIG,EAAkDN,EAClD,CAACA,CAAQ,EACT,CAAC,EACCO,EAAKC,GAAK,EAAE,QAAQ,KAAM,EAAE,EAC9BC,EAAO,GAEPC,EAEAC,EACEC,EAAO,SAAY,CACvB,MAAMlB,EAAG,YAAY,MAAOmB,GAAO,CAEjC,IAAMC,EACJf,GAAUA,EAAO,OAAS,EACtB,MAAMgB,GAAYrB,EAAII,EAAOC,EAAQc,CAAE,EACvCf,EACN,MAAMe,EAAG,KACP,0CAA0CN,CAAE,YAAYO,CAAc,EACxE,EAGA,IAAME,EAAS,MAAMC,GAAiBJ,EAAI,cAAcN,CAAE,OAAO,EACjE,MAAMW,GAA0BL,EAAIG,EAAQpB,CAAwB,EAEhEQ,GACF,MAAMS,EAAG,KAAK;AAAA,mCACSN,CAAE;AAAA,yCACIA,CAAE;AAAA;AAAA,aAE9B,EACD,MAAMM,EAAG,KAAK;AAAA,mCACSN,CAAE;AAAA,gDACWA,CAAE;AAAA,aACrC,EACDF,GACE,MAAMQ,EAAG,MACP,sBAAsBN,CAAE,mBAC1B,GACA,KAAK,CAAC,EAAE,MACVG,EAAU,CACR,GAAI,MAAMG,EAAG,MACX,sBAAsBN,CAAE,QAAQJ,CAAK,KAAKD,CAAM,IAClD,EACA,OAAAA,EACA,MAAAC,EACA,WAAAE,CACF,IAEA,MAAMQ,EAAG,KAAK;AAAA,mCACSN,CAAE;AAAA,yCACIA,CAAE;AAAA,aAC9B,EACDG,EAAU,MAAMG,EAAG,MAAS,sBAAsBN,CAAE,OAAO,GAG7DI,EAAY,MAAM,QAAQ,IACxBK,EAAQ,IAAKG,GACXN,EAAG,OACD,kBAAkBM,EAAM,UAAU,KAAKA,EAAM,SAAS,IACtD,SAAY,CACVC,EAAQ,CACV,CACF,CACF,CACF,CACF,CAAC,CACH,EACA,MAAMR,EAAK,EAGX,IAAMQ,EAAUC,GACd,MAAO,CACL,OAAQC,EACR,MAAOC,CACT,EAGI,CAAC,IAAM,CAET,GACE,CAACnB,IACAkB,IAAc,QAAaC,IAAa,QAEzC,MAAM,IAAI,MACR,8DACF,EAEF,GACGD,IACE,OAAOA,GAAc,UAAY,MAAMA,CAAS,IAClDC,IAAa,OAAOA,GAAa,UAAY,MAAMA,CAAQ,GAE5D,MAAM,IAAI,MAAM,kCAAkC,EAEpDrB,EAASoB,GAAapB,EACtBC,EAAQoB,GAAYpB,EAEpB,IAAMqB,EAAM,MAAOC,EAAQ,IAAM,CAC/B,GAAInB,EAAU,SAAW,EAGzB,IAAI,CACEF,EAKFM,EAAU,CACR,GAAI,MAAMhB,EAAG,MACX,sBAAsBa,CAAE,QAAQJ,CAAK,KAAKD,CAAM,IAClD,EACA,OAAAA,EACA,MAAAC,EACA,WAAAE,CACF,EAEAK,EAAU,MAAMhB,EAAG,MAAS,sBAAsBa,CAAE,OAAO,CAE/D,OAASmB,EAAG,CACV,IAAMC,EAAOD,EAAY,QACzB,GACEC,EAAI,WAAW,kCAAkCpB,CAAE,EAAE,GACrDoB,EAAI,SAAS,gBAAgB,EAC7B,CAGA,GAAIF,EAAQjC,GACV,MAAMkC,EAER,MAAMd,EAAK,EACXY,EAAIC,EAAQ,CAAC,CACf,KACE,OAAMC,CAEV,CAMA,GAJAE,EAAmBtB,EAAWI,CAAO,EAIjCN,EAAY,CACd,IAAMyB,GACJ,MAAMnC,EAAG,MACP,sBAAsBa,CAAE,mBAC1B,GACA,KAAK,CAAC,EAAE,MACNsB,IAAkBxB,IAEpBA,EAAawB,EACbT,EAAQ,EAEZ,EACF,EACA,MAAMI,EAAI,CACZ,CACF,EAGMM,EAAa9B,GAA4C,CAC7D,GAAIS,EACF,MAAM,IAAI,MACR,4DACF,EAEFH,EAAU,KAAKN,CAAQ,CACzB,EAKM+B,EAAc,MAAO/B,GAA6C,CAClEA,EACFM,EAAYA,EAAU,OAAQN,GAAaA,IAAaA,CAAQ,EAEhEM,EAAY,CAAC,EAEXA,EAAU,SAAW,GAAK,CAACG,IAC7BA,EAAO,GACP,MAAMf,EAAG,YAAY,MAAOmB,GAAO,CACjC,MAAM,QAAQ,IAAIF,EAAU,IAAKqB,GAAUA,EAAMnB,CAAE,CAAC,CAAC,EACrD,MAAMA,EAAG,KAAK;AAAA,+CACqBN,CAAE;AAAA,sCACXA,CAAE;AAAA,aAC3B,CACH,CAAC,EAEL,EAGA,OAAIN,GAAQ,QACV,MAAM8B,EAAY,EAGlB9B,GAAQ,iBACN,QACA,IAAM,CACJ8B,EAAY,CACd,EACA,CAAE,KAAM,EAAK,CACf,EAIFH,EAAmBtB,EAAWI,CAAQ,EAG/B,CACL,eAAgBA,EAChB,UAAAoB,EACA,YAAAC,EACA,QAAAX,CACF,CACF,EAEA,MAAM,QACJtB,EACAC,EACAkC,EACAjC,EACA,CACA,IAAIC,EAQJ,GAPI,OAAOH,GAAU,WACnBG,EAASH,EAAM,OACfC,EAASD,EAAM,OACfmC,EAAMnC,EAAM,IACZE,EAAWF,EAAM,SACjBA,EAAQA,EAAM,OAEZ,CAACmC,EACH,MAAM,IAAI,MAAM,qCAAqC,EAEvD,IAAI3B,EAAwDN,EACxD,CAACA,CAAQ,EACT,CAAC,EACCO,EAAKC,GAAK,EAAE,QAAQ,KAAM,EAAE,EAC9BC,EAAO,GAEPyB,EAAqB,EACrBC,EAEAxB,EAEEC,EAAO,SAAY,CACvB,MAAMlB,EAAG,YAAY,MAAOmB,GAAO,CAEjC,IAAMC,EAAiB,MAAMC,GAAYrB,EAAII,EAAOC,EAAQc,CAAE,EAC9D,MAAMA,EAAG,MACP,0CAA0CN,CAAE,YAAYO,CAAc,EACxE,EAGA,IAAME,EAAS,MAAMC,GAAiBJ,EAAI,cAAcN,CAAE,OAAO,EACjE,MAAMW,GAA0BL,EAAIG,EAAQpB,CAAwB,EAGpE,IAAMwC,EAAU,CACd,IACE,MAAMvB,EAAG,MAAW;AAAA;AAAA;AAAA,iDAGeN,CAAE;AAAA,eACpC,GACD,KACF,CAAE,YAAa,YAAa,UAAW,SAAU,CACnD,EAGA,MAAMM,EAAG,KAAK;AAAA,2CACmBN,CAAE,4BAA4BA,CAAE;AAAA,2CAChCA,CAAE,4BAA4BA,CAAE;AAAA,WAChE,EAGD,QAAW8B,IAAQ,CAAC,EAAG,CAAC,EAAG,CACzB,IAAMC,EAAOD,IAAS,EAAI,EAAI,EAC9B,MAAMxB,EAAG,KAAK;AAAA,mCACSN,CAAE,QAAQ8B,CAAI;AAAA;AAAA,uCAEVJ,CAAG,8CAA8C1B,CAAE,SAAS+B,CAAI;AAAA,uCAChEL,CAAG,8CAA8C1B,CAAE,SAAS8B,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKjFD,EACC,IACC,CAAC,CAAE,YAAAG,CAAY,IACb,SAASA,CAAW,SAASA,CAAW,GAC5C,EACC,KAAK;AAAA,CAAK,CAAC;AAAA;AAAA;AAAA,2CAGSN,CAAG,WAAWA,CAAG;AAAA,+BAC7BA,CAAG;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKZG,EACC,IAAI,CAAC,CAAE,YAAAG,EAAa,UAAAC,EAAW,SAAAC,CAAS,IACnCF,IAAgBN,EACX,SAASM,CAAW,SAASA,CAAW,IAExC,OAAOC,IAAc,eAAiB,KAAKC,CAAQ,GAAK,EAAE,QAAQF,CAAW,GAEvF,EACA,KAAK;AAAA,CAAK,CAAC;AAAA;AAAA;AAAA,2CAGSN,CAAG,WAAWA,CAAG;AAAA,+BAC7BA,CAAG;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKZG,EACC,IAAI,CAAC,CAAE,YAAAG,EAAa,UAAAC,EAAW,SAAAC,CAAS,IACvCF,IAAgBN,EACZ,SAASM,CAAW,SAASA,CAAW,IACxC;AAAA,2CACeA,CAAW,4BAA4BA,CAAW;AAAA,2CAClDA,CAAW;AAAA,yCACbC,IAAc,eAAiB,KAAKC,CAAQ,GAAK,EAAE;AAAA,wCACpDF,CAAW,GAC7B,EACC,KAAK;AAAA,CAAK,CAAC;AAAA,8DAC4BH,EACrC,OAAO,CAAC,CAAE,YAAAG,CAAY,IAAMA,IAAgBN,CAAG,EAC/C,IACC,CAAC,CAAE,YAAAM,CAAY,IACb;AAAA,2CACeA,CAAW,4BAA4BA,CAAW;AAAA,sCACvDA,CAAW;AAAA;AAAA,kCAGzB,EACC,KACC,IACF,CAAC;AAAA;AAAA,4CAEmBN,CAAG,WAAWA,CAAG;AAAA;AAAA;AAAA;AAAA,aAIhD,CACH,CAGAtB,EAAY,MAAM,QAAQ,IACxBK,EAAQ,IAAKG,GACXN,EAAG,OACD,kBAAkBM,EAAM,UAAU,KAAKA,EAAM,SAAS,IACtD,SAAY,CACVC,EAAQ,CACV,CACF,CACF,CACF,CACF,CAAC,CACH,EAEA,MAAMR,EAAK,EAEX,IAAMQ,EAAUC,GAAc,SAAY,CACxC,GAAIf,EAAU,SAAW,GAAK6B,EAC5B,OAEF,IAAIO,EAAQ,GACZ,QAASC,EAAI,EAAGA,EAAI,EAAGA,IACrB,GAAI,CACF,MAAMjD,EAAG,YAAY,MAAOmB,GAAO,CAEjC,MAAMA,EAAG,KAAK;AAAA,yCACaN,CAAE,SAAS2B,CAAW;AAAA,6CAClB3B,CAAE;AAAA,eAChC,EAGD4B,EAAU,MAAMtB,EAAG,MACjB,sBAAsBN,CAAE,QAAQ2B,CAAW,GAC7C,EAGAA,EAAcA,IAAgB,EAAI,EAAI,EAGtC,MAAMrB,EAAG,KAAK;AAAA,sCACUN,CAAE,SAAS2B,CAAW;AAAA,eAC7C,CACH,CAAC,EACD,KACF,OAASR,EAAG,CAEV,GADaA,EAAY,UAGvB,wBAAwBnB,CAAE,SAAS2B,CAAW,mBAC9C,CAGAQ,EAAQ,GACR,MAAM9B,EAAK,EACX,QACF,KACE,OAAMc,CAEV,CAGFkB,GAAmBtC,EAAW,CAC5B,GAAIoC,EACA,CACE,CACE,OAAQ,OACV,CACF,EACA,CAAC,EACL,GAAGP,EAAS,IACd,CAAC,CACH,CAAC,EAGKL,EAAa9B,GAAkD,CACnE,GAAIS,EACF,MAAM,IAAI,MACR,4DACF,EAEFH,EAAU,KAAKN,CAAQ,CACzB,EAGM+B,EAAc,MAClB/B,GACG,CACCA,EACFM,EAAYA,EAAU,OAAQN,GAAaA,IAAaA,CAAQ,EAEhEM,EAAY,CAAC,EAEXA,EAAU,SAAW,GAAK,CAACG,IAC7BA,EAAO,GACP,MAAMf,EAAG,YAAY,MAAOmB,GAAO,CACjC,MAAM,QAAQ,IAAIF,EAAU,IAAKqB,GAAUA,EAAMnB,CAAE,CAAC,CAAC,EACrD,MAAMA,EAAG,KAAK;AAAA,+CACqBN,CAAE;AAAA,gDACDA,CAAE;AAAA,gDACFA,CAAE;AAAA,sCACZA,CAAE;AAAA,sCACFA,CAAE;AAAA,aAC3B,CACH,CAAC,EAEL,EAGA,OAAIN,GAAQ,QACV,MAAM8B,EAAY,EAGlB9B,GAAQ,iBACN,QACA,IAAM,CACJ8B,EAAY,CACd,EACA,CAAE,KAAM,EAAK,CACf,EAIF,MAAMX,EAAQ,EASP,CACL,OAPae,EAAS,OAAO,OAC5BU,GACC,CAAC,CAAC,YAAa,SAAU,qBAAqB,EAAE,SAASA,EAAM,IAAI,CACvE,EAKE,eAAgBV,EAAS,KACzB,UAAAL,EACA,YAAAC,EACA,QAAAX,CACF,CACF,EAEA,MAAM,iBACJtB,EACAC,EACAkC,EACAjC,EACA,CACA,IAAIC,EAQJ,GAPI,OAAOH,GAAU,WACnBG,EAASH,EAAM,OACfC,EAASD,EAAM,OACfmC,EAAMnC,EAAM,IACZE,EAAWF,EAAM,SACjBA,EAAQA,EAAM,OAEZ,CAACmC,EACH,MAAM,IAAI,MAAM,yCAAyC,EAE3D,IAAI3B,EAAkDN,EAClD,CAACA,CAAQ,EACT,CAAC,EACC8C,EAAyB,IAAI,IAC7BC,EAA0B,IAAI,IAChCC,EAAgB,CAAC,EACjBC,EAAW,GAET,CACJ,OAAAC,EACA,YAAaC,EACb,QAAA/B,CACF,EAAI,MAAMvB,EAAa,QAAWC,EAAOC,EAAQkC,EAAME,GAAY,CAEjE,QAAWiB,KAAUjB,EAAS,CAC5B,GAAM,CACJ,OAAQkB,EACR,oBAAqBC,EACrB,GAAGC,CACL,EAAIH,EACJ,OAAQC,EAAI,CACV,IAAK,QACHP,EAAQ,MAAM,EACdC,EAAS,MAAM,EACf,MACF,IAAK,SACHD,EAAQ,IAAIS,EAAItB,CAAG,EAAGsB,CAAG,EACzBR,EAAS,IAAIQ,EAAI,UAAWA,EAAItB,CAAG,CAAC,EACpC,MACF,IAAK,SAAU,CACb,IAAMuB,EAASV,EAAQ,IAAIS,EAAItB,CAAG,CAAC,EACnCa,EAAQ,OAAOS,EAAItB,CAAG,CAAC,EAGnBuB,EAAO,YAAc,MACvBT,EAAS,OAAOS,EAAO,SAAS,EAElC,KACF,CACA,IAAK,SAAU,CACb,IAAMC,EAAS,CAAE,GAAIX,EAAQ,IAAIS,EAAItB,CAAG,CAAC,GAAK,CAAC,CAAG,EAClD,QAAWyB,KAAcJ,EACvBG,EAAOC,CAAU,EAAIH,EAAIG,CAAU,EAC/BA,IAAe,aACjBX,EAAS,IAAIQ,EAAI,UAAWA,EAAItB,CAAG,CAAC,EAGxCa,EAAQ,IAAIS,EAAItB,CAAG,EAAGwB,CAAM,EAC5B,KACF,CACF,CACF,CAGA,IAAME,EAAY,CAAC,EACfC,EAAe,KACnB,QAASjB,EAAI,EAAGA,EAAIG,EAAQ,KAAMH,IAAK,CACrC,IAAMkB,EAAUd,EAAS,IAAIa,CAAO,EAC9BL,EAAMT,EAAQ,IAAIe,CAAO,EAC/B,GAAI,CAACN,EACH,MAGF,IAAMO,EAAW,CAAE,GAAGP,CAAI,EAC1B,OAAOO,EAAS,UAChBH,EAAK,KAAKG,CAAQ,EAClBF,EAAUC,CACZ,CACAb,EAAWW,EAGNV,GACHrB,EAAmBtB,EAAW,CAC5B,KAAAqD,EACA,OAAAT,CACF,CAAC,CAEL,CAAC,EAEDD,EAAW,GACXrB,EAAmBtB,EAAW,CAC5B,KAAM0C,EACN,OAAAE,CACF,CAAC,EAED,IAAMpB,EAAa9B,GAA4C,CAC7DM,EAAU,KAAKN,CAAQ,CACzB,EAEM+B,EAAc,MAAO/B,GAA6C,CAClEA,EACFM,EAAYA,EAAU,OAAQN,GAAaA,IAAaA,CAAQ,EAEhEM,EAAY,CAAC,EAEXA,EAAU,SAAW,GACvB,MAAM6C,EAAmB,CAE7B,EAEA,OAAIlD,GAAQ,QACV,MAAM8B,EAAY,EAElB9B,GAAQ,iBACN,QACA,IAAM,CACJ8B,EAAY,CACd,EACA,CAAE,KAAM,EAAK,CACf,EAGK,CACL,eAAgB,CACd,KAAMiB,EACN,OAAAE,CACF,EACA,UAAApB,EACA,YAAAC,EACA,QAAAX,CACF,CACF,CACF,EAEA,MAAO,CACL,aAAAvB,CACF,CACF,EAEakE,GAAO,CAClB,KAAM,eACN,MAAAtE,EACF,EAYA,eAAewB,GACbJ,EACAmD,EAQA,CAuDA,OAtDe,MAAMnD,EAAG,MAMtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MA6CA,CAACmD,CAAQ,CACX,GAEc,KAAK,IAAKC,IAAS,CAC/B,WAAYA,EAAI,WAChB,YAAaA,EAAI,YACjB,UAAWA,EAAI,UACf,WAAYA,EAAI,UAClB,EAAE,CACJ,CAOA,eAAe/C,GACbL,EACAG,EAMApB,EACA,CACA,IAAMsE,EAAWlD,EACd,OACEG,GACC,CAACvB,EAAyB,IAAI,GAAGuB,EAAM,UAAU,IAAIA,EAAM,SAAS,EAAE,CAC1E,EACC,IAAKA,GACG;AAAA,4CAC+BA,EAAM,UAAU,IAAIA,EAAM,SAAS;AAAA;AAAA,2CAEpCA,EAAM,UAAU,KAAKA,EAAM,SAAS;AAAA;AAAA;AAAA;AAAA,mDAI5BA,EAAM,UAAU,IAAIA,EAAM,SAAS;AAAA,6CACzCA,EAAM,WAAW,MAAMA,EAAM,UAAU;AAAA,qDAC/BA,EAAM,UAAU,IAAIA,EAAM,SAAS;AAAA,OAEnF,EACA,KAAK;AAAA,CAAI,EACR+C,EAAS,KAAK,IAAM,IACtB,MAAMrD,EAAG,KAAKqD,CAAQ,EAExBlD,EAAO,IAAKG,GACVvB,EAAyB,IAAI,GAAGuB,EAAM,UAAU,IAAIA,EAAM,SAAS,EAAE,CACvE,CACF,CAEA,IAAMS,EAAqB,CACzBtB,EACAI,IACG,CACH,QAAWV,KAAYM,EACrBN,EAASU,CAAO,CAEpB,EAEMkC,GAAqB,CACzBtC,EACA6B,IACG,CACH,QAAWnC,KAAYM,EACrBN,EAASmC,CAAO,CAEpB","names":["live_exports","__export","live","__toCommonJS","byteLengthUtf8","str","byteLength","i","code","_bufferView","_offset","_littleEndian","_encoder","_headerPosition","_Writer_instances","allocateBuffer_fn","ensure_fn","join_fn","Writer","size","__privateAdd","__privateSet","__privateMethod","num","__privateGet","string","__privateWrapper","length","byteLengthUtf8","otherBuffer","code","result","oldBuffer","newSize","writer","Writer","startup","opts","key","bodyBuffer","length","requestSsl","bufferView","password","sendSASLInitialResponseMessage","mechanism","initialResponse","byteLengthUtf8","sendSCRAMClientFinalMessage","additionalData","query","text","emptyValueArray","parse","name","buffer","type","paramWriter","writeValues","values","valueMapper","i","mappedVal","writer","paramWriter","buffer","byteLengthUtf8","bind","config","portal","statement","binary","emptyValueArray","len","emptyExecute","execute","rows","portalLength","bufferView","cancel","processID","secretKey","cstringMessage","code","string","Writer","emptyDescribePortal","emptyDescribeStatement","describe","msg","close","text","copyData","chunk","copyFail","message","codeOnlyBuffer","flushBuffer","syncBuffer","endBuffer","copyDoneBuffer","serialize","startup","password","requestSsl","sendSASLInitialResponseMessage","sendSCRAMClientFinalMessage","query","parse","emptyBuffer","CODE_LENGTH","LEN_LENGTH","HEADER_LENGTH","emptyBuffer","JSON_parse","JSON_stringify","BOOL","BYTEA","INT8","INT2","INT4","TEXT","OID","JSON","FLOAT4","FLOAT8","BPCHAR","VARCHAR","DATE","TIMESTAMP","TIMESTAMPTZ","JSONB","types","TEXT","VARCHAR","BPCHAR","x","INT2","INT4","OID","FLOAT4","FLOAT8","INT8","n","JSON","JSONB","JSON_stringify","JSON_parse","BOOL","TIMESTAMPTZ","DATE","TIMESTAMP","BYTEA","byte","hexString","_","idx","defaultHandlers","typeHandlers","parsers","serializers","typeHandlers","types","parsers","serializers","k","to","from","serialize","parse","f","parseDescribeStatementResults","messages","message","msg","IN_NODE","uuid","bytes","i","hexValues","byte","formatQuery","pg","query","params","tx","messages","serialize","dataTypeIDs","parseDescribeStatementResults","subbedQuery","_","num","TEXT","debounceMutex","fn","next","isRunning","processNext","args","resolve","reject","ret","e","promise","MAX_RETRIES","setup","pg","_emscriptenOpts","tableNotifyTriggersAdded","namespaceObj","query","params","callback","signal","offset","limit","isWindowed","totalCount","callbacks","id","uuid","dead","results","unsubList","init","tx","formattedQuery","formatQuery","tables","getTablesForView","addNotifyTriggersToTables","table","refresh","debounceMutex","newOffset","newLimit","run","count","e","msg","runResultCallbacks","newTotalCount","subscribe","unsubscribe","unsub","key","stateSwitch","changes","columns","curr","prev","column_name","data_type","udt_name","reset","i","runChangeCallbacks","field","rowsMap","afterMap","lastRows","firstRun","fields","unsubscribeChanges","change","op","changedColumns","obj","oldObj","newObj","columnName","rows","lastKey","nextKey","cleanObj","live","viewName","row","triggers"]}
@@ -0,0 +1,133 @@
1
+ import{u as I,v as O,w as C}from"../chunk-FY2WAP66.js";import{j as P}from"../chunk-VCBR6USK.js";P();var M=5,U=async(E,y)=>{let p=new Set,$={async query(e,g,a){let m,l,_;if(typeof e!="string"&&(m=e.signal,g=e.params,a=e.callback,l=e.offset,_=e.limit,e=e.query),l===void 0!=(_===void 0))throw new Error("offset and limit must be provided together");let t=l!==void 0&&_!==void 0,T;if(t&&(typeof l!="number"||isNaN(l)||typeof _!="number"||isNaN(_)))throw new Error("offset and limit must be numbers");let d=a?[a]:[],o=I().replace(/-/g,""),A=!1,v,w,S=async()=>{await E.transaction(async i=>{let n=g&&g.length>0?await O(E,e,g,i):e;await i.exec(`CREATE OR REPLACE TEMP VIEW live_query_${o}_view AS ${n}`);let u=await q(i,`live_query_${o}_view`);await F(i,u,p),t?(await i.exec(`
2
+ PREPARE live_query_${o}_get(int, int) AS
3
+ SELECT * FROM live_query_${o}_view
4
+ LIMIT $1 OFFSET $2;
5
+ `),await i.exec(`
6
+ PREPARE live_query_${o}_get_total_count AS
7
+ SELECT COUNT(*) FROM live_query_${o}_view;
8
+ `),T=(await i.query(`EXECUTE live_query_${o}_get_total_count;`)).rows[0].count,v={...await i.query(`EXECUTE live_query_${o}_get(${_}, ${l});`),offset:l,limit:_,totalCount:T}):(await i.exec(`
9
+ PREPARE live_query_${o}_get AS
10
+ SELECT * FROM live_query_${o}_view;
11
+ `),v=await i.query(`EXECUTE live_query_${o}_get;`)),w=await Promise.all(u.map(f=>i.listen(`"table_change__${f.schema_oid}__${f.table_oid}"`,async()=>{R()})))})};await S();let R=C(async({offset:i,limit:n}={})=>{if(!t&&(i!==void 0||n!==void 0))throw new Error("offset and limit cannot be provided for non-windowed queries");if(i&&(typeof i!="number"||isNaN(i))||n&&(typeof n!="number"||isNaN(n)))throw new Error("offset and limit must be numbers");l=i??l,_=n??_;let u=async(f=0)=>{if(d.length!==0){try{t?v={...await E.query(`EXECUTE live_query_${o}_get(${_}, ${l});`),offset:l,limit:_,totalCount:T}:v=await E.query(`EXECUTE live_query_${o}_get;`)}catch(c){let s=c.message;if(s.startsWith(`prepared statement "live_query_${o}`)&&s.endsWith("does not exist")){if(f>M)throw c;await S(),u(f+1)}else throw c}if(N(d,v),t){let c=(await E.query(`EXECUTE live_query_${o}_get_total_count;`)).rows[0].count;c!==T&&(T=c,R())}}};await u()}),h=i=>{if(A)throw new Error("Live query is no longer active and cannot be subscribed to");d.push(i)},r=async i=>{i?d=d.filter(n=>n!==n):d=[],d.length===0&&!A&&(A=!0,await E.transaction(async n=>{await Promise.all(w.map(u=>u(n))),await n.exec(`
12
+ DROP VIEW IF EXISTS live_query_${o}_view;
13
+ DEALLOCATE live_query_${o}_get;
14
+ `)}))};return m?.aborted?await r():m?.addEventListener("abort",()=>{r()},{once:!0}),N(d,v),{initialResults:v,subscribe:h,unsubscribe:r,refresh:R}},async changes(e,g,a,m){let l;if(typeof e!="string"&&(l=e.signal,g=e.params,a=e.key,m=e.callback,e=e.query),!a)throw new Error("key is required for changes queries");let _=m?[m]:[],t=I().replace(/-/g,""),T=!1,d=1,o,A,v=async()=>{await E.transaction(async r=>{let i=await O(E,e,g,r);await r.query(`CREATE OR REPLACE TEMP VIEW live_query_${t}_view AS ${i}`);let n=await q(r,`live_query_${t}_view`);await F(r,n,p);let u=[...(await r.query(`
15
+ SELECT column_name, data_type, udt_name
16
+ FROM information_schema.columns
17
+ WHERE table_name = 'live_query_${t}_view'
18
+ `)).rows,{column_name:"__after__",data_type:"integer"}];await r.exec(`
19
+ CREATE TEMP TABLE live_query_${t}_state1 (LIKE live_query_${t}_view INCLUDING ALL);
20
+ CREATE TEMP TABLE live_query_${t}_state2 (LIKE live_query_${t}_view INCLUDING ALL);
21
+ `);for(let f of[1,2]){let c=f===1?2:1;await r.exec(`
22
+ PREPARE live_query_${t}_diff${f} AS
23
+ WITH
24
+ prev AS (SELECT LAG("${a}") OVER () as __after__, * FROM live_query_${t}_state${c}),
25
+ curr AS (SELECT LAG("${a}") OVER () as __after__, * FROM live_query_${t}_state${f}),
26
+ data_diff AS (
27
+ -- INSERT operations: Include all columns
28
+ SELECT
29
+ 'INSERT' AS __op__,
30
+ ${u.map(({column_name:s})=>`curr."${s}" AS "${s}"`).join(`,
31
+ `)},
32
+ ARRAY[]::text[] AS __changed_columns__
33
+ FROM curr
34
+ LEFT JOIN prev ON curr.${a} = prev.${a}
35
+ WHERE prev.${a} IS NULL
36
+ UNION ALL
37
+ -- DELETE operations: Include only the primary key
38
+ SELECT
39
+ 'DELETE' AS __op__,
40
+ ${u.map(({column_name:s,data_type:L,udt_name:b})=>s===a?`prev."${s}" AS "${s}"`:`NULL${L==="USER-DEFINED"?`::${b}`:""} AS "${s}"`).join(`,
41
+ `)},
42
+ ARRAY[]::text[] AS __changed_columns__
43
+ FROM prev
44
+ LEFT JOIN curr ON prev.${a} = curr.${a}
45
+ WHERE curr.${a} IS NULL
46
+ UNION ALL
47
+ -- UPDATE operations: Include only changed columns
48
+ SELECT
49
+ 'UPDATE' AS __op__,
50
+ ${u.map(({column_name:s,data_type:L,udt_name:b})=>s===a?`curr."${s}" AS "${s}"`:`CASE
51
+ WHEN curr."${s}" IS DISTINCT FROM prev."${s}"
52
+ THEN curr."${s}"
53
+ ELSE NULL${L==="USER-DEFINED"?`::${b}`:""}
54
+ END AS "${s}"`).join(`,
55
+ `)},
56
+ ARRAY(SELECT unnest FROM unnest(ARRAY[${u.filter(({column_name:s})=>s!==a).map(({column_name:s})=>`CASE
57
+ WHEN curr."${s}" IS DISTINCT FROM prev."${s}"
58
+ THEN '${s}'
59
+ ELSE NULL
60
+ END`).join(", ")}]) WHERE unnest IS NOT NULL) AS __changed_columns__
61
+ FROM curr
62
+ INNER JOIN prev ON curr.${a} = prev.${a}
63
+ WHERE NOT (curr IS NOT DISTINCT FROM prev)
64
+ )
65
+ SELECT * FROM data_diff;
66
+ `)}A=await Promise.all(n.map(f=>r.listen(`"table_change__${f.schema_oid}__${f.table_oid}"`,async()=>{w()})))})};await v();let w=C(async()=>{if(_.length===0&&o)return;let r=!1;for(let i=0;i<5;i++)try{await E.transaction(async n=>{await n.exec(`
67
+ INSERT INTO live_query_${t}_state${d}
68
+ SELECT * FROM live_query_${t}_view;
69
+ `),o=await n.query(`EXECUTE live_query_${t}_diff${d};`),d=d===1?2:1,await n.exec(`
70
+ TRUNCATE live_query_${t}_state${d};
71
+ `)});break}catch(n){if(n.message===`relation "live_query_${t}_state${d}" does not exist`){r=!0,await v();continue}else throw n}D(_,[...r?[{__op__:"RESET"}]:[],...o.rows])}),S=r=>{if(T)throw new Error("Live query is no longer active and cannot be subscribed to");_.push(r)},R=async r=>{r?_=_.filter(i=>i!==i):_=[],_.length===0&&!T&&(T=!0,await E.transaction(async i=>{await Promise.all(A.map(n=>n(i))),await i.exec(`
72
+ DROP VIEW IF EXISTS live_query_${t}_view;
73
+ DROP TABLE IF EXISTS live_query_${t}_state1;
74
+ DROP TABLE IF EXISTS live_query_${t}_state2;
75
+ DEALLOCATE live_query_${t}_diff1;
76
+ DEALLOCATE live_query_${t}_diff2;
77
+ `)}))};return l?.aborted?await R():l?.addEventListener("abort",()=>{R()},{once:!0}),await w(),{fields:o.fields.filter(r=>!["__after__","__op__","__changed_columns__"].includes(r.name)),initialChanges:o.rows,subscribe:S,unsubscribe:R,refresh:w}},async incrementalQuery(e,g,a,m){let l;if(typeof e!="string"&&(l=e.signal,g=e.params,a=e.key,m=e.callback,e=e.query),!a)throw new Error("key is required for incremental queries");let _=m?[m]:[],t=new Map,T=new Map,d=[],o=!0,{fields:A,unsubscribe:v,refresh:w}=await $.changes(e,g,a,h=>{for(let n of h){let{__op__:u,__changed_columns__:f,...c}=n;switch(u){case"RESET":t.clear(),T.clear();break;case"INSERT":t.set(c[a],c),T.set(c.__after__,c[a]);break;case"DELETE":{let s=t.get(c[a]);t.delete(c[a]),s.__after__!==null&&T.delete(s.__after__);break}case"UPDATE":{let s={...t.get(c[a])??{}};for(let L of f)s[L]=c[L],L==="__after__"&&T.set(c.__after__,c[a]);t.set(c[a],s);break}}}let r=[],i=null;for(let n=0;n<t.size;n++){let u=T.get(i),f=t.get(u);if(!f)break;let c={...f};delete c.__after__,r.push(c),i=u}d=r,o||N(_,{rows:r,fields:A})});o=!1,N(_,{rows:d,fields:A});let S=h=>{_.push(h)},R=async h=>{h?_=_.filter(r=>r!==r):_=[],_.length===0&&await v()};return l?.aborted?await R():l?.addEventListener("abort",()=>{R()},{once:!0}),{initialResults:{rows:d,fields:A},subscribe:S,unsubscribe:R,refresh:w}}};return{namespaceObj:$}},j={name:"Live Queries",setup:U};async function q(E,y){return(await E.query(`
78
+ WITH RECURSIVE view_dependencies AS (
79
+ -- Base case: Get the initial view's dependencies
80
+ SELECT DISTINCT
81
+ cl.relname AS dependent_name,
82
+ n.nspname AS schema_name,
83
+ cl.oid AS dependent_oid,
84
+ n.oid AS schema_oid,
85
+ cl.relkind = 'v' AS is_view
86
+ FROM pg_rewrite r
87
+ JOIN pg_depend d ON r.oid = d.objid
88
+ JOIN pg_class cl ON d.refobjid = cl.oid
89
+ JOIN pg_namespace n ON cl.relnamespace = n.oid
90
+ WHERE
91
+ r.ev_class = (
92
+ SELECT oid FROM pg_class WHERE relname = $1 AND relkind = 'v'
93
+ )
94
+ AND d.deptype = 'n'
95
+
96
+ UNION ALL
97
+
98
+ -- Recursive case: Traverse dependencies for views
99
+ SELECT DISTINCT
100
+ cl.relname AS dependent_name,
101
+ n.nspname AS schema_name,
102
+ cl.oid AS dependent_oid,
103
+ n.oid AS schema_oid,
104
+ cl.relkind = 'v' AS is_view
105
+ FROM view_dependencies vd
106
+ JOIN pg_rewrite r ON vd.dependent_name = (
107
+ SELECT relname FROM pg_class WHERE oid = r.ev_class AND relkind = 'v'
108
+ )
109
+ JOIN pg_depend d ON r.oid = d.objid
110
+ JOIN pg_class cl ON d.refobjid = cl.oid
111
+ JOIN pg_namespace n ON cl.relnamespace = n.oid
112
+ WHERE d.deptype = 'n'
113
+ )
114
+ SELECT DISTINCT
115
+ dependent_name AS table_name,
116
+ schema_name,
117
+ dependent_oid AS table_oid,
118
+ schema_oid
119
+ FROM view_dependencies
120
+ WHERE NOT is_view; -- Exclude intermediate views
121
+ `,[y])).rows.map($=>({table_name:$.table_name,schema_name:$.schema_name,table_oid:$.table_oid,schema_oid:$.schema_oid}))}async function F(E,y,p){let $=y.filter(e=>!p.has(`${e.schema_oid}_${e.table_oid}`)).map(e=>`
122
+ CREATE OR REPLACE FUNCTION "_notify_${e.schema_oid}_${e.table_oid}"() RETURNS TRIGGER AS $$
123
+ BEGIN
124
+ PERFORM pg_notify('table_change__${e.schema_oid}__${e.table_oid}', '');
125
+ RETURN NULL;
126
+ END;
127
+ $$ LANGUAGE plpgsql;
128
+ CREATE OR REPLACE TRIGGER "_notify_trigger_${e.schema_oid}_${e.table_oid}"
129
+ AFTER INSERT OR UPDATE OR DELETE ON "${e.schema_name}"."${e.table_name}"
130
+ FOR EACH STATEMENT EXECUTE FUNCTION "_notify_${e.schema_oid}_${e.table_oid}"();
131
+ `).join(`
132
+ `);$.trim()!==""&&await E.exec($),y.map(e=>p.add(`${e.schema_oid}_${e.table_oid}`))}var N=(E,y)=>{for(let p of E)p(y)},D=(E,y)=>{for(let p of E)p(y)};export{j as live};
133
+ //# sourceMappingURL=index.js.map