@uploadista/data-store-s3 0.0.20-beta.5 → 0.0.20-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.cts.map +1 -1
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs.map +1 -1
- package/package.json +7 -7
- package/src/s3-store.ts +11 -3
package/dist/index.d.cts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.cts","names":[],"sources":["../src/types.ts","../src/services/s3-client.service.ts","../src/s3-store.ts"],"sourcesContent":[],"mappings":";;;;;;KASY,cAAA;;EAAA;AAgCZ;AAeA;AAyBA;;;;AC3EwE;;;;EAgB/D,WAAO,CAAA,EAAA,MAAA;EAG2B;;;EAIZ,iBAAA,CAAA,EAAA,MAAA;EAAtB,OAAO,CAAA,EAAA,OAAA;EAGa,wBAAA,CAAA,EAAA,MAAA;EAApB,8BAAO,CAAA,EAAA,MAAA;EAGO,cAAI,EDJT,cCIS,GAAA;IAA4B,MAAA,EAAA,MAAA;EAA9C,CAAA;CAIM;AAIN,KDFG,kBAAA,GCEI;EAED,QAAA,EAAA,MAAA;EACI,MAAI,EAAA,MAAA;EAAV,GAAA,EAAA,MAAA;EAC8B,UAAA,CAAA,EAAA,MAAA;EAAlC,QAAO,CAAA,EAAA,MAAA;EAED,WAAA,CAAA,EAAA,MAAA;EACc,YAAA,CAAA,EAAA,MAAA;CAApB;AAckB,KDRf,mBAAA,GCQe;EAAmC,QAAA,EAAA,MAAA;EAArD,MAAO,EAAA,MAAA;EAKO,GAAA,EAAA,MAAA;CAA4B;ACLb,KFiB1B,aAAA,GAAgB,cEjBU;;;cD1DkC;EDG5D,SAAA,MAAA,EAAc,MAAA;EAgCd,SAAA,SAAA,EAAA,CAAkB,GAAA,EAAA,MAAA,EAAA,GCnBrB,MAAA,CAAO,MDmBc,CCnBP,cDmBO,ECnBS,eDmBT,CAAA;EAelB,SAAA,UAAA,EAAA,CAAmB,GAAA,EAAA,MAAA,EAAA,GC/BtB,MAAA,CAAO,MD+Be,CAAA,MAAA,GAAA,SAAA,EC/BY,eD+BZ,CAAA;EAyBnB,SAAA,SAAa,EAAA,CAAA,GAAA,EAAA,MAAG,EAAA,IAAA,ECrDhB,UDqD8B,EAAA,GCpDjC,MAAA,CAAO,MDoD0B,CAAA,MAAA,ECpDX,eDoDW,CAAA;0CCjDjC,MAAA,CAAO,aAAa;8CAGpB,MAAA,CAAO,OAAO,GAAA,CAAI,4BAA4B;4CAIxC,uBACN,MAAA,CAAO,OAAO,qBAAqB;EAlC4B,SAAA,UAAA,EAAA,CAAA,OAAA,EAoCzD,kBApCyD,GAAA;;IAgBjD,IAAA,EAoByC,UApBzC;EAAgB,CAAA,EAAA,GAqB9B,MAAA,CAAO,MArBuB,CAAA,MAAA,EAqBR,eArBQ,CAAA;EAA9B,SAAO,uBAAA,EAAA,CAAA,OAAA,EAuBD,kBAvBC,EAAA,KAAA,EAwBH,KAxBG,CAwBG,GAAA,CAAI,IAxBP,CAAA,EAAA,GAyBP,MAAA,CAAO,MAzBA,CAAA,MAAA,GAAA,SAAA,EAyB2B,eAzB3B,CAAA;EAG2B,SAAA,oBAAA,EAAA,CAAA,OAAA,EAwB5B,kBAxB4B,EAAA,GAyBlC,MAAA,CAAO,MAzB2B,CAAA,IAAA,EAyBd,eAzBc,CAAA;EAAlC,SAAO,SAAA,EAAA,CAAA,OAAA,EA2BD,kBA3BC,GAAA;IAGJ,gBAAA,CAAA,EAAA,MAAA;EACmB,CAAA,EAAA,GAwBtB,MAAA,CAAO,MAxBe,CAAA;IAAf,KAAA,EA0BD,GAAA,CAAI,IA1BH,EAAA;IAGa,WAAA,EAAA,OAAA;IAAb,oBAAA,CAAA,EAAA,MAAA;EAGO,CAAI,EAwBrB,eAxBqB,CAAA;EAA4B,SAAA,oBAAA,EAAA,CAAA,SAAA,CAAA,EAAA,MAAA,EAAA,cAAA,CAAA,EAAA,MAAA,EAAA,GA6B9C,MAAA,CAAO,MA7BuC,CA6BhC,GAAA,CAAI,iCA7B4B,EA6BO,eA7BP,CAAA;EAA9C,SAAO,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAkCP,MAAA,CAAO,MAlCA,CAkCO,cAlCP,GAAA,SAAA,EAkCmC,eAlCnC,CAAA;EAID,SAAA,qBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAiCN,MAAA,CAAO,MAjCD,CAAA,MAAA,GAAA,SAAA,EAiC4B,eAjC5B,CAAA;EACQ,SAAA,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,IAAA,EAmCX,UAnCW,EAAA,GAoCd,MAAA,CAAO,MApCO,CAAA,MAAA,EAoCQ,eApCR,CAAA;EAAqB,SAAA,oBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAuCnC,MAAA,CAAO,MAvC4B,CAAA,IAAA,EAuCf,eAvCe,CAAA;CAAnC,CAAA;AAEM,cA5BF,eAAA,SAAwB,oBAAA,CA4BtB;;;iBCsBC,aAAA,SAAsB,gBAAa,MAAA,CAAA,OAAA,UAAA,oBAAA,kBAAA;AFvDvC,
|
|
1
|
+
{"version":3,"file":"index.d.cts","names":[],"sources":["../src/types.ts","../src/services/s3-client.service.ts","../src/s3-store.ts"],"sourcesContent":[],"mappings":";;;;;;KASY,cAAA;;EAAA;AAgCZ;AAeA;AAyBA;;;;AC3EwE;;;;EAgB/D,WAAO,CAAA,EAAA,MAAA;EAG2B;;;EAIZ,iBAAA,CAAA,EAAA,MAAA;EAAtB,OAAO,CAAA,EAAA,OAAA;EAGa,wBAAA,CAAA,EAAA,MAAA;EAApB,8BAAO,CAAA,EAAA,MAAA;EAGO,cAAI,EDJT,cCIS,GAAA;IAA4B,MAAA,EAAA,MAAA;EAA9C,CAAA;CAIM;AAIN,KDFG,kBAAA,GCEI;EAED,QAAA,EAAA,MAAA;EACI,MAAI,EAAA,MAAA;EAAV,GAAA,EAAA,MAAA;EAC8B,UAAA,CAAA,EAAA,MAAA;EAAlC,QAAO,CAAA,EAAA,MAAA;EAED,WAAA,CAAA,EAAA,MAAA;EACc,YAAA,CAAA,EAAA,MAAA;CAApB;AAckB,KDRf,mBAAA,GCQe;EAAmC,QAAA,EAAA,MAAA;EAArD,MAAO,EAAA,MAAA;EAKO,GAAA,EAAA,MAAA;CAA4B;ACLb,KFiB1B,aAAA,GAAgB,cEjBU;;;cD1DkC;EDG5D,SAAA,MAAA,EAAc,MAAA;EAgCd,SAAA,SAAA,EAAA,CAAkB,GAAA,EAAA,MAAA,EAAA,GCnBrB,MAAA,CAAO,MDmBc,CCnBP,cDmBO,ECnBS,eDmBT,CAAA;EAelB,SAAA,UAAA,EAAA,CAAmB,GAAA,EAAA,MAAA,EAAA,GC/BtB,MAAA,CAAO,MD+Be,CAAA,MAAA,GAAA,SAAA,EC/BY,eD+BZ,CAAA;EAyBnB,SAAA,SAAa,EAAA,CAAA,GAAA,EAAA,MAAG,EAAA,IAAA,ECrDhB,UDqD8B,EAAA,GCpDjC,MAAA,CAAO,MDoD0B,CAAA,MAAA,ECpDX,eDoDW,CAAA;0CCjDjC,MAAA,CAAO,aAAa;8CAGpB,MAAA,CAAO,OAAO,GAAA,CAAI,4BAA4B;4CAIxC,uBACN,MAAA,CAAO,OAAO,qBAAqB;EAlC4B,SAAA,UAAA,EAAA,CAAA,OAAA,EAoCzD,kBApCyD,GAAA;;IAgBjD,IAAA,EAoByC,UApBzC;EAAgB,CAAA,EAAA,GAqB9B,MAAA,CAAO,MArBuB,CAAA,MAAA,EAqBR,eArBQ,CAAA;EAA9B,SAAO,uBAAA,EAAA,CAAA,OAAA,EAuBD,kBAvBC,EAAA,KAAA,EAwBH,KAxBG,CAwBG,GAAA,CAAI,IAxBP,CAAA,EAAA,GAyBP,MAAA,CAAO,MAzBA,CAAA,MAAA,GAAA,SAAA,EAyB2B,eAzB3B,CAAA;EAG2B,SAAA,oBAAA,EAAA,CAAA,OAAA,EAwB5B,kBAxB4B,EAAA,GAyBlC,MAAA,CAAO,MAzB2B,CAAA,IAAA,EAyBd,eAzBc,CAAA;EAAlC,SAAO,SAAA,EAAA,CAAA,OAAA,EA2BD,kBA3BC,GAAA;IAGJ,gBAAA,CAAA,EAAA,MAAA;EACmB,CAAA,EAAA,GAwBtB,MAAA,CAAO,MAxBe,CAAA;IAAf,KAAA,EA0BD,GAAA,CAAI,IA1BH,EAAA;IAGa,WAAA,EAAA,OAAA;IAAb,oBAAA,CAAA,EAAA,MAAA;EAGO,CAAI,EAwBrB,eAxBqB,CAAA;EAA4B,SAAA,oBAAA,EAAA,CAAA,SAAA,CAAA,EAAA,MAAA,EAAA,cAAA,CAAA,EAAA,MAAA,EAAA,GA6B9C,MAAA,CAAO,MA7BuC,CA6BhC,GAAA,CAAI,iCA7B4B,EA6BO,eA7BP,CAAA;EAA9C,SAAO,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAkCP,MAAA,CAAO,MAlCA,CAkCO,cAlCP,GAAA,SAAA,EAkCmC,eAlCnC,CAAA;EAID,SAAA,qBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAiCN,MAAA,CAAO,MAjCD,CAAA,MAAA,GAAA,SAAA,EAiC4B,eAjC5B,CAAA;EACQ,SAAA,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,IAAA,EAmCX,UAnCW,EAAA,GAoCd,MAAA,CAAO,MApCO,CAAA,MAAA,EAoCQ,eApCR,CAAA;EAAqB,SAAA,oBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAuCnC,MAAA,CAAO,MAvC4B,CAAA,IAAA,EAuCf,eAvCe,CAAA;CAAnC,CAAA;AAEM,cA5BF,eAAA,SAAwB,oBAAA,CA4BtB;;;iBCsBC,aAAA,SAAsB,gBAAa,MAAA,CAAA,OAAA,UAAA,oBAAA,kBAAA;AFvDvC,cEm1CC,OFn1Ca,EAAA,CAAA,OAsBR,EE6zCe,aF7zCD,EAAA,GE6zCc,MAAA,CAAA,MF7zCd,CE6zCc,SF7zCd,CE6zCc,UF7zCd,CAAA,EAAA,KAAA,EE6zCc,iBF7zCd,CAAA"}
|
package/dist/index.d.mts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.mts","names":[],"sources":["../src/types.ts","../src/services/s3-client.service.ts","../src/s3-store.ts"],"sourcesContent":[],"mappings":";;;;;;KASY,cAAA;;EAAA;AAgCZ;AAeA;AAyBA;;;;AC3EwE;;;;EAgB/D,WAAO,CAAA,EAAA,MAAA;EAG2B;;;EAIZ,iBAAA,CAAA,EAAA,MAAA;EAAtB,OAAO,CAAA,EAAA,OAAA;EAGa,wBAAA,CAAA,EAAA,MAAA;EAApB,8BAAO,CAAA,EAAA,MAAA;EAGO,cAAI,EDJT,cCIS,GAAA;IAA4B,MAAA,EAAA,MAAA;EAA9C,CAAA;CAIM;AAIN,KDFG,kBAAA,GCEI;EAED,QAAA,EAAA,MAAA;EACI,MAAI,EAAA,MAAA;EAAV,GAAA,EAAA,MAAA;EAC8B,UAAA,CAAA,EAAA,MAAA;EAAlC,QAAO,CAAA,EAAA,MAAA;EAED,WAAA,CAAA,EAAA,MAAA;EACc,YAAA,CAAA,EAAA,MAAA;CAApB;AAckB,KDRf,mBAAA,GCQe;EAAmC,QAAA,EAAA,MAAA;EAArD,MAAO,EAAA,MAAA;EAKO,GAAA,EAAA,MAAA;CAA4B;ACLb,KFiB1B,aAAA,GAAgB,cEjBU;;;cD1DkC;EDG5D,SAAA,MAAA,EAAc,MAAA;EAgCd,SAAA,SAAA,EAAA,CAAkB,GAAA,EAAA,MAAA,EAAA,GCnBrB,MAAA,CAAO,MDmBc,CCnBP,cDmBO,ECnBS,eDmBT,CAAA;EAelB,SAAA,UAAA,EAAA,CAAmB,GAAA,EAAA,MAAA,EAAA,GC/BtB,MAAA,CAAO,MD+Be,CAAA,MAAA,GAAA,SAAA,EC/BY,eD+BZ,CAAA;EAyBnB,SAAA,SAAa,EAAA,CAAA,GAAA,EAAA,MAAG,EAAA,IAAA,ECrDhB,UDqD8B,EAAA,GCpDjC,MAAA,CAAO,MDoD0B,CAAA,MAAA,ECpDX,eDoDW,CAAA;0CCjDjC,MAAA,CAAO,aAAa;8CAGpB,MAAA,CAAO,OAAO,GAAA,CAAI,4BAA4B;4CAIxC,uBACN,MAAA,CAAO,OAAO,qBAAqB;EAlC4B,SAAA,UAAA,EAAA,CAAA,OAAA,EAoCzD,kBApCyD,GAAA;;IAgBjD,IAAA,EAoByC,UApBzC;EAAgB,CAAA,EAAA,GAqB9B,MAAA,CAAO,MArBuB,CAAA,MAAA,EAqBR,eArBQ,CAAA;EAA9B,SAAO,uBAAA,EAAA,CAAA,OAAA,EAuBD,kBAvBC,EAAA,KAAA,EAwBH,KAxBG,CAwBG,GAAA,CAAI,IAxBP,CAAA,EAAA,GAyBP,MAAA,CAAO,MAzBA,CAAA,MAAA,GAAA,SAAA,EAyB2B,eAzB3B,CAAA;EAG2B,SAAA,oBAAA,EAAA,CAAA,OAAA,EAwB5B,kBAxB4B,EAAA,GAyBlC,MAAA,CAAO,MAzB2B,CAAA,IAAA,EAyBd,eAzBc,CAAA;EAAlC,SAAO,SAAA,EAAA,CAAA,OAAA,EA2BD,kBA3BC,GAAA;IAGJ,gBAAA,CAAA,EAAA,MAAA;EACmB,CAAA,EAAA,GAwBtB,MAAA,CAAO,MAxBe,CAAA;IAAf,KAAA,EA0BD,GAAA,CAAI,IA1BH,EAAA;IAGa,WAAA,EAAA,OAAA;IAAb,oBAAA,CAAA,EAAA,MAAA;EAGO,CAAI,EAwBrB,eAxBqB,CAAA;EAA4B,SAAA,oBAAA,EAAA,CAAA,SAAA,CAAA,EAAA,MAAA,EAAA,cAAA,CAAA,EAAA,MAAA,EAAA,GA6B9C,MAAA,CAAO,MA7BuC,CA6BhC,GAAA,CAAI,iCA7B4B,EA6BO,eA7BP,CAAA;EAA9C,SAAO,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAkCP,MAAA,CAAO,MAlCA,CAkCO,cAlCP,GAAA,SAAA,EAkCmC,eAlCnC,CAAA;EAID,SAAA,qBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAiCN,MAAA,CAAO,MAjCD,CAAA,MAAA,GAAA,SAAA,EAiC4B,eAjC5B,CAAA;EACQ,SAAA,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,IAAA,EAmCX,UAnCW,EAAA,GAoCd,MAAA,CAAO,MApCO,CAAA,MAAA,EAoCQ,eApCR,CAAA;EAAqB,SAAA,oBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAuCnC,MAAA,CAAO,MAvC4B,CAAA,IAAA,EAuCf,eAvCe,CAAA;CAAnC,CAAA;AAEM,cA5BF,eAAA,SAAwB,oBAAA,CA4BtB;;;iBCsBC,aAAA,SAAsB,gBAAa,MAAA,CAAA,OAAA,UAAA,oBAAA,kBAAA;AFvDvC,
|
|
1
|
+
{"version":3,"file":"index.d.mts","names":[],"sources":["../src/types.ts","../src/services/s3-client.service.ts","../src/s3-store.ts"],"sourcesContent":[],"mappings":";;;;;;KASY,cAAA;;EAAA;AAgCZ;AAeA;AAyBA;;;;AC3EwE;;;;EAgB/D,WAAO,CAAA,EAAA,MAAA;EAG2B;;;EAIZ,iBAAA,CAAA,EAAA,MAAA;EAAtB,OAAO,CAAA,EAAA,OAAA;EAGa,wBAAA,CAAA,EAAA,MAAA;EAApB,8BAAO,CAAA,EAAA,MAAA;EAGO,cAAI,EDJT,cCIS,GAAA;IAA4B,MAAA,EAAA,MAAA;EAA9C,CAAA;CAIM;AAIN,KDFG,kBAAA,GCEI;EAED,QAAA,EAAA,MAAA;EACI,MAAI,EAAA,MAAA;EAAV,GAAA,EAAA,MAAA;EAC8B,UAAA,CAAA,EAAA,MAAA;EAAlC,QAAO,CAAA,EAAA,MAAA;EAED,WAAA,CAAA,EAAA,MAAA;EACc,YAAA,CAAA,EAAA,MAAA;CAApB;AAckB,KDRf,mBAAA,GCQe;EAAmC,QAAA,EAAA,MAAA;EAArD,MAAO,EAAA,MAAA;EAKO,GAAA,EAAA,MAAA;CAA4B;ACLb,KFiB1B,aAAA,GAAgB,cEjBU;;;cD1DkC;EDG5D,SAAA,MAAA,EAAc,MAAA;EAgCd,SAAA,SAAA,EAAA,CAAkB,GAAA,EAAA,MAAA,EAAA,GCnBrB,MAAA,CAAO,MDmBc,CCnBP,cDmBO,ECnBS,eDmBT,CAAA;EAelB,SAAA,UAAA,EAAA,CAAmB,GAAA,EAAA,MAAA,EAAA,GC/BtB,MAAA,CAAO,MD+Be,CAAA,MAAA,GAAA,SAAA,EC/BY,eD+BZ,CAAA;EAyBnB,SAAA,SAAa,EAAA,CAAA,GAAA,EAAA,MAAG,EAAA,IAAA,ECrDhB,UDqD8B,EAAA,GCpDjC,MAAA,CAAO,MDoD0B,CAAA,MAAA,ECpDX,eDoDW,CAAA;0CCjDjC,MAAA,CAAO,aAAa;8CAGpB,MAAA,CAAO,OAAO,GAAA,CAAI,4BAA4B;4CAIxC,uBACN,MAAA,CAAO,OAAO,qBAAqB;EAlC4B,SAAA,UAAA,EAAA,CAAA,OAAA,EAoCzD,kBApCyD,GAAA;;IAgBjD,IAAA,EAoByC,UApBzC;EAAgB,CAAA,EAAA,GAqB9B,MAAA,CAAO,MArBuB,CAAA,MAAA,EAqBR,eArBQ,CAAA;EAA9B,SAAO,uBAAA,EAAA,CAAA,OAAA,EAuBD,kBAvBC,EAAA,KAAA,EAwBH,KAxBG,CAwBG,GAAA,CAAI,IAxBP,CAAA,EAAA,GAyBP,MAAA,CAAO,MAzBA,CAAA,MAAA,GAAA,SAAA,EAyB2B,eAzB3B,CAAA;EAG2B,SAAA,oBAAA,EAAA,CAAA,OAAA,EAwB5B,kBAxB4B,EAAA,GAyBlC,MAAA,CAAO,MAzB2B,CAAA,IAAA,EAyBd,eAzBc,CAAA;EAAlC,SAAO,SAAA,EAAA,CAAA,OAAA,EA2BD,kBA3BC,GAAA;IAGJ,gBAAA,CAAA,EAAA,MAAA;EACmB,CAAA,EAAA,GAwBtB,MAAA,CAAO,MAxBe,CAAA;IAAf,KAAA,EA0BD,GAAA,CAAI,IA1BH,EAAA;IAGa,WAAA,EAAA,OAAA;IAAb,oBAAA,CAAA,EAAA,MAAA;EAGO,CAAI,EAwBrB,eAxBqB,CAAA;EAA4B,SAAA,oBAAA,EAAA,CAAA,SAAA,CAAA,EAAA,MAAA,EAAA,cAAA,CAAA,EAAA,MAAA,EAAA,GA6B9C,MAAA,CAAO,MA7BuC,CA6BhC,GAAA,CAAI,iCA7B4B,EA6BO,eA7BP,CAAA;EAA9C,SAAO,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAkCP,MAAA,CAAO,MAlCA,CAkCO,cAlCP,GAAA,SAAA,EAkCmC,eAlCnC,CAAA;EAID,SAAA,qBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAiCN,MAAA,CAAO,MAjCD,CAAA,MAAA,GAAA,SAAA,EAiC4B,eAjC5B,CAAA;EACQ,SAAA,iBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,IAAA,EAmCX,UAnCW,EAAA,GAoCd,MAAA,CAAO,MApCO,CAAA,MAAA,EAoCQ,eApCR,CAAA;EAAqB,SAAA,oBAAA,EAAA,CAAA,EAAA,EAAA,MAAA,EAAA,GAuCnC,MAAA,CAAO,MAvC4B,CAAA,IAAA,EAuCf,eAvCe,CAAA;CAAnC,CAAA;AAEM,cA5BF,eAAA,SAAwB,oBAAA,CA4BtB;;;iBCsBC,aAAA,SAAsB,gBAAa,MAAA,CAAA,OAAA,UAAA,oBAAA,kBAAA;AFvDvC,cEm1CC,OFn1Ca,EAAA,CAAA,OAsBR,EE6zCe,aF7zCD,EAAA,GE6zCc,MAAA,CAAA,MF7zCd,CE6zCc,SF7zCd,CE6zCc,UF7zCd,CAAA,EAAA,KAAA,EE6zCc,iBF7zCd,CAAA"}
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["optimalPartSize: number","logS3Error","request: AWS.CreateMultipartUploadCommandInput","params: AWS.ListPartsCommandInput","withTimingMetrics","partUploadDurationHistogram","uploadPartsTotal","chunks: Uint8Array[]","uploadSuccessTotal","useTags","uploadRequestsTotal","fileSizeHistogram","withUploadMetrics","uploadDurationHistogram","activeUploadsGauge","uploadErrorsTotal","keyMarker: string | undefined","uploadIdMarker: string | undefined","minPartSize","readStream","partSizeHistogram","maxConcurrentPartUploads","config","parts"],"sources":["../src/utils/calculations.ts","../src/utils/error-handling.ts","../src/utils/stream-adapter.ts","../src/services/s3-client.service.ts","../src/s3-store.ts"],"sourcesContent":["import type AWS from \"@aws-sdk/client-s3\";\n\nexport const calcOffsetFromParts = (parts?: Array<AWS.Part>): number => {\n return parts && parts.length > 0\n ? parts.reduce((a, b) => a + (b?.Size ?? 0), 0)\n : 0;\n};\n\nexport const calcOptimalPartSize = (\n initSize: number | undefined,\n preferredPartSize: number,\n minPartSize: number,\n maxMultipartParts: number,\n maxUploadSize = 5_497_558_138_880, // 5TiB\n): number => {\n const size = initSize ?? maxUploadSize;\n let optimalPartSize: number;\n\n if (size <= preferredPartSize) {\n // For files smaller than preferred part size, use the file size\n // but ensure it meets S3's minimum requirements for multipart uploads\n optimalPartSize = size;\n } else if (size <= preferredPartSize * maxMultipartParts) {\n // File fits within max parts limit using preferred part size\n optimalPartSize = preferredPartSize;\n } else {\n // File is too large for preferred part size, calculate minimum needed\n optimalPartSize = Math.ceil(size / maxMultipartParts);\n }\n\n // Ensure we respect minimum part size for multipart uploads\n // Exception: if the file is smaller than minPartSize, use the file size directly\n const finalPartSize =\n initSize && initSize < minPartSize\n ? optimalPartSize // Single part upload for small files\n : Math.max(optimalPartSize, minPartSize); // Enforce minimum for multipart\n\n // Round up to ensure consistent part sizes and align to reasonable boundaries\n // This helps ensure all parts except the last one will have exactly the same size\n const alignment = 1024; // 1KB alignment for better consistency\n return Math.ceil(finalPartSize / alignment) * alignment;\n};\n\nexport const partKey = (id: string): string => {\n return `${id}.part`;\n};\n\nexport const shouldUseExpirationTags = (\n expirationPeriodInMilliseconds: number,\n useTags: boolean,\n): boolean => {\n return expirationPeriodInMilliseconds !== 0 && useTags;\n};\n\nexport const getExpirationDate = (\n createdAt: string,\n expirationPeriodInMilliseconds: number,\n): Date => {\n const date = new Date(createdAt);\n return new Date(date.getTime() + expirationPeriodInMilliseconds);\n};\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport { trackS3Error as logS3Error } from \"@uploadista/observability\";\nimport { Effect } from \"effect\";\n\nexport const handleS3Error = (\n operation: string,\n error: unknown,\n context: Record<string, unknown> = {},\n): UploadistaError => {\n // Log the error with context\n Effect.runSync(logS3Error(operation, error, context));\n\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", error as Error);\n};\n\nexport const handleS3NotFoundError = (\n operation: string,\n error: unknown,\n context: Record<string, unknown> = {},\n): UploadistaError => {\n if (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n typeof error.code === \"string\" &&\n [\"NotFound\", \"NoSuchKey\", \"NoSuchUpload\"].includes(error.code)\n ) {\n Effect.runSync(\n Effect.logWarning(`File not found during ${operation} operation`).pipe(\n Effect.annotateLogs({\n error_code: error.code,\n ...context,\n }),\n ),\n );\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n\n return handleS3Error(operation, error, context);\n};\n\nexport const isUploadNotFoundError = (\n error: unknown,\n): error is { code: \"NoSuchUpload\" | \"NoSuchKey\" } => {\n // Check direct error code\n if (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n typeof error.code === \"string\" &&\n (error.code === \"NoSuchUpload\" || error.code === \"NoSuchKey\")\n ) {\n return true;\n }\n\n // Check if it's an UploadistaError wrapping an AWS error with code\n if (\n error instanceof UploadistaError &&\n error.cause &&\n typeof error.cause === \"object\" &&\n \"code\" in error.cause &&\n typeof error.cause.code === \"string\" &&\n (error.cause.code === \"NoSuchUpload\" || error.cause.code === \"NoSuchKey\")\n ) {\n return true;\n }\n\n return false;\n};\n","/**\n * Stream adapter utility to handle AWS SDK Body responses across different environments.\n *\n * In Node.js environments, AWS SDK returns Node.js Readable streams.\n * In Cloudflare Workers, it returns Web Streams API ReadableStreams.\n * This utility normalizes both to Web Streams API ReadableStreams.\n */\n\n/**\n * Converts various stream types to a Web Streams API ReadableStream\n * @param body The body from AWS SDK response (could be Node.js Readable or Web ReadableStream)\n * @returns A Web Streams API ReadableStream\n */\nexport function toReadableStream(body: unknown): ReadableStream {\n // If it's already a Web ReadableStream, return as-is\n if (body instanceof ReadableStream) {\n return body;\n }\n\n // If it has a getReader method, it's likely already a ReadableStream\n if (body && typeof body === \"object\" && \"getReader\" in body) {\n return body as ReadableStream;\n }\n\n // Check if it's a Node.js Readable stream\n if (body && typeof body === \"object\" && \"pipe\" in body && \"on\" in body) {\n const nodeStream = body as NodeJS.ReadableStream;\n\n return new ReadableStream({\n start(controller) {\n nodeStream.on(\"data\", (chunk) => {\n controller.enqueue(new Uint8Array(chunk));\n });\n\n nodeStream.on(\"end\", () => {\n controller.close();\n });\n\n nodeStream.on(\"error\", (error) => {\n controller.error(error);\n });\n },\n });\n }\n\n // If it's some other type, try to handle it gracefully\n throw new Error(\n `Unsupported body type: ${typeof body}. Expected ReadableStream or Node.js Readable.`,\n );\n}\n","import type AWS from \"@aws-sdk/client-s3\";\nimport type { S3ClientConfig } from \"@aws-sdk/client-s3\";\nimport { NoSuchKey, NotFound, S3 } from \"@aws-sdk/client-s3\";\nimport type { UploadistaError } from \"@uploadista/core/errors\";\nimport { withS3ApiMetrics } from \"@uploadista/observability\";\nimport { Context, Effect, Layer } from \"effect\";\nimport type { MultipartUploadInfo, S3OperationContext } from \"../types\";\nimport {\n handleS3Error,\n handleS3NotFoundError,\n partKey,\n toReadableStream,\n} from \"../utils\";\n\nexport class S3ClientService extends Context.Tag(\"S3ClientService\")<\n S3ClientService,\n {\n readonly bucket: string;\n\n // Basic S3 operations\n readonly getObject: (\n key: string,\n ) => Effect.Effect<ReadableStream, UploadistaError>;\n readonly headObject: (\n key: string,\n ) => Effect.Effect<number | undefined, UploadistaError>;\n readonly putObject: (\n key: string,\n body: Uint8Array,\n ) => Effect.Effect<string, UploadistaError>;\n readonly deleteObject: (\n key: string,\n ) => Effect.Effect<void, UploadistaError>;\n readonly deleteObjects: (\n keys: string[],\n ) => Effect.Effect<AWS.DeleteObjectsCommandOutput, UploadistaError>;\n\n // Multipart upload operations\n readonly createMultipartUpload: (\n context: S3OperationContext,\n ) => Effect.Effect<MultipartUploadInfo, UploadistaError>;\n readonly uploadPart: (\n context: S3OperationContext & { partNumber: number; data: Uint8Array },\n ) => Effect.Effect<string, UploadistaError>;\n readonly completeMultipartUpload: (\n context: S3OperationContext,\n parts: Array<AWS.Part>,\n ) => Effect.Effect<string | undefined, UploadistaError>;\n readonly abortMultipartUpload: (\n context: S3OperationContext,\n ) => Effect.Effect<void, UploadistaError>;\n readonly listParts: (\n context: S3OperationContext & { partNumberMarker?: string },\n ) => Effect.Effect<\n {\n parts: AWS.Part[];\n isTruncated: boolean;\n nextPartNumberMarker?: string;\n },\n UploadistaError\n >;\n readonly listMultipartUploads: (\n keyMarker?: string,\n uploadIdMarker?: string,\n ) => Effect.Effect<AWS.ListMultipartUploadsCommandOutput, UploadistaError>;\n\n // Incomplete part operations\n readonly getIncompletePart: (\n id: string,\n ) => Effect.Effect<ReadableStream | undefined, UploadistaError>;\n readonly getIncompletePartSize: (\n id: string,\n ) => Effect.Effect<number | undefined, UploadistaError>;\n readonly putIncompletePart: (\n id: string,\n data: Uint8Array,\n ) => Effect.Effect<string, UploadistaError>;\n readonly deleteIncompletePart: (\n id: string,\n ) => Effect.Effect<void, UploadistaError>;\n }\n>() {}\n\nexport const makeS3ClientService = (\n s3ClientConfig: S3ClientConfig,\n bucket: string,\n) => {\n const s3Client = new S3(s3ClientConfig);\n const getObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const data = await s3Client.getObject({\n Bucket: bucket,\n Key: key,\n });\n return toReadableStream(data.Body);\n },\n catch: (error) => handleS3Error(\"getObject\", error, { key, bucket }),\n });\n\n const headObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n try {\n const data = await s3Client.headObject({\n Bucket: bucket,\n Key: key,\n });\n return data.ContentLength;\n } catch (error) {\n if (error instanceof NotFound) {\n return undefined;\n }\n throw error;\n }\n },\n catch: (error) => handleS3Error(\"headObject\", error, { key, bucket }),\n });\n\n const putObject = (key: string, body: Uint8Array) =>\n Effect.tryPromise({\n try: async () => {\n const response = await s3Client.putObject({\n Bucket: bucket,\n Key: key,\n Body: body,\n });\n return response.ETag || \"\";\n },\n catch: (error) =>\n handleS3Error(\"putObject\", error, { key, bucket, size: body.length }),\n });\n\n const deleteObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n await s3Client.deleteObject({\n Bucket: bucket,\n Key: key,\n });\n },\n catch: (error) => handleS3Error(\"deleteObject\", error, { key, bucket }),\n });\n\n const deleteObjects = (keys: string[]) =>\n Effect.tryPromise({\n try: () =>\n s3Client.deleteObjects({\n Bucket: bucket,\n Delete: {\n Objects: keys.map((key) => ({ Key: key })),\n },\n }),\n catch: (error) =>\n handleS3Error(\"deleteObjects\", error, { keys: keys.length, bucket }),\n });\n\n const createMultipartUpload = (context: S3OperationContext) =>\n withS3ApiMetrics(\n \"createMultipartUpload\",\n Effect.tryPromise({\n try: async () => {\n const request: AWS.CreateMultipartUploadCommandInput = {\n Bucket: context.bucket,\n Key: context.key,\n };\n\n if (context.contentType) {\n request.ContentType = context.contentType;\n }\n\n if (context.cacheControl) {\n request.CacheControl = context.cacheControl;\n }\n\n const res = await s3Client.createMultipartUpload(request);\n\n if (!res.UploadId) {\n throw new Error(\"Upload ID is undefined\");\n }\n if (!res.Key) {\n throw new Error(\"Key is undefined\");\n }\n\n return {\n uploadId: res.UploadId,\n bucket: context.bucket,\n key: res.Key,\n };\n },\n catch: (error) =>\n handleS3Error(\"createMultipartUpload\", error, context),\n }),\n );\n\n const uploadPart = (\n context: S3OperationContext & { partNumber: number; data: Uint8Array },\n ) =>\n withS3ApiMetrics(\n \"uploadPart\",\n Effect.tryPromise({\n try: () =>\n s3Client.uploadPart({\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n PartNumber: context.partNumber,\n Body: context.data,\n }),\n catch: (error) =>\n handleS3Error(\"uploadPart\", error, {\n upload_id: context.key,\n part_number: context.partNumber,\n part_size: context.data.length,\n s3_bucket: context.bucket,\n }),\n }).pipe(Effect.map((response) => response.ETag as string)),\n );\n\n const completeMultipartUpload = (\n context: S3OperationContext,\n parts: Array<AWS.Part>,\n ) =>\n withS3ApiMetrics(\n \"completeMultipartUpload\",\n Effect.tryPromise({\n try: () =>\n s3Client\n .completeMultipartUpload({\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n MultipartUpload: {\n Parts: parts.map((part) => ({\n ETag: part.ETag,\n PartNumber: part.PartNumber,\n })),\n },\n })\n .then((response) => response.Location),\n catch: (error) =>\n handleS3Error(\"completeMultipartUpload\", error, {\n upload_id: context.key,\n parts_count: parts.length,\n s3_bucket: context.bucket,\n }),\n }),\n );\n\n const abortMultipartUpload = (context: S3OperationContext) =>\n Effect.tryPromise({\n try: async () => {\n await s3Client.abortMultipartUpload({\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n });\n },\n catch: (error) =>\n handleS3NotFoundError(\"abortMultipartUpload\", error, {\n upload_id: context.key,\n s3_bucket: context.bucket,\n }),\n });\n\n const listParts = (\n context: S3OperationContext & { partNumberMarker?: string },\n ) =>\n Effect.tryPromise({\n try: async () => {\n const params: AWS.ListPartsCommandInput = {\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n PartNumberMarker: context.partNumberMarker,\n };\n\n const data = await s3Client.listParts(params);\n\n return {\n parts: data.Parts ?? [],\n isTruncated: data.IsTruncated ?? false,\n nextPartNumberMarker: data.NextPartNumberMarker,\n };\n },\n catch: (error) =>\n handleS3Error(\"listParts\", error, {\n upload_id: context.key,\n s3_bucket: context.bucket,\n }),\n });\n\n const listMultipartUploads = (keyMarker?: string, uploadIdMarker?: string) =>\n Effect.tryPromise({\n try: () =>\n s3Client.listMultipartUploads({\n Bucket: bucket,\n KeyMarker: keyMarker,\n UploadIdMarker: uploadIdMarker,\n }),\n catch: (error) =>\n handleS3Error(\"listMultipartUploads\", error, { bucket }),\n });\n\n const getIncompletePart = (id: string) =>\n Effect.tryPromise({\n try: async () => {\n try {\n const data = await s3Client.getObject({\n Bucket: bucket,\n Key: partKey(id),\n });\n return toReadableStream(data.Body);\n } catch (error) {\n if (error instanceof NoSuchKey) {\n return undefined;\n }\n throw error;\n }\n },\n catch: (error) =>\n handleS3Error(\"getIncompletePart\", error, { upload_id: id, bucket }),\n });\n\n const getIncompletePartSize = (id: string) => headObject(partKey(id));\n\n const putIncompletePart = (id: string, data: Uint8Array) =>\n putObject(partKey(id), data).pipe(\n Effect.tap(() =>\n Effect.logInfo(\"Incomplete part uploaded\").pipe(\n Effect.annotateLogs({ upload_id: id }),\n ),\n ),\n );\n\n const deleteIncompletePart = (id: string) => deleteObject(partKey(id));\n\n return {\n bucket,\n getObject,\n headObject,\n putObject,\n deleteObject,\n deleteObjects,\n createMultipartUpload,\n uploadPart,\n completeMultipartUpload,\n abortMultipartUpload,\n listParts,\n listMultipartUploads,\n getIncompletePart,\n getIncompletePartSize,\n putIncompletePart,\n deleteIncompletePart,\n };\n};\n\nexport const S3ClientLayer = (s3ClientConfig: S3ClientConfig, bucket: string) =>\n Layer.succeed(S3ClientService, makeS3ClientService(s3ClientConfig, bucket));\n","import type AWS from \"@aws-sdk/client-s3\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n DataStoreWriteOptions,\n StreamingConfig,\n StreamWriteOptions,\n StreamWriteResult,\n UploadFile,\n UploadStrategy,\n} from \"@uploadista/core/types\";\nimport {\n DEFAULT_STREAMING_CONFIG,\n UploadFileKVStore,\n} from \"@uploadista/core/types\";\nimport {\n s3ActiveUploadsGauge as activeUploadsGauge,\n s3FileSizeHistogram as fileSizeHistogram,\n logS3UploadCompletion,\n s3PartSizeHistogram as partSizeHistogram,\n s3PartUploadDurationHistogram as partUploadDurationHistogram,\n s3UploadDurationHistogram as uploadDurationHistogram,\n s3UploadErrorsTotal as uploadErrorsTotal,\n s3UploadPartsTotal as uploadPartsTotal,\n s3UploadRequestsTotal as uploadRequestsTotal,\n s3UploadSuccessTotal as uploadSuccessTotal,\n withS3TimingMetrics as withTimingMetrics,\n withS3UploadMetrics as withUploadMetrics,\n} from \"@uploadista/observability\";\nimport { Effect, Ref, Schedule, Stream } from \"effect\";\nimport { S3ClientLayer, S3ClientService } from \"./services/s3-client.service\";\nimport type { ChunkInfo, S3StoreConfig } from \"./types\";\nimport {\n calcOffsetFromParts,\n calcOptimalPartSize,\n getExpirationDate,\n isUploadNotFoundError,\n} from \"./utils\";\n\n/**\n * Generates an S3 key from an upload file, preserving the file extension if available.\n * Looks for filename in metadata under common keys: 'filename', 'fileName', or 'name'.\n * Falls back to just the upload ID if no filename is found.\n */\nconst getS3Key = (uploadFile: UploadFile): string => {\n const { id, metadata } = uploadFile;\n\n if (!metadata) {\n return id;\n }\n\n // Try common metadata keys for filename\n const filename = metadata.filename || metadata.fileName || metadata.name;\n\n if (typeof filename === \"string\" && filename.includes(\".\")) {\n const extension = filename.substring(filename.lastIndexOf(\".\"));\n return `${id}${extension}`;\n }\n\n return id;\n};\n\n// Clean implementation using composed services\nexport function createS3Store(config: S3StoreConfig) {\n const {\n deliveryUrl,\n partSize,\n minPartSize = 5_242_880,\n useTags = true,\n maxMultipartParts = 10_000,\n maxConcurrentPartUploads = 60,\n expirationPeriodInMilliseconds = 1000 * 60 * 60 * 24 * 7, // 1 week\n s3ClientConfig: { bucket },\n } = config;\n\n return Effect.gen(function* () {\n const s3Client = yield* S3ClientService;\n const kvStore = yield* UploadFileKVStore;\n const preferredPartSize = partSize || 8 * 1024 * 1024;\n\n const getUploadId = (\n uploadFile: UploadFile,\n ): Effect.Effect<string, UploadistaError> => {\n const uploadId = uploadFile.storage.uploadId;\n if (!uploadId) {\n return Effect.fail(\n UploadistaError.fromCode(\n \"FILE_WRITE_ERROR\",\n new Error(\"Upload ID is undefined\"),\n ),\n );\n }\n return Effect.succeed(uploadId);\n };\n\n const uploadPart = (\n uploadFile: UploadFile,\n data: Uint8Array,\n partNumber: number,\n ) => {\n const s3Key = getS3Key(uploadFile);\n\n return withTimingMetrics(\n partUploadDurationHistogram,\n Effect.gen(function* () {\n const uploadId = yield* getUploadId(uploadFile);\n\n const etag = yield* s3Client\n .uploadPart({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n partNumber,\n data,\n })\n .pipe(\n Effect.retry({\n schedule: Schedule.exponential(\"1 second\", 2.0).pipe(\n Schedule.intersect(Schedule.recurs(3)),\n ),\n // Don't retry on upload not found errors - they're permanent\n while: (error) => !isUploadNotFoundError(error),\n }),\n Effect.tapError((error) =>\n Effect.logWarning(\"Retrying part upload\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n part_number: partNumber,\n error_message: error.message,\n retry_attempt: \"unknown\", // Will be overridden by the retry schedule\n part_size: data.length,\n s3_bucket: s3Client.bucket,\n }),\n ),\n ),\n );\n\n yield* uploadPartsTotal(Effect.succeed(1));\n yield* Effect.logInfo(\"Part uploaded successfully\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n part_number: partNumber,\n part_size: data.length,\n etag: etag,\n }),\n );\n\n return etag;\n }),\n ).pipe(\n Effect.withSpan(`s3-upload-part-${partNumber}`, {\n attributes: {\n \"upload.id\": uploadFile.id,\n \"upload.part_number\": partNumber,\n \"upload.part_size\": data.length,\n \"s3.bucket\": s3Client.bucket,\n \"s3.key\": s3Key,\n },\n }),\n );\n };\n\n const uploadIncompletePart = (id: string, data: Uint8Array) =>\n s3Client.putIncompletePart(id, data);\n\n const downloadIncompletePart = (id: string) =>\n Effect.gen(function* () {\n const incompletePart = yield* s3Client.getIncompletePart(id);\n\n if (!incompletePart) {\n return undefined;\n }\n\n // Read the stream and collect all chunks to calculate size\n const reader = incompletePart.getReader();\n const chunks: Uint8Array[] = [];\n let incompletePartSize = 0;\n\n try {\n while (true) {\n const { done, value } = yield* Effect.promise(() => reader.read());\n if (done) break;\n chunks.push(value);\n incompletePartSize += value.length;\n }\n } finally {\n reader.releaseLock();\n }\n\n const stream = Stream.fromIterable(chunks);\n\n return {\n size: incompletePartSize,\n stream,\n };\n });\n\n const deleteIncompletePart = (id: string) =>\n s3Client.deleteIncompletePart(id);\n\n const getIncompletePartSize = (id: string) =>\n s3Client.getIncompletePartSize(id);\n\n const complete = (uploadFile: UploadFile, parts: Array<AWS.Part>) => {\n const s3Key = getS3Key(uploadFile);\n\n return Effect.gen(function* () {\n const uploadId = yield* getUploadId(uploadFile);\n\n return yield* s3Client.completeMultipartUpload(\n {\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n },\n parts,\n );\n }).pipe(\n Effect.tap(() => uploadSuccessTotal(Effect.succeed(1))),\n Effect.withSpan(\"s3-complete-multipart-upload\", {\n attributes: {\n \"upload.id\": uploadFile.id,\n \"upload.parts_count\": parts.length,\n \"s3.bucket\": s3Client.bucket,\n \"s3.key\": s3Key,\n },\n }),\n );\n };\n\n const abort = (uploadFile: UploadFile) => {\n const s3Key = getS3Key(uploadFile);\n\n return Effect.gen(function* () {\n const uploadId = yield* getUploadId(uploadFile);\n\n yield* s3Client.abortMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n });\n\n yield* s3Client.deleteObjects([s3Key]);\n });\n };\n\n const retrievePartsRecursive = (\n s3Key: string,\n uploadId: string,\n uploadFileId: string,\n partNumberMarker?: string,\n ): Effect.Effect<\n { uploadFound: boolean; parts: AWS.Part[] },\n UploadistaError\n > =>\n Effect.gen(function* () {\n const result = yield* s3Client.listParts({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n partNumberMarker,\n });\n\n let parts = result.parts;\n\n if (result.isTruncated) {\n const rest = yield* retrievePartsRecursive(\n s3Key,\n uploadId,\n uploadFileId,\n result.nextPartNumberMarker,\n );\n parts = [...parts, ...rest.parts];\n }\n\n if (!partNumberMarker) {\n parts.sort((a, b) => (a.PartNumber ?? 0) - (b.PartNumber ?? 0));\n }\n\n return { uploadFound: true, parts };\n }).pipe(\n Effect.catchAll((error) => {\n if (isUploadNotFoundError(error)) {\n return Effect.logWarning(\n \"S3 upload not found during listParts\",\n ).pipe(\n Effect.annotateLogs({\n upload_id: uploadFileId,\n error_code: error.code,\n }),\n Effect.as({ uploadFound: false, parts: [] }),\n );\n }\n return Effect.fail(error);\n }),\n );\n\n const retrieveParts = (id: string, partNumberMarker?: string) =>\n Effect.gen(function* () {\n const metadata = yield* kvStore.get(id);\n const uploadId = yield* getUploadId(metadata);\n const s3Key = getS3Key(metadata);\n\n return yield* retrievePartsRecursive(\n s3Key,\n uploadId,\n id,\n partNumberMarker,\n );\n });\n\n const completeMetadata = (upload: UploadFile, useTags: boolean) =>\n Effect.gen(function* () {\n if (!useTags) {\n return 0;\n }\n\n const uploadFile = yield* kvStore.get(upload.id);\n const uploadId = uploadFile.storage.uploadId;\n if (!uploadId) {\n return 0;\n }\n\n yield* kvStore.set(upload.id, {\n ...uploadFile,\n storage: { ...uploadFile.storage, uploadId },\n });\n\n return 0;\n });\n\n const clearCache = (id: string) =>\n Effect.gen(function* () {\n yield* Effect.logInfo(\"Clearing cache\").pipe(\n Effect.annotateLogs({ upload_id: id }),\n );\n yield* kvStore.delete(id);\n });\n\n const createMultipartUpload = (upload: UploadFile) => {\n const s3Key = getS3Key(upload);\n\n return Effect.gen(function* () {\n yield* Effect.logInfo(\"Initializing multipart upload\").pipe(\n Effect.annotateLogs({ upload_id: upload.id }),\n );\n\n const multipartInfo = yield* s3Client.createMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId: \"\", // Not needed for create\n contentType: upload.metadata?.contentType?.toString(),\n cacheControl: upload.metadata?.cacheControl?.toString(),\n });\n\n const uploadCreated = {\n ...upload,\n storage: {\n ...upload.storage,\n path: multipartInfo.key,\n uploadId: multipartInfo.uploadId,\n bucket: multipartInfo.bucket,\n },\n url: `${deliveryUrl}/${s3Key}`,\n };\n\n yield* kvStore.set(upload.id, uploadCreated);\n\n yield* Effect.logInfo(\"Multipart upload created\").pipe(\n Effect.annotateLogs({\n upload_id: upload.id,\n s3_upload_id: uploadCreated.storage.uploadId,\n s3_key: s3Key,\n }),\n );\n\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* fileSizeHistogram(Effect.succeed(upload.size || 0));\n\n return uploadCreated;\n }).pipe(\n Effect.withSpan(\"s3-create-upload\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.size\": upload.size || 0,\n \"s3.bucket\": s3Client.bucket,\n \"s3.key\": s3Key,\n },\n }),\n );\n };\n\n /**\n * Creates a multipart upload on S3 attaching any metadata to it.\n * Also, a `${file_id}.info` file is created which holds some information\n * about the upload itself like: `upload-id`, `upload-length`, etc.\n */\n const create = (upload: UploadFile) => {\n return Effect.gen(function* () {\n yield* Effect.logInfo(\"Initializing multipart upload\").pipe(\n Effect.annotateLogs({ upload_id: upload.id }),\n );\n const uploadCreated = yield* createMultipartUpload(upload);\n yield* kvStore.set(upload.id, uploadCreated);\n yield* Effect.logInfo(\"Multipart upload created\").pipe(\n Effect.annotateLogs({\n upload_id: upload.id,\n s3_upload_id: uploadCreated.storage.uploadId,\n }),\n );\n yield* uploadRequestsTotal(Effect.succeed(1));\n\n return uploadCreated;\n }).pipe(\n Effect.withSpan(\"s3-create-upload\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.size\": upload.size || 0,\n \"s3.bucket\": bucket,\n },\n }),\n );\n };\n\n const remove = (id: string) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(id);\n yield* abort(uploadFile);\n yield* clearCache(id);\n });\n\n const write = (\n options: DataStoreWriteOptions,\n dependencies: { onProgress?: (currentOffset: number) => void },\n ) =>\n withUploadMetrics(\n options.file_id,\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const {\n stream: initialData,\n file_id,\n offset: initialOffset,\n } = options;\n const { onProgress } = dependencies;\n\n // Capture start time for upload completion metrics\n const startTime = Date.now();\n\n // Track active upload\n yield* activeUploadsGauge(Effect.succeed(1));\n\n const prepareResult = yield* prepareUpload(\n file_id,\n initialOffset,\n initialData,\n );\n\n const {\n uploadFile,\n nextPartNumber,\n offset,\n data,\n existingPartSize,\n } = prepareResult;\n\n // Use existing part size if parts already exist, otherwise calculate optimal size\n const uploadPartSize =\n existingPartSize ||\n calcOptimalPartSize(\n uploadFile.size,\n preferredPartSize,\n minPartSize,\n maxMultipartParts,\n );\n\n // Log part size decision for debugging\n yield* Effect.logInfo(\"Part size decision\").pipe(\n Effect.annotateLogs({\n upload_id: file_id,\n existing_part_size: existingPartSize,\n calculated_part_size: calcOptimalPartSize(\n uploadFile.size,\n preferredPartSize,\n minPartSize,\n maxMultipartParts,\n ),\n final_part_size: uploadPartSize,\n next_part_number: nextPartNumber,\n }),\n );\n\n const bytesUploaded = yield* uploadParts(\n uploadFile,\n data,\n nextPartNumber,\n offset,\n uploadPartSize,\n minPartSize,\n maxConcurrentPartUploads,\n onProgress,\n );\n\n const newOffset = offset + bytesUploaded;\n\n if (uploadFile.size === newOffset) {\n yield* finishUpload(file_id, uploadFile, startTime);\n }\n\n return newOffset;\n }).pipe(Effect.ensuring(activeUploadsGauge(Effect.succeed(0)))),\n ),\n );\n\n const getUpload = (id: string) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(id);\n\n const { parts, uploadFound } = yield* retrieveParts(id);\n if (!uploadFound) {\n return {\n ...uploadFile,\n offset: uploadFile.size as number,\n size: uploadFile.size,\n };\n }\n\n const offset = calcOffsetFromParts(parts);\n const incompletePartSize = yield* getIncompletePartSize(id);\n\n return {\n ...uploadFile,\n offset: offset + (incompletePartSize ?? 0),\n size: uploadFile.size,\n storage: uploadFile.storage,\n };\n });\n\n // const read = (id: string) =>\n // Effect.gen(function* () {\n // return yield* s3Client.getObject(id);\n // });\n\n // Helper functions\n const prepareUpload = (\n fileId: string,\n initialOffset: number,\n initialData: Stream.Stream<Uint8Array, UploadistaError>,\n ) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(fileId);\n const { parts } = yield* retrieveParts(fileId);\n\n const partNumber: number =\n parts.length > 0 && parts[parts.length - 1].PartNumber\n ? (parts[parts.length - 1].PartNumber ?? 0)\n : 0;\n const nextPartNumber = partNumber + 1;\n\n // Detect existing part size to maintain consistency\n // We check the first part's size to ensure all subsequent parts match\n const existingPartSize =\n parts.length > 0 && parts[0].Size ? parts[0].Size : null;\n\n // Validate that all existing parts (except potentially the last one) have the same size\n if (existingPartSize && parts.length > 1) {\n const inconsistentPart = parts\n .slice(0, -1)\n .find((part) => part.Size !== existingPartSize);\n if (inconsistentPart) {\n yield* Effect.logWarning(\n \"Inconsistent part sizes detected in existing upload\",\n ).pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n expected_size: existingPartSize,\n inconsistent_part: inconsistentPart.PartNumber,\n inconsistent_size: inconsistentPart.Size,\n }),\n );\n }\n }\n\n const incompletePart = yield* downloadIncompletePart(fileId);\n\n if (incompletePart) {\n yield* deleteIncompletePart(fileId);\n const offset = initialOffset - incompletePart.size;\n const data = incompletePart.stream.pipe(Stream.concat(initialData));\n return {\n uploadFile,\n nextPartNumber,\n offset,\n incompletePartSize: incompletePart.size,\n data,\n existingPartSize,\n };\n } else {\n return {\n uploadFile,\n nextPartNumber,\n offset: initialOffset,\n incompletePartSize: 0,\n data: initialData,\n existingPartSize,\n };\n }\n });\n\n const finishUpload = (\n fileId: string,\n uploadFile: UploadFile,\n startTime: number,\n ) =>\n Effect.gen(function* () {\n const { parts } = yield* retrieveParts(fileId);\n\n // Log all parts for debugging S3 multipart upload requirements\n yield* Effect.logInfo(\"Attempting to complete multipart upload\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n parts_count: parts.length,\n parts_info: parts.map((part, index) => ({\n part_number: part.PartNumber,\n size: part.Size,\n etag: part.ETag,\n is_final_part: index === parts.length - 1,\n })),\n }),\n );\n\n yield* complete(uploadFile, parts);\n yield* completeMetadata(uploadFile, useTags);\n // yield* clearCache(fileId);\n\n // Log upload completion metrics\n const endTime = Date.now();\n const totalDurationMs = endTime - startTime;\n const fileSize = uploadFile.size || 0;\n const throughputBps =\n totalDurationMs > 0 ? (fileSize * 1000) / totalDurationMs : 0;\n\n // Calculate average part size if we have parts\n const averagePartSize =\n parts.length > 0\n ? parts.reduce((sum, part) => sum + (part.Size || 0), 0) /\n parts.length\n : undefined;\n\n yield* logS3UploadCompletion(fileId, {\n fileSize,\n totalDurationMs,\n partsCount: parts.length,\n averagePartSize,\n throughputBps,\n });\n }).pipe(\n Effect.tapError((error) =>\n Effect.gen(function* () {\n yield* uploadErrorsTotal(Effect.succeed(1));\n yield* Effect.logError(\"Failed to finish upload\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n error: String(error),\n }),\n );\n }),\n ),\n );\n\n const deleteExpired = (): Effect.Effect<number, UploadistaError> =>\n Effect.gen(function* () {\n if (expirationPeriodInMilliseconds === 0) {\n return 0;\n }\n\n let keyMarker: string | undefined;\n let uploadIdMarker: string | undefined;\n let isTruncated = true;\n let deleted = 0;\n\n while (isTruncated) {\n const listResponse = yield* s3Client.listMultipartUploads(\n keyMarker,\n uploadIdMarker,\n );\n\n const expiredUploads =\n listResponse.Uploads?.filter((multiPartUpload) => {\n const initiatedDate = multiPartUpload.Initiated;\n return (\n initiatedDate &&\n Date.now() >\n getExpirationDate(\n initiatedDate.toISOString(),\n expirationPeriodInMilliseconds,\n ).getTime()\n );\n }) || [];\n\n const objectsToDelete = expiredUploads\n .filter((upload): upload is { Key: string } => {\n return !!upload.Key;\n })\n .map((upload) => upload.Key);\n\n if (objectsToDelete.length > 0) {\n yield* s3Client.deleteObjects(objectsToDelete);\n\n // Abort multipart uploads\n yield* Effect.forEach(expiredUploads, (upload) => {\n return Effect.gen(function* () {\n if (!upload.Key || !upload.UploadId) {\n return;\n }\n yield* s3Client.abortMultipartUpload({\n bucket,\n key: upload.Key,\n uploadId: upload.UploadId,\n });\n return;\n });\n });\n\n deleted += objectsToDelete.length;\n }\n\n isTruncated = listResponse.IsTruncated ?? false;\n\n if (isTruncated) {\n keyMarker = listResponse.NextKeyMarker;\n uploadIdMarker = listResponse.NextUploadIdMarker;\n }\n }\n\n return deleted;\n });\n\n // Proper single-pass chunking using Effect's async stream constructor\n // Ensures all parts except the final part are exactly the same size (S3 requirement)\n const createChunkedStream =\n (chunkSize: number) =>\n <E>(\n stream: Stream.Stream<Uint8Array, E>,\n ): Stream.Stream<ChunkInfo, E> => {\n return Stream.async<ChunkInfo, E>((emit) => {\n let buffer = new Uint8Array(0);\n let partNumber = 1;\n let totalBytesProcessed = 0;\n\n const emitChunk = (data: Uint8Array, isFinalChunk = false) => {\n // Log chunk information for debugging - use INFO level to see in logs\n Effect.runSync(\n Effect.logInfo(\"Creating chunk\").pipe(\n Effect.annotateLogs({\n part_number: partNumber,\n chunk_size: data.length,\n expected_size: chunkSize,\n is_final_chunk: isFinalChunk,\n total_bytes_processed: totalBytesProcessed + data.length,\n }),\n ),\n );\n emit.single({\n partNumber: partNumber++,\n data,\n size: data.length,\n });\n };\n\n const processChunk = (newData: Uint8Array) => {\n // Combine buffer with new data\n const combined = new Uint8Array(buffer.length + newData.length);\n combined.set(buffer);\n combined.set(newData, buffer.length);\n buffer = combined;\n totalBytesProcessed += newData.length;\n\n // Emit full chunks of exactly chunkSize bytes\n // This ensures S3 multipart upload rule: all parts except last must be same size\n while (buffer.length >= chunkSize) {\n const chunk = buffer.slice(0, chunkSize);\n buffer = buffer.slice(chunkSize);\n emitChunk(chunk, false);\n }\n };\n\n // Process the stream\n Effect.runFork(\n stream.pipe(\n Stream.runForEach((chunk) =>\n Effect.sync(() => processChunk(chunk)),\n ),\n Effect.andThen(() =>\n Effect.sync(() => {\n // Emit final chunk if there's remaining data\n // The final chunk can be any size < chunkSize (S3 allows this)\n if (buffer.length > 0) {\n emitChunk(buffer, true);\n }\n emit.end();\n }),\n ),\n Effect.catchAll((error) => Effect.sync(() => emit.fail(error))),\n ),\n );\n });\n };\n\n // Byte-level progress tracking during streaming\n // This provides smooth, immediate progress feedback by tracking bytes as they\n // flow through the stream, before they reach S3. This solves the issue where\n // small files (< 5MB) would jump from 0% to 100% instantly.\n const withByteProgressTracking =\n (onProgress?: (totalBytes: number) => void, initialOffset = 0) =>\n <E, R>(stream: Stream.Stream<Uint8Array, E, R>) => {\n if (!onProgress) return stream;\n\n return Effect.gen(function* () {\n const totalBytesProcessedRef = yield* Ref.make(initialOffset);\n\n return stream.pipe(\n Stream.tap((chunk) =>\n Effect.gen(function* () {\n const newTotal = yield* Ref.updateAndGet(\n totalBytesProcessedRef,\n (total) => total + chunk.length,\n );\n onProgress(newTotal);\n }),\n ),\n );\n }).pipe(Stream.unwrap);\n };\n\n const uploadParts = (\n uploadFile: UploadFile,\n readStream: Stream.Stream<Uint8Array, UploadistaError>,\n initCurrentPartNumber: number,\n initOffset: number,\n uploadPartSize: number,\n minPartSize: number,\n maxConcurrentPartUploads: number,\n onProgress?: (newOffset: number) => void,\n ) =>\n Effect.gen(function* () {\n yield* Effect.logInfo(\"Starting part uploads\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n init_offset: initOffset,\n file_size: uploadFile.size,\n part_size: uploadPartSize,\n min_part_size: minPartSize,\n }),\n );\n\n // Enhanced Progress Tracking Strategy:\n // 1. Byte-level progress during streaming - provides immediate, smooth feedback\n // as data flows through the pipeline (even for small files)\n // 2. This tracks progress BEFORE S3 upload, giving users immediate feedback\n // 3. For large files with multiple parts, this provides granular updates\n // 4. For small files (single part), this prevents 0%->100% jumps\n const chunkStream = readStream.pipe(\n // Add byte-level progress tracking during streaming (immediate feedback)\n withByteProgressTracking(onProgress, initOffset),\n // Create chunks for S3 multipart upload with uniform part sizes\n createChunkedStream(uploadPartSize),\n );\n\n // Track cumulative offset and total bytes with Effect Refs\n const cumulativeOffsetRef = yield* Ref.make(initOffset);\n const totalBytesUploadedRef = yield* Ref.make(0);\n\n // Create a chunk upload function for the sink\n const uploadChunk = (chunkInfo: ChunkInfo) =>\n Effect.gen(function* () {\n // Calculate cumulative bytes to determine if this is the final part\n const cumulativeOffset = yield* Ref.updateAndGet(\n cumulativeOffsetRef,\n (offset) => offset + chunkInfo.size,\n );\n const isFinalPart = cumulativeOffset >= (uploadFile.size || 0);\n\n yield* Effect.logDebug(\"Processing chunk\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n cumulative_offset: cumulativeOffset,\n file_size: uploadFile.size,\n chunk_size: chunkInfo.size,\n is_final_part: isFinalPart,\n }),\n );\n\n const actualPartNumber =\n initCurrentPartNumber + chunkInfo.partNumber - 1;\n\n if (chunkInfo.size > uploadPartSize) {\n yield* Effect.fail(\n UploadistaError.fromCode(\n \"FILE_WRITE_ERROR\",\n new Error(\n `Part size ${chunkInfo.size} exceeds upload part size ${uploadPartSize}`,\n ),\n ),\n );\n }\n\n // For parts that meet the minimum part size (5MB) or are the final part,\n // upload them as regular multipart parts\n if (chunkInfo.size >= minPartSize || isFinalPart) {\n yield* Effect.logDebug(\"Uploading multipart chunk\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n part_number: actualPartNumber,\n chunk_size: chunkInfo.size,\n min_part_size: minPartSize,\n is_final_part: isFinalPart,\n }),\n );\n yield* uploadPart(uploadFile, chunkInfo.data, actualPartNumber);\n yield* partSizeHistogram(Effect.succeed(chunkInfo.size));\n } else {\n // Only upload as incomplete part if it's smaller than minimum and not final\n yield* uploadIncompletePart(uploadFile.id, chunkInfo.data);\n }\n\n yield* Ref.update(\n totalBytesUploadedRef,\n (total) => total + chunkInfo.size,\n );\n\n // Note: Byte-level progress is now tracked during streaming phase\n // This ensures smooth progress updates regardless of part size\n // S3 upload completion is tracked via totalBytesUploadedRef for accuracy\n });\n\n // Process chunks concurrently with controlled concurrency\n yield* chunkStream.pipe(\n Stream.runForEach((chunkInfo) => uploadChunk(chunkInfo)),\n Effect.withConcurrency(maxConcurrentPartUploads),\n );\n\n return yield* Ref.get(totalBytesUploadedRef);\n });\n\n const getCapabilities = (): DataStoreCapabilities => ({\n supportsParallelUploads: true,\n supportsConcatenation: true,\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: true,\n supportsStreamingRead: true, // Supports streaming reads via S3 GetObject\n supportsStreamingWrite: true, // Supports streaming writes via S3 multipart upload\n maxConcurrentUploads: maxConcurrentPartUploads,\n minChunkSize: minPartSize,\n maxChunkSize: 5_368_709_120, // 5GiB S3 limit\n maxParts: maxMultipartParts,\n optimalChunkSize: preferredPartSize,\n requiresOrderedChunks: false,\n requiresMimeTypeValidation: true,\n maxValidationSize: undefined, // no size limit\n });\n\n const getChunkerConstraints = () => ({\n minChunkSize: minPartSize,\n maxChunkSize: 5_368_709_120, // 5GiB S3 limit\n optimalChunkSize: preferredPartSize,\n requiresOrderedChunks: false,\n });\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n const result = (() => {\n switch (strategy) {\n case \"parallel\":\n return capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n })();\n return Effect.succeed(result);\n };\n\n const concatArrayBuffers = (chunks: Uint8Array[]): Uint8Array => {\n const result = new Uint8Array(chunks.reduce((a, c) => a + c.length, 0));\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n };\n\n const streamToArray = async (\n stream: ReadableStream<Uint8Array>,\n ): Promise<Uint8Array> => {\n const reader = stream.getReader();\n const chunks: Uint8Array[] = [];\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n }\n return concatArrayBuffers(chunks);\n };\n\n const read = (id: string) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(id);\n console.log(upload);\n if (!upload.id) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\n \"FILE_READ_ERROR\",\n new Error(\"Upload Key is undefined\"),\n ),\n );\n }\n const s3Key = getS3Key(upload);\n const stream = yield* s3Client.getObject(s3Key);\n return yield* Effect.promise(() => streamToArray(stream));\n });\n\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Uses S3 GetObject and converts the response body to an Effect Stream.\n *\n * @param id - The unique identifier of the file to read\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n */\n const readStream = (id: string, config?: StreamingConfig) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(id);\n if (!upload.id) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\n \"FILE_READ_ERROR\",\n new Error(\"Upload Key is undefined\"),\n ),\n );\n }\n\n // Merge config with defaults\n const effectiveConfig = {\n ...DEFAULT_STREAMING_CONFIG,\n ...config,\n };\n\n const s3Key = getS3Key(upload);\n const webStream = yield* s3Client.getObject(s3Key);\n\n // Convert Web ReadableStream to Effect Stream with configured chunk size\n return Stream.async<Uint8Array, UploadistaError>((emit) => {\n const reader = webStream.getReader();\n const chunkSize = effectiveConfig.chunkSize;\n let buffer = new Uint8Array(0);\n\n const processChunk = async () => {\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n // Emit any remaining data in buffer\n if (buffer.length > 0) {\n emit.single(buffer);\n }\n emit.end();\n return;\n }\n\n if (value) {\n // Combine buffer with new value\n const combined = new Uint8Array(buffer.length + value.length);\n combined.set(buffer);\n combined.set(value, buffer.length);\n buffer = combined;\n\n // Emit chunks of the configured size\n while (buffer.length >= chunkSize) {\n const chunk = buffer.slice(0, chunkSize);\n buffer = buffer.slice(chunkSize);\n emit.single(chunk);\n }\n }\n }\n } catch (error) {\n emit.fail(\n new UploadistaError({\n code: \"FILE_READ_ERROR\",\n status: 500,\n body: \"Failed to read S3 object stream\",\n details: `S3 stream read failed: ${String(error)}`,\n }),\n );\n }\n };\n\n // Start processing\n processChunk();\n\n // Cleanup function\n return Effect.sync(() => {\n reader.releaseLock();\n });\n });\n });\n\n /**\n * Writes file content from a stream without knowing the final size upfront.\n * Uses S3 multipart upload to stream content as parts are buffered.\n *\n * @param fileId - The unique identifier for the file\n * @param options - Stream write options including the Effect Stream\n * @returns StreamWriteResult with final size after stream completes\n */\n const writeStream = (\n fileId: string,\n options: StreamWriteOptions,\n ): Effect.Effect<StreamWriteResult, UploadistaError> =>\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n const s3Key = fileId;\n\n yield* Effect.logInfo(\"Starting streaming write to S3\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n s3_key: s3Key,\n size_hint: options.sizeHint,\n }),\n );\n\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n\n // Calculate optimal part size based on size hint or use default\n const uploadPartSize = options.sizeHint\n ? calcOptimalPartSize(\n options.sizeHint,\n preferredPartSize,\n minPartSize,\n maxMultipartParts,\n )\n : preferredPartSize;\n\n // Create multipart upload\n const multipartInfo = yield* s3Client.createMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId: \"\", // Not needed for create\n contentType: options.contentType,\n });\n\n const uploadId = multipartInfo.uploadId;\n\n yield* Effect.logInfo(\"Multipart upload created for streaming write\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n s3_upload_id: uploadId,\n s3_key: s3Key,\n part_size: uploadPartSize,\n }),\n );\n\n // Track parts and total bytes\n const partsRef = yield* Ref.make<AWS.Part[]>([]);\n const totalBytesRef = yield* Ref.make(0);\n const partNumberRef = yield* Ref.make(1);\n const bufferRef = yield* Ref.make(new Uint8Array(0));\n\n // Helper to upload a part\n const uploadBufferedPart = (data: Uint8Array, isFinalPart: boolean) =>\n Effect.gen(function* () {\n if (data.length === 0) {\n return;\n }\n\n // Only upload if we have enough data or it's the final part\n if (data.length < minPartSize && !isFinalPart) {\n return;\n }\n\n const partNumber = yield* Ref.getAndUpdate(\n partNumberRef,\n (n) => n + 1,\n );\n\n yield* Effect.logDebug(\"Uploading part from stream\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n part_number: partNumber,\n part_size: data.length,\n is_final_part: isFinalPart,\n }),\n );\n\n const etag = yield* s3Client\n .uploadPart({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n partNumber,\n data,\n })\n .pipe(\n Effect.retry({\n schedule: Schedule.exponential(\"1 second\", 2.0).pipe(\n Schedule.intersect(Schedule.recurs(3)),\n ),\n while: (error) => !isUploadNotFoundError(error),\n }),\n );\n\n yield* Ref.update(partsRef, (parts) => [\n ...parts,\n { PartNumber: partNumber, ETag: etag },\n ]);\n yield* uploadPartsTotal(Effect.succeed(1));\n yield* partSizeHistogram(Effect.succeed(data.length));\n });\n\n // Process stream chunks\n yield* options.stream.pipe(\n Stream.runForEach((chunk) =>\n Effect.gen(function* () {\n // Update total bytes\n yield* Ref.update(totalBytesRef, (total) => total + chunk.length);\n\n // Get current buffer and append new chunk\n const currentBuffer = yield* Ref.get(bufferRef);\n const combined = new Uint8Array(\n currentBuffer.length + chunk.length,\n );\n combined.set(currentBuffer);\n combined.set(chunk, currentBuffer.length);\n\n // Extract full parts and keep remainder in buffer\n let offset = 0;\n while (combined.length - offset >= uploadPartSize) {\n const partData = combined.slice(offset, offset + uploadPartSize);\n yield* uploadBufferedPart(partData, false);\n offset += uploadPartSize;\n }\n\n // Store remaining data in buffer\n yield* Ref.set(bufferRef, combined.slice(offset));\n }),\n ),\n );\n\n // Upload any remaining data as final part\n const remainingBuffer = yield* Ref.get(bufferRef);\n if (remainingBuffer.length > 0) {\n yield* uploadBufferedPart(remainingBuffer, true);\n }\n\n // Get all parts and complete the upload\n const parts = yield* Ref.get(partsRef);\n const totalBytes = yield* Ref.get(totalBytesRef);\n\n if (parts.length === 0) {\n // No parts uploaded (empty stream) - abort and fail\n yield* s3Client.abortMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n });\n yield* activeUploadsGauge(Effect.succeed(-1));\n return yield* Effect.fail(\n new UploadistaError({\n code: \"FILE_WRITE_ERROR\",\n status: 400,\n body: \"Cannot complete upload with no data\",\n details: \"The stream provided no data to upload\",\n }),\n );\n }\n\n // Sort parts by part number for completion\n parts.sort((a, b) => (a.PartNumber ?? 0) - (b.PartNumber ?? 0));\n\n yield* s3Client.completeMultipartUpload(\n {\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n },\n parts,\n );\n\n // Log completion metrics\n const endTime = Date.now();\n const totalDurationMs = endTime - startTime;\n const throughputBps =\n totalDurationMs > 0 ? (totalBytes * 1000) / totalDurationMs : 0;\n const averagePartSize =\n parts.length > 0 ? totalBytes / parts.length : undefined;\n\n yield* logS3UploadCompletion(fileId, {\n fileSize: totalBytes,\n totalDurationMs,\n partsCount: parts.length,\n averagePartSize,\n throughputBps,\n });\n\n yield* uploadSuccessTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n yield* fileSizeHistogram(Effect.succeed(totalBytes));\n\n yield* Effect.logInfo(\"Streaming write to S3 completed\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n total_bytes: totalBytes,\n parts_count: parts.length,\n duration_ms: totalDurationMs,\n }),\n );\n\n return {\n id: s3Key,\n size: totalBytes,\n path: s3Key,\n bucket: s3Client.bucket,\n url: `${deliveryUrl}/${s3Key}`,\n } satisfies StreamWriteResult;\n }).pipe(\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n yield* uploadErrorsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n return yield* Effect.fail(error);\n }),\n ),\n ),\n );\n\n return {\n bucket,\n create,\n remove,\n write,\n getUpload,\n read,\n readStream,\n writeStream,\n deleteExpired,\n getCapabilities,\n getChunkerConstraints,\n validateUploadStrategy,\n } as DataStore<UploadFile>;\n });\n}\n\n// Effect-based factory that uses services\nexport const s3Store = (options: S3StoreConfig) => {\n const {\n s3ClientConfig: { bucket, ...restS3ClientConfig },\n } = options;\n return createS3Store(options).pipe(\n Effect.provide(S3ClientLayer(restS3ClientConfig, bucket)),\n );\n};\n"],"mappings":"msBAEA,MAAa,EAAuB,GAC3B,GAAS,EAAM,OAAS,EAC3B,EAAM,QAAQ,EAAG,IAAM,GAAK,GAAG,MAAQ,GAAI,EAAE,CAC7C,EAGO,GACX,EACA,EACA,EACA,EACA,EAAgB,gBACL,CACX,IAAM,EAAO,GAAY,EACrBA,EAEJ,AASE,EATE,GAAQ,EAGQ,EACT,GAAQ,EAAoB,EAEnB,EAGA,KAAK,KAAK,EAAO,EAAkB,CAKvD,IAAM,EACJ,GAAY,EAAW,EACnB,EACA,KAAK,IAAI,EAAiB,EAAY,CAItC,EAAY,KAClB,OAAO,KAAK,KAAK,EAAgB,EAAU,CAAG,GAGnC,EAAW,GACf,GAAG,EAAG,OAUF,GACX,EACA,IACS,CACT,IAAM,EAAO,IAAI,KAAK,EAAU,CAChC,OAAO,IAAI,KAAK,EAAK,SAAS,CAAG,EAA+B,ECvDrD,GACX,EACA,EACA,EAAmC,EAAE,IAGrC,EAAO,QAAQC,EAAW,EAAW,EAAO,EAAQ,CAAC,CAE9C,EAAgB,SAAS,mBAAoB,EAAe,EAGxD,GACX,EACA,EACA,EAAmC,EAAE,GAGnC,OAAO,GAAU,UACjB,GACA,SAAU,GACV,OAAO,EAAM,MAAS,UACtB,CAAC,WAAY,YAAa,eAAe,CAAC,SAAS,EAAM,KAAK,EAE9D,EAAO,QACL,EAAO,WAAW,yBAAyB,EAAU,YAAY,CAAC,KAChE,EAAO,aAAa,CAClB,WAAY,EAAM,KAClB,GAAG,EACJ,CAAC,CACH,CACF,CACM,EAAgB,SAAS,iBAAiB,EAG5C,EAAc,EAAW,EAAO,EAAQ,CAGpC,EACX,GAcA,GAVE,OAAO,GAAU,UACjB,GACA,SAAU,GACV,OAAO,EAAM,MAAS,WACrB,EAAM,OAAS,gBAAkB,EAAM,OAAS,cAOjD,aAAiB,GACjB,EAAM,OACN,OAAO,EAAM,OAAU,UACvB,SAAU,EAAM,OAChB,OAAO,EAAM,MAAM,MAAS,WAC3B,EAAM,MAAM,OAAS,gBAAkB,EAAM,MAAM,OAAS,cCjDjE,SAAgB,EAAiB,EAA+B,CAO9D,GALI,aAAgB,gBAKhB,GAAQ,OAAO,GAAS,UAAY,cAAe,EACrD,OAAO,EAIT,GAAI,GAAQ,OAAO,GAAS,UAAY,SAAU,GAAQ,OAAQ,EAAM,CACtE,IAAM,EAAa,EAEnB,OAAO,IAAI,eAAe,CACxB,MAAM,EAAY,CAChB,EAAW,GAAG,OAAS,GAAU,CAC/B,EAAW,QAAQ,IAAI,WAAW,EAAM,CAAC,EACzC,CAEF,EAAW,GAAG,UAAa,CACzB,EAAW,OAAO,EAClB,CAEF,EAAW,GAAG,QAAU,GAAU,CAChC,EAAW,MAAM,EAAM,EACvB,EAEL,CAAC,CAIJ,MAAU,MACR,0BAA0B,OAAO,EAAK,gDACvC,CClCH,IAAa,EAAb,cAAqC,EAAQ,IAAI,kBAAkB,EAmEhE,AAAC,GAEJ,MAAa,GACX,EACA,IACG,CACH,IAAM,EAAW,IAAI,EAAG,EAAe,CACjC,EAAa,GACjB,EAAO,WAAW,CAChB,IAAK,SAKI,GAJM,MAAM,EAAS,UAAU,CACpC,OAAQ,EACR,IAAK,EACN,CAAC,EAC2B,KAAK,CAEpC,MAAQ,GAAU,EAAc,YAAa,EAAO,CAAE,MAAK,SAAQ,CAAC,CACrE,CAAC,CAEE,EAAc,GAClB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,GAAI,CAKF,OAJa,MAAM,EAAS,WAAW,CACrC,OAAQ,EACR,IAAK,EACN,CAAC,EACU,oBACL,EAAO,CACd,GAAI,aAAiB,EACnB,OAEF,MAAM,IAGV,MAAQ,GAAU,EAAc,aAAc,EAAO,CAAE,MAAK,SAAQ,CAAC,CACtE,CAAC,CAEE,GAAa,EAAa,IAC9B,EAAO,WAAW,CAChB,IAAK,UACc,MAAM,EAAS,UAAU,CACxC,OAAQ,EACR,IAAK,EACL,KAAM,EACP,CAAC,EACc,MAAQ,GAE1B,MAAQ,GACN,EAAc,YAAa,EAAO,CAAE,MAAK,SAAQ,KAAM,EAAK,OAAQ,CAAC,CACxE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,MAAM,EAAS,aAAa,CAC1B,OAAQ,EACR,IAAK,EACN,CAAC,EAEJ,MAAQ,GAAU,EAAc,eAAgB,EAAO,CAAE,MAAK,SAAQ,CAAC,CACxE,CAAC,CAmMJ,MAAO,CACL,SACA,YACA,aACA,YACA,eACA,cAvMqB,GACrB,EAAO,WAAW,CAChB,QACE,EAAS,cAAc,CACrB,OAAQ,EACR,OAAQ,CACN,QAAS,EAAK,IAAK,IAAS,CAAE,IAAK,EAAK,EAAE,CAC3C,CACF,CAAC,CACJ,MAAQ,GACN,EAAc,gBAAiB,EAAO,CAAE,KAAM,EAAK,OAAQ,SAAQ,CAAC,CACvE,CAAC,CA6LF,sBA3L6B,GAC7B,EACE,wBACA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAMC,EAAiD,CACrD,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACd,CAEG,EAAQ,cACV,EAAQ,YAAc,EAAQ,aAG5B,EAAQ,eACV,EAAQ,aAAe,EAAQ,cAGjC,IAAM,EAAM,MAAM,EAAS,sBAAsB,EAAQ,CAEzD,GAAI,CAAC,EAAI,SACP,MAAU,MAAM,yBAAyB,CAE3C,GAAI,CAAC,EAAI,IACP,MAAU,MAAM,mBAAmB,CAGrC,MAAO,CACL,SAAU,EAAI,SACd,OAAQ,EAAQ,OAChB,IAAK,EAAI,IACV,EAEH,MAAQ,GACN,EAAc,wBAAyB,EAAO,EAAQ,CACzD,CAAC,CACH,CAwJD,WArJA,GAEA,EACE,aACA,EAAO,WAAW,CAChB,QACE,EAAS,WAAW,CAClB,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SAClB,WAAY,EAAQ,WACpB,KAAM,EAAQ,KACf,CAAC,CACJ,MAAQ,GACN,EAAc,aAAc,EAAO,CACjC,UAAW,EAAQ,IACnB,YAAa,EAAQ,WACrB,UAAW,EAAQ,KAAK,OACxB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CAAC,KAAK,EAAO,IAAK,GAAa,EAAS,KAAe,CAAC,CAC3D,CAiID,yBA9HA,EACA,IAEA,EACE,0BACA,EAAO,WAAW,CAChB,QACE,EACG,wBAAwB,CACvB,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SAClB,gBAAiB,CACf,MAAO,EAAM,IAAK,IAAU,CAC1B,KAAM,EAAK,KACX,WAAY,EAAK,WAClB,EAAE,CACJ,CACF,CAAC,CACD,KAAM,GAAa,EAAS,SAAS,CAC1C,MAAQ,GACN,EAAc,0BAA2B,EAAO,CAC9C,UAAW,EAAQ,IACnB,YAAa,EAAM,OACnB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CACH,CAoGD,qBAlG4B,GAC5B,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,MAAM,EAAS,qBAAqB,CAClC,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SACnB,CAAC,EAEJ,MAAQ,GACN,EAAsB,uBAAwB,EAAO,CACnD,UAAW,EAAQ,IACnB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CAqFF,UAlFA,GAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAMC,EAAoC,CACxC,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SAClB,iBAAkB,EAAQ,iBAC3B,CAEK,EAAO,MAAM,EAAS,UAAU,EAAO,CAE7C,MAAO,CACL,MAAO,EAAK,OAAS,EAAE,CACvB,YAAa,EAAK,aAAe,GACjC,qBAAsB,EAAK,qBAC5B,EAEH,MAAQ,GACN,EAAc,YAAa,EAAO,CAChC,UAAW,EAAQ,IACnB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CA2DF,sBAzD4B,EAAoB,IAChD,EAAO,WAAW,CAChB,QACE,EAAS,qBAAqB,CAC5B,OAAQ,EACR,UAAW,EACX,eAAgB,EACjB,CAAC,CACJ,MAAQ,GACN,EAAc,uBAAwB,EAAO,CAAE,SAAQ,CAAC,CAC3D,CAAC,CAgDF,kBA9CyB,GACzB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,GAAI,CAKF,OAAO,GAJM,MAAM,EAAS,UAAU,CACpC,OAAQ,EACR,IAAK,EAAQ,EAAG,CACjB,CAAC,EAC2B,KAAK,OAC3B,EAAO,CACd,GAAI,aAAiB,EACnB,OAEF,MAAM,IAGV,MAAQ,GACN,EAAc,oBAAqB,EAAO,CAAE,UAAW,EAAI,SAAQ,CAAC,CACvE,CAAC,CA6BF,sBA3B6B,GAAe,EAAW,EAAQ,EAAG,CAAC,CA4BnE,mBA1ByB,EAAY,IACrC,EAAU,EAAQ,EAAG,CAAE,EAAK,CAAC,KAC3B,EAAO,QACL,EAAO,QAAQ,2BAA2B,CAAC,KACzC,EAAO,aAAa,CAAE,UAAW,EAAI,CAAC,CACvC,CACF,CACF,CAoBD,qBAlB4B,GAAe,EAAa,EAAQ,EAAG,CAAC,CAmBrE,EAGU,GAAiB,EAAgC,IAC5D,EAAM,QAAQ,EAAiB,EAAoB,EAAgB,EAAO,CAAC,CCzTvE,EAAY,GAAmC,CACnD,GAAM,CAAE,KAAI,YAAa,EAEzB,GAAI,CAAC,EACH,OAAO,EAIT,IAAM,EAAW,EAAS,UAAY,EAAS,UAAY,EAAS,KAOpE,OALI,OAAO,GAAa,UAAY,EAAS,SAAS,IAAI,CAEjD,GAAG,IADQ,EAAS,UAAU,EAAS,YAAY,IAAI,CAAC,GAI1D,GAIT,SAAgB,EAAc,EAAuB,CACnD,GAAM,CACJ,cACA,WACA,cAAc,QACd,UAAU,GACV,oBAAoB,IACpB,2BAA2B,GAC3B,iCAAiC,IAAO,GAAK,GAAK,GAAK,EACvD,eAAgB,CAAE,WAChB,EAEJ,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAClB,EAAU,MAAO,EACjB,EAAoB,GAAY,EAAI,KAAO,KAE3C,EACJ,GAC2C,CAC3C,IAAM,EAAW,EAAW,QAAQ,SASpC,OARK,EAQE,EAAO,QAAQ,EAAS,CAPtB,EAAO,KACZ,EAAgB,SACd,mBACI,MAAM,yBAAyB,CACpC,CACF,EAKC,GACJ,EACA,EACA,IACG,CACH,IAAM,EAAQ,EAAS,EAAW,CAElC,OAAOC,EACLC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,EAAY,EAAW,CAEzC,EAAO,MAAO,EACjB,WAAW,CACV,OAAQ,EAAS,OACjB,IAAK,EACL,WACA,aACA,OACD,CAAC,CACD,KACC,EAAO,MAAM,CACX,SAAU,EAAS,YAAY,WAAY,EAAI,CAAC,KAC9C,EAAS,UAAU,EAAS,OAAO,EAAE,CAAC,CACvC,CAED,MAAQ,GAAU,CAAC,EAAsB,EAAM,CAChD,CAAC,CACF,EAAO,SAAU,GACf,EAAO,WAAW,uBAAuB,CAAC,KACxC,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,cAAe,EAAM,QACrB,cAAe,UACf,UAAW,EAAK,OAChB,UAAW,EAAS,OACrB,CAAC,CACH,CACF,CACF,CAYH,OAVA,MAAOC,EAAiB,EAAO,QAAQ,EAAE,CAAC,CAC1C,MAAO,EAAO,QAAQ,6BAA6B,CAAC,KAClD,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,UAAW,EAAK,OACV,OACP,CAAC,CACH,CAEM,GACP,CACH,CAAC,KACA,EAAO,SAAS,kBAAkB,IAAc,CAC9C,WAAY,CACV,YAAa,EAAW,GACxB,qBAAsB,EACtB,mBAAoB,EAAK,OACzB,YAAa,EAAS,OACtB,SAAU,EACX,CACF,CAAC,CACH,EAGG,GAAwB,EAAY,IACxC,EAAS,kBAAkB,EAAI,EAAK,CAEhC,EAA0B,GAC9B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAiB,MAAO,EAAS,kBAAkB,EAAG,CAE5D,GAAI,CAAC,EACH,OAIF,IAAM,EAAS,EAAe,WAAW,CACnCC,EAAuB,EAAE,CAC3B,EAAqB,EAEzB,GAAI,CACF,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAO,EAAO,YAAc,EAAO,MAAM,CAAC,CAClE,GAAI,EAAM,MACV,EAAO,KAAK,EAAM,CAClB,GAAsB,EAAM,eAEtB,CACR,EAAO,aAAa,CAGtB,IAAM,EAAS,EAAO,aAAa,EAAO,CAE1C,MAAO,CACL,KAAM,EACN,SACD,EACD,CAEE,EAAwB,GAC5B,EAAS,qBAAqB,EAAG,CAE7B,EAAyB,GAC7B,EAAS,sBAAsB,EAAG,CAE9B,GAAY,EAAwB,IAA2B,CACnE,IAAM,EAAQ,EAAS,EAAW,CAElC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAAY,EAAW,CAE/C,OAAO,MAAO,EAAS,wBACrB,CACE,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CACD,EACD,EACD,CAAC,KACD,EAAO,QAAUC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACvD,EAAO,SAAS,+BAAgC,CAC9C,WAAY,CACV,YAAa,EAAW,GACxB,qBAAsB,EAAM,OAC5B,YAAa,EAAS,OACtB,SAAU,EACX,CACF,CAAC,CACH,EAGG,EAAS,GAA2B,CACxC,IAAM,EAAQ,EAAS,EAAW,CAElC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAAY,EAAW,CAE/C,MAAO,EAAS,qBAAqB,CACnC,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CAAC,CAEF,MAAO,EAAS,cAAc,CAAC,EAAM,CAAC,EACtC,EAGE,GACJ,EACA,EACA,EACA,IAKA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAS,UAAU,CACvC,OAAQ,EAAS,OACjB,IAAK,EACL,WACA,mBACD,CAAC,CAEE,EAAQ,EAAO,MAEnB,GAAI,EAAO,YAAa,CACtB,IAAM,EAAO,MAAO,EAClB,EACA,EACA,EACA,EAAO,qBACR,CACD,EAAQ,CAAC,GAAG,EAAO,GAAG,EAAK,MAAM,CAOnC,OAJK,GACH,EAAM,MAAM,EAAG,KAAO,EAAE,YAAc,IAAM,EAAE,YAAc,GAAG,CAG1D,CAAE,YAAa,GAAM,QAAO,EACnC,CAAC,KACD,EAAO,SAAU,GACX,EAAsB,EAAM,CACvB,EAAO,WACZ,uCACD,CAAC,KACA,EAAO,aAAa,CAClB,UAAW,EACX,WAAY,EAAM,KACnB,CAAC,CACF,EAAO,GAAG,CAAE,YAAa,GAAO,MAAO,EAAE,CAAE,CAAC,CAC7C,CAEI,EAAO,KAAK,EAAM,CACzB,CACH,CAEG,GAAiB,EAAY,IACjC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,EAAQ,IAAI,EAAG,CACjC,EAAW,MAAO,EAAY,EAAS,CAG7C,OAAO,MAAO,EAFA,EAAS,EAAS,CAI9B,EACA,EACA,EACD,EACD,CAEE,GAAoB,EAAoB,IAC5C,EAAO,IAAI,WAAa,CACtB,GAAI,CAACC,EACH,MAAO,GAGT,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAO,GAAG,CAC1C,EAAW,EAAW,QAAQ,SAUpC,OATK,IAIL,MAAO,EAAQ,IAAI,EAAO,GAAI,CAC5B,GAAG,EACH,QAAS,CAAE,GAAG,EAAW,QAAS,WAAU,CAC7C,CAAC,EANO,GAST,CAEE,EAAc,GAClB,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,QAAQ,iBAAiB,CAAC,KACtC,EAAO,aAAa,CAAE,UAAW,EAAI,CAAC,CACvC,CACD,MAAO,EAAQ,OAAO,EAAG,EACzB,CAEE,EAAyB,GAAuB,CACpD,IAAM,EAAQ,EAAS,EAAO,CAE9B,OAAO,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAO,QAAQ,gCAAgC,CAAC,KACrD,EAAO,aAAa,CAAE,UAAW,EAAO,GAAI,CAAC,CAC9C,CAED,IAAM,EAAgB,MAAO,EAAS,sBAAsB,CAC1D,OAAQ,EAAS,OACjB,IAAK,EACL,SAAU,GACV,YAAa,EAAO,UAAU,aAAa,UAAU,CACrD,aAAc,EAAO,UAAU,cAAc,UAAU,CACxD,CAAC,CAEI,EAAgB,CACpB,GAAG,EACH,QAAS,CACP,GAAG,EAAO,QACV,KAAM,EAAc,IACpB,SAAU,EAAc,SACxB,OAAQ,EAAc,OACvB,CACD,IAAK,GAAG,EAAY,GAAG,IACxB,CAeD,OAbA,MAAO,EAAQ,IAAI,EAAO,GAAI,EAAc,CAE5C,MAAO,EAAO,QAAQ,2BAA2B,CAAC,KAChD,EAAO,aAAa,CAClB,UAAW,EAAO,GAClB,aAAc,EAAc,QAAQ,SACpC,OAAQ,EACT,CAAC,CACH,CAED,MAAOC,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAkB,EAAO,QAAQ,EAAO,MAAQ,EAAE,CAAC,CAEnD,GACP,CAAC,KACD,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,YAAa,EAAO,GACpB,cAAe,EAAO,MAAQ,EAC9B,YAAa,EAAS,OACtB,SAAU,EACX,CACF,CAAC,CACH,EAQG,EAAU,GACP,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAO,QAAQ,gCAAgC,CAAC,KACrD,EAAO,aAAa,CAAE,UAAW,EAAO,GAAI,CAAC,CAC9C,CACD,IAAM,EAAgB,MAAO,EAAsB,EAAO,CAU1D,OATA,MAAO,EAAQ,IAAI,EAAO,GAAI,EAAc,CAC5C,MAAO,EAAO,QAAQ,2BAA2B,CAAC,KAChD,EAAO,aAAa,CAClB,UAAW,EAAO,GAClB,aAAc,EAAc,QAAQ,SACrC,CAAC,CACH,CACD,MAAOD,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAEtC,GACP,CAAC,KACD,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,YAAa,EAAO,GACpB,cAAe,EAAO,MAAQ,EAC9B,YAAa,EACd,CACF,CAAC,CACH,CAGG,EAAU,GACd,EAAO,IAAI,WAAa,CAEtB,MAAO,EADY,MAAO,EAAQ,IAAI,EAAG,CACjB,CACxB,MAAO,EAAW,EAAG,EACrB,CAEE,GACJ,EACA,IAEAE,EACE,EAAQ,QACRR,EACES,EACA,EAAO,IAAI,WAAa,CACtB,GAAM,CACJ,OAAQ,EACR,UACA,OAAQ,GACN,EACE,CAAE,cAAe,EAGjB,EAAY,KAAK,KAAK,CAG5B,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAQ5C,GAAM,CACJ,aACA,iBACA,SACA,OACA,oBAXoB,MAAO,GAC3B,EACA,EACA,EACD,CAWK,EACJ,GACA,EACE,EAAW,KACX,EACA,EACA,EACD,CAGH,MAAO,EAAO,QAAQ,qBAAqB,CAAC,KAC1C,EAAO,aAAa,CAClB,UAAW,EACX,mBAAoB,EACpB,qBAAsB,EACpB,EAAW,KACX,EACA,EACA,EACD,CACD,gBAAiB,EACjB,iBAAkB,EACnB,CAAC,CACH,CAaD,IAAM,EAAY,GAXI,MAAO,GAC3B,EACA,EACA,EACA,EACA,EACA,EACA,EACA,EACD,EAQD,OAJI,EAAW,OAAS,IACtB,MAAO,GAAa,EAAS,EAAY,EAAU,EAG9C,GACP,CAAC,KAAK,EAAO,SAASA,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAAC,CAAC,CAChE,CACF,CAEG,EAAa,GACjB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAG,CAEnC,CAAE,QAAO,eAAgB,MAAO,EAAc,EAAG,CACvD,GAAI,CAAC,EACH,MAAO,CACL,GAAG,EACH,OAAQ,EAAW,KACnB,KAAM,EAAW,KAClB,CAGH,IAAM,EAAS,EAAoB,EAAM,CACnC,EAAqB,MAAO,EAAsB,EAAG,CAE3D,MAAO,CACL,GAAG,EACH,OAAQ,GAAU,GAAsB,GACxC,KAAM,EAAW,KACjB,QAAS,EAAW,QACrB,EACD,CAQE,IACJ,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAO,CACvC,CAAE,SAAU,MAAO,EAAc,EAAO,CAMxC,GAHJ,EAAM,OAAS,GAAK,EAAM,EAAM,OAAS,GAAG,WACvC,EAAM,EAAM,OAAS,GAAG,YAAc,EACvC,GAC8B,EAI9B,EACJ,EAAM,OAAS,GAAK,EAAM,GAAG,KAAO,EAAM,GAAG,KAAO,KAGtD,GAAI,GAAoB,EAAM,OAAS,EAAG,CACxC,IAAM,EAAmB,EACtB,MAAM,EAAG,GAAG,CACZ,KAAM,GAAS,EAAK,OAAS,EAAiB,CAC7C,IACF,MAAO,EAAO,WACZ,sDACD,CAAC,KACA,EAAO,aAAa,CAClB,UAAW,EACX,cAAe,EACf,kBAAmB,EAAiB,WACpC,kBAAmB,EAAiB,KACrC,CAAC,CACH,EAIL,IAAM,EAAiB,MAAO,EAAuB,EAAO,CAE5D,GAAI,EAAgB,CAClB,MAAO,EAAqB,EAAO,CACnC,IAAM,EAAS,EAAgB,EAAe,KACxC,EAAO,EAAe,OAAO,KAAK,EAAO,OAAO,EAAY,CAAC,CACnE,MAAO,CACL,aACA,iBACA,SACA,mBAAoB,EAAe,KACnC,OACA,mBACD,MAED,MAAO,CACL,aACA,iBACA,OAAQ,EACR,mBAAoB,EACpB,KAAM,EACN,mBACD,EAEH,CAEE,IACJ,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,GAAM,CAAE,SAAU,MAAO,EAAc,EAAO,CAG9C,MAAO,EAAO,QAAQ,0CAA0C,CAAC,KAC/D,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EAAM,OACnB,WAAY,EAAM,KAAK,EAAM,KAAW,CACtC,YAAa,EAAK,WAClB,KAAM,EAAK,KACX,KAAM,EAAK,KACX,cAAe,IAAU,EAAM,OAAS,EACzC,EAAE,CACJ,CAAC,CACH,CAED,MAAO,EAAS,EAAY,EAAM,CAClC,MAAO,EAAiB,EAAY,EAAQ,CAK5C,IAAM,EADU,KAAK,KAAK,CACQ,EAC5B,EAAW,EAAW,MAAQ,EAC9B,EACJ,EAAkB,EAAK,EAAW,IAAQ,EAAkB,EAGxD,EACJ,EAAM,OAAS,EACX,EAAM,QAAQ,EAAK,IAAS,GAAO,EAAK,MAAQ,GAAI,EAAE,CACtD,EAAM,OACN,IAAA,GAEN,MAAO,EAAsB,EAAQ,CACnC,WACA,kBACA,WAAY,EAAM,OAClB,kBACA,gBACD,CAAC,EACF,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CACtB,MAAOC,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAC3C,MAAO,EAAO,SAAS,0BAA0B,CAAC,KAChD,EAAO,aAAa,CAClB,UAAW,EACX,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,EACD,CACH,CACF,CAEG,OACJ,EAAO,IAAI,WAAa,CACtB,GAAI,IAAmC,EACrC,MAAO,GAGT,IAAIC,EACAC,EACA,EAAc,GACd,EAAU,EAEd,KAAO,GAAa,CAClB,IAAM,EAAe,MAAO,EAAS,qBACnC,EACA,EACD,CAEK,EACJ,EAAa,SAAS,OAAQ,GAAoB,CAChD,IAAM,EAAgB,EAAgB,UACtC,OACE,GACA,KAAK,KAAK,CACR,EACE,EAAc,aAAa,CAC3B,EACD,CAAC,SAAS,EAEf,EAAI,EAAE,CAEJ,EAAkB,EACrB,OAAQ,GACA,CAAC,CAAC,EAAO,IAChB,CACD,IAAK,GAAW,EAAO,IAAI,CAE1B,EAAgB,OAAS,IAC3B,MAAO,EAAS,cAAc,EAAgB,CAG9C,MAAO,EAAO,QAAQ,EAAiB,GAC9B,EAAO,IAAI,WAAa,CACzB,CAAC,EAAO,KAAO,CAAC,EAAO,WAG3B,MAAO,EAAS,qBAAqB,CACnC,SACA,IAAK,EAAO,IACZ,SAAU,EAAO,SAClB,CAAC,GAEF,CACF,CAEF,GAAW,EAAgB,QAG7B,EAAc,EAAa,aAAe,GAEtC,IACF,EAAY,EAAa,cACzB,EAAiB,EAAa,oBAIlC,OAAO,GACP,CAIE,GACH,GAEC,GAEO,EAAO,MAAqB,GAAS,CAC1C,IAAI,EAAS,IAAI,WACb,EAAa,EACb,EAAsB,EAEpB,GAAa,EAAkB,EAAe,KAAU,CAE5D,EAAO,QACL,EAAO,QAAQ,iBAAiB,CAAC,KAC/B,EAAO,aAAa,CAClB,YAAa,EACb,WAAY,EAAK,OACjB,cAAe,EACf,eAAgB,EAChB,sBAAuB,EAAsB,EAAK,OACnD,CAAC,CACH,CACF,CACD,EAAK,OAAO,CACV,WAAY,IACZ,OACA,KAAM,EAAK,OACZ,CAAC,EAGE,EAAgB,GAAwB,CAE5C,IAAM,EAAW,IAAI,WAAW,EAAO,OAAS,EAAQ,OAAO,CAQ/D,IAPA,EAAS,IAAI,EAAO,CACpB,EAAS,IAAI,EAAS,EAAO,OAAO,CACpC,EAAS,EACT,GAAuB,EAAQ,OAIxB,EAAO,QAAU,GAAW,CACjC,IAAM,EAAQ,EAAO,MAAM,EAAG,EAAU,CACxC,EAAS,EAAO,MAAM,EAAU,CAChC,EAAU,EAAO,GAAM,GAK3B,EAAO,QACL,EAAO,KACL,EAAO,WAAY,GACjB,EAAO,SAAW,EAAa,EAAM,CAAC,CACvC,CACD,EAAO,YACL,EAAO,SAAW,CAGZ,EAAO,OAAS,GAClB,EAAU,EAAQ,GAAK,CAEzB,EAAK,KAAK,EACV,CACH,CACD,EAAO,SAAU,GAAU,EAAO,SAAW,EAAK,KAAK,EAAM,CAAC,CAAC,CAChE,CACF,EACD,CAOA,IACH,EAA2C,EAAgB,IACrD,GACA,EAEE,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAyB,MAAO,EAAI,KAAK,EAAc,CAE7D,OAAO,EAAO,KACZ,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAKtB,EAJiB,MAAO,EAAI,aAC1B,EACC,GAAU,EAAQ,EAAM,OAC1B,CACmB,EACpB,CACH,CACF,EACD,CAAC,KAAK,EAAO,OAAO,CAhBE,EAmBtB,IACJ,EACA,EACA,EACA,EACA,EACA,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,QAAQ,wBAAwB,CAAC,KAC7C,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,UAAW,EAAW,KACtB,UAAW,EACX,cAAeC,EAChB,CAAC,CACH,CAQD,IAAM,EAAcC,EAAW,KAE7B,GAAyB,EAAY,EAAW,CAEhD,GAAoB,EAAe,CACpC,CAGK,EAAsB,MAAO,EAAI,KAAK,EAAW,CACjD,EAAwB,MAAO,EAAI,KAAK,EAAE,CAG1C,EAAe,GACnB,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAmB,MAAO,EAAI,aAClC,EACC,GAAW,EAAS,EAAU,KAChC,CACK,EAAc,IAAqB,EAAW,MAAQ,GAE5D,MAAO,EAAO,SAAS,mBAAmB,CAAC,KACzC,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,kBAAmB,EACnB,UAAW,EAAW,KACtB,WAAY,EAAU,KACtB,cAAe,EAChB,CAAC,CACH,CAED,IAAM,EACJ,EAAwB,EAAU,WAAa,EAE7C,EAAU,KAAO,IACnB,MAAO,EAAO,KACZ,EAAgB,SACd,mBACI,MACF,aAAa,EAAU,KAAK,4BAA4B,IACzD,CACF,CACF,EAKC,EAAU,MAAQD,GAAe,GACnC,MAAO,EAAO,SAAS,4BAA4B,CAAC,KAClD,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,WAAY,EAAU,KACtB,cAAeA,EACf,cAAe,EAChB,CAAC,CACH,CACD,MAAO,EAAW,EAAY,EAAU,KAAM,EAAiB,CAC/D,MAAOE,EAAkB,EAAO,QAAQ,EAAU,KAAK,CAAC,EAGxD,MAAO,EAAqB,EAAW,GAAI,EAAU,KAAK,CAG5D,MAAO,EAAI,OACT,EACC,GAAU,EAAQ,EAAU,KAC9B,EAKD,CAQJ,OALA,MAAO,EAAY,KACjB,EAAO,WAAY,GAAc,EAAY,EAAU,CAAC,CACxD,EAAO,gBAAgBC,EAAyB,CACjD,CAEM,MAAO,EAAI,IAAI,EAAsB,EAC5C,CAEE,OAAgD,CACpD,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,sBAAuB,GACvB,uBAAwB,GACxB,qBAAsB,EACtB,aAAc,EACd,aAAc,WACd,SAAU,EACV,iBAAkB,EAClB,sBAAuB,GACvB,2BAA4B,GAC5B,kBAAmB,IAAA,GACpB,EAEK,QAA+B,CACnC,aAAc,EACd,aAAc,WACd,iBAAkB,EAClB,sBAAuB,GACxB,EAEK,GACJ,GACkC,CAClC,IAAM,EAAe,GAAiB,CAChC,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAa,wBACtB,IAAK,SACH,MAAO,GACT,QACE,MAAO,OAET,CACJ,OAAO,EAAO,QAAQ,EAAO,EAGzB,GAAsB,GAAqC,CAC/D,IAAM,EAAS,IAAI,WAAW,EAAO,QAAQ,EAAG,IAAM,EAAI,EAAE,OAAQ,EAAE,CAAC,CACnE,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAElB,OAAO,GAGH,GAAgB,KACpB,IACwB,CACxB,IAAM,EAAS,EAAO,WAAW,CAC3Bd,EAAuB,EAAE,CAC/B,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAM,EAAO,MAAM,CAC3C,GAAI,EAAM,MACV,EAAO,KAAK,EAAM,CAEpB,OAAO,GAAmB,EAAO,EAiVnC,MAAO,CACL,SACA,SACA,SACA,QACA,YACA,KApVY,GACZ,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAG,CAErC,GADA,QAAQ,IAAI,EAAO,CACf,CAAC,EAAO,GACV,OAAO,MAAO,EAAO,KACnB,EAAgB,SACd,kBACI,MAAM,0BAA0B,CACrC,CACF,CAEH,IAAM,EAAQ,EAAS,EAAO,CACxB,EAAS,MAAO,EAAS,UAAU,EAAM,CAC/C,OAAO,MAAO,EAAO,YAAc,GAAc,EAAO,CAAC,EACzD,CAsUF,YA5TkB,EAAY,IAC9B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAG,CACrC,GAAI,CAAC,EAAO,GACV,OAAO,MAAO,EAAO,KACnB,EAAgB,SACd,kBACI,MAAM,0BAA0B,CACrC,CACF,CAIH,IAAM,EAAkB,CACtB,GAAG,EACH,GAAGe,EACJ,CAEK,EAAQ,EAAS,EAAO,CACxB,EAAY,MAAO,EAAS,UAAU,EAAM,CAGlD,OAAO,EAAO,MAAoC,GAAS,CACzD,IAAM,EAAS,EAAU,WAAW,CAC9B,EAAY,EAAgB,UAC9B,EAAS,IAAI,WA+CjB,OA7CqB,SAAY,CAC/B,GAAI,CACF,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAM,EAAO,MAAM,CAE3C,GAAI,EAAM,CAEJ,EAAO,OAAS,GAClB,EAAK,OAAO,EAAO,CAErB,EAAK,KAAK,CACV,OAGF,GAAI,EAAO,CAET,IAAM,EAAW,IAAI,WAAW,EAAO,OAAS,EAAM,OAAO,CAM7D,IALA,EAAS,IAAI,EAAO,CACpB,EAAS,IAAI,EAAO,EAAO,OAAO,CAClC,EAAS,EAGF,EAAO,QAAU,GAAW,CACjC,IAAM,EAAQ,EAAO,MAAM,EAAG,EAAU,CACxC,EAAS,EAAO,MAAM,EAAU,CAChC,EAAK,OAAO,EAAM,UAIjB,EAAO,CACd,EAAK,KACH,IAAI,EAAgB,CAClB,KAAM,kBACN,OAAQ,IACR,KAAM,kCACN,QAAS,0BAA0B,OAAO,EAAM,GACjD,CAAC,CACH,KAKS,CAGP,EAAO,SAAW,CACvB,EAAO,aAAa,EACpB,EACF,EACF,CAiPF,aAtOA,EACA,IAEAlB,EACES,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CACtB,EAAQ,EAEd,MAAO,EAAO,QAAQ,iCAAiC,CAAC,KACtD,EAAO,aAAa,CAClB,UAAW,EACX,OAAQ,EACR,UAAW,EAAQ,SACpB,CAAC,CACH,CAED,MAAOH,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOI,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAG5C,IAAM,EAAiB,EAAQ,SAC3B,EACE,EAAQ,SACR,EACA,EACA,EACD,CACD,EAUE,GAPgB,MAAO,EAAS,sBAAsB,CAC1D,OAAQ,EAAS,OACjB,IAAK,EACL,SAAU,GACV,YAAa,EAAQ,YACtB,CAAC,EAE6B,SAE/B,MAAO,EAAO,QAAQ,+CAA+C,CAAC,KACpE,EAAO,aAAa,CAClB,UAAW,EACX,aAAc,EACd,OAAQ,EACR,UAAW,EACZ,CAAC,CACH,CAGD,IAAM,EAAW,MAAO,EAAI,KAAiB,EAAE,CAAC,CAC1C,EAAgB,MAAO,EAAI,KAAK,EAAE,CAClC,EAAgB,MAAO,EAAI,KAAK,EAAE,CAClC,EAAY,MAAO,EAAI,KAAK,IAAI,WAAc,CAG9C,GAAsB,EAAkB,IAC5C,EAAO,IAAI,WAAa,CAMtB,GALI,EAAK,SAAW,GAKhB,EAAK,OAAS,GAAe,CAAC,EAChC,OAGF,IAAM,EAAa,MAAO,EAAI,aAC5B,EACC,GAAM,EAAI,EACZ,CAED,MAAO,EAAO,SAAS,6BAA6B,CAAC,KACnD,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EACb,UAAW,EAAK,OAChB,cAAe,EAChB,CAAC,CACH,CAED,IAAM,EAAO,MAAO,EACjB,WAAW,CACV,OAAQ,EAAS,OACjB,IAAK,EACL,WACA,aACA,OACD,CAAC,CACD,KACC,EAAO,MAAM,CACX,SAAU,EAAS,YAAY,WAAY,EAAI,CAAC,KAC9C,EAAS,UAAU,EAAS,OAAO,EAAE,CAAC,CACvC,CACD,MAAQ,GAAU,CAAC,EAAsB,EAAM,CAChD,CAAC,CACH,CAEH,MAAO,EAAI,OAAO,EAAW,GAAU,CACrC,GAAGS,EACH,CAAE,WAAY,EAAY,KAAM,EAAM,CACvC,CAAC,CACF,MAAOjB,EAAiB,EAAO,QAAQ,EAAE,CAAC,CAC1C,MAAOc,EAAkB,EAAO,QAAQ,EAAK,OAAO,CAAC,EACrD,CAGJ,MAAO,EAAQ,OAAO,KACpB,EAAO,WAAY,GACjB,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAI,OAAO,EAAgB,GAAU,EAAQ,EAAM,OAAO,CAGjE,IAAM,EAAgB,MAAO,EAAI,IAAI,EAAU,CACzC,EAAW,IAAI,WACnB,EAAc,OAAS,EAAM,OAC9B,CACD,EAAS,IAAI,EAAc,CAC3B,EAAS,IAAI,EAAO,EAAc,OAAO,CAGzC,IAAI,EAAS,EACb,KAAO,EAAS,OAAS,GAAU,GAEjC,MAAO,EADU,EAAS,MAAM,EAAQ,EAAS,EAAe,CAC5B,GAAM,CAC1C,GAAU,EAIZ,MAAO,EAAI,IAAI,EAAW,EAAS,MAAM,EAAO,CAAC,EACjD,CACH,CACF,CAGD,IAAM,EAAkB,MAAO,EAAI,IAAI,EAAU,CAC7C,EAAgB,OAAS,IAC3B,MAAO,EAAmB,EAAiB,GAAK,EAIlD,IAAM,EAAQ,MAAO,EAAI,IAAI,EAAS,CAChC,EAAa,MAAO,EAAI,IAAI,EAAc,CAEhD,GAAI,EAAM,SAAW,EAQnB,OANA,MAAO,EAAS,qBAAqB,CACnC,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CAAC,CACF,MAAON,EAAmB,EAAO,QAAQ,GAAG,CAAC,CACtC,MAAO,EAAO,KACnB,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,sCACN,QAAS,wCACV,CAAC,CACH,CAIH,EAAM,MAAM,EAAG,KAAO,EAAE,YAAc,IAAM,EAAE,YAAc,GAAG,CAE/D,MAAO,EAAS,wBACd,CACE,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CACD,EACD,CAID,IAAM,EADU,KAAK,KAAK,CACQ,EAC5B,EACJ,EAAkB,EAAK,EAAa,IAAQ,EAAkB,EAC1D,EACJ,EAAM,OAAS,EAAI,EAAa,EAAM,OAAS,IAAA,GAuBjD,OArBA,MAAO,EAAsB,EAAQ,CACnC,SAAU,EACV,kBACA,WAAY,EAAM,OAClB,kBACA,gBACD,CAAC,CAEF,MAAON,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOM,EAAmB,EAAO,QAAQ,GAAG,CAAC,CAC7C,MAAOH,EAAkB,EAAO,QAAQ,EAAW,CAAC,CAEpD,MAAO,EAAO,QAAQ,kCAAkC,CAAC,KACvD,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EACb,YAAa,EAAM,OACnB,YAAa,EACd,CAAC,CACH,CAEM,CACL,GAAI,EACJ,KAAM,EACN,KAAM,EACN,OAAQ,EAAS,OACjB,IAAK,GAAG,EAAY,GAAG,IACxB,EACD,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAGtB,OAFA,MAAOI,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAC3C,MAAOD,EAAmB,EAAO,QAAQ,GAAG,CAAC,CACtC,MAAO,EAAO,KAAK,EAAM,EAChC,CACH,CACF,CACF,CAWD,iBACA,kBACA,yBACA,0BACD,EACD,CAIJ,MAAa,EAAW,GAA2B,CACjD,GAAM,CACJ,eAAgB,CAAE,SAAQ,GAAG,IAC3B,EACJ,OAAO,EAAc,EAAQ,CAAC,KAC5B,EAAO,QAAQ,EAAc,EAAoB,EAAO,CAAC,CAC1D"}
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["optimalPartSize: number","logS3Error","request: AWS.CreateMultipartUploadCommandInput","params: AWS.ListPartsCommandInput","withTimingMetrics","partUploadDurationHistogram","uploadPartsTotal","chunks: Uint8Array[]","uploadSuccessTotal","useTags","uploadRequestsTotal","fileSizeHistogram","withUploadMetrics","uploadDurationHistogram","activeUploadsGauge","uploadErrorsTotal","keyMarker: string | undefined","uploadIdMarker: string | undefined","minPartSize","readStream","partSizeHistogram","maxConcurrentPartUploads","config","parts"],"sources":["../src/utils/calculations.ts","../src/utils/error-handling.ts","../src/utils/stream-adapter.ts","../src/services/s3-client.service.ts","../src/s3-store.ts"],"sourcesContent":["import type AWS from \"@aws-sdk/client-s3\";\n\nexport const calcOffsetFromParts = (parts?: Array<AWS.Part>): number => {\n return parts && parts.length > 0\n ? parts.reduce((a, b) => a + (b?.Size ?? 0), 0)\n : 0;\n};\n\nexport const calcOptimalPartSize = (\n initSize: number | undefined,\n preferredPartSize: number,\n minPartSize: number,\n maxMultipartParts: number,\n maxUploadSize = 5_497_558_138_880, // 5TiB\n): number => {\n const size = initSize ?? maxUploadSize;\n let optimalPartSize: number;\n\n if (size <= preferredPartSize) {\n // For files smaller than preferred part size, use the file size\n // but ensure it meets S3's minimum requirements for multipart uploads\n optimalPartSize = size;\n } else if (size <= preferredPartSize * maxMultipartParts) {\n // File fits within max parts limit using preferred part size\n optimalPartSize = preferredPartSize;\n } else {\n // File is too large for preferred part size, calculate minimum needed\n optimalPartSize = Math.ceil(size / maxMultipartParts);\n }\n\n // Ensure we respect minimum part size for multipart uploads\n // Exception: if the file is smaller than minPartSize, use the file size directly\n const finalPartSize =\n initSize && initSize < minPartSize\n ? optimalPartSize // Single part upload for small files\n : Math.max(optimalPartSize, minPartSize); // Enforce minimum for multipart\n\n // Round up to ensure consistent part sizes and align to reasonable boundaries\n // This helps ensure all parts except the last one will have exactly the same size\n const alignment = 1024; // 1KB alignment for better consistency\n return Math.ceil(finalPartSize / alignment) * alignment;\n};\n\nexport const partKey = (id: string): string => {\n return `${id}.part`;\n};\n\nexport const shouldUseExpirationTags = (\n expirationPeriodInMilliseconds: number,\n useTags: boolean,\n): boolean => {\n return expirationPeriodInMilliseconds !== 0 && useTags;\n};\n\nexport const getExpirationDate = (\n createdAt: string,\n expirationPeriodInMilliseconds: number,\n): Date => {\n const date = new Date(createdAt);\n return new Date(date.getTime() + expirationPeriodInMilliseconds);\n};\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport { trackS3Error as logS3Error } from \"@uploadista/observability\";\nimport { Effect } from \"effect\";\n\nexport const handleS3Error = (\n operation: string,\n error: unknown,\n context: Record<string, unknown> = {},\n): UploadistaError => {\n // Log the error with context\n Effect.runSync(logS3Error(operation, error, context));\n\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", error as Error);\n};\n\nexport const handleS3NotFoundError = (\n operation: string,\n error: unknown,\n context: Record<string, unknown> = {},\n): UploadistaError => {\n if (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n typeof error.code === \"string\" &&\n [\"NotFound\", \"NoSuchKey\", \"NoSuchUpload\"].includes(error.code)\n ) {\n Effect.runSync(\n Effect.logWarning(`File not found during ${operation} operation`).pipe(\n Effect.annotateLogs({\n error_code: error.code,\n ...context,\n }),\n ),\n );\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n\n return handleS3Error(operation, error, context);\n};\n\nexport const isUploadNotFoundError = (\n error: unknown,\n): error is { code: \"NoSuchUpload\" | \"NoSuchKey\" } => {\n // Check direct error code\n if (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n typeof error.code === \"string\" &&\n (error.code === \"NoSuchUpload\" || error.code === \"NoSuchKey\")\n ) {\n return true;\n }\n\n // Check if it's an UploadistaError wrapping an AWS error with code\n if (\n error instanceof UploadistaError &&\n error.cause &&\n typeof error.cause === \"object\" &&\n \"code\" in error.cause &&\n typeof error.cause.code === \"string\" &&\n (error.cause.code === \"NoSuchUpload\" || error.cause.code === \"NoSuchKey\")\n ) {\n return true;\n }\n\n return false;\n};\n","/**\n * Stream adapter utility to handle AWS SDK Body responses across different environments.\n *\n * In Node.js environments, AWS SDK returns Node.js Readable streams.\n * In Cloudflare Workers, it returns Web Streams API ReadableStreams.\n * This utility normalizes both to Web Streams API ReadableStreams.\n */\n\n/**\n * Converts various stream types to a Web Streams API ReadableStream\n * @param body The body from AWS SDK response (could be Node.js Readable or Web ReadableStream)\n * @returns A Web Streams API ReadableStream\n */\nexport function toReadableStream(body: unknown): ReadableStream {\n // If it's already a Web ReadableStream, return as-is\n if (body instanceof ReadableStream) {\n return body;\n }\n\n // If it has a getReader method, it's likely already a ReadableStream\n if (body && typeof body === \"object\" && \"getReader\" in body) {\n return body as ReadableStream;\n }\n\n // Check if it's a Node.js Readable stream\n if (body && typeof body === \"object\" && \"pipe\" in body && \"on\" in body) {\n const nodeStream = body as NodeJS.ReadableStream;\n\n return new ReadableStream({\n start(controller) {\n nodeStream.on(\"data\", (chunk) => {\n controller.enqueue(new Uint8Array(chunk));\n });\n\n nodeStream.on(\"end\", () => {\n controller.close();\n });\n\n nodeStream.on(\"error\", (error) => {\n controller.error(error);\n });\n },\n });\n }\n\n // If it's some other type, try to handle it gracefully\n throw new Error(\n `Unsupported body type: ${typeof body}. Expected ReadableStream or Node.js Readable.`,\n );\n}\n","import type AWS from \"@aws-sdk/client-s3\";\nimport type { S3ClientConfig } from \"@aws-sdk/client-s3\";\nimport { NoSuchKey, NotFound, S3 } from \"@aws-sdk/client-s3\";\nimport type { UploadistaError } from \"@uploadista/core/errors\";\nimport { withS3ApiMetrics } from \"@uploadista/observability\";\nimport { Context, Effect, Layer } from \"effect\";\nimport type { MultipartUploadInfo, S3OperationContext } from \"../types\";\nimport {\n handleS3Error,\n handleS3NotFoundError,\n partKey,\n toReadableStream,\n} from \"../utils\";\n\nexport class S3ClientService extends Context.Tag(\"S3ClientService\")<\n S3ClientService,\n {\n readonly bucket: string;\n\n // Basic S3 operations\n readonly getObject: (\n key: string,\n ) => Effect.Effect<ReadableStream, UploadistaError>;\n readonly headObject: (\n key: string,\n ) => Effect.Effect<number | undefined, UploadistaError>;\n readonly putObject: (\n key: string,\n body: Uint8Array,\n ) => Effect.Effect<string, UploadistaError>;\n readonly deleteObject: (\n key: string,\n ) => Effect.Effect<void, UploadistaError>;\n readonly deleteObjects: (\n keys: string[],\n ) => Effect.Effect<AWS.DeleteObjectsCommandOutput, UploadistaError>;\n\n // Multipart upload operations\n readonly createMultipartUpload: (\n context: S3OperationContext,\n ) => Effect.Effect<MultipartUploadInfo, UploadistaError>;\n readonly uploadPart: (\n context: S3OperationContext & { partNumber: number; data: Uint8Array },\n ) => Effect.Effect<string, UploadistaError>;\n readonly completeMultipartUpload: (\n context: S3OperationContext,\n parts: Array<AWS.Part>,\n ) => Effect.Effect<string | undefined, UploadistaError>;\n readonly abortMultipartUpload: (\n context: S3OperationContext,\n ) => Effect.Effect<void, UploadistaError>;\n readonly listParts: (\n context: S3OperationContext & { partNumberMarker?: string },\n ) => Effect.Effect<\n {\n parts: AWS.Part[];\n isTruncated: boolean;\n nextPartNumberMarker?: string;\n },\n UploadistaError\n >;\n readonly listMultipartUploads: (\n keyMarker?: string,\n uploadIdMarker?: string,\n ) => Effect.Effect<AWS.ListMultipartUploadsCommandOutput, UploadistaError>;\n\n // Incomplete part operations\n readonly getIncompletePart: (\n id: string,\n ) => Effect.Effect<ReadableStream | undefined, UploadistaError>;\n readonly getIncompletePartSize: (\n id: string,\n ) => Effect.Effect<number | undefined, UploadistaError>;\n readonly putIncompletePart: (\n id: string,\n data: Uint8Array,\n ) => Effect.Effect<string, UploadistaError>;\n readonly deleteIncompletePart: (\n id: string,\n ) => Effect.Effect<void, UploadistaError>;\n }\n>() {}\n\nexport const makeS3ClientService = (\n s3ClientConfig: S3ClientConfig,\n bucket: string,\n) => {\n const s3Client = new S3(s3ClientConfig);\n const getObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const data = await s3Client.getObject({\n Bucket: bucket,\n Key: key,\n });\n return toReadableStream(data.Body);\n },\n catch: (error) => handleS3Error(\"getObject\", error, { key, bucket }),\n });\n\n const headObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n try {\n const data = await s3Client.headObject({\n Bucket: bucket,\n Key: key,\n });\n return data.ContentLength;\n } catch (error) {\n if (error instanceof NotFound) {\n return undefined;\n }\n throw error;\n }\n },\n catch: (error) => handleS3Error(\"headObject\", error, { key, bucket }),\n });\n\n const putObject = (key: string, body: Uint8Array) =>\n Effect.tryPromise({\n try: async () => {\n const response = await s3Client.putObject({\n Bucket: bucket,\n Key: key,\n Body: body,\n });\n return response.ETag || \"\";\n },\n catch: (error) =>\n handleS3Error(\"putObject\", error, { key, bucket, size: body.length }),\n });\n\n const deleteObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n await s3Client.deleteObject({\n Bucket: bucket,\n Key: key,\n });\n },\n catch: (error) => handleS3Error(\"deleteObject\", error, { key, bucket }),\n });\n\n const deleteObjects = (keys: string[]) =>\n Effect.tryPromise({\n try: () =>\n s3Client.deleteObjects({\n Bucket: bucket,\n Delete: {\n Objects: keys.map((key) => ({ Key: key })),\n },\n }),\n catch: (error) =>\n handleS3Error(\"deleteObjects\", error, { keys: keys.length, bucket }),\n });\n\n const createMultipartUpload = (context: S3OperationContext) =>\n withS3ApiMetrics(\n \"createMultipartUpload\",\n Effect.tryPromise({\n try: async () => {\n const request: AWS.CreateMultipartUploadCommandInput = {\n Bucket: context.bucket,\n Key: context.key,\n };\n\n if (context.contentType) {\n request.ContentType = context.contentType;\n }\n\n if (context.cacheControl) {\n request.CacheControl = context.cacheControl;\n }\n\n const res = await s3Client.createMultipartUpload(request);\n\n if (!res.UploadId) {\n throw new Error(\"Upload ID is undefined\");\n }\n if (!res.Key) {\n throw new Error(\"Key is undefined\");\n }\n\n return {\n uploadId: res.UploadId,\n bucket: context.bucket,\n key: res.Key,\n };\n },\n catch: (error) =>\n handleS3Error(\"createMultipartUpload\", error, context),\n }),\n );\n\n const uploadPart = (\n context: S3OperationContext & { partNumber: number; data: Uint8Array },\n ) =>\n withS3ApiMetrics(\n \"uploadPart\",\n Effect.tryPromise({\n try: () =>\n s3Client.uploadPart({\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n PartNumber: context.partNumber,\n Body: context.data,\n }),\n catch: (error) =>\n handleS3Error(\"uploadPart\", error, {\n upload_id: context.key,\n part_number: context.partNumber,\n part_size: context.data.length,\n s3_bucket: context.bucket,\n }),\n }).pipe(Effect.map((response) => response.ETag as string)),\n );\n\n const completeMultipartUpload = (\n context: S3OperationContext,\n parts: Array<AWS.Part>,\n ) =>\n withS3ApiMetrics(\n \"completeMultipartUpload\",\n Effect.tryPromise({\n try: () =>\n s3Client\n .completeMultipartUpload({\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n MultipartUpload: {\n Parts: parts.map((part) => ({\n ETag: part.ETag,\n PartNumber: part.PartNumber,\n })),\n },\n })\n .then((response) => response.Location),\n catch: (error) =>\n handleS3Error(\"completeMultipartUpload\", error, {\n upload_id: context.key,\n parts_count: parts.length,\n s3_bucket: context.bucket,\n }),\n }),\n );\n\n const abortMultipartUpload = (context: S3OperationContext) =>\n Effect.tryPromise({\n try: async () => {\n await s3Client.abortMultipartUpload({\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n });\n },\n catch: (error) =>\n handleS3NotFoundError(\"abortMultipartUpload\", error, {\n upload_id: context.key,\n s3_bucket: context.bucket,\n }),\n });\n\n const listParts = (\n context: S3OperationContext & { partNumberMarker?: string },\n ) =>\n Effect.tryPromise({\n try: async () => {\n const params: AWS.ListPartsCommandInput = {\n Bucket: context.bucket,\n Key: context.key,\n UploadId: context.uploadId,\n PartNumberMarker: context.partNumberMarker,\n };\n\n const data = await s3Client.listParts(params);\n\n return {\n parts: data.Parts ?? [],\n isTruncated: data.IsTruncated ?? false,\n nextPartNumberMarker: data.NextPartNumberMarker,\n };\n },\n catch: (error) =>\n handleS3Error(\"listParts\", error, {\n upload_id: context.key,\n s3_bucket: context.bucket,\n }),\n });\n\n const listMultipartUploads = (keyMarker?: string, uploadIdMarker?: string) =>\n Effect.tryPromise({\n try: () =>\n s3Client.listMultipartUploads({\n Bucket: bucket,\n KeyMarker: keyMarker,\n UploadIdMarker: uploadIdMarker,\n }),\n catch: (error) =>\n handleS3Error(\"listMultipartUploads\", error, { bucket }),\n });\n\n const getIncompletePart = (id: string) =>\n Effect.tryPromise({\n try: async () => {\n try {\n const data = await s3Client.getObject({\n Bucket: bucket,\n Key: partKey(id),\n });\n return toReadableStream(data.Body);\n } catch (error) {\n if (error instanceof NoSuchKey) {\n return undefined;\n }\n throw error;\n }\n },\n catch: (error) =>\n handleS3Error(\"getIncompletePart\", error, { upload_id: id, bucket }),\n });\n\n const getIncompletePartSize = (id: string) => headObject(partKey(id));\n\n const putIncompletePart = (id: string, data: Uint8Array) =>\n putObject(partKey(id), data).pipe(\n Effect.tap(() =>\n Effect.logInfo(\"Incomplete part uploaded\").pipe(\n Effect.annotateLogs({ upload_id: id }),\n ),\n ),\n );\n\n const deleteIncompletePart = (id: string) => deleteObject(partKey(id));\n\n return {\n bucket,\n getObject,\n headObject,\n putObject,\n deleteObject,\n deleteObjects,\n createMultipartUpload,\n uploadPart,\n completeMultipartUpload,\n abortMultipartUpload,\n listParts,\n listMultipartUploads,\n getIncompletePart,\n getIncompletePartSize,\n putIncompletePart,\n deleteIncompletePart,\n };\n};\n\nexport const S3ClientLayer = (s3ClientConfig: S3ClientConfig, bucket: string) =>\n Layer.succeed(S3ClientService, makeS3ClientService(s3ClientConfig, bucket));\n","import type AWS from \"@aws-sdk/client-s3\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n DataStoreWriteOptions,\n StreamingConfig,\n StreamWriteOptions,\n StreamWriteResult,\n UploadFile,\n UploadStrategy,\n} from \"@uploadista/core/types\";\nimport {\n DEFAULT_STREAMING_CONFIG,\n UploadFileKVStore,\n} from \"@uploadista/core/types\";\nimport {\n s3ActiveUploadsGauge as activeUploadsGauge,\n s3FileSizeHistogram as fileSizeHistogram,\n logS3UploadCompletion,\n s3PartSizeHistogram as partSizeHistogram,\n s3PartUploadDurationHistogram as partUploadDurationHistogram,\n s3UploadDurationHistogram as uploadDurationHistogram,\n s3UploadErrorsTotal as uploadErrorsTotal,\n s3UploadPartsTotal as uploadPartsTotal,\n s3UploadRequestsTotal as uploadRequestsTotal,\n s3UploadSuccessTotal as uploadSuccessTotal,\n withS3TimingMetrics as withTimingMetrics,\n withS3UploadMetrics as withUploadMetrics,\n} from \"@uploadista/observability\";\nimport { Effect, Ref, Schedule, Stream } from \"effect\";\nimport { S3ClientLayer, S3ClientService } from \"./services/s3-client.service\";\nimport type { ChunkInfo, S3StoreConfig } from \"./types\";\nimport {\n calcOffsetFromParts,\n calcOptimalPartSize,\n getExpirationDate,\n isUploadNotFoundError,\n} from \"./utils\";\n\n/**\n * Generates an S3 key from an upload file, preserving the file extension if available.\n * Looks for filename in metadata under common keys: 'filename', 'fileName', or 'name'.\n * Falls back to just the upload ID if no filename is found.\n */\nconst getS3Key = (uploadFile: UploadFile): string => {\n const { id, metadata } = uploadFile;\n\n if (!metadata) {\n return id;\n }\n\n // Try common metadata keys for filename\n const filename = metadata.filename || metadata.fileName || metadata.name;\n\n if (typeof filename === \"string\" && filename.includes(\".\")) {\n const extension = filename.substring(filename.lastIndexOf(\".\"));\n return `${id}${extension}`;\n }\n\n return id;\n};\n\n// Clean implementation using composed services\nexport function createS3Store(config: S3StoreConfig) {\n const {\n deliveryUrl,\n partSize,\n minPartSize = 5_242_880,\n useTags = true,\n maxMultipartParts = 10_000,\n maxConcurrentPartUploads = 60,\n expirationPeriodInMilliseconds = 1000 * 60 * 60 * 24 * 7, // 1 week\n s3ClientConfig: { bucket },\n } = config;\n\n return Effect.gen(function* () {\n const s3Client = yield* S3ClientService;\n const kvStore = yield* UploadFileKVStore;\n const preferredPartSize = partSize || 8 * 1024 * 1024;\n\n const getUploadId = (\n uploadFile: UploadFile,\n ): Effect.Effect<string, UploadistaError> => {\n const uploadId = uploadFile.storage.uploadId;\n if (!uploadId) {\n return Effect.fail(\n UploadistaError.fromCode(\n \"FILE_WRITE_ERROR\",\n new Error(\"Upload ID is undefined\"),\n ),\n );\n }\n return Effect.succeed(uploadId);\n };\n\n const uploadPart = (\n uploadFile: UploadFile,\n data: Uint8Array,\n partNumber: number,\n ) => {\n const s3Key = getS3Key(uploadFile);\n\n return withTimingMetrics(\n partUploadDurationHistogram,\n Effect.gen(function* () {\n const uploadId = yield* getUploadId(uploadFile);\n\n const etag = yield* s3Client\n .uploadPart({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n partNumber,\n data,\n })\n .pipe(\n Effect.retry({\n schedule: Schedule.exponential(\"1 second\", 2.0).pipe(\n Schedule.intersect(Schedule.recurs(3)),\n ),\n // Don't retry on upload not found errors - they're permanent\n while: (error) => !isUploadNotFoundError(error),\n }),\n Effect.tapError((error) =>\n Effect.logWarning(\"Retrying part upload\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n part_number: partNumber,\n error_message: error.message,\n retry_attempt: \"unknown\", // Will be overridden by the retry schedule\n part_size: data.length,\n s3_bucket: s3Client.bucket,\n }),\n ),\n ),\n );\n\n yield* uploadPartsTotal(Effect.succeed(1));\n yield* Effect.logInfo(\"Part uploaded successfully\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n part_number: partNumber,\n part_size: data.length,\n etag: etag,\n }),\n );\n\n return etag;\n }),\n ).pipe(\n Effect.withSpan(`s3-upload-part-${partNumber}`, {\n attributes: {\n \"upload.id\": uploadFile.id,\n \"upload.part_number\": partNumber,\n \"upload.part_size\": data.length,\n \"s3.bucket\": s3Client.bucket,\n \"s3.key\": s3Key,\n },\n }),\n );\n };\n\n const uploadIncompletePart = (id: string, data: Uint8Array) =>\n s3Client.putIncompletePart(id, data);\n\n const downloadIncompletePart = (id: string) =>\n Effect.gen(function* () {\n const incompletePart = yield* s3Client.getIncompletePart(id);\n\n if (!incompletePart) {\n return undefined;\n }\n\n // Read the stream and collect all chunks to calculate size\n const reader = incompletePart.getReader();\n const chunks: Uint8Array[] = [];\n let incompletePartSize = 0;\n\n try {\n while (true) {\n const { done, value } = yield* Effect.promise(() => reader.read());\n if (done) break;\n chunks.push(value);\n incompletePartSize += value.length;\n }\n } finally {\n reader.releaseLock();\n }\n\n const stream = Stream.fromIterable(chunks);\n\n return {\n size: incompletePartSize,\n stream,\n };\n });\n\n const deleteIncompletePart = (id: string) =>\n s3Client.deleteIncompletePart(id);\n\n const getIncompletePartSize = (id: string) =>\n s3Client.getIncompletePartSize(id);\n\n const complete = (uploadFile: UploadFile, parts: Array<AWS.Part>) => {\n const s3Key = getS3Key(uploadFile);\n\n return Effect.gen(function* () {\n const uploadId = yield* getUploadId(uploadFile);\n\n return yield* s3Client.completeMultipartUpload(\n {\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n },\n parts,\n );\n }).pipe(\n Effect.tap(() => uploadSuccessTotal(Effect.succeed(1))),\n Effect.withSpan(\"s3-complete-multipart-upload\", {\n attributes: {\n \"upload.id\": uploadFile.id,\n \"upload.parts_count\": parts.length,\n \"s3.bucket\": s3Client.bucket,\n \"s3.key\": s3Key,\n },\n }),\n );\n };\n\n const abort = (uploadFile: UploadFile) => {\n const s3Key = getS3Key(uploadFile);\n\n return Effect.gen(function* () {\n const uploadId = yield* getUploadId(uploadFile);\n\n yield* s3Client.abortMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n });\n\n yield* s3Client.deleteObjects([s3Key]);\n });\n };\n\n const retrievePartsRecursive = (\n s3Key: string,\n uploadId: string,\n uploadFileId: string,\n partNumberMarker?: string,\n ): Effect.Effect<\n { uploadFound: boolean; parts: AWS.Part[] },\n UploadistaError\n > =>\n Effect.gen(function* () {\n const result = yield* s3Client.listParts({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n partNumberMarker,\n });\n\n let parts = result.parts;\n\n if (result.isTruncated) {\n const rest = yield* retrievePartsRecursive(\n s3Key,\n uploadId,\n uploadFileId,\n result.nextPartNumberMarker,\n );\n parts = [...parts, ...rest.parts];\n }\n\n if (!partNumberMarker) {\n parts.sort((a, b) => (a.PartNumber ?? 0) - (b.PartNumber ?? 0));\n }\n\n return { uploadFound: true, parts };\n }).pipe(\n Effect.catchAll((error) => {\n if (isUploadNotFoundError(error)) {\n return Effect.logWarning(\n \"S3 upload not found during listParts\",\n ).pipe(\n Effect.annotateLogs({\n upload_id: uploadFileId,\n error_code: error.code,\n }),\n Effect.as({ uploadFound: false, parts: [] }),\n );\n }\n return Effect.fail(error);\n }),\n );\n\n const retrieveParts = (id: string, partNumberMarker?: string) =>\n Effect.gen(function* () {\n const metadata = yield* kvStore.get(id);\n const uploadId = yield* getUploadId(metadata);\n const s3Key = getS3Key(metadata);\n\n return yield* retrievePartsRecursive(\n s3Key,\n uploadId,\n id,\n partNumberMarker,\n );\n });\n\n const completeMetadata = (upload: UploadFile, useTags: boolean) =>\n Effect.gen(function* () {\n if (!useTags) {\n return 0;\n }\n\n const uploadFile = yield* kvStore.get(upload.id);\n const uploadId = uploadFile.storage.uploadId;\n if (!uploadId) {\n return 0;\n }\n\n yield* kvStore.set(upload.id, {\n ...uploadFile,\n storage: { ...uploadFile.storage, uploadId },\n });\n\n return 0;\n });\n\n const clearCache = (id: string) =>\n Effect.gen(function* () {\n yield* Effect.logInfo(\"Clearing cache\").pipe(\n Effect.annotateLogs({ upload_id: id }),\n );\n yield* kvStore.delete(id);\n });\n\n const createMultipartUpload = (upload: UploadFile) => {\n const s3Key = getS3Key(upload);\n\n return Effect.gen(function* () {\n yield* Effect.logInfo(\"Initializing multipart upload\").pipe(\n Effect.annotateLogs({ upload_id: upload.id }),\n );\n\n const multipartInfo = yield* s3Client.createMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId: \"\", // Not needed for create\n contentType: upload.metadata?.contentType?.toString(),\n cacheControl: upload.metadata?.cacheControl?.toString(),\n });\n\n const uploadCreated = {\n ...upload,\n storage: {\n ...upload.storage,\n path: multipartInfo.key,\n uploadId: multipartInfo.uploadId,\n bucket: multipartInfo.bucket,\n },\n url: `${deliveryUrl}/${s3Key}`,\n };\n\n yield* kvStore.set(upload.id, uploadCreated);\n\n yield* Effect.logInfo(\"Multipart upload created\").pipe(\n Effect.annotateLogs({\n upload_id: upload.id,\n s3_upload_id: uploadCreated.storage.uploadId,\n s3_key: s3Key,\n }),\n );\n\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* fileSizeHistogram(Effect.succeed(upload.size || 0));\n\n return uploadCreated;\n }).pipe(\n Effect.withSpan(\"s3-create-upload\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.size\": upload.size || 0,\n \"s3.bucket\": s3Client.bucket,\n \"s3.key\": s3Key,\n },\n }),\n );\n };\n\n /**\n * Creates a multipart upload on S3 attaching any metadata to it.\n * Also, a `${file_id}.info` file is created which holds some information\n * about the upload itself like: `upload-id`, `upload-length`, etc.\n */\n const create = (upload: UploadFile) => {\n return Effect.gen(function* () {\n yield* Effect.logInfo(\"Initializing multipart upload\").pipe(\n Effect.annotateLogs({ upload_id: upload.id }),\n );\n const uploadCreated = yield* createMultipartUpload(upload);\n yield* kvStore.set(upload.id, uploadCreated);\n yield* Effect.logInfo(\"Multipart upload created\").pipe(\n Effect.annotateLogs({\n upload_id: upload.id,\n s3_upload_id: uploadCreated.storage.uploadId,\n }),\n );\n yield* uploadRequestsTotal(Effect.succeed(1));\n\n return uploadCreated;\n }).pipe(\n Effect.withSpan(\"s3-create-upload\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.size\": upload.size || 0,\n \"s3.bucket\": bucket,\n },\n }),\n );\n };\n\n const remove = (id: string) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(id);\n yield* abort(uploadFile);\n yield* clearCache(id);\n });\n\n const write = (\n options: DataStoreWriteOptions,\n dependencies: { onProgress?: (currentOffset: number) => void },\n ) =>\n withUploadMetrics(\n options.file_id,\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const {\n stream: initialData,\n file_id,\n offset: initialOffset,\n } = options;\n const { onProgress } = dependencies;\n\n // Capture start time for upload completion metrics\n const startTime = Date.now();\n\n // Track active upload\n yield* activeUploadsGauge(Effect.succeed(1));\n\n const prepareResult = yield* prepareUpload(\n file_id,\n initialOffset,\n initialData,\n );\n\n const {\n uploadFile,\n nextPartNumber,\n offset,\n data,\n existingPartSize,\n } = prepareResult;\n\n // Use existing part size if parts already exist, otherwise calculate optimal size\n const uploadPartSize =\n existingPartSize ||\n calcOptimalPartSize(\n uploadFile.size,\n preferredPartSize,\n minPartSize,\n maxMultipartParts,\n );\n\n // Log part size decision for debugging\n yield* Effect.logInfo(\"Part size decision\").pipe(\n Effect.annotateLogs({\n upload_id: file_id,\n existing_part_size: existingPartSize,\n calculated_part_size: calcOptimalPartSize(\n uploadFile.size,\n preferredPartSize,\n minPartSize,\n maxMultipartParts,\n ),\n final_part_size: uploadPartSize,\n next_part_number: nextPartNumber,\n }),\n );\n\n const bytesUploaded = yield* uploadParts(\n uploadFile,\n data,\n nextPartNumber,\n offset,\n uploadPartSize,\n minPartSize,\n maxConcurrentPartUploads,\n onProgress,\n );\n\n const newOffset = offset + bytesUploaded;\n\n if (uploadFile.size === newOffset) {\n yield* finishUpload(file_id, uploadFile, startTime);\n }\n\n return newOffset;\n }).pipe(Effect.ensuring(activeUploadsGauge(Effect.succeed(0)))),\n ),\n );\n\n const getUpload = (id: string) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(id);\n\n const { parts, uploadFound } = yield* retrieveParts(id);\n if (!uploadFound) {\n return {\n ...uploadFile,\n offset: uploadFile.size as number,\n size: uploadFile.size,\n };\n }\n\n const offset = calcOffsetFromParts(parts);\n const incompletePartSize = yield* getIncompletePartSize(id);\n\n return {\n ...uploadFile,\n offset: offset + (incompletePartSize ?? 0),\n size: uploadFile.size,\n storage: uploadFile.storage,\n };\n });\n\n // const read = (id: string) =>\n // Effect.gen(function* () {\n // return yield* s3Client.getObject(id);\n // });\n\n // Helper functions\n const prepareUpload = (\n fileId: string,\n initialOffset: number,\n initialData: Stream.Stream<Uint8Array, UploadistaError>,\n ) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(fileId);\n const { parts } = yield* retrieveParts(fileId);\n\n const partNumber: number =\n parts.length > 0 && parts[parts.length - 1].PartNumber\n ? (parts[parts.length - 1].PartNumber ?? 0)\n : 0;\n const nextPartNumber = partNumber + 1;\n\n // Detect existing part size to maintain consistency\n // We check the first part's size to ensure all subsequent parts match\n const existingPartSize =\n parts.length > 0 && parts[0].Size ? parts[0].Size : null;\n\n // Validate that all existing parts (except potentially the last one) have the same size\n if (existingPartSize && parts.length > 1) {\n const inconsistentPart = parts\n .slice(0, -1)\n .find((part) => part.Size !== existingPartSize);\n if (inconsistentPart) {\n yield* Effect.logWarning(\n \"Inconsistent part sizes detected in existing upload\",\n ).pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n expected_size: existingPartSize,\n inconsistent_part: inconsistentPart.PartNumber,\n inconsistent_size: inconsistentPart.Size,\n }),\n );\n }\n }\n\n const incompletePart = yield* downloadIncompletePart(fileId);\n\n if (incompletePart) {\n yield* deleteIncompletePart(fileId);\n const offset = initialOffset - incompletePart.size;\n const data = incompletePart.stream.pipe(Stream.concat(initialData));\n return {\n uploadFile,\n nextPartNumber,\n offset,\n incompletePartSize: incompletePart.size,\n data,\n existingPartSize,\n };\n } else {\n return {\n uploadFile,\n nextPartNumber,\n offset: initialOffset,\n incompletePartSize: 0,\n data: initialData,\n existingPartSize,\n };\n }\n });\n\n const finishUpload = (\n fileId: string,\n uploadFile: UploadFile,\n startTime: number,\n ) =>\n Effect.gen(function* () {\n const { parts } = yield* retrieveParts(fileId);\n\n // Log all parts for debugging S3 multipart upload requirements\n yield* Effect.logInfo(\"Attempting to complete multipart upload\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n parts_count: parts.length,\n parts_info: parts.map((part, index) => ({\n part_number: part.PartNumber,\n size: part.Size,\n etag: part.ETag,\n is_final_part: index === parts.length - 1,\n })),\n }),\n );\n\n yield* complete(uploadFile, parts);\n yield* completeMetadata(uploadFile, useTags);\n // yield* clearCache(fileId);\n\n // Log upload completion metrics\n const endTime = Date.now();\n const totalDurationMs = endTime - startTime;\n const fileSize = uploadFile.size || 0;\n const throughputBps =\n totalDurationMs > 0 ? (fileSize * 1000) / totalDurationMs : 0;\n\n // Calculate average part size if we have parts\n const averagePartSize =\n parts.length > 0\n ? parts.reduce((sum, part) => sum + (part.Size || 0), 0) /\n parts.length\n : undefined;\n\n yield* logS3UploadCompletion(fileId, {\n fileSize,\n totalDurationMs,\n partsCount: parts.length,\n averagePartSize,\n throughputBps,\n });\n }).pipe(\n Effect.tapError((error) =>\n Effect.gen(function* () {\n yield* uploadErrorsTotal(Effect.succeed(1));\n yield* Effect.logError(\"Failed to finish upload\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n error: String(error),\n }),\n );\n }),\n ),\n );\n\n const deleteExpired = (): Effect.Effect<number, UploadistaError> =>\n Effect.gen(function* () {\n if (expirationPeriodInMilliseconds === 0) {\n return 0;\n }\n\n let keyMarker: string | undefined;\n let uploadIdMarker: string | undefined;\n let isTruncated = true;\n let deleted = 0;\n\n while (isTruncated) {\n const listResponse = yield* s3Client.listMultipartUploads(\n keyMarker,\n uploadIdMarker,\n );\n\n const expiredUploads =\n listResponse.Uploads?.filter((multiPartUpload) => {\n const initiatedDate = multiPartUpload.Initiated;\n return (\n initiatedDate &&\n Date.now() >\n getExpirationDate(\n initiatedDate.toISOString(),\n expirationPeriodInMilliseconds,\n ).getTime()\n );\n }) || [];\n\n const objectsToDelete = expiredUploads\n .filter((upload): upload is { Key: string } => {\n return !!upload.Key;\n })\n .map((upload) => upload.Key);\n\n if (objectsToDelete.length > 0) {\n yield* s3Client.deleteObjects(objectsToDelete);\n\n // Abort multipart uploads\n yield* Effect.forEach(expiredUploads, (upload) => {\n return Effect.gen(function* () {\n if (!upload.Key || !upload.UploadId) {\n return;\n }\n yield* s3Client.abortMultipartUpload({\n bucket,\n key: upload.Key,\n uploadId: upload.UploadId,\n });\n return;\n });\n });\n\n deleted += objectsToDelete.length;\n }\n\n isTruncated = listResponse.IsTruncated ?? false;\n\n if (isTruncated) {\n keyMarker = listResponse.NextKeyMarker;\n uploadIdMarker = listResponse.NextUploadIdMarker;\n }\n }\n\n return deleted;\n });\n\n // Proper single-pass chunking using Effect's async stream constructor\n // Ensures all parts except the final part are exactly the same size (S3 requirement)\n const createChunkedStream =\n (chunkSize: number) =>\n <E>(\n stream: Stream.Stream<Uint8Array, E>,\n ): Stream.Stream<ChunkInfo, E> => {\n return Stream.async<ChunkInfo, E>((emit) => {\n let buffer = new Uint8Array(0);\n let partNumber = 1;\n let totalBytesProcessed = 0;\n\n const emitChunk = (data: Uint8Array, isFinalChunk = false) => {\n // Log chunk information for debugging - use INFO level to see in logs\n Effect.runSync(\n Effect.logInfo(\"Creating chunk\").pipe(\n Effect.annotateLogs({\n part_number: partNumber,\n chunk_size: data.length,\n expected_size: chunkSize,\n is_final_chunk: isFinalChunk,\n total_bytes_processed: totalBytesProcessed + data.length,\n }),\n ),\n );\n emit.single({\n partNumber: partNumber++,\n data,\n size: data.length,\n });\n };\n\n const processChunk = (newData: Uint8Array) => {\n // Combine buffer with new data\n const combined = new Uint8Array(buffer.length + newData.length);\n combined.set(buffer);\n combined.set(newData, buffer.length);\n buffer = combined;\n totalBytesProcessed += newData.length;\n\n // Emit full chunks of exactly chunkSize bytes\n // This ensures S3 multipart upload rule: all parts except last must be same size\n while (buffer.length >= chunkSize) {\n const chunk = buffer.slice(0, chunkSize);\n buffer = buffer.slice(chunkSize);\n emitChunk(chunk, false);\n }\n };\n\n // Process the stream\n Effect.runFork(\n stream.pipe(\n Stream.runForEach((chunk) =>\n Effect.sync(() => processChunk(chunk)),\n ),\n Effect.andThen(() =>\n Effect.sync(() => {\n // Emit final chunk if there's remaining data\n // The final chunk can be any size < chunkSize (S3 allows this)\n if (buffer.length > 0) {\n emitChunk(buffer, true);\n }\n emit.end();\n }),\n ),\n Effect.catchAll((error) => Effect.sync(() => emit.fail(error))),\n ),\n );\n });\n };\n\n // Byte-level progress tracking during streaming\n // This provides smooth, immediate progress feedback by tracking bytes as they\n // flow through the stream, before they reach S3. This solves the issue where\n // small files (< 5MB) would jump from 0% to 100% instantly.\n const withByteProgressTracking =\n (onProgress?: (totalBytes: number) => void, initialOffset = 0) =>\n <E, R>(stream: Stream.Stream<Uint8Array, E, R>) => {\n if (!onProgress) return stream;\n\n return Effect.gen(function* () {\n const totalBytesProcessedRef = yield* Ref.make(initialOffset);\n\n return stream.pipe(\n Stream.tap((chunk) =>\n Effect.gen(function* () {\n const newTotal = yield* Ref.updateAndGet(\n totalBytesProcessedRef,\n (total) => total + chunk.length,\n );\n onProgress(newTotal);\n }),\n ),\n );\n }).pipe(Stream.unwrap);\n };\n\n const uploadParts = (\n uploadFile: UploadFile,\n readStream: Stream.Stream<Uint8Array, UploadistaError>,\n initCurrentPartNumber: number,\n initOffset: number,\n uploadPartSize: number,\n minPartSize: number,\n maxConcurrentPartUploads: number,\n onProgress?: (newOffset: number) => void,\n ) =>\n Effect.gen(function* () {\n yield* Effect.logInfo(\"Starting part uploads\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n init_offset: initOffset,\n file_size: uploadFile.size,\n part_size: uploadPartSize,\n min_part_size: minPartSize,\n }),\n );\n\n // Enhanced Progress Tracking Strategy:\n // 1. Byte-level progress during streaming - provides immediate, smooth feedback\n // as data flows through the pipeline (even for small files)\n // 2. This tracks progress BEFORE S3 upload, giving users immediate feedback\n // 3. For large files with multiple parts, this provides granular updates\n // 4. For small files (single part), this prevents 0%->100% jumps\n const chunkStream = readStream.pipe(\n // Add byte-level progress tracking during streaming (immediate feedback)\n withByteProgressTracking(onProgress, initOffset),\n // Create chunks for S3 multipart upload with uniform part sizes\n createChunkedStream(uploadPartSize),\n );\n\n // Track cumulative offset and total bytes with Effect Refs\n const cumulativeOffsetRef = yield* Ref.make(initOffset);\n const totalBytesUploadedRef = yield* Ref.make(0);\n\n // Create a chunk upload function for the sink\n const uploadChunk = (chunkInfo: ChunkInfo) =>\n Effect.gen(function* () {\n // Calculate cumulative bytes to determine if this is the final part\n const cumulativeOffset = yield* Ref.updateAndGet(\n cumulativeOffsetRef,\n (offset) => offset + chunkInfo.size,\n );\n const isFinalPart = cumulativeOffset >= (uploadFile.size || 0);\n\n yield* Effect.logDebug(\"Processing chunk\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n cumulative_offset: cumulativeOffset,\n file_size: uploadFile.size,\n chunk_size: chunkInfo.size,\n is_final_part: isFinalPart,\n }),\n );\n\n const actualPartNumber =\n initCurrentPartNumber + chunkInfo.partNumber - 1;\n\n if (chunkInfo.size > uploadPartSize) {\n yield* Effect.fail(\n UploadistaError.fromCode(\n \"FILE_WRITE_ERROR\",\n new Error(\n `Part size ${chunkInfo.size} exceeds upload part size ${uploadPartSize}`,\n ),\n ),\n );\n }\n\n // For parts that meet the minimum part size (5MB) or are the final part,\n // upload them as regular multipart parts\n if (chunkInfo.size >= minPartSize || isFinalPart) {\n yield* Effect.logDebug(\"Uploading multipart chunk\").pipe(\n Effect.annotateLogs({\n upload_id: uploadFile.id,\n part_number: actualPartNumber,\n chunk_size: chunkInfo.size,\n min_part_size: minPartSize,\n is_final_part: isFinalPart,\n }),\n );\n yield* uploadPart(uploadFile, chunkInfo.data, actualPartNumber);\n yield* partSizeHistogram(Effect.succeed(chunkInfo.size));\n } else {\n // Only upload as incomplete part if it's smaller than minimum and not final\n yield* uploadIncompletePart(uploadFile.id, chunkInfo.data);\n }\n\n yield* Ref.update(\n totalBytesUploadedRef,\n (total) => total + chunkInfo.size,\n );\n\n // Note: Byte-level progress is now tracked during streaming phase\n // This ensures smooth progress updates regardless of part size\n // S3 upload completion is tracked via totalBytesUploadedRef for accuracy\n });\n\n // Process chunks concurrently with controlled concurrency\n yield* chunkStream.pipe(\n Stream.runForEach((chunkInfo) => uploadChunk(chunkInfo)),\n Effect.withConcurrency(maxConcurrentPartUploads),\n );\n\n return yield* Ref.get(totalBytesUploadedRef);\n });\n\n const getCapabilities = (): DataStoreCapabilities => ({\n supportsParallelUploads: true,\n supportsConcatenation: true,\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: true,\n supportsStreamingRead: true, // Supports streaming reads via S3 GetObject\n supportsStreamingWrite: true, // Supports streaming writes via S3 multipart upload\n maxConcurrentUploads: maxConcurrentPartUploads,\n minChunkSize: minPartSize,\n maxChunkSize: 5_368_709_120, // 5GiB S3 limit\n maxParts: maxMultipartParts,\n optimalChunkSize: preferredPartSize,\n requiresOrderedChunks: false,\n requiresMimeTypeValidation: true,\n maxValidationSize: undefined, // no size limit\n });\n\n const getChunkerConstraints = () => ({\n minChunkSize: minPartSize,\n maxChunkSize: 5_368_709_120, // 5GiB S3 limit\n optimalChunkSize: preferredPartSize,\n requiresOrderedChunks: false,\n });\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n const result = (() => {\n switch (strategy) {\n case \"parallel\":\n return capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n })();\n return Effect.succeed(result);\n };\n\n const concatArrayBuffers = (chunks: Uint8Array[]): Uint8Array => {\n const result = new Uint8Array(chunks.reduce((a, c) => a + c.length, 0));\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n };\n\n const streamToArray = async (\n stream: ReadableStream<Uint8Array>,\n ): Promise<Uint8Array> => {\n const reader = stream.getReader();\n const chunks: Uint8Array[] = [];\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n }\n return concatArrayBuffers(chunks);\n };\n\n const read = (id: string) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(id);\n console.log(upload);\n if (!upload.id) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\n \"FILE_READ_ERROR\",\n new Error(\"Upload Key is undefined\"),\n ),\n );\n }\n const s3Key = getS3Key(upload);\n const stream = yield* s3Client.getObject(s3Key);\n return yield* Effect.promise(() => streamToArray(stream));\n });\n\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Uses S3 GetObject and converts the response body to an Effect Stream.\n *\n * @param id - The unique identifier of the file to read\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n */\n const readStream = (id: string, config?: StreamingConfig) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(id);\n if (!upload.id) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\n \"FILE_READ_ERROR\",\n new Error(\"Upload Key is undefined\"),\n ),\n );\n }\n\n // Merge config with defaults\n const effectiveConfig = {\n ...DEFAULT_STREAMING_CONFIG,\n ...config,\n };\n\n const s3Key = getS3Key(upload);\n const webStream = yield* s3Client.getObject(s3Key);\n\n // Convert Web ReadableStream to Effect Stream with configured chunk size\n return Stream.async<Uint8Array, UploadistaError>((emit) => {\n const reader = webStream.getReader();\n const chunkSize = effectiveConfig.chunkSize;\n let buffer = new Uint8Array(0);\n\n const processChunk = async () => {\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n // Emit any remaining data in buffer\n if (buffer.length > 0) {\n emit.single(buffer);\n }\n emit.end();\n return;\n }\n\n if (value) {\n // Combine buffer with new value\n const combined = new Uint8Array(buffer.length + value.length);\n combined.set(buffer);\n combined.set(value, buffer.length);\n buffer = combined;\n\n // Emit chunks of the configured size\n while (buffer.length >= chunkSize) {\n const chunk = buffer.slice(0, chunkSize);\n buffer = buffer.slice(chunkSize);\n emit.single(chunk);\n }\n }\n }\n } catch (error) {\n emit.fail(\n new UploadistaError({\n code: \"FILE_READ_ERROR\",\n status: 500,\n body: \"Failed to read S3 object stream\",\n details: `S3 stream read failed: ${String(error)}`,\n }),\n );\n }\n };\n\n // Start processing\n processChunk();\n\n // Cleanup function\n return Effect.sync(() => {\n reader.releaseLock();\n });\n });\n });\n\n /**\n * Writes file content from a stream without knowing the final size upfront.\n * Uses S3 multipart upload to stream content as parts are buffered.\n *\n * @param fileId - The unique identifier for the file\n * @param options - Stream write options including the Effect Stream\n * @returns StreamWriteResult with final size after stream completes\n */\n const writeStream = (\n fileId: string,\n options: StreamWriteOptions,\n ): Effect.Effect<StreamWriteResult, UploadistaError> =>\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n const s3Key = fileId;\n\n yield* Effect.logInfo(\"Starting streaming write to S3\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n s3_key: s3Key,\n size_hint: options.sizeHint,\n }),\n );\n\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n\n // Calculate optimal part size based on size hint or use default\n const uploadPartSize = options.sizeHint\n ? calcOptimalPartSize(\n options.sizeHint,\n preferredPartSize,\n minPartSize,\n maxMultipartParts,\n )\n : preferredPartSize;\n\n // Create multipart upload\n const multipartInfo = yield* s3Client.createMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId: \"\", // Not needed for create\n contentType: options.contentType,\n });\n\n const uploadId = multipartInfo.uploadId;\n\n yield* Effect.logInfo(\n \"Multipart upload created for streaming write\",\n ).pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n s3_upload_id: uploadId,\n s3_key: s3Key,\n part_size: uploadPartSize,\n }),\n );\n\n // Track parts and total bytes\n const partsRef = yield* Ref.make<AWS.Part[]>([]);\n const totalBytesRef = yield* Ref.make(0);\n const partNumberRef = yield* Ref.make(1);\n const bufferRef = yield* Ref.make(new Uint8Array(0));\n\n // Helper to upload a part\n const uploadBufferedPart = (data: Uint8Array, isFinalPart: boolean) =>\n Effect.gen(function* () {\n if (data.length === 0) {\n return;\n }\n\n // Only upload if we have enough data or it's the final part\n if (data.length < minPartSize && !isFinalPart) {\n return;\n }\n\n const partNumber = yield* Ref.getAndUpdate(\n partNumberRef,\n (n) => n + 1,\n );\n\n yield* Effect.logDebug(\"Uploading part from stream\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n part_number: partNumber,\n part_size: data.length,\n is_final_part: isFinalPart,\n }),\n );\n\n const etag = yield* s3Client\n .uploadPart({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n partNumber,\n data,\n })\n .pipe(\n Effect.retry({\n schedule: Schedule.exponential(\"1 second\", 2.0).pipe(\n Schedule.intersect(Schedule.recurs(3)),\n ),\n while: (error) => !isUploadNotFoundError(error),\n }),\n );\n\n yield* Ref.update(partsRef, (parts) => [\n ...parts,\n { PartNumber: partNumber, ETag: etag },\n ]);\n yield* uploadPartsTotal(Effect.succeed(1));\n yield* partSizeHistogram(Effect.succeed(data.length));\n });\n\n // Process stream chunks\n yield* options.stream.pipe(\n Stream.runForEach((chunk) =>\n Effect.gen(function* () {\n // Update total bytes\n yield* Ref.update(\n totalBytesRef,\n (total) => total + chunk.length,\n );\n\n // Get current buffer and append new chunk\n const currentBuffer = yield* Ref.get(bufferRef);\n const combined = new Uint8Array(\n currentBuffer.length + chunk.length,\n );\n combined.set(currentBuffer);\n combined.set(chunk, currentBuffer.length);\n\n // Extract full parts and keep remainder in buffer\n let offset = 0;\n while (combined.length - offset >= uploadPartSize) {\n const partData = combined.slice(\n offset,\n offset + uploadPartSize,\n );\n yield* uploadBufferedPart(partData, false);\n offset += uploadPartSize;\n }\n\n // Store remaining data in buffer\n yield* Ref.set(bufferRef, combined.slice(offset));\n }),\n ),\n );\n\n // Upload any remaining data as final part\n const remainingBuffer = yield* Ref.get(bufferRef);\n if (remainingBuffer.length > 0) {\n yield* uploadBufferedPart(remainingBuffer, true);\n }\n\n // Get all parts and complete the upload\n const parts = yield* Ref.get(partsRef);\n const totalBytes = yield* Ref.get(totalBytesRef);\n\n if (parts.length === 0) {\n // No parts uploaded (empty stream) - abort and fail\n yield* s3Client.abortMultipartUpload({\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n });\n yield* activeUploadsGauge(Effect.succeed(-1));\n return yield* Effect.fail(\n new UploadistaError({\n code: \"FILE_WRITE_ERROR\",\n status: 400,\n body: \"Cannot complete upload with no data\",\n details: \"The stream provided no data to upload\",\n }),\n );\n }\n\n // Sort parts by part number for completion\n parts.sort((a, b) => (a.PartNumber ?? 0) - (b.PartNumber ?? 0));\n\n yield* s3Client.completeMultipartUpload(\n {\n bucket: s3Client.bucket,\n key: s3Key,\n uploadId,\n },\n parts,\n );\n\n // Log completion metrics\n const endTime = Date.now();\n const totalDurationMs = endTime - startTime;\n const throughputBps =\n totalDurationMs > 0 ? (totalBytes * 1000) / totalDurationMs : 0;\n const averagePartSize =\n parts.length > 0 ? totalBytes / parts.length : undefined;\n\n yield* logS3UploadCompletion(fileId, {\n fileSize: totalBytes,\n totalDurationMs,\n partsCount: parts.length,\n averagePartSize,\n throughputBps,\n });\n\n yield* uploadSuccessTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n yield* fileSizeHistogram(Effect.succeed(totalBytes));\n\n yield* Effect.logInfo(\"Streaming write to S3 completed\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n total_bytes: totalBytes,\n parts_count: parts.length,\n duration_ms: totalDurationMs,\n }),\n );\n\n return {\n id: s3Key,\n size: totalBytes,\n path: s3Key,\n bucket: s3Client.bucket,\n url: `${deliveryUrl}/${s3Key}`,\n } satisfies StreamWriteResult;\n }).pipe(\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n yield* uploadErrorsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n return yield* Effect.fail(error);\n }),\n ),\n ),\n );\n\n return {\n bucket,\n create,\n remove,\n write,\n getUpload,\n read,\n readStream,\n writeStream,\n deleteExpired,\n getCapabilities,\n getChunkerConstraints,\n validateUploadStrategy,\n } as DataStore<UploadFile>;\n });\n}\n\n// Effect-based factory that uses services\nexport const s3Store = (options: S3StoreConfig) => {\n const {\n s3ClientConfig: { bucket, ...restS3ClientConfig },\n } = options;\n return createS3Store(options).pipe(\n Effect.provide(S3ClientLayer(restS3ClientConfig, bucket)),\n );\n};\n"],"mappings":"msBAEA,MAAa,EAAuB,GAC3B,GAAS,EAAM,OAAS,EAC3B,EAAM,QAAQ,EAAG,IAAM,GAAK,GAAG,MAAQ,GAAI,EAAE,CAC7C,EAGO,GACX,EACA,EACA,EACA,EACA,EAAgB,gBACL,CACX,IAAM,EAAO,GAAY,EACrBA,EAEJ,AASE,EATE,GAAQ,EAGQ,EACT,GAAQ,EAAoB,EAEnB,EAGA,KAAK,KAAK,EAAO,EAAkB,CAKvD,IAAM,EACJ,GAAY,EAAW,EACnB,EACA,KAAK,IAAI,EAAiB,EAAY,CAItC,EAAY,KAClB,OAAO,KAAK,KAAK,EAAgB,EAAU,CAAG,GAGnC,EAAW,GACf,GAAG,EAAG,OAUF,GACX,EACA,IACS,CACT,IAAM,EAAO,IAAI,KAAK,EAAU,CAChC,OAAO,IAAI,KAAK,EAAK,SAAS,CAAG,EAA+B,ECvDrD,GACX,EACA,EACA,EAAmC,EAAE,IAGrC,EAAO,QAAQC,EAAW,EAAW,EAAO,EAAQ,CAAC,CAE9C,EAAgB,SAAS,mBAAoB,EAAe,EAGxD,GACX,EACA,EACA,EAAmC,EAAE,GAGnC,OAAO,GAAU,UACjB,GACA,SAAU,GACV,OAAO,EAAM,MAAS,UACtB,CAAC,WAAY,YAAa,eAAe,CAAC,SAAS,EAAM,KAAK,EAE9D,EAAO,QACL,EAAO,WAAW,yBAAyB,EAAU,YAAY,CAAC,KAChE,EAAO,aAAa,CAClB,WAAY,EAAM,KAClB,GAAG,EACJ,CAAC,CACH,CACF,CACM,EAAgB,SAAS,iBAAiB,EAG5C,EAAc,EAAW,EAAO,EAAQ,CAGpC,EACX,GAcA,GAVE,OAAO,GAAU,UACjB,GACA,SAAU,GACV,OAAO,EAAM,MAAS,WACrB,EAAM,OAAS,gBAAkB,EAAM,OAAS,cAOjD,aAAiB,GACjB,EAAM,OACN,OAAO,EAAM,OAAU,UACvB,SAAU,EAAM,OAChB,OAAO,EAAM,MAAM,MAAS,WAC3B,EAAM,MAAM,OAAS,gBAAkB,EAAM,MAAM,OAAS,cCjDjE,SAAgB,EAAiB,EAA+B,CAO9D,GALI,aAAgB,gBAKhB,GAAQ,OAAO,GAAS,UAAY,cAAe,EACrD,OAAO,EAIT,GAAI,GAAQ,OAAO,GAAS,UAAY,SAAU,GAAQ,OAAQ,EAAM,CACtE,IAAM,EAAa,EAEnB,OAAO,IAAI,eAAe,CACxB,MAAM,EAAY,CAChB,EAAW,GAAG,OAAS,GAAU,CAC/B,EAAW,QAAQ,IAAI,WAAW,EAAM,CAAC,EACzC,CAEF,EAAW,GAAG,UAAa,CACzB,EAAW,OAAO,EAClB,CAEF,EAAW,GAAG,QAAU,GAAU,CAChC,EAAW,MAAM,EAAM,EACvB,EAEL,CAAC,CAIJ,MAAU,MACR,0BAA0B,OAAO,EAAK,gDACvC,CClCH,IAAa,EAAb,cAAqC,EAAQ,IAAI,kBAAkB,EAmEhE,AAAC,GAEJ,MAAa,GACX,EACA,IACG,CACH,IAAM,EAAW,IAAI,EAAG,EAAe,CACjC,EAAa,GACjB,EAAO,WAAW,CAChB,IAAK,SAKI,GAJM,MAAM,EAAS,UAAU,CACpC,OAAQ,EACR,IAAK,EACN,CAAC,EAC2B,KAAK,CAEpC,MAAQ,GAAU,EAAc,YAAa,EAAO,CAAE,MAAK,SAAQ,CAAC,CACrE,CAAC,CAEE,EAAc,GAClB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,GAAI,CAKF,OAJa,MAAM,EAAS,WAAW,CACrC,OAAQ,EACR,IAAK,EACN,CAAC,EACU,oBACL,EAAO,CACd,GAAI,aAAiB,EACnB,OAEF,MAAM,IAGV,MAAQ,GAAU,EAAc,aAAc,EAAO,CAAE,MAAK,SAAQ,CAAC,CACtE,CAAC,CAEE,GAAa,EAAa,IAC9B,EAAO,WAAW,CAChB,IAAK,UACc,MAAM,EAAS,UAAU,CACxC,OAAQ,EACR,IAAK,EACL,KAAM,EACP,CAAC,EACc,MAAQ,GAE1B,MAAQ,GACN,EAAc,YAAa,EAAO,CAAE,MAAK,SAAQ,KAAM,EAAK,OAAQ,CAAC,CACxE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,MAAM,EAAS,aAAa,CAC1B,OAAQ,EACR,IAAK,EACN,CAAC,EAEJ,MAAQ,GAAU,EAAc,eAAgB,EAAO,CAAE,MAAK,SAAQ,CAAC,CACxE,CAAC,CAmMJ,MAAO,CACL,SACA,YACA,aACA,YACA,eACA,cAvMqB,GACrB,EAAO,WAAW,CAChB,QACE,EAAS,cAAc,CACrB,OAAQ,EACR,OAAQ,CACN,QAAS,EAAK,IAAK,IAAS,CAAE,IAAK,EAAK,EAAE,CAC3C,CACF,CAAC,CACJ,MAAQ,GACN,EAAc,gBAAiB,EAAO,CAAE,KAAM,EAAK,OAAQ,SAAQ,CAAC,CACvE,CAAC,CA6LF,sBA3L6B,GAC7B,EACE,wBACA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAMC,EAAiD,CACrD,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACd,CAEG,EAAQ,cACV,EAAQ,YAAc,EAAQ,aAG5B,EAAQ,eACV,EAAQ,aAAe,EAAQ,cAGjC,IAAM,EAAM,MAAM,EAAS,sBAAsB,EAAQ,CAEzD,GAAI,CAAC,EAAI,SACP,MAAU,MAAM,yBAAyB,CAE3C,GAAI,CAAC,EAAI,IACP,MAAU,MAAM,mBAAmB,CAGrC,MAAO,CACL,SAAU,EAAI,SACd,OAAQ,EAAQ,OAChB,IAAK,EAAI,IACV,EAEH,MAAQ,GACN,EAAc,wBAAyB,EAAO,EAAQ,CACzD,CAAC,CACH,CAwJD,WArJA,GAEA,EACE,aACA,EAAO,WAAW,CAChB,QACE,EAAS,WAAW,CAClB,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SAClB,WAAY,EAAQ,WACpB,KAAM,EAAQ,KACf,CAAC,CACJ,MAAQ,GACN,EAAc,aAAc,EAAO,CACjC,UAAW,EAAQ,IACnB,YAAa,EAAQ,WACrB,UAAW,EAAQ,KAAK,OACxB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CAAC,KAAK,EAAO,IAAK,GAAa,EAAS,KAAe,CAAC,CAC3D,CAiID,yBA9HA,EACA,IAEA,EACE,0BACA,EAAO,WAAW,CAChB,QACE,EACG,wBAAwB,CACvB,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SAClB,gBAAiB,CACf,MAAO,EAAM,IAAK,IAAU,CAC1B,KAAM,EAAK,KACX,WAAY,EAAK,WAClB,EAAE,CACJ,CACF,CAAC,CACD,KAAM,GAAa,EAAS,SAAS,CAC1C,MAAQ,GACN,EAAc,0BAA2B,EAAO,CAC9C,UAAW,EAAQ,IACnB,YAAa,EAAM,OACnB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CACH,CAoGD,qBAlG4B,GAC5B,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,MAAM,EAAS,qBAAqB,CAClC,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SACnB,CAAC,EAEJ,MAAQ,GACN,EAAsB,uBAAwB,EAAO,CACnD,UAAW,EAAQ,IACnB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CAqFF,UAlFA,GAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAMC,EAAoC,CACxC,OAAQ,EAAQ,OAChB,IAAK,EAAQ,IACb,SAAU,EAAQ,SAClB,iBAAkB,EAAQ,iBAC3B,CAEK,EAAO,MAAM,EAAS,UAAU,EAAO,CAE7C,MAAO,CACL,MAAO,EAAK,OAAS,EAAE,CACvB,YAAa,EAAK,aAAe,GACjC,qBAAsB,EAAK,qBAC5B,EAEH,MAAQ,GACN,EAAc,YAAa,EAAO,CAChC,UAAW,EAAQ,IACnB,UAAW,EAAQ,OACpB,CAAC,CACL,CAAC,CA2DF,sBAzD4B,EAAoB,IAChD,EAAO,WAAW,CAChB,QACE,EAAS,qBAAqB,CAC5B,OAAQ,EACR,UAAW,EACX,eAAgB,EACjB,CAAC,CACJ,MAAQ,GACN,EAAc,uBAAwB,EAAO,CAAE,SAAQ,CAAC,CAC3D,CAAC,CAgDF,kBA9CyB,GACzB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,GAAI,CAKF,OAAO,GAJM,MAAM,EAAS,UAAU,CACpC,OAAQ,EACR,IAAK,EAAQ,EAAG,CACjB,CAAC,EAC2B,KAAK,OAC3B,EAAO,CACd,GAAI,aAAiB,EACnB,OAEF,MAAM,IAGV,MAAQ,GACN,EAAc,oBAAqB,EAAO,CAAE,UAAW,EAAI,SAAQ,CAAC,CACvE,CAAC,CA6BF,sBA3B6B,GAAe,EAAW,EAAQ,EAAG,CAAC,CA4BnE,mBA1ByB,EAAY,IACrC,EAAU,EAAQ,EAAG,CAAE,EAAK,CAAC,KAC3B,EAAO,QACL,EAAO,QAAQ,2BAA2B,CAAC,KACzC,EAAO,aAAa,CAAE,UAAW,EAAI,CAAC,CACvC,CACF,CACF,CAoBD,qBAlB4B,GAAe,EAAa,EAAQ,EAAG,CAAC,CAmBrE,EAGU,GAAiB,EAAgC,IAC5D,EAAM,QAAQ,EAAiB,EAAoB,EAAgB,EAAO,CAAC,CCzTvE,EAAY,GAAmC,CACnD,GAAM,CAAE,KAAI,YAAa,EAEzB,GAAI,CAAC,EACH,OAAO,EAIT,IAAM,EAAW,EAAS,UAAY,EAAS,UAAY,EAAS,KAOpE,OALI,OAAO,GAAa,UAAY,EAAS,SAAS,IAAI,CAEjD,GAAG,IADQ,EAAS,UAAU,EAAS,YAAY,IAAI,CAAC,GAI1D,GAIT,SAAgB,EAAc,EAAuB,CACnD,GAAM,CACJ,cACA,WACA,cAAc,QACd,UAAU,GACV,oBAAoB,IACpB,2BAA2B,GAC3B,iCAAiC,IAAO,GAAK,GAAK,GAAK,EACvD,eAAgB,CAAE,WAChB,EAEJ,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAClB,EAAU,MAAO,EACjB,EAAoB,GAAY,EAAI,KAAO,KAE3C,EACJ,GAC2C,CAC3C,IAAM,EAAW,EAAW,QAAQ,SASpC,OARK,EAQE,EAAO,QAAQ,EAAS,CAPtB,EAAO,KACZ,EAAgB,SACd,mBACI,MAAM,yBAAyB,CACpC,CACF,EAKC,GACJ,EACA,EACA,IACG,CACH,IAAM,EAAQ,EAAS,EAAW,CAElC,OAAOC,EACLC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,EAAY,EAAW,CAEzC,EAAO,MAAO,EACjB,WAAW,CACV,OAAQ,EAAS,OACjB,IAAK,EACL,WACA,aACA,OACD,CAAC,CACD,KACC,EAAO,MAAM,CACX,SAAU,EAAS,YAAY,WAAY,EAAI,CAAC,KAC9C,EAAS,UAAU,EAAS,OAAO,EAAE,CAAC,CACvC,CAED,MAAQ,GAAU,CAAC,EAAsB,EAAM,CAChD,CAAC,CACF,EAAO,SAAU,GACf,EAAO,WAAW,uBAAuB,CAAC,KACxC,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,cAAe,EAAM,QACrB,cAAe,UACf,UAAW,EAAK,OAChB,UAAW,EAAS,OACrB,CAAC,CACH,CACF,CACF,CAYH,OAVA,MAAOC,EAAiB,EAAO,QAAQ,EAAE,CAAC,CAC1C,MAAO,EAAO,QAAQ,6BAA6B,CAAC,KAClD,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,UAAW,EAAK,OACV,OACP,CAAC,CACH,CAEM,GACP,CACH,CAAC,KACA,EAAO,SAAS,kBAAkB,IAAc,CAC9C,WAAY,CACV,YAAa,EAAW,GACxB,qBAAsB,EACtB,mBAAoB,EAAK,OACzB,YAAa,EAAS,OACtB,SAAU,EACX,CACF,CAAC,CACH,EAGG,GAAwB,EAAY,IACxC,EAAS,kBAAkB,EAAI,EAAK,CAEhC,EAA0B,GAC9B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAiB,MAAO,EAAS,kBAAkB,EAAG,CAE5D,GAAI,CAAC,EACH,OAIF,IAAM,EAAS,EAAe,WAAW,CACnCC,EAAuB,EAAE,CAC3B,EAAqB,EAEzB,GAAI,CACF,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAO,EAAO,YAAc,EAAO,MAAM,CAAC,CAClE,GAAI,EAAM,MACV,EAAO,KAAK,EAAM,CAClB,GAAsB,EAAM,eAEtB,CACR,EAAO,aAAa,CAGtB,IAAM,EAAS,EAAO,aAAa,EAAO,CAE1C,MAAO,CACL,KAAM,EACN,SACD,EACD,CAEE,EAAwB,GAC5B,EAAS,qBAAqB,EAAG,CAE7B,EAAyB,GAC7B,EAAS,sBAAsB,EAAG,CAE9B,GAAY,EAAwB,IAA2B,CACnE,IAAM,EAAQ,EAAS,EAAW,CAElC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAAY,EAAW,CAE/C,OAAO,MAAO,EAAS,wBACrB,CACE,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CACD,EACD,EACD,CAAC,KACD,EAAO,QAAUC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACvD,EAAO,SAAS,+BAAgC,CAC9C,WAAY,CACV,YAAa,EAAW,GACxB,qBAAsB,EAAM,OAC5B,YAAa,EAAS,OACtB,SAAU,EACX,CACF,CAAC,CACH,EAGG,EAAS,GAA2B,CACxC,IAAM,EAAQ,EAAS,EAAW,CAElC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAAY,EAAW,CAE/C,MAAO,EAAS,qBAAqB,CACnC,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CAAC,CAEF,MAAO,EAAS,cAAc,CAAC,EAAM,CAAC,EACtC,EAGE,GACJ,EACA,EACA,EACA,IAKA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAS,UAAU,CACvC,OAAQ,EAAS,OACjB,IAAK,EACL,WACA,mBACD,CAAC,CAEE,EAAQ,EAAO,MAEnB,GAAI,EAAO,YAAa,CACtB,IAAM,EAAO,MAAO,EAClB,EACA,EACA,EACA,EAAO,qBACR,CACD,EAAQ,CAAC,GAAG,EAAO,GAAG,EAAK,MAAM,CAOnC,OAJK,GACH,EAAM,MAAM,EAAG,KAAO,EAAE,YAAc,IAAM,EAAE,YAAc,GAAG,CAG1D,CAAE,YAAa,GAAM,QAAO,EACnC,CAAC,KACD,EAAO,SAAU,GACX,EAAsB,EAAM,CACvB,EAAO,WACZ,uCACD,CAAC,KACA,EAAO,aAAa,CAClB,UAAW,EACX,WAAY,EAAM,KACnB,CAAC,CACF,EAAO,GAAG,CAAE,YAAa,GAAO,MAAO,EAAE,CAAE,CAAC,CAC7C,CAEI,EAAO,KAAK,EAAM,CACzB,CACH,CAEG,GAAiB,EAAY,IACjC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,EAAQ,IAAI,EAAG,CACjC,EAAW,MAAO,EAAY,EAAS,CAG7C,OAAO,MAAO,EAFA,EAAS,EAAS,CAI9B,EACA,EACA,EACD,EACD,CAEE,GAAoB,EAAoB,IAC5C,EAAO,IAAI,WAAa,CACtB,GAAI,CAACC,EACH,MAAO,GAGT,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAO,GAAG,CAC1C,EAAW,EAAW,QAAQ,SAUpC,OATK,IAIL,MAAO,EAAQ,IAAI,EAAO,GAAI,CAC5B,GAAG,EACH,QAAS,CAAE,GAAG,EAAW,QAAS,WAAU,CAC7C,CAAC,EANO,GAST,CAEE,EAAc,GAClB,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,QAAQ,iBAAiB,CAAC,KACtC,EAAO,aAAa,CAAE,UAAW,EAAI,CAAC,CACvC,CACD,MAAO,EAAQ,OAAO,EAAG,EACzB,CAEE,EAAyB,GAAuB,CACpD,IAAM,EAAQ,EAAS,EAAO,CAE9B,OAAO,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAO,QAAQ,gCAAgC,CAAC,KACrD,EAAO,aAAa,CAAE,UAAW,EAAO,GAAI,CAAC,CAC9C,CAED,IAAM,EAAgB,MAAO,EAAS,sBAAsB,CAC1D,OAAQ,EAAS,OACjB,IAAK,EACL,SAAU,GACV,YAAa,EAAO,UAAU,aAAa,UAAU,CACrD,aAAc,EAAO,UAAU,cAAc,UAAU,CACxD,CAAC,CAEI,EAAgB,CACpB,GAAG,EACH,QAAS,CACP,GAAG,EAAO,QACV,KAAM,EAAc,IACpB,SAAU,EAAc,SACxB,OAAQ,EAAc,OACvB,CACD,IAAK,GAAG,EAAY,GAAG,IACxB,CAeD,OAbA,MAAO,EAAQ,IAAI,EAAO,GAAI,EAAc,CAE5C,MAAO,EAAO,QAAQ,2BAA2B,CAAC,KAChD,EAAO,aAAa,CAClB,UAAW,EAAO,GAClB,aAAc,EAAc,QAAQ,SACpC,OAAQ,EACT,CAAC,CACH,CAED,MAAOC,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAkB,EAAO,QAAQ,EAAO,MAAQ,EAAE,CAAC,CAEnD,GACP,CAAC,KACD,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,YAAa,EAAO,GACpB,cAAe,EAAO,MAAQ,EAC9B,YAAa,EAAS,OACtB,SAAU,EACX,CACF,CAAC,CACH,EAQG,EAAU,GACP,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAO,QAAQ,gCAAgC,CAAC,KACrD,EAAO,aAAa,CAAE,UAAW,EAAO,GAAI,CAAC,CAC9C,CACD,IAAM,EAAgB,MAAO,EAAsB,EAAO,CAU1D,OATA,MAAO,EAAQ,IAAI,EAAO,GAAI,EAAc,CAC5C,MAAO,EAAO,QAAQ,2BAA2B,CAAC,KAChD,EAAO,aAAa,CAClB,UAAW,EAAO,GAClB,aAAc,EAAc,QAAQ,SACrC,CAAC,CACH,CACD,MAAOD,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAEtC,GACP,CAAC,KACD,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,YAAa,EAAO,GACpB,cAAe,EAAO,MAAQ,EAC9B,YAAa,EACd,CACF,CAAC,CACH,CAGG,EAAU,GACd,EAAO,IAAI,WAAa,CAEtB,MAAO,EADY,MAAO,EAAQ,IAAI,EAAG,CACjB,CACxB,MAAO,EAAW,EAAG,EACrB,CAEE,GACJ,EACA,IAEAE,EACE,EAAQ,QACRR,EACES,EACA,EAAO,IAAI,WAAa,CACtB,GAAM,CACJ,OAAQ,EACR,UACA,OAAQ,GACN,EACE,CAAE,cAAe,EAGjB,EAAY,KAAK,KAAK,CAG5B,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAQ5C,GAAM,CACJ,aACA,iBACA,SACA,OACA,oBAXoB,MAAO,GAC3B,EACA,EACA,EACD,CAWK,EACJ,GACA,EACE,EAAW,KACX,EACA,EACA,EACD,CAGH,MAAO,EAAO,QAAQ,qBAAqB,CAAC,KAC1C,EAAO,aAAa,CAClB,UAAW,EACX,mBAAoB,EACpB,qBAAsB,EACpB,EAAW,KACX,EACA,EACA,EACD,CACD,gBAAiB,EACjB,iBAAkB,EACnB,CAAC,CACH,CAaD,IAAM,EAAY,GAXI,MAAO,GAC3B,EACA,EACA,EACA,EACA,EACA,EACA,EACA,EACD,EAQD,OAJI,EAAW,OAAS,IACtB,MAAO,GAAa,EAAS,EAAY,EAAU,EAG9C,GACP,CAAC,KAAK,EAAO,SAASA,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAAC,CAAC,CAChE,CACF,CAEG,EAAa,GACjB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAG,CAEnC,CAAE,QAAO,eAAgB,MAAO,EAAc,EAAG,CACvD,GAAI,CAAC,EACH,MAAO,CACL,GAAG,EACH,OAAQ,EAAW,KACnB,KAAM,EAAW,KAClB,CAGH,IAAM,EAAS,EAAoB,EAAM,CACnC,EAAqB,MAAO,EAAsB,EAAG,CAE3D,MAAO,CACL,GAAG,EACH,OAAQ,GAAU,GAAsB,GACxC,KAAM,EAAW,KACjB,QAAS,EAAW,QACrB,EACD,CAQE,IACJ,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAO,CACvC,CAAE,SAAU,MAAO,EAAc,EAAO,CAMxC,GAHJ,EAAM,OAAS,GAAK,EAAM,EAAM,OAAS,GAAG,WACvC,EAAM,EAAM,OAAS,GAAG,YAAc,EACvC,GAC8B,EAI9B,EACJ,EAAM,OAAS,GAAK,EAAM,GAAG,KAAO,EAAM,GAAG,KAAO,KAGtD,GAAI,GAAoB,EAAM,OAAS,EAAG,CACxC,IAAM,EAAmB,EACtB,MAAM,EAAG,GAAG,CACZ,KAAM,GAAS,EAAK,OAAS,EAAiB,CAC7C,IACF,MAAO,EAAO,WACZ,sDACD,CAAC,KACA,EAAO,aAAa,CAClB,UAAW,EACX,cAAe,EACf,kBAAmB,EAAiB,WACpC,kBAAmB,EAAiB,KACrC,CAAC,CACH,EAIL,IAAM,EAAiB,MAAO,EAAuB,EAAO,CAE5D,GAAI,EAAgB,CAClB,MAAO,EAAqB,EAAO,CACnC,IAAM,EAAS,EAAgB,EAAe,KACxC,EAAO,EAAe,OAAO,KAAK,EAAO,OAAO,EAAY,CAAC,CACnE,MAAO,CACL,aACA,iBACA,SACA,mBAAoB,EAAe,KACnC,OACA,mBACD,MAED,MAAO,CACL,aACA,iBACA,OAAQ,EACR,mBAAoB,EACpB,KAAM,EACN,mBACD,EAEH,CAEE,IACJ,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,GAAM,CAAE,SAAU,MAAO,EAAc,EAAO,CAG9C,MAAO,EAAO,QAAQ,0CAA0C,CAAC,KAC/D,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EAAM,OACnB,WAAY,EAAM,KAAK,EAAM,KAAW,CACtC,YAAa,EAAK,WAClB,KAAM,EAAK,KACX,KAAM,EAAK,KACX,cAAe,IAAU,EAAM,OAAS,EACzC,EAAE,CACJ,CAAC,CACH,CAED,MAAO,EAAS,EAAY,EAAM,CAClC,MAAO,EAAiB,EAAY,EAAQ,CAK5C,IAAM,EADU,KAAK,KAAK,CACQ,EAC5B,EAAW,EAAW,MAAQ,EAC9B,EACJ,EAAkB,EAAK,EAAW,IAAQ,EAAkB,EAGxD,EACJ,EAAM,OAAS,EACX,EAAM,QAAQ,EAAK,IAAS,GAAO,EAAK,MAAQ,GAAI,EAAE,CACtD,EAAM,OACN,IAAA,GAEN,MAAO,EAAsB,EAAQ,CACnC,WACA,kBACA,WAAY,EAAM,OAClB,kBACA,gBACD,CAAC,EACF,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CACtB,MAAOC,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAC3C,MAAO,EAAO,SAAS,0BAA0B,CAAC,KAChD,EAAO,aAAa,CAClB,UAAW,EACX,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,EACD,CACH,CACF,CAEG,OACJ,EAAO,IAAI,WAAa,CACtB,GAAI,IAAmC,EACrC,MAAO,GAGT,IAAIC,EACAC,EACA,EAAc,GACd,EAAU,EAEd,KAAO,GAAa,CAClB,IAAM,EAAe,MAAO,EAAS,qBACnC,EACA,EACD,CAEK,EACJ,EAAa,SAAS,OAAQ,GAAoB,CAChD,IAAM,EAAgB,EAAgB,UACtC,OACE,GACA,KAAK,KAAK,CACR,EACE,EAAc,aAAa,CAC3B,EACD,CAAC,SAAS,EAEf,EAAI,EAAE,CAEJ,EAAkB,EACrB,OAAQ,GACA,CAAC,CAAC,EAAO,IAChB,CACD,IAAK,GAAW,EAAO,IAAI,CAE1B,EAAgB,OAAS,IAC3B,MAAO,EAAS,cAAc,EAAgB,CAG9C,MAAO,EAAO,QAAQ,EAAiB,GAC9B,EAAO,IAAI,WAAa,CACzB,CAAC,EAAO,KAAO,CAAC,EAAO,WAG3B,MAAO,EAAS,qBAAqB,CACnC,SACA,IAAK,EAAO,IACZ,SAAU,EAAO,SAClB,CAAC,GAEF,CACF,CAEF,GAAW,EAAgB,QAG7B,EAAc,EAAa,aAAe,GAEtC,IACF,EAAY,EAAa,cACzB,EAAiB,EAAa,oBAIlC,OAAO,GACP,CAIE,GACH,GAEC,GAEO,EAAO,MAAqB,GAAS,CAC1C,IAAI,EAAS,IAAI,WACb,EAAa,EACb,EAAsB,EAEpB,GAAa,EAAkB,EAAe,KAAU,CAE5D,EAAO,QACL,EAAO,QAAQ,iBAAiB,CAAC,KAC/B,EAAO,aAAa,CAClB,YAAa,EACb,WAAY,EAAK,OACjB,cAAe,EACf,eAAgB,EAChB,sBAAuB,EAAsB,EAAK,OACnD,CAAC,CACH,CACF,CACD,EAAK,OAAO,CACV,WAAY,IACZ,OACA,KAAM,EAAK,OACZ,CAAC,EAGE,EAAgB,GAAwB,CAE5C,IAAM,EAAW,IAAI,WAAW,EAAO,OAAS,EAAQ,OAAO,CAQ/D,IAPA,EAAS,IAAI,EAAO,CACpB,EAAS,IAAI,EAAS,EAAO,OAAO,CACpC,EAAS,EACT,GAAuB,EAAQ,OAIxB,EAAO,QAAU,GAAW,CACjC,IAAM,EAAQ,EAAO,MAAM,EAAG,EAAU,CACxC,EAAS,EAAO,MAAM,EAAU,CAChC,EAAU,EAAO,GAAM,GAK3B,EAAO,QACL,EAAO,KACL,EAAO,WAAY,GACjB,EAAO,SAAW,EAAa,EAAM,CAAC,CACvC,CACD,EAAO,YACL,EAAO,SAAW,CAGZ,EAAO,OAAS,GAClB,EAAU,EAAQ,GAAK,CAEzB,EAAK,KAAK,EACV,CACH,CACD,EAAO,SAAU,GAAU,EAAO,SAAW,EAAK,KAAK,EAAM,CAAC,CAAC,CAChE,CACF,EACD,CAOA,IACH,EAA2C,EAAgB,IACrD,GACA,EAEE,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAyB,MAAO,EAAI,KAAK,EAAc,CAE7D,OAAO,EAAO,KACZ,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAKtB,EAJiB,MAAO,EAAI,aAC1B,EACC,GAAU,EAAQ,EAAM,OAC1B,CACmB,EACpB,CACH,CACF,EACD,CAAC,KAAK,EAAO,OAAO,CAhBE,EAmBtB,IACJ,EACA,EACA,EACA,EACA,EACA,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,QAAQ,wBAAwB,CAAC,KAC7C,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,UAAW,EAAW,KACtB,UAAW,EACX,cAAeC,EAChB,CAAC,CACH,CAQD,IAAM,EAAcC,EAAW,KAE7B,GAAyB,EAAY,EAAW,CAEhD,GAAoB,EAAe,CACpC,CAGK,EAAsB,MAAO,EAAI,KAAK,EAAW,CACjD,EAAwB,MAAO,EAAI,KAAK,EAAE,CAG1C,EAAe,GACnB,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAmB,MAAO,EAAI,aAClC,EACC,GAAW,EAAS,EAAU,KAChC,CACK,EAAc,IAAqB,EAAW,MAAQ,GAE5D,MAAO,EAAO,SAAS,mBAAmB,CAAC,KACzC,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,kBAAmB,EACnB,UAAW,EAAW,KACtB,WAAY,EAAU,KACtB,cAAe,EAChB,CAAC,CACH,CAED,IAAM,EACJ,EAAwB,EAAU,WAAa,EAE7C,EAAU,KAAO,IACnB,MAAO,EAAO,KACZ,EAAgB,SACd,mBACI,MACF,aAAa,EAAU,KAAK,4BAA4B,IACzD,CACF,CACF,EAKC,EAAU,MAAQD,GAAe,GACnC,MAAO,EAAO,SAAS,4BAA4B,CAAC,KAClD,EAAO,aAAa,CAClB,UAAW,EAAW,GACtB,YAAa,EACb,WAAY,EAAU,KACtB,cAAeA,EACf,cAAe,EAChB,CAAC,CACH,CACD,MAAO,EAAW,EAAY,EAAU,KAAM,EAAiB,CAC/D,MAAOE,EAAkB,EAAO,QAAQ,EAAU,KAAK,CAAC,EAGxD,MAAO,EAAqB,EAAW,GAAI,EAAU,KAAK,CAG5D,MAAO,EAAI,OACT,EACC,GAAU,EAAQ,EAAU,KAC9B,EAKD,CAQJ,OALA,MAAO,EAAY,KACjB,EAAO,WAAY,GAAc,EAAY,EAAU,CAAC,CACxD,EAAO,gBAAgBC,EAAyB,CACjD,CAEM,MAAO,EAAI,IAAI,EAAsB,EAC5C,CAEE,OAAgD,CACpD,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,sBAAuB,GACvB,uBAAwB,GACxB,qBAAsB,EACtB,aAAc,EACd,aAAc,WACd,SAAU,EACV,iBAAkB,EAClB,sBAAuB,GACvB,2BAA4B,GAC5B,kBAAmB,IAAA,GACpB,EAEK,QAA+B,CACnC,aAAc,EACd,aAAc,WACd,iBAAkB,EAClB,sBAAuB,GACxB,EAEK,GACJ,GACkC,CAClC,IAAM,EAAe,GAAiB,CAChC,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAa,wBACtB,IAAK,SACH,MAAO,GACT,QACE,MAAO,OAET,CACJ,OAAO,EAAO,QAAQ,EAAO,EAGzB,GAAsB,GAAqC,CAC/D,IAAM,EAAS,IAAI,WAAW,EAAO,QAAQ,EAAG,IAAM,EAAI,EAAE,OAAQ,EAAE,CAAC,CACnE,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAElB,OAAO,GAGH,GAAgB,KACpB,IACwB,CACxB,IAAM,EAAS,EAAO,WAAW,CAC3Bd,EAAuB,EAAE,CAC/B,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAM,EAAO,MAAM,CAC3C,GAAI,EAAM,MACV,EAAO,KAAK,EAAM,CAEpB,OAAO,GAAmB,EAAO,EAyVnC,MAAO,CACL,SACA,SACA,SACA,QACA,YACA,KA5VY,GACZ,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAG,CAErC,GADA,QAAQ,IAAI,EAAO,CACf,CAAC,EAAO,GACV,OAAO,MAAO,EAAO,KACnB,EAAgB,SACd,kBACI,MAAM,0BAA0B,CACrC,CACF,CAEH,IAAM,EAAQ,EAAS,EAAO,CACxB,EAAS,MAAO,EAAS,UAAU,EAAM,CAC/C,OAAO,MAAO,EAAO,YAAc,GAAc,EAAO,CAAC,EACzD,CA8UF,YApUkB,EAAY,IAC9B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAG,CACrC,GAAI,CAAC,EAAO,GACV,OAAO,MAAO,EAAO,KACnB,EAAgB,SACd,kBACI,MAAM,0BAA0B,CACrC,CACF,CAIH,IAAM,EAAkB,CACtB,GAAG,EACH,GAAGe,EACJ,CAEK,EAAQ,EAAS,EAAO,CACxB,EAAY,MAAO,EAAS,UAAU,EAAM,CAGlD,OAAO,EAAO,MAAoC,GAAS,CACzD,IAAM,EAAS,EAAU,WAAW,CAC9B,EAAY,EAAgB,UAC9B,EAAS,IAAI,WA+CjB,OA7CqB,SAAY,CAC/B,GAAI,CACF,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAM,EAAO,MAAM,CAE3C,GAAI,EAAM,CAEJ,EAAO,OAAS,GAClB,EAAK,OAAO,EAAO,CAErB,EAAK,KAAK,CACV,OAGF,GAAI,EAAO,CAET,IAAM,EAAW,IAAI,WAAW,EAAO,OAAS,EAAM,OAAO,CAM7D,IALA,EAAS,IAAI,EAAO,CACpB,EAAS,IAAI,EAAO,EAAO,OAAO,CAClC,EAAS,EAGF,EAAO,QAAU,GAAW,CACjC,IAAM,EAAQ,EAAO,MAAM,EAAG,EAAU,CACxC,EAAS,EAAO,MAAM,EAAU,CAChC,EAAK,OAAO,EAAM,UAIjB,EAAO,CACd,EAAK,KACH,IAAI,EAAgB,CAClB,KAAM,kBACN,OAAQ,IACR,KAAM,kCACN,QAAS,0BAA0B,OAAO,EAAM,GACjD,CAAC,CACH,KAKS,CAGP,EAAO,SAAW,CACvB,EAAO,aAAa,EACpB,EACF,EACF,CAyPF,aA9OA,EACA,IAEAlB,EACES,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CACtB,EAAQ,EAEd,MAAO,EAAO,QAAQ,iCAAiC,CAAC,KACtD,EAAO,aAAa,CAClB,UAAW,EACX,OAAQ,EACR,UAAW,EAAQ,SACpB,CAAC,CACH,CAED,MAAOH,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOI,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAG5C,IAAM,EAAiB,EAAQ,SAC3B,EACE,EAAQ,SACR,EACA,EACA,EACD,CACD,EAUE,GAPgB,MAAO,EAAS,sBAAsB,CAC1D,OAAQ,EAAS,OACjB,IAAK,EACL,SAAU,GACV,YAAa,EAAQ,YACtB,CAAC,EAE6B,SAE/B,MAAO,EAAO,QACZ,+CACD,CAAC,KACA,EAAO,aAAa,CAClB,UAAW,EACX,aAAc,EACd,OAAQ,EACR,UAAW,EACZ,CAAC,CACH,CAGD,IAAM,EAAW,MAAO,EAAI,KAAiB,EAAE,CAAC,CAC1C,EAAgB,MAAO,EAAI,KAAK,EAAE,CAClC,EAAgB,MAAO,EAAI,KAAK,EAAE,CAClC,EAAY,MAAO,EAAI,KAAK,IAAI,WAAc,CAG9C,GAAsB,EAAkB,IAC5C,EAAO,IAAI,WAAa,CAMtB,GALI,EAAK,SAAW,GAKhB,EAAK,OAAS,GAAe,CAAC,EAChC,OAGF,IAAM,EAAa,MAAO,EAAI,aAC5B,EACC,GAAM,EAAI,EACZ,CAED,MAAO,EAAO,SAAS,6BAA6B,CAAC,KACnD,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EACb,UAAW,EAAK,OAChB,cAAe,EAChB,CAAC,CACH,CAED,IAAM,EAAO,MAAO,EACjB,WAAW,CACV,OAAQ,EAAS,OACjB,IAAK,EACL,WACA,aACA,OACD,CAAC,CACD,KACC,EAAO,MAAM,CACX,SAAU,EAAS,YAAY,WAAY,EAAI,CAAC,KAC9C,EAAS,UAAU,EAAS,OAAO,EAAE,CAAC,CACvC,CACD,MAAQ,GAAU,CAAC,EAAsB,EAAM,CAChD,CAAC,CACH,CAEH,MAAO,EAAI,OAAO,EAAW,GAAU,CACrC,GAAGS,EACH,CAAE,WAAY,EAAY,KAAM,EAAM,CACvC,CAAC,CACF,MAAOjB,EAAiB,EAAO,QAAQ,EAAE,CAAC,CAC1C,MAAOc,EAAkB,EAAO,QAAQ,EAAK,OAAO,CAAC,EACrD,CAGJ,MAAO,EAAQ,OAAO,KACpB,EAAO,WAAY,GACjB,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAI,OACT,EACC,GAAU,EAAQ,EAAM,OAC1B,CAGD,IAAM,EAAgB,MAAO,EAAI,IAAI,EAAU,CACzC,EAAW,IAAI,WACnB,EAAc,OAAS,EAAM,OAC9B,CACD,EAAS,IAAI,EAAc,CAC3B,EAAS,IAAI,EAAO,EAAc,OAAO,CAGzC,IAAI,EAAS,EACb,KAAO,EAAS,OAAS,GAAU,GAKjC,MAAO,EAJU,EAAS,MACxB,EACA,EAAS,EACV,CACmC,GAAM,CAC1C,GAAU,EAIZ,MAAO,EAAI,IAAI,EAAW,EAAS,MAAM,EAAO,CAAC,EACjD,CACH,CACF,CAGD,IAAM,EAAkB,MAAO,EAAI,IAAI,EAAU,CAC7C,EAAgB,OAAS,IAC3B,MAAO,EAAmB,EAAiB,GAAK,EAIlD,IAAM,EAAQ,MAAO,EAAI,IAAI,EAAS,CAChC,EAAa,MAAO,EAAI,IAAI,EAAc,CAEhD,GAAI,EAAM,SAAW,EAQnB,OANA,MAAO,EAAS,qBAAqB,CACnC,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CAAC,CACF,MAAON,EAAmB,EAAO,QAAQ,GAAG,CAAC,CACtC,MAAO,EAAO,KACnB,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,sCACN,QAAS,wCACV,CAAC,CACH,CAIH,EAAM,MAAM,EAAG,KAAO,EAAE,YAAc,IAAM,EAAE,YAAc,GAAG,CAE/D,MAAO,EAAS,wBACd,CACE,OAAQ,EAAS,OACjB,IAAK,EACL,WACD,CACD,EACD,CAID,IAAM,EADU,KAAK,KAAK,CACQ,EAC5B,EACJ,EAAkB,EAAK,EAAa,IAAQ,EAAkB,EAC1D,EACJ,EAAM,OAAS,EAAI,EAAa,EAAM,OAAS,IAAA,GAuBjD,OArBA,MAAO,EAAsB,EAAQ,CACnC,SAAU,EACV,kBACA,WAAY,EAAM,OAClB,kBACA,gBACD,CAAC,CAEF,MAAON,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOM,EAAmB,EAAO,QAAQ,GAAG,CAAC,CAC7C,MAAOH,EAAkB,EAAO,QAAQ,EAAW,CAAC,CAEpD,MAAO,EAAO,QAAQ,kCAAkC,CAAC,KACvD,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EACb,YAAa,EAAM,OACnB,YAAa,EACd,CAAC,CACH,CAEM,CACL,GAAI,EACJ,KAAM,EACN,KAAM,EACN,OAAQ,EAAS,OACjB,IAAK,GAAG,EAAY,GAAG,IACxB,EACD,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAGtB,OAFA,MAAOI,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAC3C,MAAOD,EAAmB,EAAO,QAAQ,GAAG,CAAC,CACtC,MAAO,EAAO,KAAK,EAAM,EAChC,CACH,CACF,CACF,CAWD,iBACA,kBACA,yBACA,0BACD,EACD,CAIJ,MAAa,EAAW,GAA2B,CACjD,GAAM,CACJ,eAAgB,CAAE,SAAQ,GAAG,IAC3B,EACJ,OAAO,EAAc,EAAQ,CAAC,KAC5B,EAAO,QAAQ,EAAc,EAAoB,EAAO,CAAC,CAC1D"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@uploadista/data-store-s3",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.20-beta.
|
|
4
|
+
"version": "0.0.20-beta.7",
|
|
5
5
|
"description": "AWS S3 data store for Uploadista",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"author": "Uploadista",
|
|
@@ -14,20 +14,20 @@
|
|
|
14
14
|
}
|
|
15
15
|
},
|
|
16
16
|
"dependencies": {
|
|
17
|
-
"@aws-sdk/client-s3": "3.
|
|
18
|
-
"@uploadista/
|
|
19
|
-
"@uploadista/
|
|
17
|
+
"@aws-sdk/client-s3": "3.948.0",
|
|
18
|
+
"@uploadista/observability": "0.0.20-beta.7",
|
|
19
|
+
"@uploadista/core": "0.0.20-beta.7"
|
|
20
20
|
},
|
|
21
21
|
"peerDependencies": {
|
|
22
22
|
"effect": "^3.0.0"
|
|
23
23
|
},
|
|
24
24
|
"devDependencies": {
|
|
25
25
|
"@effect/vitest": "0.27.0",
|
|
26
|
-
"effect": "3.19.
|
|
26
|
+
"effect": "3.19.11",
|
|
27
27
|
"tsdown": "0.17.2",
|
|
28
28
|
"vitest": "4.0.15",
|
|
29
|
-
"@uploadista/typescript-config": "0.0.20-beta.
|
|
30
|
-
"@uploadista/kv-store-memory": "0.0.20-beta.
|
|
29
|
+
"@uploadista/typescript-config": "0.0.20-beta.7",
|
|
30
|
+
"@uploadista/kv-store-memory": "0.0.20-beta.7"
|
|
31
31
|
},
|
|
32
32
|
"scripts": {
|
|
33
33
|
"build": "tsdown",
|
package/src/s3-store.ts
CHANGED
|
@@ -1162,7 +1162,9 @@ export function createS3Store(config: S3StoreConfig) {
|
|
|
1162
1162
|
|
|
1163
1163
|
const uploadId = multipartInfo.uploadId;
|
|
1164
1164
|
|
|
1165
|
-
yield* Effect.logInfo(
|
|
1165
|
+
yield* Effect.logInfo(
|
|
1166
|
+
"Multipart upload created for streaming write",
|
|
1167
|
+
).pipe(
|
|
1166
1168
|
Effect.annotateLogs({
|
|
1167
1169
|
upload_id: fileId,
|
|
1168
1170
|
s3_upload_id: uploadId,
|
|
@@ -1233,7 +1235,10 @@ export function createS3Store(config: S3StoreConfig) {
|
|
|
1233
1235
|
Stream.runForEach((chunk) =>
|
|
1234
1236
|
Effect.gen(function* () {
|
|
1235
1237
|
// Update total bytes
|
|
1236
|
-
yield* Ref.update(
|
|
1238
|
+
yield* Ref.update(
|
|
1239
|
+
totalBytesRef,
|
|
1240
|
+
(total) => total + chunk.length,
|
|
1241
|
+
);
|
|
1237
1242
|
|
|
1238
1243
|
// Get current buffer and append new chunk
|
|
1239
1244
|
const currentBuffer = yield* Ref.get(bufferRef);
|
|
@@ -1246,7 +1251,10 @@ export function createS3Store(config: S3StoreConfig) {
|
|
|
1246
1251
|
// Extract full parts and keep remainder in buffer
|
|
1247
1252
|
let offset = 0;
|
|
1248
1253
|
while (combined.length - offset >= uploadPartSize) {
|
|
1249
|
-
const partData = combined.slice(
|
|
1254
|
+
const partData = combined.slice(
|
|
1255
|
+
offset,
|
|
1256
|
+
offset + uploadPartSize,
|
|
1257
|
+
);
|
|
1250
1258
|
yield* uploadBufferedPart(partData, false);
|
|
1251
1259
|
offset += uploadPartSize;
|
|
1252
1260
|
}
|