@fluentcommerce/fc-connect-sdk 0.1.54 → 0.1.55

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (475) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/dist/cjs/clients/fluent-client.js +13 -6
  3. package/dist/cjs/utils/pagination-helpers.js +38 -2
  4. package/dist/cjs/versori/fluent-versori-client.js +11 -5
  5. package/dist/esm/clients/fluent-client.js +13 -6
  6. package/dist/esm/utils/pagination-helpers.js +38 -2
  7. package/dist/esm/versori/fluent-versori-client.js +11 -5
  8. package/dist/tsconfig.esm.tsbuildinfo +1 -1
  9. package/dist/tsconfig.tsbuildinfo +1 -1
  10. package/dist/tsconfig.types.tsbuildinfo +1 -1
  11. package/docs/00-START-HERE/EXPORT-VALIDATION.md +158 -158
  12. package/docs/00-START-HERE/cli-analyze-source-structure-guide.md +655 -655
  13. package/docs/00-START-HERE/cli-documentation-index.md +202 -202
  14. package/docs/00-START-HERE/cli-quick-reference.md +252 -252
  15. package/docs/00-START-HERE/decision-tree.md +552 -552
  16. package/docs/00-START-HERE/getting-started.md +1070 -1070
  17. package/docs/00-START-HERE/mapper-quick-decision-guide.md +235 -235
  18. package/docs/00-START-HERE/readme.md +237 -237
  19. package/docs/00-START-HERE/retailerid-configuration.md +404 -404
  20. package/docs/00-START-HERE/sdk-philosophy.md +794 -794
  21. package/docs/00-START-HERE/troubleshooting-quick-reference.md +1086 -1086
  22. package/docs/01-TEMPLATES/faq.md +686 -686
  23. package/docs/01-TEMPLATES/patterns/pattern-templates-guide.md +68 -68
  24. package/docs/01-TEMPLATES/patterns/patterns-csv-schema-validation-and-rejection-report.md +233 -233
  25. package/docs/01-TEMPLATES/patterns/patterns-custom-resolvers.md +407 -407
  26. package/docs/01-TEMPLATES/patterns/patterns-error-handling-retry.md +511 -511
  27. package/docs/01-TEMPLATES/patterns/patterns-field-mapping-universal.md +701 -701
  28. package/docs/01-TEMPLATES/patterns/patterns-large-file-splitting.md +1430 -1430
  29. package/docs/01-TEMPLATES/patterns/patterns-master-data-etl.md +2399 -2399
  30. package/docs/01-TEMPLATES/patterns/patterns-pagination-streaming.md +447 -447
  31. package/docs/01-TEMPLATES/patterns/patterns-state-duplicate-prevention.md +385 -385
  32. package/docs/01-TEMPLATES/readme.md +957 -957
  33. package/docs/01-TEMPLATES/standalone/standalone-asn-inbound-processing.md +1209 -1209
  34. package/docs/01-TEMPLATES/standalone/standalone-graphql-query-export.md +1140 -1140
  35. package/docs/01-TEMPLATES/standalone/standalone-graphql-to-parquet-partitioned-s3.md +432 -432
  36. package/docs/01-TEMPLATES/standalone/standalone-multi-channel-inventory-sync.md +1185 -1185
  37. package/docs/01-TEMPLATES/standalone/standalone-multi-source-aggregation.md +1462 -1462
  38. package/docs/01-TEMPLATES/standalone/standalone-s3-csv-batch-api.md +1390 -1390
  39. package/docs/01-TEMPLATES/standalone/standalone-s3-csv-inventory-to-batch.md +330 -330
  40. package/docs/01-TEMPLATES/standalone/standalone-scripts-guide.md +87 -87
  41. package/docs/01-TEMPLATES/standalone/standalone-sftp-xml-graphql.md +1444 -1444
  42. package/docs/01-TEMPLATES/standalone/standalone-webhook-payload-processing.md +688 -688
  43. package/docs/01-TEMPLATES/versori/business-examples/business-examples-dropship-order-routing.md +193 -193
  44. package/docs/01-TEMPLATES/versori/business-examples/business-examples-graphql-parquet-extraction.md +518 -518
  45. package/docs/01-TEMPLATES/versori/business-examples/business-examples-inter-location-transfers.md +2162 -2162
  46. package/docs/01-TEMPLATES/versori/business-examples/business-examples-pre-order-allocation.md +2226 -2226
  47. package/docs/01-TEMPLATES/versori/business-examples/business-scenarios-guide.md +87 -87
  48. package/docs/01-TEMPLATES/versori/patterns/versori-patterns-connection-validation-pattern.md +656 -656
  49. package/docs/01-TEMPLATES/versori/patterns/versori-patterns-dual-workflow-connector.md +835 -835
  50. package/docs/01-TEMPLATES/versori/patterns/versori-patterns-guide.md +108 -108
  51. package/docs/01-TEMPLATES/versori/patterns/versori-patterns-kv-state-management.md +1533 -1533
  52. package/docs/01-TEMPLATES/versori/patterns/versori-patterns-xml-response-patterns.md +1160 -1160
  53. package/docs/01-TEMPLATES/versori/versori-platform-guide.md +201 -201
  54. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-asn-purchase-order.md +1906 -1906
  55. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-dropship-routing.md +1074 -1074
  56. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-flash-sale-reserve.md +1395 -1395
  57. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-generic-xml-order.md +888 -888
  58. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-payment-gateway-integration.md +2478 -2478
  59. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-rma-returns-comprehensive.md +2240 -2240
  60. package/docs/01-TEMPLATES/versori/webhooks/template-webhook-xml-order-ingestion.md +2029 -2029
  61. package/docs/01-TEMPLATES/versori/webhooks/webhook-templates-guide.md +140 -140
  62. package/docs/01-TEMPLATES/versori/workflows/_examples/sample-data/inventory-mapping.json +20 -20
  63. package/docs/01-TEMPLATES/versori/workflows/_examples/sample-data/products_2025-01-22.csv +11 -11
  64. package/docs/01-TEMPLATES/versori/workflows/_examples/sample-data/sample-data-guide.md +34 -34
  65. package/docs/01-TEMPLATES/versori/workflows/_examples/workflow-examples-guide.md +36 -36
  66. package/docs/01-TEMPLATES/versori/workflows/extraction/extraction-modes-guide.md +1038 -1038
  67. package/docs/01-TEMPLATES/versori/workflows/extraction/extraction-workflows-guide.md +138 -138
  68. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/graphql-extraction-guide.md +63 -63
  69. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-fulfillments-to-sftp-csv.md +2062 -2062
  70. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-fulfillments-to-sftp-xml.md +2294 -2294
  71. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-inventory-positions-to-s3-csv.md +2461 -2461
  72. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-inventory-positions-to-sftp-xml.md +2529 -2529
  73. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-inventory-quantities-to-s3-csv.md +2464 -2464
  74. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-inventory-quantities-to-s3-json.md +1959 -1959
  75. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-orders-to-s3-csv.md +1953 -1953
  76. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-orders-to-sftp-xml.md +2541 -2541
  77. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-products-to-s3-json.md +2384 -2384
  78. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-products-to-sftp-xml.md +2445 -2445
  79. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-virtual-positions-to-s3-csv.md +2355 -2355
  80. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-virtual-positions-to-s3-json.md +2042 -2042
  81. package/docs/01-TEMPLATES/versori/workflows/extraction/graphql-queries/template-extraction-virtual-positions-to-sftp-xml.md +2726 -2726
  82. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/batch-api-guide.md +206 -206
  83. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-cycle-count-reconciliation.md +2030 -2030
  84. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-multi-channel-inventory-sync.md +1882 -1882
  85. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-csv-inventory-batch.md +2827 -2827
  86. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-json-inventory-batch.md +1952 -1952
  87. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-xml-inventory-batch.md +3289 -3289
  88. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-sftp-csv-inventory-batch.md +3064 -3064
  89. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-sftp-json-inventory-batch.md +3238 -3238
  90. package/docs/01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-sftp-xml-inventory-batch.md +2977 -2977
  91. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/event-api-guide.md +321 -321
  92. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-payload-json-order-cancel-event.md +959 -959
  93. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-payload-xml-order-cancel-event.md +1170 -1170
  94. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-s3-csv-product-event.md +2312 -2312
  95. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-s3-json-product-event.md +2999 -2999
  96. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-s3-parquet-product-event.md +2836 -2836
  97. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-s3-xml-product-event.md +2395 -2395
  98. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-sftp-csv-product-event.md +2295 -2295
  99. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-sftp-json-product-event.md +2602 -2602
  100. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-sftp-parquet-product-event.md +2589 -2589
  101. package/docs/01-TEMPLATES/versori/workflows/ingestion/event-api/template-ingestion-sftp-xml-product-event.md +3578 -3578
  102. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/graphql-mutations-guide.md +93 -93
  103. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-payload-json-order-update-graphql.md +1260 -1260
  104. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-payload-xml-order-update-graphql.md +1472 -1472
  105. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-s3-csv-control-graphql.md +2417 -2417
  106. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-s3-csv-location-graphql.md +2811 -2811
  107. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-s3-csv-price-graphql.md +2619 -2619
  108. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-s3-json-location-graphql.md +2807 -2807
  109. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-s3-xml-location-graphql.md +2373 -2373
  110. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-sftp-csv-control-graphql.md +2740 -2740
  111. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-sftp-csv-location-graphql.md +2760 -2760
  112. package/docs/01-TEMPLATES/versori/workflows/ingestion/graphql-mutations/template-ingestion-sftp-json-location-graphql.md +1710 -1710
  113. package/docs/01-TEMPLATES/versori/workflows/ingestion/ingestion-workflows-guide.md +136 -136
  114. package/docs/01-TEMPLATES/versori/workflows/rubix-webhooks/rubix-webhooks-guide.md +520 -520
  115. package/docs/01-TEMPLATES/versori/workflows/rubix-webhooks/template-webhook-rubix-fulfilment-to-sftp-xml-inline.md +1418 -1418
  116. package/docs/01-TEMPLATES/versori/workflows/rubix-webhooks/template-webhook-rubix-fulfilment-to-sftp-xml-universal-mapper.md +1785 -1785
  117. package/docs/01-TEMPLATES/versori/workflows/rubix-webhooks/template-webhook-rubix-order-attribute-update.md +824 -824
  118. package/docs/01-TEMPLATES/versori/workflows/workflows-overview-guide.md +646 -646
  119. package/docs/02-CORE-GUIDES/advanced-services/advanced-services-batch-archival.md +724 -724
  120. package/docs/02-CORE-GUIDES/advanced-services/advanced-services-job-tracker.md +627 -627
  121. package/docs/02-CORE-GUIDES/advanced-services/advanced-services-partial-batch-recovery.md +561 -561
  122. package/docs/02-CORE-GUIDES/advanced-services/advanced-services-quick-reference.md +367 -367
  123. package/docs/02-CORE-GUIDES/advanced-services/advanced-services-readme.md +407 -407
  124. package/docs/02-CORE-GUIDES/advanced-services/readme.md +49 -49
  125. package/docs/02-CORE-GUIDES/api-reference/api-reference-quick-reference.md +548 -548
  126. package/docs/02-CORE-GUIDES/api-reference/event-api-input-output-reference.md +702 -1171
  127. package/docs/02-CORE-GUIDES/api-reference/examples/client-initialization.ts +286 -286
  128. package/docs/02-CORE-GUIDES/api-reference/graphql-error-classification.md +337 -337
  129. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-01-client-api.md +399 -520
  130. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-03-authentication.md +199 -199
  131. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-04-graphql-mapping.md +925 -925
  132. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-05-services.md +1198 -1198
  133. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-06-data-sources.md +1083 -1083
  134. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-07-parsers.md +1097 -1097
  135. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-08-pagination.md +513 -513
  136. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-08-types.md +545 -597
  137. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-09-error-handling.md +527 -527
  138. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-09-webhook-validation.md +514 -514
  139. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-10-extraction.md +557 -557
  140. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-10-utilities.md +412 -412
  141. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-11-cli-tools.md +423 -423
  142. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-11-error-handling.md +716 -716
  143. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-12-analyze-source-structure.md +518 -518
  144. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-12-partial-responses.md +212 -212
  145. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-12-testing.md +300 -300
  146. package/docs/02-CORE-GUIDES/api-reference/modules/api-reference-13-resolver-builder.md +322 -322
  147. package/docs/02-CORE-GUIDES/api-reference/readme.md +279 -279
  148. package/docs/02-CORE-GUIDES/auto-pagination/auto-pagination-quick-reference.md +351 -351
  149. package/docs/02-CORE-GUIDES/auto-pagination/auto-pagination-readme.md +277 -277
  150. package/docs/02-CORE-GUIDES/auto-pagination/examples/auto-pagination-readme.md +178 -178
  151. package/docs/02-CORE-GUIDES/auto-pagination/examples/common-patterns.ts +351 -351
  152. package/docs/02-CORE-GUIDES/auto-pagination/examples/paginate-products.ts +384 -384
  153. package/docs/02-CORE-GUIDES/auto-pagination/examples/paginate-virtual-positions.ts +308 -308
  154. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-01-foundations.md +470 -470
  155. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-02-quick-start.md +713 -713
  156. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-03-configuration.md +754 -754
  157. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-04-advanced-patterns.md +732 -732
  158. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-05-sdk-integration.md +847 -847
  159. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-06-troubleshooting.md +359 -359
  160. package/docs/02-CORE-GUIDES/auto-pagination/modules/auto-pagination-07-api-reference.md +462 -462
  161. package/docs/02-CORE-GUIDES/auto-pagination/readme.md +54 -54
  162. package/docs/02-CORE-GUIDES/data-sources/data-sources-file-operations-error-handling.md +1487 -1487
  163. package/docs/02-CORE-GUIDES/data-sources/data-sources-quick-reference.md +836 -836
  164. package/docs/02-CORE-GUIDES/data-sources/data-sources-readme.md +276 -276
  165. package/docs/02-CORE-GUIDES/data-sources/data-sources-sftp-credential-access-security.md +553 -553
  166. package/docs/02-CORE-GUIDES/data-sources/examples/common-patterns.ts +409 -409
  167. package/docs/02-CORE-GUIDES/data-sources/examples/data-sources-readme.md +178 -178
  168. package/docs/02-CORE-GUIDES/data-sources/examples/s3-operations.ts +308 -308
  169. package/docs/02-CORE-GUIDES/data-sources/examples/sftp-operations.ts +371 -371
  170. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-01-foundations.md +735 -735
  171. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-02-s3-operations.md +1302 -1302
  172. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-03-sftp-operations.md +1379 -1379
  173. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-04-file-patterns.md +941 -941
  174. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-05-advanced-topics.md +813 -813
  175. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-06-integration-patterns.md +486 -486
  176. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-07-troubleshooting.md +387 -387
  177. package/docs/02-CORE-GUIDES/data-sources/modules/data-sources-08-api-reference.md +417 -417
  178. package/docs/02-CORE-GUIDES/data-sources/readme.md +77 -77
  179. package/docs/02-CORE-GUIDES/error-handling-guide.md +936 -936
  180. package/docs/02-CORE-GUIDES/extraction/examples/02-core-guides-extraction-readme.md +116 -116
  181. package/docs/02-CORE-GUIDES/extraction/examples/common-patterns.ts +428 -428
  182. package/docs/02-CORE-GUIDES/extraction/examples/extract-inventory-basic.ts +187 -187
  183. package/docs/02-CORE-GUIDES/extraction/extraction-quick-reference.md +596 -596
  184. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-01-foundations.md +514 -514
  185. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-02-basic-extraction.md +823 -823
  186. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-03-parquet-processing.md +507 -507
  187. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-04-data-enrichment.md +546 -546
  188. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-05-transformation.md +494 -494
  189. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-06-export-formats.md +458 -458
  190. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-06-performance.md +138 -138
  191. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-07-api-reference.md +148 -148
  192. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-07-optimization.md +692 -692
  193. package/docs/02-CORE-GUIDES/extraction/modules/02-core-guides-extraction-08-extraction-orchestrator.md +1008 -1008
  194. package/docs/02-CORE-GUIDES/extraction/readme.md +151 -151
  195. package/docs/02-CORE-GUIDES/ingestion/examples/_simple-kv-store.ts +40 -40
  196. package/docs/02-CORE-GUIDES/ingestion/examples/error-recovery.ts +728 -728
  197. package/docs/02-CORE-GUIDES/ingestion/examples/event-driven.ts +501 -501
  198. package/docs/02-CORE-GUIDES/ingestion/examples/local-file-ingestion.ts +88 -88
  199. package/docs/02-CORE-GUIDES/ingestion/examples/parquet-ingestion.ts +117 -117
  200. package/docs/02-CORE-GUIDES/ingestion/examples/performance-optimized.ts +647 -647
  201. package/docs/02-CORE-GUIDES/ingestion/examples/s3-csv-ingestion.ts +169 -169
  202. package/docs/02-CORE-GUIDES/ingestion/examples/sftp-csv-ingestion.ts +134 -134
  203. package/docs/02-CORE-GUIDES/ingestion/ingestion-quick-reference.md +546 -546
  204. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-01-introduction.md +626 -626
  205. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-02-quick-start.md +658 -658
  206. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-03-data-sources.md +1052 -1052
  207. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-04-field-mapping.md +763 -763
  208. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-05-advanced-parsers.md +676 -676
  209. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-06-batch-api.md +1295 -1295
  210. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-07-api-reference.md +138 -138
  211. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-07-state-management.md +1037 -1037
  212. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-08-performance-optimization.md +1349 -1349
  213. package/docs/02-CORE-GUIDES/ingestion/modules/02-core-guides-ingestion-09-best-practices.md +1893 -1893
  214. package/docs/02-CORE-GUIDES/ingestion/readme.md +160 -160
  215. package/docs/02-CORE-GUIDES/logging-guide.md +585 -585
  216. package/docs/02-CORE-GUIDES/mapping/error-handling-patterns.md +401 -401
  217. package/docs/02-CORE-GUIDES/mapping/examples/02-core-guides-mapping-readme.md +128 -128
  218. package/docs/02-CORE-GUIDES/mapping/examples/common-patterns.ts +273 -273
  219. package/docs/02-CORE-GUIDES/mapping/examples/csv-location-ingestion.json +36 -36
  220. package/docs/02-CORE-GUIDES/mapping/examples/csv-mapping.ts +242 -242
  221. package/docs/02-CORE-GUIDES/mapping/examples/graphql-to-parquet-extraction.json +36 -36
  222. package/docs/02-CORE-GUIDES/mapping/examples/json-mapping.ts +213 -213
  223. package/docs/02-CORE-GUIDES/mapping/examples/json-product-to-mutation.json +48 -48
  224. package/docs/02-CORE-GUIDES/mapping/examples/xml-mapping.ts +291 -291
  225. package/docs/02-CORE-GUIDES/mapping/examples/xml-order-to-mutation.json +45 -45
  226. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/graphql-mutation-mapping-quick-reference.md +463 -463
  227. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/graphql-mutation-mapping-readme.md +227 -227
  228. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-01-introduction.md +222 -222
  229. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-02-quick-start.md +351 -351
  230. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-03-schema-validation.md +569 -569
  231. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-04-mapping-patterns.md +471 -471
  232. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-05-configuration-reference.md +611 -611
  233. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-06-advanced-xpath.md +148 -148
  234. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-06-path-syntax.md +464 -464
  235. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-07-api-reference.md +94 -94
  236. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-07-array-handling.md +307 -307
  237. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-08-custom-resolvers.md +544 -544
  238. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-09-advanced-patterns.md +427 -427
  239. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-10-hooks-and-variables.md +336 -336
  240. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-11-error-handling.md +488 -488
  241. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-12-arguments-vs-nodes.md +383 -383
  242. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/modules/graphql-mutation-mapping-13-best-practices.md +477 -477
  243. package/docs/02-CORE-GUIDES/mapping/graphql-mutation-mapping/readme.md +62 -62
  244. package/docs/02-CORE-GUIDES/mapping/mapping-format-decision-tree.md +480 -480
  245. package/docs/02-CORE-GUIDES/mapping/mapping-graphql-alias-batching-guide.md +820 -820
  246. package/docs/02-CORE-GUIDES/mapping/mapping-javascript-objects.md +2369 -2369
  247. package/docs/02-CORE-GUIDES/mapping/mapping-mapper-comparison-guide.md +682 -682
  248. package/docs/02-CORE-GUIDES/mapping/modules/02-core-guides-mapping-07-api-reference.md +1327 -1327
  249. package/docs/02-CORE-GUIDES/mapping/modules/02-core-guides-mapping-08-error-handling.md +1142 -1142
  250. package/docs/02-CORE-GUIDES/mapping/modules/mapping-04-use-cases.md +891 -891
  251. package/docs/02-CORE-GUIDES/mapping/modules/mapping-06-helpers-resolvers.md +1126 -1126
  252. package/docs/02-CORE-GUIDES/mapping/modules/mapping-06-sdk-resolvers.md +199 -199
  253. package/docs/02-CORE-GUIDES/mapping/modules/mapping-07-api-reference.md +1319 -1319
  254. package/docs/02-CORE-GUIDES/mapping/readme.md +178 -178
  255. package/docs/02-CORE-GUIDES/mapping/resolver-registration.md +410 -410
  256. package/docs/02-CORE-GUIDES/mapping/resolvers/examples/common-patterns.ts +226 -226
  257. package/docs/02-CORE-GUIDES/mapping/resolvers/examples/custom-resolvers.ts +227 -227
  258. package/docs/02-CORE-GUIDES/mapping/resolvers/examples/sdk-resolvers-usage.ts +203 -203
  259. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-readme.md +274 -274
  260. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-resolver-api-reference.md +679 -679
  261. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-resolver-cookbook.md +826 -826
  262. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-resolver-guide.md +1330 -1330
  263. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-resolver-helpers-reference.md +1437 -1437
  264. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-resolver-parameters-reference.md +553 -553
  265. package/docs/02-CORE-GUIDES/mapping/resolvers/mapping-resolvers-resolver-troubleshooting.md +854 -854
  266. package/docs/02-CORE-GUIDES/mapping/resolvers/readme.md +75 -75
  267. package/docs/02-CORE-GUIDES/parsers/examples/02-core-guides-parsers-readme.md +161 -161
  268. package/docs/02-CORE-GUIDES/parsers/examples/csv-parser-examples.ts +110 -110
  269. package/docs/02-CORE-GUIDES/parsers/examples/json-parser-examples.ts +33 -33
  270. package/docs/02-CORE-GUIDES/parsers/examples/parquet-parser-examples.ts +47 -47
  271. package/docs/02-CORE-GUIDES/parsers/examples/xml-parser-examples.ts +38 -38
  272. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-01-foundations.md +355 -355
  273. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-02-csv-parser.md +772 -772
  274. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-03-json-parser.md +789 -789
  275. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-04-xml-parser.md +857 -857
  276. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-05-parquet-parser.md +603 -603
  277. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-06-integration-patterns.md +702 -702
  278. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-06-streaming.md +121 -121
  279. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-07-api-reference.md +89 -89
  280. package/docs/02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-07-troubleshooting.md +727 -727
  281. package/docs/02-CORE-GUIDES/parsers/parsers-quick-reference.md +482 -482
  282. package/docs/02-CORE-GUIDES/parsers/parsers-readme.md +258 -258
  283. package/docs/02-CORE-GUIDES/parsers/readme.md +65 -65
  284. package/docs/02-CORE-GUIDES/readme.md +194 -194
  285. package/docs/02-CORE-GUIDES/webhook-validation/examples/basic-validation.ts +108 -108
  286. package/docs/02-CORE-GUIDES/webhook-validation/examples/common-patterns.ts +316 -316
  287. package/docs/02-CORE-GUIDES/webhook-validation/examples/webhook-validation-readme.md +61 -61
  288. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-01-foundations.md +440 -440
  289. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-02-quick-start.md +525 -525
  290. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-03-versori-integration.md +741 -741
  291. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-04-platform-integration.md +629 -629
  292. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-05-configuration.md +535 -535
  293. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-06-error-handling.md +611 -611
  294. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-06-troubleshooting.md +124 -124
  295. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-07-api-reference.md +511 -511
  296. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-08-rubix-webhooks.md +590 -590
  297. package/docs/02-CORE-GUIDES/webhook-validation/modules/webhook-validation-09-rubix-event-vs-http-call.md +432 -432
  298. package/docs/02-CORE-GUIDES/webhook-validation/readme.md +239 -239
  299. package/docs/02-CORE-GUIDES/webhook-validation/webhook-validation-quick-reference.md +392 -392
  300. package/docs/03-PATTERN-GUIDES/connector-scenarios/connector-scenarios-quick-reference.md +498 -498
  301. package/docs/03-PATTERN-GUIDES/connector-scenarios/connector-scenarios-readme.md +313 -313
  302. package/docs/03-PATTERN-GUIDES/connector-scenarios/examples/common-patterns.ts +612 -612
  303. package/docs/03-PATTERN-GUIDES/connector-scenarios/examples/connector-scenarios-readme.md +253 -253
  304. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-01-foundations.md +452 -452
  305. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-02-simple-scenarios.md +681 -681
  306. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-03-intermediate-scenarios.md +637 -637
  307. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-04-advanced-scenarios.md +650 -650
  308. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-05-bidirectional-sync.md +233 -233
  309. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-06-production-patterns.md +442 -442
  310. package/docs/03-PATTERN-GUIDES/connector-scenarios/modules/connector-scenarios-07-reference.md +445 -445
  311. package/docs/03-PATTERN-GUIDES/connector-scenarios/readme.md +31 -31
  312. package/docs/03-PATTERN-GUIDES/enterprise-integration-patterns.md +1528 -1528
  313. package/docs/03-PATTERN-GUIDES/error-handling/comprehensive-error-handling-guide.md +1437 -1437
  314. package/docs/03-PATTERN-GUIDES/error-handling/error-handling-quick-reference.md +390 -390
  315. package/docs/03-PATTERN-GUIDES/error-handling/examples/common-patterns.ts +438 -438
  316. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-01-foundations.md +362 -362
  317. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-02-error-types.md +850 -850
  318. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-03-utf8-handling.md +456 -456
  319. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-04-error-scenarios.md +658 -658
  320. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-05-calling-patterns.md +671 -671
  321. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-06-retry-strategies.md +1034 -1034
  322. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-07-monitoring.md +653 -653
  323. package/docs/03-PATTERN-GUIDES/error-handling/modules/error-handling-08-api-reference.md +847 -847
  324. package/docs/03-PATTERN-GUIDES/error-handling/readme.md +36 -36
  325. package/docs/03-PATTERN-GUIDES/examples/__tests__/readme.md +40 -40
  326. package/docs/03-PATTERN-GUIDES/examples/__tests__/resolver-examples.test.js +282 -282
  327. package/docs/03-PATTERN-GUIDES/examples/test-data/03-pattern-guides-readme.md +110 -110
  328. package/docs/03-PATTERN-GUIDES/examples/test-data/canonical-inventory.json +123 -123
  329. package/docs/03-PATTERN-GUIDES/examples/test-data/canonical-order.json +171 -171
  330. package/docs/03-PATTERN-GUIDES/examples/test-data/readme.md +28 -28
  331. package/docs/03-PATTERN-GUIDES/extraction/extraction-readme.md +15 -15
  332. package/docs/03-PATTERN-GUIDES/extraction/readme.md +25 -25
  333. package/docs/03-PATTERN-GUIDES/file-operations/examples/common-patterns.ts +407 -407
  334. package/docs/03-PATTERN-GUIDES/file-operations/examples/file-operations-readme.md +142 -142
  335. package/docs/03-PATTERN-GUIDES/file-operations/file-operations-quick-reference.md +462 -462
  336. package/docs/03-PATTERN-GUIDES/file-operations/file-operations-readme.md +379 -379
  337. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-01-foundations.md +430 -430
  338. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-02-quick-start.md +484 -484
  339. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-03-s3-operations.md +507 -507
  340. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-04-sftp-operations.md +963 -963
  341. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-05-streaming-performance.md +503 -503
  342. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-06-archive-patterns.md +386 -386
  343. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-06-error-handling.md +117 -117
  344. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-07-api-reference.md +78 -78
  345. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-07-testing-troubleshooting.md +567 -567
  346. package/docs/03-PATTERN-GUIDES/file-operations/modules/file-operations-08-api-reference.md +1055 -1055
  347. package/docs/03-PATTERN-GUIDES/file-operations/readme.md +32 -32
  348. package/docs/03-PATTERN-GUIDES/ingestion/ingestion-readme.md +15 -15
  349. package/docs/03-PATTERN-GUIDES/ingestion/readme.md +25 -25
  350. package/docs/03-PATTERN-GUIDES/integration-patterns/examples/batch-processing.ts +130 -130
  351. package/docs/03-PATTERN-GUIDES/integration-patterns/examples/common-patterns.ts +360 -360
  352. package/docs/03-PATTERN-GUIDES/integration-patterns/examples/delta-sync.ts +130 -130
  353. package/docs/03-PATTERN-GUIDES/integration-patterns/examples/integration-patterns-readme.md +100 -100
  354. package/docs/03-PATTERN-GUIDES/integration-patterns/examples/real-time-webhook.ts +398 -398
  355. package/docs/03-PATTERN-GUIDES/integration-patterns/integration-patterns-quick-reference.md +962 -962
  356. package/docs/03-PATTERN-GUIDES/integration-patterns/integration-patterns-readme.md +134 -134
  357. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-01-real-time-processing.md +991 -991
  358. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-02-batch-processing.md +1547 -1547
  359. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-03-delta-sync.md +1108 -1108
  360. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-04-webhook-patterns.md +1181 -1181
  361. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-05-error-handling.md +1061 -1061
  362. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-06-advanced-integration-services.md +1547 -1547
  363. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-06-performance.md +109 -109
  364. package/docs/03-PATTERN-GUIDES/integration-patterns/modules/integration-patterns-07-api-reference.md +34 -34
  365. package/docs/03-PATTERN-GUIDES/integration-patterns/readme.md +30 -30
  366. package/docs/03-PATTERN-GUIDES/logging-minimal-mode.md +128 -128
  367. package/docs/03-PATTERN-GUIDES/multiple-connections/examples/common-patterns.ts +380 -380
  368. package/docs/03-PATTERN-GUIDES/multiple-connections/examples/multiple-connections-readme.md +139 -139
  369. package/docs/03-PATTERN-GUIDES/multiple-connections/examples/parallel-root-connections.ts +149 -149
  370. package/docs/03-PATTERN-GUIDES/multiple-connections/examples/real-world-scenarios.ts +405 -405
  371. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-01-foundations.md +378 -378
  372. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-02-quick-start.md +566 -566
  373. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-03-targeting-connections.md +659 -659
  374. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-04-parallel-queries.md +656 -656
  375. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-05-best-practices.md +624 -624
  376. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-06-api-reference.md +824 -824
  377. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-06-versori.md +119 -119
  378. package/docs/03-PATTERN-GUIDES/multiple-connections/modules/multiple-connections-07-api-reference.md +87 -87
  379. package/docs/03-PATTERN-GUIDES/multiple-connections/multiple-connections-quick-reference.md +353 -353
  380. package/docs/03-PATTERN-GUIDES/multiple-connections/multiple-connections-readme.md +270 -270
  381. package/docs/03-PATTERN-GUIDES/multiple-connections/readme.md +30 -30
  382. package/docs/03-PATTERN-GUIDES/pagination/pagination-readme.md +14 -14
  383. package/docs/03-PATTERN-GUIDES/pagination/readme.md +24 -24
  384. package/docs/03-PATTERN-GUIDES/parquet/examples/common-patterns.ts +180 -180
  385. package/docs/03-PATTERN-GUIDES/parquet/examples/read-parquet.ts +48 -48
  386. package/docs/03-PATTERN-GUIDES/parquet/examples/write-parquet.ts +65 -65
  387. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-01-introduction.md +393 -393
  388. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-02-quick-start.md +572 -572
  389. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-03-reading-parquet.md +525 -525
  390. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-04-writing-parquet.md +554 -554
  391. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-05-graphql-extraction.md +405 -405
  392. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-06-performance.md +104 -104
  393. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-06-s3-integration.md +511 -511
  394. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-07-api-reference.md +90 -90
  395. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-07-performance-optimization.md +525 -525
  396. package/docs/03-PATTERN-GUIDES/parquet/modules/03-pattern-guides-parquet-08-best-practices.md +712 -712
  397. package/docs/03-PATTERN-GUIDES/parquet/parquet-quick-reference.md +683 -683
  398. package/docs/03-PATTERN-GUIDES/parquet/parquet-readme.md +248 -248
  399. package/docs/03-PATTERN-GUIDES/parquet/readme.md +32 -32
  400. package/docs/03-PATTERN-GUIDES/parsers/parsers-readme.md +12 -12
  401. package/docs/03-PATTERN-GUIDES/parsers/readme.md +24 -24
  402. package/docs/03-PATTERN-GUIDES/readme.md +159 -159
  403. package/docs/03-PATTERN-GUIDES/webhooks/readme.md +24 -24
  404. package/docs/03-PATTERN-GUIDES/webhooks/webhooks-readme.md +8 -8
  405. package/docs/04-REFERENCE/architecture/architecture-01-overview.md +427 -427
  406. package/docs/04-REFERENCE/architecture/architecture-02-client-architecture.md +424 -424
  407. package/docs/04-REFERENCE/architecture/architecture-03-data-flow.md +690 -690
  408. package/docs/04-REFERENCE/architecture/architecture-04-service-layer.md +834 -834
  409. package/docs/04-REFERENCE/architecture/architecture-05-integration-architecture.md +655 -655
  410. package/docs/04-REFERENCE/architecture/architecture-06-state-management.md +653 -653
  411. package/docs/04-REFERENCE/architecture/architecture-adding-new-data-sources.md +686 -686
  412. package/docs/04-REFERENCE/architecture/readme.md +279 -279
  413. package/docs/04-REFERENCE/platforms/deno/readme.md +117 -117
  414. package/docs/04-REFERENCE/platforms/nodejs/readme.md +146 -146
  415. package/docs/04-REFERENCE/platforms/readme.md +135 -135
  416. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-01-introduction.md +398 -398
  417. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-02-quick-start.md +560 -560
  418. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-03-authentication.md +757 -757
  419. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-04-workflows.md +2476 -2476
  420. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-05-connections.md +1167 -1167
  421. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-06-kv-storage.md +990 -990
  422. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-06-state-management.md +121 -121
  423. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-07-api-reference.md +68 -68
  424. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-07-deployment.md +731 -731
  425. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-08-best-practices.md +1111 -1111
  426. package/docs/04-REFERENCE/platforms/versori/modules/platforms-versori-09-signature-reference.md +766 -766
  427. package/docs/04-REFERENCE/platforms/versori/platforms-versori-readme.md +299 -299
  428. package/docs/04-REFERENCE/platforms/versori/platforms-versori-s3-sftp-configuration-guide.md +1425 -1425
  429. package/docs/04-REFERENCE/platforms/versori/platforms-versori-webhook-api-key-security.md +816 -816
  430. package/docs/04-REFERENCE/platforms/versori/platforms-versori-webhook-connection-security.md +681 -681
  431. package/docs/04-REFERENCE/platforms/versori/platforms-versori-workflow-task-types.md +708 -708
  432. package/docs/04-REFERENCE/platforms/versori/readme.md +108 -108
  433. package/docs/04-REFERENCE/readme.md +148 -148
  434. package/docs/04-REFERENCE/resolver-signature/examples/advanced-resolvers.ts +482 -482
  435. package/docs/04-REFERENCE/resolver-signature/examples/async-resolvers.ts +496 -496
  436. package/docs/04-REFERENCE/resolver-signature/examples/basic-resolvers.ts +343 -343
  437. package/docs/04-REFERENCE/resolver-signature/examples/resolver-signature-readme.md +188 -188
  438. package/docs/04-REFERENCE/resolver-signature/examples/testing-resolvers.ts +463 -463
  439. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-01-foundations.md +286 -286
  440. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-02-parameter-reference.md +643 -643
  441. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-03-basic-examples.md +521 -521
  442. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-04-advanced-patterns.md +739 -739
  443. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-05-sdk-resolvers.md +531 -531
  444. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-06-migration-guide.md +650 -650
  445. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-06-testing.md +125 -125
  446. package/docs/04-REFERENCE/resolver-signature/modules/resolver-signature-07-api-reference.md +794 -794
  447. package/docs/04-REFERENCE/resolver-signature/readme.md +64 -64
  448. package/docs/04-REFERENCE/resolver-signature/resolver-signature-quick-reference.md +270 -270
  449. package/docs/04-REFERENCE/resolver-signature/resolver-signature-readme.md +351 -351
  450. package/docs/04-REFERENCE/schema/fluent-commerce-schema.json +764 -764
  451. package/docs/04-REFERENCE/schema/readme.md +141 -141
  452. package/docs/04-REFERENCE/testing/examples/04-reference-testing-readme.md +158 -158
  453. package/docs/04-REFERENCE/testing/examples/fluent-testing.ts +62 -62
  454. package/docs/04-REFERENCE/testing/examples/health-check.ts +155 -155
  455. package/docs/04-REFERENCE/testing/examples/integration-test.ts +119 -119
  456. package/docs/04-REFERENCE/testing/examples/performance-test.ts +183 -183
  457. package/docs/04-REFERENCE/testing/examples/s3-testing.ts +127 -127
  458. package/docs/04-REFERENCE/testing/modules/04-reference-testing-01-foundations.md +267 -267
  459. package/docs/04-REFERENCE/testing/modules/04-reference-testing-02-s3-testing.md +599 -599
  460. package/docs/04-REFERENCE/testing/modules/04-reference-testing-03-fluent-testing.md +589 -589
  461. package/docs/04-REFERENCE/testing/modules/04-reference-testing-04-integration-testing.md +699 -699
  462. package/docs/04-REFERENCE/testing/modules/04-reference-testing-05-debugging.md +478 -478
  463. package/docs/04-REFERENCE/testing/modules/04-reference-testing-06-cicd-integration.md +463 -463
  464. package/docs/04-REFERENCE/testing/modules/04-reference-testing-06-preflight-validation.md +131 -131
  465. package/docs/04-REFERENCE/testing/modules/04-reference-testing-07-best-practices.md +499 -499
  466. package/docs/04-REFERENCE/testing/modules/04-reference-testing-07-coverage-ci.md +165 -165
  467. package/docs/04-REFERENCE/testing/modules/04-reference-testing-08-api-reference.md +634 -634
  468. package/docs/04-REFERENCE/testing/readme.md +86 -86
  469. package/docs/04-REFERENCE/testing/testing-quick-reference.md +667 -667
  470. package/docs/04-REFERENCE/testing/testing-readme.md +286 -286
  471. package/docs/04-REFERENCE/troubleshooting/readme.md +144 -144
  472. package/docs/04-REFERENCE/troubleshooting/troubleshooting-deno-sftp-compatibility.md +392 -392
  473. package/docs/template-loading-matrix.md +242 -242
  474. package/package.json +5 -3
  475. package/docs/02-CORE-GUIDES/api-reference/cli-profile-integration.md +0 -377
@@ -1,1547 +1,1547 @@
1
- # Module 2: Batch Processing
2
-
3
- > **Learning Objective:** Master batch processing patterns for high-volume data synchronization using the Fluent Batch API and SDK orchestration services.
4
- >
5
- > **Level:** Intermediate
6
-
7
- ## Table of Contents
8
-
9
- 1. [What is Batch Processing?](#what-is-batch-processing)
10
- 2. [When to Use Batch Processing](#when-to-use-batch-processing)
11
- 3. [Fluent Batch API Overview](#fluent-batch-api-overview)
12
- 4. [SDK Batch Components](#sdk-batch-components)
13
- 5. [Basic Batch Workflow](#basic-batch-workflow)
14
- 6. [Job Creation Strategies](#job-creation-strategies)
15
- 7. [Batch Size Optimization](#batch-size-optimization)
16
- 8. [Status Polling and Completion](#status-polling-and-completion)
17
- 9. [Error Handling in Batches](#error-handling-in-batches)
18
- 10. [Complete Implementation Example](#complete-implementation-example)
19
- 11. [Next Steps](#next-steps)
20
-
21
- ---
22
-
23
- ## What is Batch Processing?
24
-
25
- **Batch processing** means grouping multiple records together and processing them as a single unit, optimized for throughput over latency.
26
-
27
- ### Key Characteristics
28
-
29
- | Characteristic | Description | Example |
30
- |----------------|-------------|---------|
31
- | **High Volume** | Process thousands of records | 50,000 inventory positions |
32
- | **Scheduled** | Runs at specific times | Daily at 2 AM |
33
- | **Asynchronous** | Submit job, poll for completion | Job completes in 5-10 minutes |
34
- | **Bulk Operations** | Optimize for throughput | 100 records per API call |
35
-
36
- ### How It Works
37
-
38
- ```
39
- CSV File (50K records) → Parse → Transform → Batch API → Job → Poll Status → Complete
40
- ↓ ↓ ↓ ↓ ↓ ↓ ↓
41
- S3 bucket Parse chunks Map fields Create job Submit Check every Archive
42
- batches 30 seconds file
43
- ```
44
-
45
- ### Visual Flow
46
-
47
- ```
48
- ┌─────────────────┐
49
- │ S3 CSV File │
50
- │ (50,000 records)│
51
- └────────┬────────┘
52
-
53
- │ ①Download & Parse
54
-
55
- ┌─────────────────┐
56
- │ CSVParserService │
57
- │ Stream chunks │
58
- └────────┬────────┘
59
-
60
- │ ②Transform fields
61
-
62
- ┌─────────────────┐
63
- │ UniversalMapper │
64
- │ Field mapping │
65
- └────────┬────────┘
66
-
67
- │ ③Create Batch job
68
-
69
- ┌─────────────────┐
70
- │ FluentClient │
71
- │ createJob() │
72
- └────────┬────────┘
73
-
74
- │ ④Send batches (100 records each)
75
-
76
- ┌─────────────────┐
77
- │ sendBatch() │ ────┐
78
- │ 500 batches │ │ Parallel processing
79
- │ of 100 records │ ────┤ on Fluent servers
80
- └────────┬────────┘ │
81
- │ │
82
- │ ⑤Poll status every 30s
83
-
84
- ┌─────────────────┐
85
- │ getJobStatus() │
86
- │ PROCESSING... │
87
- │ COMPLETED ✓ │
88
- └────────┬────────┘
89
-
90
- │ ⑥Archive file
91
-
92
- ┌─────────────────┐
93
- │ S3 Archive │
94
- │ Success log │
95
- └─────────────────┘
96
- ```
97
-
98
- **Total time**: 5-10 minutes for 50,000 records (vs 2-3 hours for sequential GraphQL mutations).
99
-
100
- ---
101
-
102
- ## When to Use Batch Processing
103
-
104
- ### ✅ Use Batch Processing When:
105
-
106
- | Scenario | Why Batch? | SDK Pattern |
107
- |----------|------------|-------------|
108
- | **Large Files** | 1K+ records | Batch API with streaming |
109
- | **Daily Sync** | Full inventory sync | Scheduled batch job |
110
- | **High Volume** | > 1,000 events/hour | Batch API (not webhooks) |
111
- | **Bulk Updates** | Mass price changes | Single job, multiple batches |
112
- | **CSV/Parquet Files** | Structured file formats | S3DataSource + FluentClient |
113
-
114
- ### ❌ Avoid Batch Processing When:
115
-
116
- | Scenario | Why Not Batch? | Use Instead |
117
- |----------|----------------|-------------|
118
- | **Immediate Updates** | Need < 5 second latency | Real-time (Module 1) |
119
- | **Single Records** | 1-10 records | Direct GraphQL mutation |
120
- | **Event-Driven** | External webhook triggers | Real-time webhook |
121
- | **Critical Orders** | Customer waiting for confirmation | Real-time processing |
122
-
123
- ---
124
-
125
- ## Fluent Batch API Overview
126
-
127
- ### What is Batch API?
128
-
129
- The Fluent Batch API is a specialized GraphQL endpoint for bulk data operations:
130
-
131
- - **Asynchronous**: Submit job, get ID, poll for completion
132
- - **High-throughput**: 10,000+ records in minutes
133
- - **Fault-tolerant**: Partial failures don't block entire job
134
- - **Entity-specific**: Currently supports `InventoryQuantity` only
135
-
136
- ### Supported Operations
137
-
138
- | Entity | Operations | Max per Batch |
139
- |--------|-----------|---------------|
140
- | `InventoryQuantity` | Create, Update | 100-250 records |
141
-
142
- **IMPORTANT**: Batch API currently **only supports InventoryQuantity**. For other entities (Order, Product, Customer), use standard GraphQL mutations.
143
-
144
- ### Batch API Workflow
145
-
146
- ```graphql
147
- # Step 1: Create job
148
- mutation CreateJob {
149
- createJob(input: {
150
- name: "Daily Inventory Sync"
151
- retailerId: "2"
152
- }) {
153
- id
154
- status
155
- }
156
- }
157
-
158
- # Step 2: Send batches
159
- mutation SendBatch($jobId: ID!, $entities: [InventoryQuantityInput!]!) {
160
- sendBatch(jobId: $jobId, entities: $entities) {
161
- id
162
- status
163
- recordCount
164
- }
165
- }
166
-
167
- # Step 3: Poll status
168
- query GetJobStatus($jobId: ID!) {
169
- job(id: $jobId) {
170
- id
171
- status # PENDING, PROCESSING, COMPLETED, FAILED
172
- totalBatches
173
- completedBatches
174
- errorSummary {
175
- totalErrors
176
- errorTypes
177
- }
178
- }
179
- }
180
- ```
181
-
182
- ### Job Lifecycle
183
-
184
- ```
185
- CREATE_JOB → PENDING → SEND_BATCHES → PROCESSING → COMPLETED
186
- ↓ ↓
187
- (can add more batches) (can check errors)
188
- ```
189
-
190
- ---
191
-
192
- ## SDK Batch Components
193
-
194
- ### Component 1: FluentClient Batch Methods
195
-
196
- The SDK provides batch methods directly on `FluentClient` (there is no separate `FluentBatchManager` class):
197
-
198
- ```typescript
199
- import { createClient } from '@fluentcommerce/fc-connect-sdk';
200
-
201
- const client = await createClient({
202
- config: {
203
- baseUrl: 'https://api.fluentcommerce.com',
204
- clientId: process.env.FLUENT_CLIENT_ID,
205
- clientSecret: process.env.FLUENT_CLIENT_SECRET,
206
- retailerId: process.env.FLUENT_RETAILER_ID
207
- }
208
- });
209
-
210
- // Create job
211
- const job = await client.createJob({
212
- name: 'Daily Inventory Sync',
213
- retailerId: '2'
214
- });
215
-
216
- // Send batch
217
- const batch = await client.sendBatch(job.id, {
218
- entities: inventoryRecords
219
- });
220
-
221
- // Get status
222
- const status = await client.getJobStatus(job.id);
223
-
224
- // Get detailed status with batches
225
- const jobDetail = await client.getBatchStatus(job.id, batch.id);
226
- ```
227
-
228
- **Available methods**:
229
- - `createJob(input)` - Create new Batch job
230
- - `sendBatch(jobId, data)` - Submit batch of records
231
- - `getJobStatus(jobId)` - Get job status and summary
232
- - `getBatchStatus(jobId, batchId)` - Get individual batch details
233
-
234
- ### Component 2: `S3DataSource`
235
-
236
- **Purpose**: Read and write files from S3 with streaming support
237
-
238
- ```typescript
239
- import { S3DataSource } from '@fluentcommerce/fc-connect-sdk';
240
-
241
- const s3 = new S3DataSource({
242
- type: 'S3_CSV',
243
- connectionId: 'my-s3',
244
- name: 'My S3 Source',
245
- s3Config: {
246
- accessKeyId: process.env.AWS_ACCESS_KEY_ID,
247
- secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
248
- region: 'us-east-1',
249
- bucket: 'inventory-bucket'
250
- }
251
- }, logger);
252
-
253
- // List files
254
- const files = await s3.listFiles('inventory/updates/');
255
-
256
- // Download file
257
- const fileContent = await s3.downloadFile('inventory/updates/inventory.csv');
258
-
259
- // Download large file as Buffer (for files >100MB)
260
- const buffer = await s3.downloadFile('inventory/updates/large-file.csv', { encoding: 'binary' });
261
-
262
- // Upload file
263
- await s3.uploadFile('inventory/archive/processed.csv', content);
264
-
265
- // Move file
266
- await s3.moveFile('inventory/updates/file.csv', 'inventory/archive/file.csv');
267
- ```
268
-
269
- **Key features**:
270
- - Streaming support for large files (memory-efficient)
271
- - Automatic retry on network errors
272
- - Progress callbacks for uploads/downloads
273
-
274
- ### Component 3: `CSVParserService`
275
-
276
- **Purpose**: Parse CSV files with validation and streaming
277
-
278
- ```typescript
279
- import { CSVParserService } from '@fluentcommerce/fc-connect-sdk';
280
-
281
- const parser = new CSVParserService({
282
- headers: true, // First row is headers
283
- skipEmptyLines: true, // Ignore blank lines
284
- delimiter: ',', // CSV delimiter
285
- quote: '"', // Quote character
286
- escape: '\\' // Escape character
287
- });
288
-
289
- // Parse entire file (for small files < 10MB)
290
- const records = await parser.parse(csvContent);
291
-
292
- // Stream parse (for large files)
293
- const recordStream = parser.streamParse(csvStream);
294
- for await (const record of recordStream) {
295
- // Process record
296
- }
297
- ```
298
-
299
- **Validation options**:
300
- - Required columns
301
- - Type validation
302
- - Custom validators
303
-
304
- ### Component 4: `UniversalMapper`
305
-
306
- **Purpose**: Transform CSV/JSON fields to Fluent schema
307
-
308
- ```typescript
309
- import { UniversalMapper } from '@fluentcommerce/fc-connect-sdk';
310
-
311
- const mappingConfig = {
312
- fields: {
313
- ref: {
314
- source: 'sku',
315
- resolver: 'custom.buildRef' // Combine sku + location
316
- },
317
- type: {
318
- value: 'INVENTORY' // Static value
319
- },
320
- status: {
321
- source: 'status',
322
- resolver: 'sdk.uppercase'
323
- },
324
- productRef: {
325
- source: 'sku',
326
- required: true
327
- },
328
- locationRef: {
329
- source: 'warehouse_code',
330
- required: true
331
- },
332
- onHand: {
333
- source: 'quantity',
334
- resolver: 'sdk.parseInt'
335
- }
336
- }
337
- };
338
-
339
- const mapper = new UniversalMapper(mappingConfig, {
340
- customResolvers: {
341
- 'custom.buildRef': (value, data) => {
342
- return `${data.sku}-${data.warehouse_code}`;
343
- }
344
- }
345
- });
346
-
347
- const result = await mapper.map(csvRecord);
348
- // result.data = { ref: 'SKU001-WH01', type: 'INVENTORY', ... }
349
- ```
350
-
351
- ### Workflow Composition Pattern
352
-
353
- **Purpose**: Compose SDK services for complete ingestion workflows
354
-
355
- Instead of using a single orchestrator, compose the above components into your custom workflow:
356
-
357
- ```typescript
358
- import {
359
- createClient,
360
- S3DataSource,
361
- CSVParserService,
362
- UniversalMapper,
363
- StateService,
364
- createConsoleLogger,
365
- toStructuredLogger
366
- } from '@fluentcommerce/fc-connect-sdk';
367
-
368
- async function processInventoryFiles() {
369
- const logger = toStructuredLogger(createConsoleLogger(), { logLevel: 'info' });
370
-
371
- // Initialize components
372
- const client = await createClient({ config });
373
- const s3 = new S3DataSource(s3Config, logger);
374
- const parser = new CSVParserService();
375
- const mapper = new UniversalMapper(mappingConfig);
376
- const stateService = new StateService(logger);
377
-
378
- // List and process files
379
- const files = await s3.listFiles({ prefix: 'inventory/updates/' });
380
-
381
- for (const file of files) {
382
- if (await stateService.isFileProcessed(file.name)) continue;
383
-
384
- try {
385
- // 1. Download and parse
386
- const content = await s3.downloadFile(file.name);
387
- const records = await parser.parse(content);
388
-
389
- // 2. Map fields
390
- const inventory = [];
391
- for (const record of records) {
392
- const result = await mapper.map(record);
393
- if (result.success) inventory.push(result.data);
394
- }
395
-
396
- // 3. Create job and send batches
397
- const job = await client.createJob({
398
- name: `Inventory - ${file.name}`,
399
- retailerId: '1'
400
- });
401
-
402
- const batches = chunkArray(inventory, 100);
403
- for (const batch of batches) {
404
- await client.sendBatch(job.id, {
405
- action: 'UPSERT',
406
- entityType: 'INVENTORY',
407
- entities: batch
408
- });
409
- }
410
-
411
- // 4. Archive and mark processed
412
- await s3.moveFile(file.name, `inventory/archive/${file.name}`);
413
- await stateService.markFileProcessed(file.name);
414
-
415
- } catch (error) {
416
- logger.error(`Failed to process ${file.name}`, error);
417
- await s3.moveFile(file.name, `inventory/errors/${file.name}`);
418
- }
419
- }
420
- }
421
- ```
422
-
423
- **Benefits of building block composition**:
424
- 1. ✅ Full control over workflow logic
425
- 2. ✅ Custom error handling
426
- 3. ✅ Easy to test individual components
427
- 4. ✅ Flexible batch sizing and job strategies
428
- 5. ✅ Works with any entity type (not just INVENTORY)
429
- 6. ✅ Easy to add custom business rules
430
-
431
- ---
432
-
433
- ## Basic Batch Workflow
434
-
435
- ### Step-by-Step Pattern
436
-
437
- ```typescript
438
- /**
439
- * Basic Batch Processing Workflow
440
- *
441
- * Steps:
442
- * 1. Create Fluent client
443
- * 2. Download and parse CSV file
444
- * 3. Transform records to Fluent schema
445
- * 4. Create Batch job
446
- * 5. Send records in batches
447
- * 6. Poll status until complete
448
- * 7. Handle results
449
- */
450
-
451
- import { createClient, S3DataSource, CSVParserService, UniversalMapper } from '@fluentcommerce/fc-connect-sdk';
452
-
453
- async function batchInventorySync() {
454
- // Step 1: Create client
455
- const client = await createClient({
456
- baseUrl: process.env.FLUENT_BASE_URL,
457
- clientId: process.env.FLUENT_CLIENT_ID,
458
- clientSecret: process.env.FLUENT_CLIENT_SECRET,
459
- retailerId: process.env.FLUENT_RETAILER_ID
460
- });
461
-
462
- // Step 2: Download CSV
463
- const s3 = new S3DataSource({
464
- type: 'S3_CSV',
465
- connectionId: 'my-s3',
466
- name: 'My S3 Source',
467
- s3Config: {
468
- accessKeyId: process.env.AWS_ACCESS_KEY_ID,
469
- secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
470
- region: process.env.AWS_REGION,
471
- bucket: process.env.AWS_BUCKET
472
- }
473
- }, console);
474
-
475
- const csvContent = await s3.downloadFile('inventory/daily-update.csv');
476
-
477
- // Step 3: Parse CSV
478
- const parser = new CSVParserService({ headers: true });
479
- const records = await parser.parse(csvContent);
480
-
481
- console.log(`Parsed ${records.length} records`);
482
-
483
- // Step 4: Transform records
484
- const mapper = new UniversalMapper({
485
- fields: {
486
- ref: { source: 'sku', resolver: 'custom.buildRef' },
487
- type: { value: 'INVENTORY' },
488
- productRef: { source: 'sku', required: true },
489
- locationRef: { source: 'location', required: true },
490
- onHand: { source: 'qty', resolver: 'sdk.parseInt' }
491
- }
492
- }, {
493
- customResolvers: {
494
- 'custom.buildRef': (value, data) => `${data.sku}-${data.location}`
495
- }
496
- });
497
-
498
- const mappedRecords = [];
499
- for (const record of records) {
500
- const result = await mapper.map(record);
501
- if (result.success) {
502
- mappedRecords.push(result.data);
503
- } else {
504
- console.error('Mapping error:', result.errors);
505
- }
506
- }
507
-
508
- console.log(`Mapped ${mappedRecords.length} records`);
509
-
510
- // Step 5: Create Batch job
511
- const job = await client.createJob({
512
- name: `Inventory Sync ${new Date().toISOString()}`,
513
- retailerId: '2'
514
- });
515
-
516
- console.log(`Created job ${job.id}`);
517
-
518
- // Step 6: Send batches
519
- const BATCH_SIZE = 100;
520
- for (let i = 0; i < mappedRecords.length; i += BATCH_SIZE) {
521
- const chunk = mappedRecords.slice(i, i + BATCH_SIZE);
522
-
523
- const batch = await client.sendBatch(job.id, {
524
- entities: chunk
525
- });
526
-
527
- console.log(`Sent batch ${batch.id} (${chunk.length} records)`);
528
- }
529
-
530
- // Step 7: Poll status
531
- let status = await client.getJobStatus(job.id);
532
- while (status.status === 'PENDING' || status.status === 'PROCESSING') {
533
- console.log(`Job status: ${status.status} (${status.completedBatches}/${status.totalBatches} batches)`);
534
-
535
- await new Promise(resolve => setTimeout(resolve, 30000)); // Wait 30 seconds
536
-
537
- status = await client.getJobStatus(job.id);
538
- }
539
-
540
- // Step 8: Handle results
541
- if (status.status === 'COMPLETED') {
542
- console.log('✓ Job completed successfully');
543
- console.log(`Total records: ${status.totalRecords}`);
544
- console.log(`Errors: ${status.errorSummary?.totalErrors || 0}`);
545
-
546
- // Archive file
547
- await s3.moveFile('inventory/daily-update.csv', 'inventory/archive/daily-update.csv');
548
- } else {
549
- console.error('✗ Job failed:', status.status);
550
-
551
- // Move to error folder
552
- await s3.moveFile('inventory/daily-update.csv', 'inventory/errors/daily-update.csv');
553
- }
554
- }
555
-
556
- batchInventorySync().catch(console.error);
557
- ```
558
-
559
- ---
560
-
561
- ## NEW: Job Lifecycle Tracking (v0.1.10+)
562
-
563
- **Track job state, metadata, and lifecycle** across your integration workflows.
564
-
565
- The SDK provides `JobTracker` service for managing job lifecycle, tracking status, and storing job metadata.
566
-
567
- ### JobTracker Overview
568
-
569
- ```typescript
570
- import { JobTracker, VersoriKVAdapter } from '@fluentcommerce/fc-connect-sdk';
571
- // ✅ CORRECT: Access openKv from Versori context
572
- // import { openKv } from '@versori/run'; // ❌ WRONG - Not a direct export
573
-
574
- // In Versori workflow handler:
575
- const { openKv } = ctx;
576
- const kvAdapter = new VersoriKVAdapter(openKv(':project:'));
577
- const tracker = new JobTracker(kvAdapter, logger);
578
-
579
- // Create job
580
- const jobId = `scheduled_${Date.now()}`;
581
-
582
- await tracker.createJob(jobId, {
583
- triggeredBy: 'schedule',
584
- stage: 'initialization',
585
- details: {
586
- catalogueRef: 'DEFAULT:1',
587
- fileName: 'inventory.csv'
588
- }
589
- });
590
-
591
- // Update progress
592
- await tracker.updateJob(jobId, {
593
- status: 'processing',
594
- stage: 'extraction',
595
- message: 'Extracting records from S3'
596
- });
597
-
598
- await tracker.updateJob(jobId, {
599
- stage: 'transformation',
600
- message: 'Mapping 1000 records',
601
- details: { recordCount: 1000 }
602
- });
603
-
604
- // Mark as completed
605
- await tracker.markCompleted(jobId, {
606
- recordCount: 1000,
607
- successCount: 998,
608
- failedCount: 2
609
- });
610
-
611
- // Or mark as failed
612
- try {
613
- // ... job logic ...
614
- } catch (error) {
615
- await tracker.markFailed(jobId, error);
616
- }
617
- ```
618
-
619
- ### Complete Example with Versori
620
-
621
- ```typescript
622
- import {
623
- createClient,
624
- JobTracker,
625
- VersoriKVAdapter,
626
- } from '@fluentcommerce/fc-connect-sdk';
627
- import { schedule } from '@versori/run';
628
-
629
- /**
630
- * Versori workflow with complete job tracking
631
- */
632
-
633
- export const dailyInventorySync = schedule('daily-inventory', '0 2 * * *')
634
- .execute(async ({ log, connections, vars, kv }) => {
635
- const jobId = `inventory_${Date.now()}`;
636
- const tracker = new JobTracker(new VersoriKVAdapter(kv), log);
637
-
638
- try {
639
- // Create job
640
- await tracker.createJob(jobId, {
641
- triggeredBy: 'schedule',
642
- stage: 'start',
643
- details: { schedule: 'daily 2am' }
644
- });
645
-
646
- // Stage 1: Extraction
647
- await tracker.updateJob(jobId, {
648
- status: 'processing',
649
- stage: 'extraction',
650
- message: 'Querying virtual positions'
651
- });
652
-
653
- const data = await extractFromFluent();
654
-
655
- // Stage 2: Transformation
656
- await tracker.updateJob(jobId, {
657
- stage: 'transformation',
658
- message: `Processing ${data.length} records`
659
- });
660
-
661
- const transformed = await transformData(data);
662
-
663
- // Stage 3: Upload
664
- await tracker.updateJob(jobId, {
665
- stage: 'upload',
666
- message: 'Uploading to SFTP'
667
- });
668
-
669
- await uploadToSFTP(transformed);
670
-
671
- // Completed
672
- await tracker.markCompleted(jobId, {
673
- recordCount: data.length,
674
- fileName: `inventory_${jobId}.xml`
675
- });
676
-
677
- log.info('Job completed successfully', { jobId });
678
-
679
- } catch (error) {
680
- await tracker.markFailed(jobId, error);
681
- log.error('Job failed', error);
682
- throw error;
683
- }
684
- });
685
- ```
686
-
687
- ### Querying Job Status
688
-
689
- ```typescript
690
- // Get job status
691
- const status = await tracker.getJob(jobId);
692
-
693
- if (status) {
694
- console.log(`Job ${jobId}:`, {
695
- status: status.status,
696
- stage: status.stage,
697
- message: status.message,
698
- createdAt: status.createdAt,
699
- completedAt: status.completedAt
700
- });
701
- }
702
-
703
- // Check if job is still running
704
- if (status.status === 'processing') {
705
- console.log(`Job in progress: ${status.stage}`);
706
- }
707
-
708
- // Check for errors
709
- if (status.status === 'failed') {
710
- console.error('Job failed:', {
711
- error: status.error,
712
- stack: status.errorStack
713
- });
714
- }
715
- ```
716
-
717
- ### Custom TTL Configuration
718
-
719
- ```typescript
720
- // Default TTL: 7 days
721
- const tracker = new JobTracker(kvAdapter, logger);
722
-
723
- // Custom TTL: 24 hours
724
- const shortTracker = new JobTracker(
725
- kvAdapter,
726
- logger,
727
- 86400 // 24 hours in seconds
728
- );
729
-
730
- // Custom TTL: 30 days
731
- const longTracker = new JobTracker(
732
- kvAdapter,
733
- logger,
734
- 2592000 // 30 days in seconds
735
- );
736
- ```
737
-
738
- ### JobTracker API Reference
739
-
740
- | Method | Description | Parameters | Example |
741
- |--------|-------------|------------|---------|
742
- | `createJob(jobId, metadata)` | Create new job with 'queued' status | jobId, metadata | `await tracker.createJob('job_123', { triggeredBy: 'schedule' })` |
743
- | `updateJob(jobId, updates)` | Update job metadata/status/stage | jobId, updates | `await tracker.updateJob('job_123', { status: 'processing' })` |
744
- | `getJob(jobId)` | Get job by ID | jobId | `const job = await tracker.getJob('job_123')` |
745
- | `markCompleted(jobId, details)` | Mark job complete | jobId, details | `await tracker.markCompleted('job_123', { recordCount: 1000 })` |
746
- | `markFailed(jobId, error)` | Mark job failed | jobId, error | `await tracker.markFailed('job_123', error)` |
747
-
748
- **Constructor:**
749
- ```typescript
750
- new JobTracker(kvAdapter: KVAdapter, logger: StructuredLogger, ttl?: number)
751
- ```
752
-
753
- ### When to Use JobTracker
754
-
755
- ✅ **Use when**:
756
- - Need job history and audit trail
757
- - Monitoring multiple concurrent jobs
758
- - Debugging job failures
759
- - Tracking job duration and performance
760
- - Building dashboards or reports
761
-
762
- ❌ **Skip when**:
763
- - Simple one-off scripts
764
- - No state persistence available
765
- - Memory-constrained environments
766
-
767
- ---
768
-
769
- ## Job Creation Strategies
770
-
771
- ### Strategy 1: Single Job Per File (Recommended)
772
-
773
- **When to use**: Most common pattern for daily/scheduled files
774
-
775
- ```typescript
776
- // Process one file
777
- const job = await client.createJob({
778
- name: `Daily Inventory - ${fileName}`,
779
- retailerId: '2'
780
- });
781
-
782
- // Send all batches for this file
783
- for (const batch of batches) {
784
- await client.sendBatch(job.id, { entities: batch });
785
- }
786
-
787
- // Poll until complete
788
- await pollJobCompletion(job.id);
789
- ```
790
-
791
- **Pros**:
792
- - Simple error tracking (one job = one file)
793
- - Easy archival (job complete = archive file)
794
- - Clear audit trail
795
-
796
- **Cons**:
797
- - More jobs in system
798
- - Can't combine multiple files
799
-
800
- ### Strategy 2: Single Job Per Day
801
-
802
- **When to use**: Multiple small files processed together
803
-
804
- ```typescript
805
- // Create one job for entire day
806
- const job = await client.createJob({
807
- name: `Daily Inventory - ${new Date().toISOString().split('T')[0]}`,
808
- retailerId: '2'
809
- });
810
-
811
- // Process multiple files
812
- const files = await s3.listFiles('inventory/updates/');
813
- for (const file of files) {
814
- const records = await processFile(file);
815
-
816
- // Send batches for this file
817
- for (const batch of chunkArray(records, 100)) {
818
- await client.sendBatch(job.id, { entities: batch });
819
- }
820
- }
821
-
822
- // Poll once for entire day's work
823
- await pollJobCompletion(job.id);
824
- ```
825
-
826
- **Pros**:
827
- - Fewer jobs
828
- - Combine multiple sources
829
- - Single status check
830
-
831
- **Cons**:
832
- - Harder to track individual file errors
833
- - One file failure doesn't block others
834
- - More complex archival logic
835
-
836
- ### Strategy 3: Long-Running Job (Advanced)
837
-
838
- **When to use**: Continuous processing (hourly micro-batches)
839
-
840
- ```typescript
841
- // Create job once
842
- const job = await client.createJob({
843
- name: `Inventory Stream - ${new Date().toISOString().split('T')[0]}`,
844
- retailerId: '2'
845
- });
846
-
847
- // Send batches throughout the day
848
- setInterval(async () => {
849
- const newRecords = await fetchLatestUpdates();
850
-
851
- if (newRecords.length > 0) {
852
- await client.sendBatch(job.id, { entities: newRecords });
853
- }
854
- }, 3600000); // Every hour
855
-
856
- // Check status at end of day
857
- ```
858
-
859
- **Pros**:
860
- - Minimize job creation overhead
861
- - Continuous processing
862
-
863
- **Cons**:
864
- - Job can stay open for hours
865
- - Harder to determine "completion"
866
- - Risk of very large jobs
867
-
868
- ---
869
-
870
- ## Batch Size Optimization
871
-
872
- ### Recommended Batch Sizes
873
-
874
- | Total Records | Batch Size | Batches | Processing Time |
875
- |---------------|------------|---------|-----------------|
876
- | < 1,000 | 100 | 1-10 | 1-2 minutes |
877
- | 1,000-10,000 | 100-200 | 10-100 | 3-10 minutes |
878
- | 10,000-50,000 | 200 | 50-250 | 5-15 minutes |
879
- | > 50,000 | 250 | 200+ | 10-30 minutes |
880
-
881
- **Max batch size**: 250 records (Fluent API limit)
882
-
883
- ### Batch Size Calculator
884
-
885
- ```typescript
886
- /**
887
- * Calculate optimal batch size based on total records
888
- */
889
- function calculateBatchSize(totalRecords: number): number {
890
- if (totalRecords < 1000) {
891
- return 100;
892
- } else if (totalRecords < 10000) {
893
- return 150;
894
- } else {
895
- return 250; // Max allowed
896
- }
897
- }
898
-
899
- // Usage
900
- const batchSize = calculateBatchSize(records.length);
901
- const batches = chunkArray(records, batchSize);
902
- ```
903
-
904
- ### Chunking Utility
905
-
906
- ```typescript
907
- /**
908
- * Split array into chunks
909
- */
910
- function chunkArray<T>(array: T[], size: number): T[][] {
911
- const chunks: T[][] = [];
912
- for (let i = 0; i < array.length; i += size) {
913
- chunks.push(array.slice(i, i + size));
914
- }
915
- return chunks;
916
- }
917
-
918
- // Usage
919
- const batches = chunkArray(mappedRecords, 100);
920
- // [[record1, record2, ...], [record101, record102, ...], ...]
921
- ```
922
-
923
- ### Parallel Batch Sending (Advanced)
924
-
925
- ```typescript
926
- /**
927
- * Send multiple batches in parallel
928
- * WARNING: Use with caution - can hit rate limits
929
- */
930
- async function sendBatchesParallel(
931
- client: FluentClient,
932
- jobId: string,
933
- batches: any[][],
934
- concurrency = 5
935
- ) {
936
- const results = [];
937
-
938
- for (let i = 0; i < batches.length; i += concurrency) {
939
- const chunk = batches.slice(i, i + concurrency);
940
-
941
- // Send up to 'concurrency' batches at once
942
- const promises = chunk.map(batch =>
943
- client.sendBatch(jobId, { entities: batch })
944
- );
945
-
946
- const chunkResults = await Promise.all(promises);
947
- results.push(...chunkResults);
948
-
949
- console.log(`Sent batches ${i + 1} to ${i + chunk.length}`);
950
- }
951
-
952
- return results;
953
- }
954
-
955
- // Usage (send 5 batches at a time)
956
- await sendBatchesParallel(client, job.id, batches, 5);
957
- ```
958
-
959
- **Caution**: Parallel sending can trigger rate limits. Start with concurrency=5, monitor, and adjust.
960
-
961
- ---
962
-
963
- ## Status Polling and Completion
964
-
965
- ### Basic Polling Pattern
966
-
967
- ```typescript
968
- /**
969
- * Poll job status until complete
970
- */
971
- async function pollJobCompletion(
972
- client: FluentClient,
973
- jobId: string,
974
- options = {
975
- interval: 30000, // 30 seconds
976
- timeout: 3600000, // 1 hour
977
- onProgress: undefined // Callback
978
- }
979
- ) {
980
- const startTime = Date.now();
981
-
982
- while (true) {
983
- const status = await client.getJobStatus(jobId);
984
-
985
- // Check terminal states
986
- if (status.status === 'COMPLETED') {
987
- console.log('✓ Job completed successfully');
988
- return status;
989
- }
990
-
991
- if (status.status === 'FAILED') {
992
- throw new Error(`Job failed: ${status.errorSummary?.message || 'Unknown error'}`);
993
- }
994
-
995
- // Check timeout
996
- if (Date.now() - startTime > options.timeout) {
997
- throw new Error(`Job timeout after ${options.timeout}ms`);
998
- }
999
-
1000
- // Progress callback
1001
- if (options.onProgress) {
1002
- options.onProgress(status);
1003
- }
1004
-
1005
- // Log progress
1006
- console.log(
1007
- `Job ${status.status}: ${status.completedBatches}/${status.totalBatches} batches ` +
1008
- `(${Math.round((status.completedBatches / status.totalBatches) * 100)}%)`
1009
- );
1010
-
1011
- // Wait before next poll
1012
- await new Promise(resolve => setTimeout(resolve, options.interval));
1013
- }
1014
- }
1015
-
1016
- // Usage
1017
- const finalStatus = await pollJobCompletion(client, job.id, {
1018
- interval: 30000,
1019
- timeout: 3600000,
1020
- onProgress: (status) => {
1021
- console.log(`Progress: ${status.completedBatches}/${status.totalBatches}`);
1022
- }
1023
- });
1024
- ```
1025
-
1026
- ### Advanced: Adaptive Polling
1027
-
1028
- ```typescript
1029
- /**
1030
- * Adjust polling interval based on job size
1031
- */
1032
- function getPollingInterval(totalBatches: number): number {
1033
- if (totalBatches < 10) {
1034
- return 10000; // 10 seconds for small jobs
1035
- } else if (totalBatches < 100) {
1036
- return 30000; // 30 seconds for medium jobs
1037
- } else {
1038
- return 60000; // 1 minute for large jobs
1039
- }
1040
- }
1041
-
1042
- // Usage
1043
- const interval = getPollingInterval(batches.length);
1044
- await pollJobCompletion(client, job.id, { interval });
1045
- ```
1046
-
1047
- ---
1048
-
1049
- ## Error Handling in Batches
1050
-
1051
- ### Error Types
1052
-
1053
- | Error Level | Scope | Example | Recovery |
1054
- |-------------|-------|---------|----------|
1055
- | **File Error** | Entire file | Invalid CSV format | Move to error folder |
1056
- | **Record Error** | Single record | Missing required field | Log, continue with others |
1057
- | **Batch Error** | 100-250 records | API validation error | Logged by Fluent, continue |
1058
- | **Job Error** | Entire job | Authentication failure | Retry entire job |
1059
- | **Partial Batch Failure** | Some batches succeed, some fail | Network interruption | **NEW: PartialBatchRecovery** |
1060
-
1061
- ### NEW: Partial Batch Recovery (v0.1.10+)
1062
-
1063
- **Problem**: What happens when sending 500 batches and batch #250 fails due to network error? You don't want to resend the first 249 successful batches.
1064
-
1065
- **Solution**: The SDK provides `PartialBatchRecovery` service to track batch progress and resume from failure point.
1066
-
1067
- #### How It Works
1068
-
1069
- ```typescript
1070
- import { PartialBatchRecovery, createClient } from '@fluentcommerce/fc-connect-sdk';
1071
-
1072
- /**
1073
- * Partial Batch Recovery Pattern
1074
- *
1075
- * Tracks per-record success/failure in batch operations and enables:
1076
- * - Retrying only failed records instead of entire batch
1077
- * - Checkpoint/resume functionality
1078
- * - Detailed error reporting per record
1079
- */
1080
-
1081
- async function resilientBatchSync() {
1082
- const client = await createClient({ config });
1083
- const recovery = new PartialBatchRecovery(logger);
1084
-
1085
- // Step 1: Create job
1086
- const job = await client.createJob({
1087
- name: 'Daily Inventory Sync',
1088
- retailerId: '2',
1089
- });
1090
-
1091
- // Step 2: Process batch with automatic recovery
1092
- const result = await recovery.processBatchWithRecovery(
1093
- records,
1094
- async (batch) => {
1095
- // Your batch processing logic
1096
- return await client.sendBatch(job.id, {
1097
- action: 'UPSERT',
1098
- entityType: 'INVENTORY',
1099
- entities: batch
1100
- });
1101
- },
1102
- {
1103
- maxRetries: 3,
1104
- retryOnlyFailed: true, // Only retry failed records
1105
- retryDelayMs: 1000, // Start with 1 second
1106
- retryBatchSize: 100, // Process 100 at a time
1107
- checkpointKey: 'inventory-sync-2025-01-24'
1108
- }
1109
- );
1110
-
1111
- console.log(`✓ Success: ${result.successCount}/${result.totalRecords}`);
1112
- console.log(`✗ Failed: ${result.failedCount} records`);
1113
-
1114
- if (result.failedCount > 0) {
1115
- console.error('Failed records:', result.failedRecords);
1116
- console.log(`Checkpoint saved: ${result.checkpointId}`);
1117
- }
1118
- }
1119
- ```
1120
-
1121
- #### Integration with Batch API
1122
-
1123
- ```typescript
1124
- import {
1125
- createClient,
1126
- PartialBatchRecovery
1127
- } from '@fluentcommerce/fc-connect-sdk';
1128
-
1129
- async function batchIngestionWithRecovery(records: any[]) {
1130
- const client = await createClient({ config });
1131
- const recovery = new PartialBatchRecovery(logger);
1132
-
1133
- // Create job
1134
- const job = await client.createJob({
1135
- name: 'Inventory Ingestion with Recovery',
1136
- retailerId: 'my-retailer'
1137
- });
1138
-
1139
- // Process with recovery
1140
- const result = await recovery.processBatchWithRecovery(
1141
- records,
1142
- async (batch) => {
1143
- const response = await client.sendBatch(job.id, {
1144
- action: 'UPSERT',
1145
- entityType: 'INVENTORY',
1146
- entities: batch
1147
- });
1148
-
1149
- logger.info('Batch sent', {
1150
- batchId: response.id,
1151
- recordCount: batch.length
1152
- });
1153
-
1154
- return response;
1155
- },
1156
- {
1157
- maxRetries: 3,
1158
- retryOnlyFailed: true,
1159
- checkpointKey: `job-${job.id}`
1160
- }
1161
- );
1162
-
1163
- // Check job status
1164
- const status = await client.getJobStatus(job.id);
1165
-
1166
- return {
1167
- jobId: job.id,
1168
- jobStatus: status.status,
1169
- ...result
1170
- };
1171
- }
1172
- ```
1173
-
1174
- #### Checkpoint and Resume
1175
-
1176
- ```typescript
1177
- // Process batch and save checkpoint
1178
- const result = await recovery.processBatchWithRecovery(
1179
- records,
1180
- processBatch,
1181
- {
1182
- maxRetries: 3,
1183
- checkpointKey: 'daily-inventory-sync'
1184
- }
1185
- );
1186
-
1187
- if (result.failedCount > 0) {
1188
- console.log(`Checkpoint created: ${result.checkpointId}`);
1189
- console.log(`Failed records saved for later retry`);
1190
- }
1191
-
1192
- // Later: Resume from checkpoint
1193
- const checkpointId = result.checkpointId;
1194
- const resumeResult = await recovery.resumeFromCheckpoint(
1195
- checkpointId,
1196
- processBatch,
1197
- {
1198
- maxRetries: 5 // More retries on resume
1199
- }
1200
- );
1201
-
1202
- console.log(`Resume: ${resumeResult.successCount} recovered`);
1203
- ```
1204
-
1205
- #### Custom Retry Logic
1206
-
1207
- ```typescript
1208
- const result = await recovery.processBatchWithRecovery(
1209
- records,
1210
- processBatch,
1211
- {
1212
- maxRetries: 5,
1213
- retryDelayMs: 2000,
1214
- // Custom retry decision
1215
- shouldRetry: (error, attemptCount) => {
1216
- // Don't retry validation errors
1217
- if (error.message.includes('validation')) {
1218
- return false;
1219
- }
1220
-
1221
- // Don't retry after 3 attempts for rate limits
1222
- if (error.message.includes('rate limit') && attemptCount > 3) {
1223
- return false;
1224
- }
1225
-
1226
- // Retry all other errors
1227
- return true;
1228
- }
1229
- }
1230
- );
1231
- ```
1232
-
1233
- #### Record Failure Details
1234
-
1235
- ```typescript
1236
- // Access detailed failure information
1237
- if (result.failedCount > 0) {
1238
- result.failedRecords.forEach(failure => {
1239
- console.error(`Record ${failure.index} failed:`, {
1240
- record: failure.record,
1241
- error: failure.error.message,
1242
- attempts: failure.attemptCount,
1243
- timestamp: failure.timestamp
1244
- });
1245
- });
1246
-
1247
- // Export failures for manual review
1248
- await fs.writeFile(
1249
- 'failed-records.json',
1250
- JSON.stringify(result.failedRecords, null, 2)
1251
- );
1252
- }
1253
- ```
1254
-
1255
- #### Key Features
1256
-
1257
- | Feature | Description | Benefit |
1258
- |---------|-------------|---------|
1259
- | **Per-record tracking** | Tracks each record individually | Know exactly which records failed |
1260
- | **Selective retry** | Retry only failures, not successes | Efficient retry logic |
1261
- | **Checkpoint support** | Resume from failure point | Handle interruptions |
1262
- | **Exponential backoff** | Configurable retry delays | Avoid overwhelming API |
1263
- | **Custom retry logic** | Override retry decisions | Fine-grained control |
1264
-
1265
- #### API Reference
1266
-
1267
- **Constructor:**
1268
- ```typescript
1269
- new PartialBatchRecovery(logger?: StructuredLogger)
1270
- ```
1271
-
1272
- **Methods:**
1273
-
1274
- | Method | Description | Parameters |
1275
- |--------|-------------|------------|
1276
- | `processBatchWithRecovery()` | Process batch with recovery | records, processor, options |
1277
- | `resumeFromCheckpoint()` | Resume from saved checkpoint | checkpointId, processor, options |
1278
-
1279
- #### When to Use Partial Batch Recovery
1280
-
1281
- ✅ **Use when**:
1282
- - Sending 50+ batches (high failure risk)
1283
- - Network is unstable
1284
- - Long-running jobs (> 10 minutes)
1285
- - Scheduled workflows that may be interrupted
1286
- - Critical data that must complete
1287
-
1288
- ❌ **Skip when**:
1289
- - < 10 batches (low failure risk)
1290
- - Fast operations (< 2 minutes)
1291
- - Non-critical data (can rerun from scratch)
1292
- - Memory-constrained environments
1293
-
1294
- ### File-Level Error Handling
1295
-
1296
- ```typescript
1297
- try {
1298
- // Download file
1299
- const content = await s3.downloadFile(fileKey);
1300
-
1301
- // Parse CSV
1302
- const records = await parser.parse(content);
1303
-
1304
- // Process batches
1305
- await processBatches(records);
1306
-
1307
- // Archive on success
1308
- await s3.moveFile(fileKey, archiveKey);
1309
-
1310
- } catch (error: any) {
1311
- console.error(`File processing failed: ${error.message}`);
1312
-
1313
- // Move to error folder
1314
- await s3.moveFile(fileKey, errorKey);
1315
-
1316
- // Log error details
1317
- await s3.uploadFile(errorKey + '.error.log', JSON.stringify({
1318
- file: fileKey,
1319
- error: error.message,
1320
- stack: error.stack,
1321
- timestamp: new Date().toISOString()
1322
- }, null, 2));
1323
-
1324
- // Continue with next file (don't throw)
1325
- }
1326
- ```
1327
-
1328
- ### Record-Level Error Handling
1329
-
1330
- ```typescript
1331
- const mappedRecords = [];
1332
- const mappingErrors = [];
1333
-
1334
- for (const record of records) {
1335
- const result = await mapper.map(record);
1336
-
1337
- if (result.success) {
1338
- mappedRecords.push(result.data);
1339
- } else {
1340
- mappingErrors.push({
1341
- record,
1342
- errors: result.errors
1343
- });
1344
- }
1345
- }
1346
-
1347
- console.log(`Mapped ${mappedRecords.length} records, ${mappingErrors.length} errors`);
1348
-
1349
- // Write error report
1350
- if (mappingErrors.length > 0) {
1351
- await s3.uploadFile(
1352
- 'inventory/errors/mapping-errors.json',
1353
- JSON.stringify(mappingErrors, null, 2)
1354
- );
1355
- }
1356
-
1357
- // Continue with successful records
1358
- ```
1359
-
1360
- ### Batch API Error Reporting
1361
-
1362
- ```typescript
1363
- // After job completes, check for errors
1364
- const status = await client.getJobStatus(job.id);
1365
-
1366
- if (status.errorSummary && status.errorSummary.totalErrors > 0) {
1367
- console.warn(`Job completed with ${status.errorSummary.totalErrors} errors`);
1368
-
1369
- // Get error details
1370
- const errorDetails = await client.graphql({
1371
- query: `
1372
- query GetJobErrors($jobId: ID!) {
1373
- job(id: $jobId) {
1374
- batches(first: 100) {
1375
- edges {
1376
- node {
1377
- id
1378
- status
1379
- errors {
1380
- recordRef
1381
- errorType
1382
- errorMessage
1383
- }
1384
- }
1385
- }
1386
- }
1387
- }
1388
- }
1389
- `,
1390
- variables: { jobId: job.id }
1391
- });
1392
-
1393
- // Write error report
1394
- await s3.uploadFile(
1395
- 'inventory/errors/batch-errors.json',
1396
- JSON.stringify(errorDetails, null, 2)
1397
- );
1398
- }
1399
- ```
1400
-
1401
- ---
1402
-
1403
- ## Complete Implementation Example
1404
-
1405
- See **[S3 CSV Batch API Guide](../../../01-TEMPLATES/standalone/s3-csv-batch-api.md)** for production-ready implementation with:
1406
-
1407
- - Complete TypeScript code
1408
- - Environment configuration
1409
- - Error handling strategies
1410
- - File archival patterns
1411
- - Scheduling with cron
1412
- - Monitoring and logging
1413
-
1414
- ### Quick Example: Scheduled Versori Batch
1415
-
1416
- ```typescript
1417
- /**
1418
- * Versori Scheduled Workflow: Daily Inventory Sync
1419
- *
1420
- * Trigger: Cron (daily at 2 AM)
1421
- * Process: Download CSV from S3, send to Batch API
1422
- */
1423
-
1424
- import { createClient, S3DataSource, CSVParserService, UniversalMapper } from '@fluentcommerce/fc-connect-sdk';
1425
-
1426
- export default async function dailyInventorySync(activation: any, log: any, connections: any) {
1427
- try {
1428
- log.info('Starting daily inventory sync');
1429
-
1430
- // Create client
1431
- const client = await createClient({
1432
- connection: connections.fluent_commerce,
1433
- logger: log
1434
- });
1435
-
1436
- // Create S3 source
1437
- const s3 = new S3DataSource({
1438
- connection: connections.aws_s3
1439
- }, log);
1440
-
1441
- // List files
1442
- const files = await s3.listFiles('inventory/daily/');
1443
- log.info(`Found ${files.length} files to process`);
1444
-
1445
- // Process each file
1446
- for (const file of files) {
1447
- await processFile(client, s3, file.key, log);
1448
- }
1449
-
1450
- return { status: 200, body: { success: true, filesProcessed: files.length } };
1451
-
1452
- } catch (error: any) {
1453
- log.error('Daily sync failed', error);
1454
- return { status: 500, body: { success: false, error: error.message } };
1455
- }
1456
- }
1457
-
1458
- async function processFile(client: any, s3: any, fileKey: string, log: any) {
1459
- try {
1460
- // Download
1461
- const content = await s3.downloadFile(fileKey);
1462
-
1463
- // Parse
1464
- const parser = new CSVParserService({ headers: true });
1465
- const records = await parser.parse(content);
1466
-
1467
- // Map
1468
- const mapper = new UniversalMapper({
1469
- fields: {
1470
- ref: { source: 'sku', resolver: 'custom.buildRef' },
1471
- type: { value: 'INVENTORY' },
1472
- productRef: { source: 'sku', required: true },
1473
- locationRef: { source: 'location', required: true },
1474
- onHand: { source: 'qty', resolver: 'sdk.parseInt' }
1475
- }
1476
- }, {
1477
- customResolvers: {
1478
- 'custom.buildRef': (v, d) => `${d.sku}-${d.location}`
1479
- }
1480
- });
1481
-
1482
- const mapped = [];
1483
- for (const rec of records) {
1484
- const result = await mapper.map(rec);
1485
- if (result.success) mapped.push(result.data);
1486
- }
1487
-
1488
- // Create job
1489
- const job = await client.createJob({
1490
- name: `Daily Inventory - ${fileKey}`,
1491
- retailerId: '2'
1492
- });
1493
-
1494
- // Send batches
1495
- const BATCH_SIZE = 100;
1496
- for (let i = 0; i < mapped.length; i += BATCH_SIZE) {
1497
- await client.sendBatch(job.id, {
1498
- entities: mapped.slice(i, i + BATCH_SIZE)
1499
- });
1500
- }
1501
-
1502
- // Poll completion
1503
- let status = await client.getJobStatus(job.id);
1504
- while (status.status === 'PENDING' || status.status === 'PROCESSING') {
1505
- await new Promise(r => setTimeout(r, 30000));
1506
- status = await client.getJobStatus(job.id);
1507
- }
1508
-
1509
- if (status.status === 'COMPLETED') {
1510
- await s3.moveFile(fileKey, fileKey.replace('daily/', 'archive/'));
1511
- log.info(`✓ Processed ${fileKey}`);
1512
- } else {
1513
- throw new Error(`Job failed: ${status.status}`);
1514
- }
1515
-
1516
- } catch (error: any) {
1517
- log.error(`Failed to process ${fileKey}`, error);
1518
- await s3.moveFile(fileKey, fileKey.replace('daily/', 'errors/'));
1519
- }
1520
- }
1521
- ```
1522
-
1523
- ---
1524
-
1525
- ## Next Steps
1526
-
1527
- Now that you understand batch processing, you're ready to learn delta sync for incremental updates!
1528
-
1529
- **Continue to:** [Module 3: Delta Sync →](./integration-patterns-03-delta-sync.md)
1530
-
1531
- Or explore:
1532
- - [Module 5: Error Handling](./integration-patterns-05-error-handling.md) - Resilience strategies
1533
- - [Complete Example: S3 CSV Batch API](../../../01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-csv-inventory-batch.md)
1534
- - [Complete Example: Versori Scheduled CSV](../../../01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-csv-inventory-batch.md)
1535
-
1536
- ---
1537
-
1538
- ## Additional Resources
1539
-
1540
- - [Fluent Batch API Documentation](https://docs.fluentcommerce.com/)
1541
- - [Universal Mapping Guide](../../../02-CORE-GUIDES/advanced-services/advanced-services-readme.md)
1542
- - [S3DataSource API Reference](../../../02-CORE-GUIDES/data-sources/modules/data-sources-02-s3-operations.md)
1543
- - [CSVParserService API Reference](../../../02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-02-csv-parser.md)
1544
-
1545
- ---
1546
-
1547
- [← Back to Index](../../../02-CORE-GUIDES/advanced-services/advanced-services-readme.md) | [Previous: Real-Time →](./integration-patterns-01-real-time-processing.md) | [Next: Delta Sync →](./integration-patterns-03-delta-sync.md)
1
+ # Module 2: Batch Processing
2
+
3
+ > **Learning Objective:** Master batch processing patterns for high-volume data synchronization using the Fluent Batch API and SDK orchestration services.
4
+ >
5
+ > **Level:** Intermediate
6
+
7
+ ## Table of Contents
8
+
9
+ 1. [What is Batch Processing?](#what-is-batch-processing)
10
+ 2. [When to Use Batch Processing](#when-to-use-batch-processing)
11
+ 3. [Fluent Batch API Overview](#fluent-batch-api-overview)
12
+ 4. [SDK Batch Components](#sdk-batch-components)
13
+ 5. [Basic Batch Workflow](#basic-batch-workflow)
14
+ 6. [Job Creation Strategies](#job-creation-strategies)
15
+ 7. [Batch Size Optimization](#batch-size-optimization)
16
+ 8. [Status Polling and Completion](#status-polling-and-completion)
17
+ 9. [Error Handling in Batches](#error-handling-in-batches)
18
+ 10. [Complete Implementation Example](#complete-implementation-example)
19
+ 11. [Next Steps](#next-steps)
20
+
21
+ ---
22
+
23
+ ## What is Batch Processing?
24
+
25
+ **Batch processing** means grouping multiple records together and processing them as a single unit, optimized for throughput over latency.
26
+
27
+ ### Key Characteristics
28
+
29
+ | Characteristic | Description | Example |
30
+ |----------------|-------------|---------|
31
+ | **High Volume** | Process thousands of records | 50,000 inventory positions |
32
+ | **Scheduled** | Runs at specific times | Daily at 2 AM |
33
+ | **Asynchronous** | Submit job, poll for completion | Job completes in 5-10 minutes |
34
+ | **Bulk Operations** | Optimize for throughput | 100 records per API call |
35
+
36
+ ### How It Works
37
+
38
+ ```
39
+ CSV File (50K records) → Parse → Transform → Batch API → Job → Poll Status → Complete
40
+ ↓ ↓ ↓ ↓ ↓ ↓ ↓
41
+ S3 bucket Parse chunks Map fields Create job Submit Check every Archive
42
+ batches 30 seconds file
43
+ ```
44
+
45
+ ### Visual Flow
46
+
47
+ ```
48
+ ┌─────────────────┐
49
+ │ S3 CSV File │
50
+ │ (50,000 records)│
51
+ └────────┬────────┘
52
+
53
+ │ ①Download & Parse
54
+
55
+ ┌─────────────────┐
56
+ │ CSVParserService │
57
+ │ Stream chunks │
58
+ └────────┬────────┘
59
+
60
+ │ ②Transform fields
61
+
62
+ ┌─────────────────┐
63
+ │ UniversalMapper │
64
+ │ Field mapping │
65
+ └────────┬────────┘
66
+
67
+ │ ③Create Batch job
68
+
69
+ ┌─────────────────┐
70
+ │ FluentClient │
71
+ │ createJob() │
72
+ └────────┬────────┘
73
+
74
+ │ ④Send batches (100 records each)
75
+
76
+ ┌─────────────────┐
77
+ │ sendBatch() │ ────┐
78
+ │ 500 batches │ │ Parallel processing
79
+ │ of 100 records │ ────┤ on Fluent servers
80
+ └────────┬────────┘ │
81
+ │ │
82
+ │ ⑤Poll status every 30s
83
+
84
+ ┌─────────────────┐
85
+ │ getJobStatus() │
86
+ │ PROCESSING... │
87
+ │ COMPLETED ✓ │
88
+ └────────┬────────┘
89
+
90
+ │ ⑥Archive file
91
+
92
+ ┌─────────────────┐
93
+ │ S3 Archive │
94
+ │ Success log │
95
+ └─────────────────┘
96
+ ```
97
+
98
+ **Total time**: 5-10 minutes for 50,000 records (vs 2-3 hours for sequential GraphQL mutations).
99
+
100
+ ---
101
+
102
+ ## When to Use Batch Processing
103
+
104
+ ### ✅ Use Batch Processing When:
105
+
106
+ | Scenario | Why Batch? | SDK Pattern |
107
+ |----------|------------|-------------|
108
+ | **Large Files** | 1K+ records | Batch API with streaming |
109
+ | **Daily Sync** | Full inventory sync | Scheduled batch job |
110
+ | **High Volume** | > 1,000 events/hour | Batch API (not webhooks) |
111
+ | **Bulk Updates** | Mass price changes | Single job, multiple batches |
112
+ | **CSV/Parquet Files** | Structured file formats | S3DataSource + FluentClient |
113
+
114
+ ### ❌ Avoid Batch Processing When:
115
+
116
+ | Scenario | Why Not Batch? | Use Instead |
117
+ |----------|----------------|-------------|
118
+ | **Immediate Updates** | Need < 5 second latency | Real-time (Module 1) |
119
+ | **Single Records** | 1-10 records | Direct GraphQL mutation |
120
+ | **Event-Driven** | External webhook triggers | Real-time webhook |
121
+ | **Critical Orders** | Customer waiting for confirmation | Real-time processing |
122
+
123
+ ---
124
+
125
+ ## Fluent Batch API Overview
126
+
127
+ ### What is Batch API?
128
+
129
+ The Fluent Batch API is a specialized GraphQL endpoint for bulk data operations:
130
+
131
+ - **Asynchronous**: Submit job, get ID, poll for completion
132
+ - **High-throughput**: 10,000+ records in minutes
133
+ - **Fault-tolerant**: Partial failures don't block entire job
134
+ - **Entity-specific**: Currently supports `InventoryQuantity` only
135
+
136
+ ### Supported Operations
137
+
138
+ | Entity | Operations | Max per Batch |
139
+ |--------|-----------|---------------|
140
+ | `InventoryQuantity` | Create, Update | 100-250 records |
141
+
142
+ **IMPORTANT**: Batch API currently **only supports InventoryQuantity**. For other entities (Order, Product, Customer), use standard GraphQL mutations.
143
+
144
+ ### Batch API Workflow
145
+
146
+ ```graphql
147
+ # Step 1: Create job
148
+ mutation CreateJob {
149
+ createJob(input: {
150
+ name: "Daily Inventory Sync"
151
+ retailerId: "2"
152
+ }) {
153
+ id
154
+ status
155
+ }
156
+ }
157
+
158
+ # Step 2: Send batches
159
+ mutation SendBatch($jobId: ID!, $entities: [InventoryQuantityInput!]!) {
160
+ sendBatch(jobId: $jobId, entities: $entities) {
161
+ id
162
+ status
163
+ recordCount
164
+ }
165
+ }
166
+
167
+ # Step 3: Poll status
168
+ query GetJobStatus($jobId: ID!) {
169
+ job(id: $jobId) {
170
+ id
171
+ status # PENDING, PROCESSING, COMPLETED, FAILED
172
+ totalBatches
173
+ completedBatches
174
+ errorSummary {
175
+ totalErrors
176
+ errorTypes
177
+ }
178
+ }
179
+ }
180
+ ```
181
+
182
+ ### Job Lifecycle
183
+
184
+ ```
185
+ CREATE_JOB → PENDING → SEND_BATCHES → PROCESSING → COMPLETED
186
+ ↓ ↓
187
+ (can add more batches) (can check errors)
188
+ ```
189
+
190
+ ---
191
+
192
+ ## SDK Batch Components
193
+
194
+ ### Component 1: FluentClient Batch Methods
195
+
196
+ The SDK provides batch methods directly on `FluentClient` (there is no separate `FluentBatchManager` class):
197
+
198
+ ```typescript
199
+ import { createClient } from '@fluentcommerce/fc-connect-sdk';
200
+
201
+ const client = await createClient({
202
+ config: {
203
+ baseUrl: 'https://api.fluentcommerce.com',
204
+ clientId: process.env.FLUENT_CLIENT_ID,
205
+ clientSecret: process.env.FLUENT_CLIENT_SECRET,
206
+ retailerId: process.env.FLUENT_RETAILER_ID
207
+ }
208
+ });
209
+
210
+ // Create job
211
+ const job = await client.createJob({
212
+ name: 'Daily Inventory Sync',
213
+ retailerId: '2'
214
+ });
215
+
216
+ // Send batch
217
+ const batch = await client.sendBatch(job.id, {
218
+ entities: inventoryRecords
219
+ });
220
+
221
+ // Get status
222
+ const status = await client.getJobStatus(job.id);
223
+
224
+ // Get detailed status with batches
225
+ const jobDetail = await client.getBatchStatus(job.id, batch.id);
226
+ ```
227
+
228
+ **Available methods**:
229
+ - `createJob(input)` - Create new Batch job
230
+ - `sendBatch(jobId, data)` - Submit batch of records
231
+ - `getJobStatus(jobId)` - Get job status and summary
232
+ - `getBatchStatus(jobId, batchId)` - Get individual batch details
233
+
234
+ ### Component 2: `S3DataSource`
235
+
236
+ **Purpose**: Read and write files from S3 with streaming support
237
+
238
+ ```typescript
239
+ import { S3DataSource } from '@fluentcommerce/fc-connect-sdk';
240
+
241
+ const s3 = new S3DataSource({
242
+ type: 'S3_CSV',
243
+ connectionId: 'my-s3',
244
+ name: 'My S3 Source',
245
+ s3Config: {
246
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
247
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
248
+ region: 'us-east-1',
249
+ bucket: 'inventory-bucket'
250
+ }
251
+ }, logger);
252
+
253
+ // List files
254
+ const files = await s3.listFiles('inventory/updates/');
255
+
256
+ // Download file
257
+ const fileContent = await s3.downloadFile('inventory/updates/inventory.csv');
258
+
259
+ // Download large file as Buffer (for files >100MB)
260
+ const buffer = await s3.downloadFile('inventory/updates/large-file.csv', { encoding: 'binary' });
261
+
262
+ // Upload file
263
+ await s3.uploadFile('inventory/archive/processed.csv', content);
264
+
265
+ // Move file
266
+ await s3.moveFile('inventory/updates/file.csv', 'inventory/archive/file.csv');
267
+ ```
268
+
269
+ **Key features**:
270
+ - Streaming support for large files (memory-efficient)
271
+ - Automatic retry on network errors
272
+ - Progress callbacks for uploads/downloads
273
+
274
+ ### Component 3: `CSVParserService`
275
+
276
+ **Purpose**: Parse CSV files with validation and streaming
277
+
278
+ ```typescript
279
+ import { CSVParserService } from '@fluentcommerce/fc-connect-sdk';
280
+
281
+ const parser = new CSVParserService({
282
+ headers: true, // First row is headers
283
+ skipEmptyLines: true, // Ignore blank lines
284
+ delimiter: ',', // CSV delimiter
285
+ quote: '"', // Quote character
286
+ escape: '\\' // Escape character
287
+ });
288
+
289
+ // Parse entire file (for small files < 10MB)
290
+ const records = await parser.parse(csvContent);
291
+
292
+ // Stream parse (for large files)
293
+ const recordStream = parser.streamParse(csvStream);
294
+ for await (const record of recordStream) {
295
+ // Process record
296
+ }
297
+ ```
298
+
299
+ **Validation options**:
300
+ - Required columns
301
+ - Type validation
302
+ - Custom validators
303
+
304
+ ### Component 4: `UniversalMapper`
305
+
306
+ **Purpose**: Transform CSV/JSON fields to Fluent schema
307
+
308
+ ```typescript
309
+ import { UniversalMapper } from '@fluentcommerce/fc-connect-sdk';
310
+
311
+ const mappingConfig = {
312
+ fields: {
313
+ ref: {
314
+ source: 'sku',
315
+ resolver: 'custom.buildRef' // Combine sku + location
316
+ },
317
+ type: {
318
+ value: 'INVENTORY' // Static value
319
+ },
320
+ status: {
321
+ source: 'status',
322
+ resolver: 'sdk.uppercase'
323
+ },
324
+ productRef: {
325
+ source: 'sku',
326
+ required: true
327
+ },
328
+ locationRef: {
329
+ source: 'warehouse_code',
330
+ required: true
331
+ },
332
+ onHand: {
333
+ source: 'quantity',
334
+ resolver: 'sdk.parseInt'
335
+ }
336
+ }
337
+ };
338
+
339
+ const mapper = new UniversalMapper(mappingConfig, {
340
+ customResolvers: {
341
+ 'custom.buildRef': (value, data) => {
342
+ return `${data.sku}-${data.warehouse_code}`;
343
+ }
344
+ }
345
+ });
346
+
347
+ const result = await mapper.map(csvRecord);
348
+ // result.data = { ref: 'SKU001-WH01', type: 'INVENTORY', ... }
349
+ ```
350
+
351
+ ### Workflow Composition Pattern
352
+
353
+ **Purpose**: Compose SDK services for complete ingestion workflows
354
+
355
+ Instead of using a single orchestrator, compose the above components into your custom workflow:
356
+
357
+ ```typescript
358
+ import {
359
+ createClient,
360
+ S3DataSource,
361
+ CSVParserService,
362
+ UniversalMapper,
363
+ StateService,
364
+ createConsoleLogger,
365
+ toStructuredLogger
366
+ } from '@fluentcommerce/fc-connect-sdk';
367
+
368
+ async function processInventoryFiles() {
369
+ const logger = toStructuredLogger(createConsoleLogger(), { logLevel: 'info' });
370
+
371
+ // Initialize components
372
+ const client = await createClient({ config });
373
+ const s3 = new S3DataSource(s3Config, logger);
374
+ const parser = new CSVParserService();
375
+ const mapper = new UniversalMapper(mappingConfig);
376
+ const stateService = new StateService(logger);
377
+
378
+ // List and process files
379
+ const files = await s3.listFiles({ prefix: 'inventory/updates/' });
380
+
381
+ for (const file of files) {
382
+ if (await stateService.isFileProcessed(file.name)) continue;
383
+
384
+ try {
385
+ // 1. Download and parse
386
+ const content = await s3.downloadFile(file.name);
387
+ const records = await parser.parse(content);
388
+
389
+ // 2. Map fields
390
+ const inventory = [];
391
+ for (const record of records) {
392
+ const result = await mapper.map(record);
393
+ if (result.success) inventory.push(result.data);
394
+ }
395
+
396
+ // 3. Create job and send batches
397
+ const job = await client.createJob({
398
+ name: `Inventory - ${file.name}`,
399
+ retailerId: '1'
400
+ });
401
+
402
+ const batches = chunkArray(inventory, 100);
403
+ for (const batch of batches) {
404
+ await client.sendBatch(job.id, {
405
+ action: 'UPSERT',
406
+ entityType: 'INVENTORY',
407
+ entities: batch
408
+ });
409
+ }
410
+
411
+ // 4. Archive and mark processed
412
+ await s3.moveFile(file.name, `inventory/archive/${file.name}`);
413
+ await stateService.markFileProcessed(file.name);
414
+
415
+ } catch (error) {
416
+ logger.error(`Failed to process ${file.name}`, error);
417
+ await s3.moveFile(file.name, `inventory/errors/${file.name}`);
418
+ }
419
+ }
420
+ }
421
+ ```
422
+
423
+ **Benefits of building block composition**:
424
+ 1. ✅ Full control over workflow logic
425
+ 2. ✅ Custom error handling
426
+ 3. ✅ Easy to test individual components
427
+ 4. ✅ Flexible batch sizing and job strategies
428
+ 5. ✅ Works with any entity type (not just INVENTORY)
429
+ 6. ✅ Easy to add custom business rules
430
+
431
+ ---
432
+
433
+ ## Basic Batch Workflow
434
+
435
+ ### Step-by-Step Pattern
436
+
437
+ ```typescript
438
+ /**
439
+ * Basic Batch Processing Workflow
440
+ *
441
+ * Steps:
442
+ * 1. Create Fluent client
443
+ * 2. Download and parse CSV file
444
+ * 3. Transform records to Fluent schema
445
+ * 4. Create Batch job
446
+ * 5. Send records in batches
447
+ * 6. Poll status until complete
448
+ * 7. Handle results
449
+ */
450
+
451
+ import { createClient, S3DataSource, CSVParserService, UniversalMapper } from '@fluentcommerce/fc-connect-sdk';
452
+
453
+ async function batchInventorySync() {
454
+ // Step 1: Create client
455
+ const client = await createClient({
456
+ baseUrl: process.env.FLUENT_BASE_URL,
457
+ clientId: process.env.FLUENT_CLIENT_ID,
458
+ clientSecret: process.env.FLUENT_CLIENT_SECRET,
459
+ retailerId: process.env.FLUENT_RETAILER_ID
460
+ });
461
+
462
+ // Step 2: Download CSV
463
+ const s3 = new S3DataSource({
464
+ type: 'S3_CSV',
465
+ connectionId: 'my-s3',
466
+ name: 'My S3 Source',
467
+ s3Config: {
468
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
469
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
470
+ region: process.env.AWS_REGION,
471
+ bucket: process.env.AWS_BUCKET
472
+ }
473
+ }, console);
474
+
475
+ const csvContent = await s3.downloadFile('inventory/daily-update.csv');
476
+
477
+ // Step 3: Parse CSV
478
+ const parser = new CSVParserService({ headers: true });
479
+ const records = await parser.parse(csvContent);
480
+
481
+ console.log(`Parsed ${records.length} records`);
482
+
483
+ // Step 4: Transform records
484
+ const mapper = new UniversalMapper({
485
+ fields: {
486
+ ref: { source: 'sku', resolver: 'custom.buildRef' },
487
+ type: { value: 'INVENTORY' },
488
+ productRef: { source: 'sku', required: true },
489
+ locationRef: { source: 'location', required: true },
490
+ onHand: { source: 'qty', resolver: 'sdk.parseInt' }
491
+ }
492
+ }, {
493
+ customResolvers: {
494
+ 'custom.buildRef': (value, data) => `${data.sku}-${data.location}`
495
+ }
496
+ });
497
+
498
+ const mappedRecords = [];
499
+ for (const record of records) {
500
+ const result = await mapper.map(record);
501
+ if (result.success) {
502
+ mappedRecords.push(result.data);
503
+ } else {
504
+ console.error('Mapping error:', result.errors);
505
+ }
506
+ }
507
+
508
+ console.log(`Mapped ${mappedRecords.length} records`);
509
+
510
+ // Step 5: Create Batch job
511
+ const job = await client.createJob({
512
+ name: `Inventory Sync ${new Date().toISOString()}`,
513
+ retailerId: '2'
514
+ });
515
+
516
+ console.log(`Created job ${job.id}`);
517
+
518
+ // Step 6: Send batches
519
+ const BATCH_SIZE = 100;
520
+ for (let i = 0; i < mappedRecords.length; i += BATCH_SIZE) {
521
+ const chunk = mappedRecords.slice(i, i + BATCH_SIZE);
522
+
523
+ const batch = await client.sendBatch(job.id, {
524
+ entities: chunk
525
+ });
526
+
527
+ console.log(`Sent batch ${batch.id} (${chunk.length} records)`);
528
+ }
529
+
530
+ // Step 7: Poll status
531
+ let status = await client.getJobStatus(job.id);
532
+ while (status.status === 'PENDING' || status.status === 'PROCESSING') {
533
+ console.log(`Job status: ${status.status} (${status.completedBatches}/${status.totalBatches} batches)`);
534
+
535
+ await new Promise(resolve => setTimeout(resolve, 30000)); // Wait 30 seconds
536
+
537
+ status = await client.getJobStatus(job.id);
538
+ }
539
+
540
+ // Step 8: Handle results
541
+ if (status.status === 'COMPLETED') {
542
+ console.log('✓ Job completed successfully');
543
+ console.log(`Total records: ${status.totalRecords}`);
544
+ console.log(`Errors: ${status.errorSummary?.totalErrors || 0}`);
545
+
546
+ // Archive file
547
+ await s3.moveFile('inventory/daily-update.csv', 'inventory/archive/daily-update.csv');
548
+ } else {
549
+ console.error('✗ Job failed:', status.status);
550
+
551
+ // Move to error folder
552
+ await s3.moveFile('inventory/daily-update.csv', 'inventory/errors/daily-update.csv');
553
+ }
554
+ }
555
+
556
+ batchInventorySync().catch(console.error);
557
+ ```
558
+
559
+ ---
560
+
561
+ ## NEW: Job Lifecycle Tracking (v0.1.10+)
562
+
563
+ **Track job state, metadata, and lifecycle** across your integration workflows.
564
+
565
+ The SDK provides `JobTracker` service for managing job lifecycle, tracking status, and storing job metadata.
566
+
567
+ ### JobTracker Overview
568
+
569
+ ```typescript
570
+ import { JobTracker, VersoriKVAdapter } from '@fluentcommerce/fc-connect-sdk';
571
+ // ✅ CORRECT: Access openKv from Versori context
572
+ // import { openKv } from '@versori/run'; // ❌ WRONG - Not a direct export
573
+
574
+ // In Versori workflow handler:
575
+ const { openKv } = ctx;
576
+ const kvAdapter = new VersoriKVAdapter(openKv(':project:'));
577
+ const tracker = new JobTracker(kvAdapter, logger);
578
+
579
+ // Create job
580
+ const jobId = `scheduled_${Date.now()}`;
581
+
582
+ await tracker.createJob(jobId, {
583
+ triggeredBy: 'schedule',
584
+ stage: 'initialization',
585
+ details: {
586
+ catalogueRef: 'DEFAULT:1',
587
+ fileName: 'inventory.csv'
588
+ }
589
+ });
590
+
591
+ // Update progress
592
+ await tracker.updateJob(jobId, {
593
+ status: 'processing',
594
+ stage: 'extraction',
595
+ message: 'Extracting records from S3'
596
+ });
597
+
598
+ await tracker.updateJob(jobId, {
599
+ stage: 'transformation',
600
+ message: 'Mapping 1000 records',
601
+ details: { recordCount: 1000 }
602
+ });
603
+
604
+ // Mark as completed
605
+ await tracker.markCompleted(jobId, {
606
+ recordCount: 1000,
607
+ successCount: 998,
608
+ failedCount: 2
609
+ });
610
+
611
+ // Or mark as failed
612
+ try {
613
+ // ... job logic ...
614
+ } catch (error) {
615
+ await tracker.markFailed(jobId, error);
616
+ }
617
+ ```
618
+
619
+ ### Complete Example with Versori
620
+
621
+ ```typescript
622
+ import {
623
+ createClient,
624
+ JobTracker,
625
+ VersoriKVAdapter,
626
+ } from '@fluentcommerce/fc-connect-sdk';
627
+ import { schedule } from '@versori/run';
628
+
629
+ /**
630
+ * Versori workflow with complete job tracking
631
+ */
632
+
633
+ export const dailyInventorySync = schedule('daily-inventory', '0 2 * * *')
634
+ .execute(async ({ log, connections, vars, kv }) => {
635
+ const jobId = `inventory_${Date.now()}`;
636
+ const tracker = new JobTracker(new VersoriKVAdapter(kv), log);
637
+
638
+ try {
639
+ // Create job
640
+ await tracker.createJob(jobId, {
641
+ triggeredBy: 'schedule',
642
+ stage: 'start',
643
+ details: { schedule: 'daily 2am' }
644
+ });
645
+
646
+ // Stage 1: Extraction
647
+ await tracker.updateJob(jobId, {
648
+ status: 'processing',
649
+ stage: 'extraction',
650
+ message: 'Querying virtual positions'
651
+ });
652
+
653
+ const data = await extractFromFluent();
654
+
655
+ // Stage 2: Transformation
656
+ await tracker.updateJob(jobId, {
657
+ stage: 'transformation',
658
+ message: `Processing ${data.length} records`
659
+ });
660
+
661
+ const transformed = await transformData(data);
662
+
663
+ // Stage 3: Upload
664
+ await tracker.updateJob(jobId, {
665
+ stage: 'upload',
666
+ message: 'Uploading to SFTP'
667
+ });
668
+
669
+ await uploadToSFTP(transformed);
670
+
671
+ // Completed
672
+ await tracker.markCompleted(jobId, {
673
+ recordCount: data.length,
674
+ fileName: `inventory_${jobId}.xml`
675
+ });
676
+
677
+ log.info('Job completed successfully', { jobId });
678
+
679
+ } catch (error) {
680
+ await tracker.markFailed(jobId, error);
681
+ log.error('Job failed', error);
682
+ throw error;
683
+ }
684
+ });
685
+ ```
686
+
687
+ ### Querying Job Status
688
+
689
+ ```typescript
690
+ // Get job status
691
+ const status = await tracker.getJob(jobId);
692
+
693
+ if (status) {
694
+ console.log(`Job ${jobId}:`, {
695
+ status: status.status,
696
+ stage: status.stage,
697
+ message: status.message,
698
+ createdAt: status.createdAt,
699
+ completedAt: status.completedAt
700
+ });
701
+ }
702
+
703
+ // Check if job is still running
704
+ if (status.status === 'processing') {
705
+ console.log(`Job in progress: ${status.stage}`);
706
+ }
707
+
708
+ // Check for errors
709
+ if (status.status === 'failed') {
710
+ console.error('Job failed:', {
711
+ error: status.error,
712
+ stack: status.errorStack
713
+ });
714
+ }
715
+ ```
716
+
717
+ ### Custom TTL Configuration
718
+
719
+ ```typescript
720
+ // Default TTL: 7 days
721
+ const tracker = new JobTracker(kvAdapter, logger);
722
+
723
+ // Custom TTL: 24 hours
724
+ const shortTracker = new JobTracker(
725
+ kvAdapter,
726
+ logger,
727
+ 86400 // 24 hours in seconds
728
+ );
729
+
730
+ // Custom TTL: 30 days
731
+ const longTracker = new JobTracker(
732
+ kvAdapter,
733
+ logger,
734
+ 2592000 // 30 days in seconds
735
+ );
736
+ ```
737
+
738
+ ### JobTracker API Reference
739
+
740
+ | Method | Description | Parameters | Example |
741
+ |--------|-------------|------------|---------|
742
+ | `createJob(jobId, metadata)` | Create new job with 'queued' status | jobId, metadata | `await tracker.createJob('job_123', { triggeredBy: 'schedule' })` |
743
+ | `updateJob(jobId, updates)` | Update job metadata/status/stage | jobId, updates | `await tracker.updateJob('job_123', { status: 'processing' })` |
744
+ | `getJob(jobId)` | Get job by ID | jobId | `const job = await tracker.getJob('job_123')` |
745
+ | `markCompleted(jobId, details)` | Mark job complete | jobId, details | `await tracker.markCompleted('job_123', { recordCount: 1000 })` |
746
+ | `markFailed(jobId, error)` | Mark job failed | jobId, error | `await tracker.markFailed('job_123', error)` |
747
+
748
+ **Constructor:**
749
+ ```typescript
750
+ new JobTracker(kvAdapter: KVAdapter, logger: StructuredLogger, ttl?: number)
751
+ ```
752
+
753
+ ### When to Use JobTracker
754
+
755
+ ✅ **Use when**:
756
+ - Need job history and audit trail
757
+ - Monitoring multiple concurrent jobs
758
+ - Debugging job failures
759
+ - Tracking job duration and performance
760
+ - Building dashboards or reports
761
+
762
+ ❌ **Skip when**:
763
+ - Simple one-off scripts
764
+ - No state persistence available
765
+ - Memory-constrained environments
766
+
767
+ ---
768
+
769
+ ## Job Creation Strategies
770
+
771
+ ### Strategy 1: Single Job Per File (Recommended)
772
+
773
+ **When to use**: Most common pattern for daily/scheduled files
774
+
775
+ ```typescript
776
+ // Process one file
777
+ const job = await client.createJob({
778
+ name: `Daily Inventory - ${fileName}`,
779
+ retailerId: '2'
780
+ });
781
+
782
+ // Send all batches for this file
783
+ for (const batch of batches) {
784
+ await client.sendBatch(job.id, { entities: batch });
785
+ }
786
+
787
+ // Poll until complete
788
+ await pollJobCompletion(job.id);
789
+ ```
790
+
791
+ **Pros**:
792
+ - Simple error tracking (one job = one file)
793
+ - Easy archival (job complete = archive file)
794
+ - Clear audit trail
795
+
796
+ **Cons**:
797
+ - More jobs in system
798
+ - Can't combine multiple files
799
+
800
+ ### Strategy 2: Single Job Per Day
801
+
802
+ **When to use**: Multiple small files processed together
803
+
804
+ ```typescript
805
+ // Create one job for entire day
806
+ const job = await client.createJob({
807
+ name: `Daily Inventory - ${new Date().toISOString().split('T')[0]}`,
808
+ retailerId: '2'
809
+ });
810
+
811
+ // Process multiple files
812
+ const files = await s3.listFiles('inventory/updates/');
813
+ for (const file of files) {
814
+ const records = await processFile(file);
815
+
816
+ // Send batches for this file
817
+ for (const batch of chunkArray(records, 100)) {
818
+ await client.sendBatch(job.id, { entities: batch });
819
+ }
820
+ }
821
+
822
+ // Poll once for entire day's work
823
+ await pollJobCompletion(job.id);
824
+ ```
825
+
826
+ **Pros**:
827
+ - Fewer jobs
828
+ - Combine multiple sources
829
+ - Single status check
830
+
831
+ **Cons**:
832
+ - Harder to track individual file errors
833
+ - One file failure doesn't block others
834
+ - More complex archival logic
835
+
836
+ ### Strategy 3: Long-Running Job (Advanced)
837
+
838
+ **When to use**: Continuous processing (hourly micro-batches)
839
+
840
+ ```typescript
841
+ // Create job once
842
+ const job = await client.createJob({
843
+ name: `Inventory Stream - ${new Date().toISOString().split('T')[0]}`,
844
+ retailerId: '2'
845
+ });
846
+
847
+ // Send batches throughout the day
848
+ setInterval(async () => {
849
+ const newRecords = await fetchLatestUpdates();
850
+
851
+ if (newRecords.length > 0) {
852
+ await client.sendBatch(job.id, { entities: newRecords });
853
+ }
854
+ }, 3600000); // Every hour
855
+
856
+ // Check status at end of day
857
+ ```
858
+
859
+ **Pros**:
860
+ - Minimize job creation overhead
861
+ - Continuous processing
862
+
863
+ **Cons**:
864
+ - Job can stay open for hours
865
+ - Harder to determine "completion"
866
+ - Risk of very large jobs
867
+
868
+ ---
869
+
870
+ ## Batch Size Optimization
871
+
872
+ ### Recommended Batch Sizes
873
+
874
+ | Total Records | Batch Size | Batches | Processing Time |
875
+ |---------------|------------|---------|-----------------|
876
+ | < 1,000 | 100 | 1-10 | 1-2 minutes |
877
+ | 1,000-10,000 | 100-200 | 10-100 | 3-10 minutes |
878
+ | 10,000-50,000 | 200 | 50-250 | 5-15 minutes |
879
+ | > 50,000 | 250 | 200+ | 10-30 minutes |
880
+
881
+ **Max batch size**: 250 records (Fluent API limit)
882
+
883
+ ### Batch Size Calculator
884
+
885
+ ```typescript
886
+ /**
887
+ * Calculate optimal batch size based on total records
888
+ */
889
+ function calculateBatchSize(totalRecords: number): number {
890
+ if (totalRecords < 1000) {
891
+ return 100;
892
+ } else if (totalRecords < 10000) {
893
+ return 150;
894
+ } else {
895
+ return 250; // Max allowed
896
+ }
897
+ }
898
+
899
+ // Usage
900
+ const batchSize = calculateBatchSize(records.length);
901
+ const batches = chunkArray(records, batchSize);
902
+ ```
903
+
904
+ ### Chunking Utility
905
+
906
+ ```typescript
907
+ /**
908
+ * Split array into chunks
909
+ */
910
+ function chunkArray<T>(array: T[], size: number): T[][] {
911
+ const chunks: T[][] = [];
912
+ for (let i = 0; i < array.length; i += size) {
913
+ chunks.push(array.slice(i, i + size));
914
+ }
915
+ return chunks;
916
+ }
917
+
918
+ // Usage
919
+ const batches = chunkArray(mappedRecords, 100);
920
+ // [[record1, record2, ...], [record101, record102, ...], ...]
921
+ ```
922
+
923
+ ### Parallel Batch Sending (Advanced)
924
+
925
+ ```typescript
926
+ /**
927
+ * Send multiple batches in parallel
928
+ * WARNING: Use with caution - can hit rate limits
929
+ */
930
+ async function sendBatchesParallel(
931
+ client: FluentClient,
932
+ jobId: string,
933
+ batches: any[][],
934
+ concurrency = 5
935
+ ) {
936
+ const results = [];
937
+
938
+ for (let i = 0; i < batches.length; i += concurrency) {
939
+ const chunk = batches.slice(i, i + concurrency);
940
+
941
+ // Send up to 'concurrency' batches at once
942
+ const promises = chunk.map(batch =>
943
+ client.sendBatch(jobId, { entities: batch })
944
+ );
945
+
946
+ const chunkResults = await Promise.all(promises);
947
+ results.push(...chunkResults);
948
+
949
+ console.log(`Sent batches ${i + 1} to ${i + chunk.length}`);
950
+ }
951
+
952
+ return results;
953
+ }
954
+
955
+ // Usage (send 5 batches at a time)
956
+ await sendBatchesParallel(client, job.id, batches, 5);
957
+ ```
958
+
959
+ **Caution**: Parallel sending can trigger rate limits. Start with concurrency=5, monitor, and adjust.
960
+
961
+ ---
962
+
963
+ ## Status Polling and Completion
964
+
965
+ ### Basic Polling Pattern
966
+
967
+ ```typescript
968
+ /**
969
+ * Poll job status until complete
970
+ */
971
+ async function pollJobCompletion(
972
+ client: FluentClient,
973
+ jobId: string,
974
+ options = {
975
+ interval: 30000, // 30 seconds
976
+ timeout: 3600000, // 1 hour
977
+ onProgress: undefined // Callback
978
+ }
979
+ ) {
980
+ const startTime = Date.now();
981
+
982
+ while (true) {
983
+ const status = await client.getJobStatus(jobId);
984
+
985
+ // Check terminal states
986
+ if (status.status === 'COMPLETED') {
987
+ console.log('✓ Job completed successfully');
988
+ return status;
989
+ }
990
+
991
+ if (status.status === 'FAILED') {
992
+ throw new Error(`Job failed: ${status.errorSummary?.message || 'Unknown error'}`);
993
+ }
994
+
995
+ // Check timeout
996
+ if (Date.now() - startTime > options.timeout) {
997
+ throw new Error(`Job timeout after ${options.timeout}ms`);
998
+ }
999
+
1000
+ // Progress callback
1001
+ if (options.onProgress) {
1002
+ options.onProgress(status);
1003
+ }
1004
+
1005
+ // Log progress
1006
+ console.log(
1007
+ `Job ${status.status}: ${status.completedBatches}/${status.totalBatches} batches ` +
1008
+ `(${Math.round((status.completedBatches / status.totalBatches) * 100)}%)`
1009
+ );
1010
+
1011
+ // Wait before next poll
1012
+ await new Promise(resolve => setTimeout(resolve, options.interval));
1013
+ }
1014
+ }
1015
+
1016
+ // Usage
1017
+ const finalStatus = await pollJobCompletion(client, job.id, {
1018
+ interval: 30000,
1019
+ timeout: 3600000,
1020
+ onProgress: (status) => {
1021
+ console.log(`Progress: ${status.completedBatches}/${status.totalBatches}`);
1022
+ }
1023
+ });
1024
+ ```
1025
+
1026
+ ### Advanced: Adaptive Polling
1027
+
1028
+ ```typescript
1029
+ /**
1030
+ * Adjust polling interval based on job size
1031
+ */
1032
+ function getPollingInterval(totalBatches: number): number {
1033
+ if (totalBatches < 10) {
1034
+ return 10000; // 10 seconds for small jobs
1035
+ } else if (totalBatches < 100) {
1036
+ return 30000; // 30 seconds for medium jobs
1037
+ } else {
1038
+ return 60000; // 1 minute for large jobs
1039
+ }
1040
+ }
1041
+
1042
+ // Usage
1043
+ const interval = getPollingInterval(batches.length);
1044
+ await pollJobCompletion(client, job.id, { interval });
1045
+ ```
1046
+
1047
+ ---
1048
+
1049
+ ## Error Handling in Batches
1050
+
1051
+ ### Error Types
1052
+
1053
+ | Error Level | Scope | Example | Recovery |
1054
+ |-------------|-------|---------|----------|
1055
+ | **File Error** | Entire file | Invalid CSV format | Move to error folder |
1056
+ | **Record Error** | Single record | Missing required field | Log, continue with others |
1057
+ | **Batch Error** | 100-250 records | API validation error | Logged by Fluent, continue |
1058
+ | **Job Error** | Entire job | Authentication failure | Retry entire job |
1059
+ | **Partial Batch Failure** | Some batches succeed, some fail | Network interruption | **NEW: PartialBatchRecovery** |
1060
+
1061
+ ### NEW: Partial Batch Recovery (v0.1.10+)
1062
+
1063
+ **Problem**: What happens when sending 500 batches and batch #250 fails due to network error? You don't want to resend the first 249 successful batches.
1064
+
1065
+ **Solution**: The SDK provides `PartialBatchRecovery` service to track batch progress and resume from failure point.
1066
+
1067
+ #### How It Works
1068
+
1069
+ ```typescript
1070
+ import { PartialBatchRecovery, createClient } from '@fluentcommerce/fc-connect-sdk';
1071
+
1072
+ /**
1073
+ * Partial Batch Recovery Pattern
1074
+ *
1075
+ * Tracks per-record success/failure in batch operations and enables:
1076
+ * - Retrying only failed records instead of entire batch
1077
+ * - Checkpoint/resume functionality
1078
+ * - Detailed error reporting per record
1079
+ */
1080
+
1081
+ async function resilientBatchSync() {
1082
+ const client = await createClient({ config });
1083
+ const recovery = new PartialBatchRecovery(logger);
1084
+
1085
+ // Step 1: Create job
1086
+ const job = await client.createJob({
1087
+ name: 'Daily Inventory Sync',
1088
+ retailerId: '2',
1089
+ });
1090
+
1091
+ // Step 2: Process batch with automatic recovery
1092
+ const result = await recovery.processBatchWithRecovery(
1093
+ records,
1094
+ async (batch) => {
1095
+ // Your batch processing logic
1096
+ return await client.sendBatch(job.id, {
1097
+ action: 'UPSERT',
1098
+ entityType: 'INVENTORY',
1099
+ entities: batch
1100
+ });
1101
+ },
1102
+ {
1103
+ maxRetries: 3,
1104
+ retryOnlyFailed: true, // Only retry failed records
1105
+ retryDelayMs: 1000, // Start with 1 second
1106
+ retryBatchSize: 100, // Process 100 at a time
1107
+ checkpointKey: 'inventory-sync-2025-01-24'
1108
+ }
1109
+ );
1110
+
1111
+ console.log(`✓ Success: ${result.successCount}/${result.totalRecords}`);
1112
+ console.log(`✗ Failed: ${result.failedCount} records`);
1113
+
1114
+ if (result.failedCount > 0) {
1115
+ console.error('Failed records:', result.failedRecords);
1116
+ console.log(`Checkpoint saved: ${result.checkpointId}`);
1117
+ }
1118
+ }
1119
+ ```
1120
+
1121
+ #### Integration with Batch API
1122
+
1123
+ ```typescript
1124
+ import {
1125
+ createClient,
1126
+ PartialBatchRecovery
1127
+ } from '@fluentcommerce/fc-connect-sdk';
1128
+
1129
+ async function batchIngestionWithRecovery(records: any[]) {
1130
+ const client = await createClient({ config });
1131
+ const recovery = new PartialBatchRecovery(logger);
1132
+
1133
+ // Create job
1134
+ const job = await client.createJob({
1135
+ name: 'Inventory Ingestion with Recovery',
1136
+ retailerId: 'my-retailer'
1137
+ });
1138
+
1139
+ // Process with recovery
1140
+ const result = await recovery.processBatchWithRecovery(
1141
+ records,
1142
+ async (batch) => {
1143
+ const response = await client.sendBatch(job.id, {
1144
+ action: 'UPSERT',
1145
+ entityType: 'INVENTORY',
1146
+ entities: batch
1147
+ });
1148
+
1149
+ logger.info('Batch sent', {
1150
+ batchId: response.id,
1151
+ recordCount: batch.length
1152
+ });
1153
+
1154
+ return response;
1155
+ },
1156
+ {
1157
+ maxRetries: 3,
1158
+ retryOnlyFailed: true,
1159
+ checkpointKey: `job-${job.id}`
1160
+ }
1161
+ );
1162
+
1163
+ // Check job status
1164
+ const status = await client.getJobStatus(job.id);
1165
+
1166
+ return {
1167
+ jobId: job.id,
1168
+ jobStatus: status.status,
1169
+ ...result
1170
+ };
1171
+ }
1172
+ ```
1173
+
1174
+ #### Checkpoint and Resume
1175
+
1176
+ ```typescript
1177
+ // Process batch and save checkpoint
1178
+ const result = await recovery.processBatchWithRecovery(
1179
+ records,
1180
+ processBatch,
1181
+ {
1182
+ maxRetries: 3,
1183
+ checkpointKey: 'daily-inventory-sync'
1184
+ }
1185
+ );
1186
+
1187
+ if (result.failedCount > 0) {
1188
+ console.log(`Checkpoint created: ${result.checkpointId}`);
1189
+ console.log(`Failed records saved for later retry`);
1190
+ }
1191
+
1192
+ // Later: Resume from checkpoint
1193
+ const checkpointId = result.checkpointId;
1194
+ const resumeResult = await recovery.resumeFromCheckpoint(
1195
+ checkpointId,
1196
+ processBatch,
1197
+ {
1198
+ maxRetries: 5 // More retries on resume
1199
+ }
1200
+ );
1201
+
1202
+ console.log(`Resume: ${resumeResult.successCount} recovered`);
1203
+ ```
1204
+
1205
+ #### Custom Retry Logic
1206
+
1207
+ ```typescript
1208
+ const result = await recovery.processBatchWithRecovery(
1209
+ records,
1210
+ processBatch,
1211
+ {
1212
+ maxRetries: 5,
1213
+ retryDelayMs: 2000,
1214
+ // Custom retry decision
1215
+ shouldRetry: (error, attemptCount) => {
1216
+ // Don't retry validation errors
1217
+ if (error.message.includes('validation')) {
1218
+ return false;
1219
+ }
1220
+
1221
+ // Don't retry after 3 attempts for rate limits
1222
+ if (error.message.includes('rate limit') && attemptCount > 3) {
1223
+ return false;
1224
+ }
1225
+
1226
+ // Retry all other errors
1227
+ return true;
1228
+ }
1229
+ }
1230
+ );
1231
+ ```
1232
+
1233
+ #### Record Failure Details
1234
+
1235
+ ```typescript
1236
+ // Access detailed failure information
1237
+ if (result.failedCount > 0) {
1238
+ result.failedRecords.forEach(failure => {
1239
+ console.error(`Record ${failure.index} failed:`, {
1240
+ record: failure.record,
1241
+ error: failure.error.message,
1242
+ attempts: failure.attemptCount,
1243
+ timestamp: failure.timestamp
1244
+ });
1245
+ });
1246
+
1247
+ // Export failures for manual review
1248
+ await fs.writeFile(
1249
+ 'failed-records.json',
1250
+ JSON.stringify(result.failedRecords, null, 2)
1251
+ );
1252
+ }
1253
+ ```
1254
+
1255
+ #### Key Features
1256
+
1257
+ | Feature | Description | Benefit |
1258
+ |---------|-------------|---------|
1259
+ | **Per-record tracking** | Tracks each record individually | Know exactly which records failed |
1260
+ | **Selective retry** | Retry only failures, not successes | Efficient retry logic |
1261
+ | **Checkpoint support** | Resume from failure point | Handle interruptions |
1262
+ | **Exponential backoff** | Configurable retry delays | Avoid overwhelming API |
1263
+ | **Custom retry logic** | Override retry decisions | Fine-grained control |
1264
+
1265
+ #### API Reference
1266
+
1267
+ **Constructor:**
1268
+ ```typescript
1269
+ new PartialBatchRecovery(logger?: StructuredLogger)
1270
+ ```
1271
+
1272
+ **Methods:**
1273
+
1274
+ | Method | Description | Parameters |
1275
+ |--------|-------------|------------|
1276
+ | `processBatchWithRecovery()` | Process batch with recovery | records, processor, options |
1277
+ | `resumeFromCheckpoint()` | Resume from saved checkpoint | checkpointId, processor, options |
1278
+
1279
+ #### When to Use Partial Batch Recovery
1280
+
1281
+ ✅ **Use when**:
1282
+ - Sending 50+ batches (high failure risk)
1283
+ - Network is unstable
1284
+ - Long-running jobs (> 10 minutes)
1285
+ - Scheduled workflows that may be interrupted
1286
+ - Critical data that must complete
1287
+
1288
+ ❌ **Skip when**:
1289
+ - < 10 batches (low failure risk)
1290
+ - Fast operations (< 2 minutes)
1291
+ - Non-critical data (can rerun from scratch)
1292
+ - Memory-constrained environments
1293
+
1294
+ ### File-Level Error Handling
1295
+
1296
+ ```typescript
1297
+ try {
1298
+ // Download file
1299
+ const content = await s3.downloadFile(fileKey);
1300
+
1301
+ // Parse CSV
1302
+ const records = await parser.parse(content);
1303
+
1304
+ // Process batches
1305
+ await processBatches(records);
1306
+
1307
+ // Archive on success
1308
+ await s3.moveFile(fileKey, archiveKey);
1309
+
1310
+ } catch (error: any) {
1311
+ console.error(`File processing failed: ${error.message}`);
1312
+
1313
+ // Move to error folder
1314
+ await s3.moveFile(fileKey, errorKey);
1315
+
1316
+ // Log error details
1317
+ await s3.uploadFile(errorKey + '.error.log', JSON.stringify({
1318
+ file: fileKey,
1319
+ error: error.message,
1320
+ stack: error.stack,
1321
+ timestamp: new Date().toISOString()
1322
+ }, null, 2));
1323
+
1324
+ // Continue with next file (don't throw)
1325
+ }
1326
+ ```
1327
+
1328
+ ### Record-Level Error Handling
1329
+
1330
+ ```typescript
1331
+ const mappedRecords = [];
1332
+ const mappingErrors = [];
1333
+
1334
+ for (const record of records) {
1335
+ const result = await mapper.map(record);
1336
+
1337
+ if (result.success) {
1338
+ mappedRecords.push(result.data);
1339
+ } else {
1340
+ mappingErrors.push({
1341
+ record,
1342
+ errors: result.errors
1343
+ });
1344
+ }
1345
+ }
1346
+
1347
+ console.log(`Mapped ${mappedRecords.length} records, ${mappingErrors.length} errors`);
1348
+
1349
+ // Write error report
1350
+ if (mappingErrors.length > 0) {
1351
+ await s3.uploadFile(
1352
+ 'inventory/errors/mapping-errors.json',
1353
+ JSON.stringify(mappingErrors, null, 2)
1354
+ );
1355
+ }
1356
+
1357
+ // Continue with successful records
1358
+ ```
1359
+
1360
+ ### Batch API Error Reporting
1361
+
1362
+ ```typescript
1363
+ // After job completes, check for errors
1364
+ const status = await client.getJobStatus(job.id);
1365
+
1366
+ if (status.errorSummary && status.errorSummary.totalErrors > 0) {
1367
+ console.warn(`Job completed with ${status.errorSummary.totalErrors} errors`);
1368
+
1369
+ // Get error details
1370
+ const errorDetails = await client.graphql({
1371
+ query: `
1372
+ query GetJobErrors($jobId: ID!) {
1373
+ job(id: $jobId) {
1374
+ batches(first: 100) {
1375
+ edges {
1376
+ node {
1377
+ id
1378
+ status
1379
+ errors {
1380
+ recordRef
1381
+ errorType
1382
+ errorMessage
1383
+ }
1384
+ }
1385
+ }
1386
+ }
1387
+ }
1388
+ }
1389
+ `,
1390
+ variables: { jobId: job.id }
1391
+ });
1392
+
1393
+ // Write error report
1394
+ await s3.uploadFile(
1395
+ 'inventory/errors/batch-errors.json',
1396
+ JSON.stringify(errorDetails, null, 2)
1397
+ );
1398
+ }
1399
+ ```
1400
+
1401
+ ---
1402
+
1403
+ ## Complete Implementation Example
1404
+
1405
+ See **[S3 CSV Batch API Guide](../../../01-TEMPLATES/standalone/s3-csv-batch-api.md)** for production-ready implementation with:
1406
+
1407
+ - Complete TypeScript code
1408
+ - Environment configuration
1409
+ - Error handling strategies
1410
+ - File archival patterns
1411
+ - Scheduling with cron
1412
+ - Monitoring and logging
1413
+
1414
+ ### Quick Example: Scheduled Versori Batch
1415
+
1416
+ ```typescript
1417
+ /**
1418
+ * Versori Scheduled Workflow: Daily Inventory Sync
1419
+ *
1420
+ * Trigger: Cron (daily at 2 AM)
1421
+ * Process: Download CSV from S3, send to Batch API
1422
+ */
1423
+
1424
+ import { createClient, S3DataSource, CSVParserService, UniversalMapper } from '@fluentcommerce/fc-connect-sdk';
1425
+
1426
+ export default async function dailyInventorySync(activation: any, log: any, connections: any) {
1427
+ try {
1428
+ log.info('Starting daily inventory sync');
1429
+
1430
+ // Create client
1431
+ const client = await createClient({
1432
+ connection: connections.fluent_commerce,
1433
+ logger: log
1434
+ });
1435
+
1436
+ // Create S3 source
1437
+ const s3 = new S3DataSource({
1438
+ connection: connections.aws_s3
1439
+ }, log);
1440
+
1441
+ // List files
1442
+ const files = await s3.listFiles('inventory/daily/');
1443
+ log.info(`Found ${files.length} files to process`);
1444
+
1445
+ // Process each file
1446
+ for (const file of files) {
1447
+ await processFile(client, s3, file.key, log);
1448
+ }
1449
+
1450
+ return { status: 200, body: { success: true, filesProcessed: files.length } };
1451
+
1452
+ } catch (error: any) {
1453
+ log.error('Daily sync failed', error);
1454
+ return { status: 500, body: { success: false, error: error.message } };
1455
+ }
1456
+ }
1457
+
1458
+ async function processFile(client: any, s3: any, fileKey: string, log: any) {
1459
+ try {
1460
+ // Download
1461
+ const content = await s3.downloadFile(fileKey);
1462
+
1463
+ // Parse
1464
+ const parser = new CSVParserService({ headers: true });
1465
+ const records = await parser.parse(content);
1466
+
1467
+ // Map
1468
+ const mapper = new UniversalMapper({
1469
+ fields: {
1470
+ ref: { source: 'sku', resolver: 'custom.buildRef' },
1471
+ type: { value: 'INVENTORY' },
1472
+ productRef: { source: 'sku', required: true },
1473
+ locationRef: { source: 'location', required: true },
1474
+ onHand: { source: 'qty', resolver: 'sdk.parseInt' }
1475
+ }
1476
+ }, {
1477
+ customResolvers: {
1478
+ 'custom.buildRef': (v, d) => `${d.sku}-${d.location}`
1479
+ }
1480
+ });
1481
+
1482
+ const mapped = [];
1483
+ for (const rec of records) {
1484
+ const result = await mapper.map(rec);
1485
+ if (result.success) mapped.push(result.data);
1486
+ }
1487
+
1488
+ // Create job
1489
+ const job = await client.createJob({
1490
+ name: `Daily Inventory - ${fileKey}`,
1491
+ retailerId: '2'
1492
+ });
1493
+
1494
+ // Send batches
1495
+ const BATCH_SIZE = 100;
1496
+ for (let i = 0; i < mapped.length; i += BATCH_SIZE) {
1497
+ await client.sendBatch(job.id, {
1498
+ entities: mapped.slice(i, i + BATCH_SIZE)
1499
+ });
1500
+ }
1501
+
1502
+ // Poll completion
1503
+ let status = await client.getJobStatus(job.id);
1504
+ while (status.status === 'PENDING' || status.status === 'PROCESSING') {
1505
+ await new Promise(r => setTimeout(r, 30000));
1506
+ status = await client.getJobStatus(job.id);
1507
+ }
1508
+
1509
+ if (status.status === 'COMPLETED') {
1510
+ await s3.moveFile(fileKey, fileKey.replace('daily/', 'archive/'));
1511
+ log.info(`✓ Processed ${fileKey}`);
1512
+ } else {
1513
+ throw new Error(`Job failed: ${status.status}`);
1514
+ }
1515
+
1516
+ } catch (error: any) {
1517
+ log.error(`Failed to process ${fileKey}`, error);
1518
+ await s3.moveFile(fileKey, fileKey.replace('daily/', 'errors/'));
1519
+ }
1520
+ }
1521
+ ```
1522
+
1523
+ ---
1524
+
1525
+ ## Next Steps
1526
+
1527
+ Now that you understand batch processing, you're ready to learn delta sync for incremental updates!
1528
+
1529
+ **Continue to:** [Module 3: Delta Sync →](./integration-patterns-03-delta-sync.md)
1530
+
1531
+ Or explore:
1532
+ - [Module 5: Error Handling](./integration-patterns-05-error-handling.md) - Resilience strategies
1533
+ - [Complete Example: S3 CSV Batch API](../../../01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-csv-inventory-batch.md)
1534
+ - [Complete Example: Versori Scheduled CSV](../../../01-TEMPLATES/versori/workflows/ingestion/batch-api/template-ingestion-s3-csv-inventory-batch.md)
1535
+
1536
+ ---
1537
+
1538
+ ## Additional Resources
1539
+
1540
+ - [Fluent Batch API Documentation](https://docs.fluentcommerce.com/)
1541
+ - [Universal Mapping Guide](../../../02-CORE-GUIDES/advanced-services/advanced-services-readme.md)
1542
+ - [S3DataSource API Reference](../../../02-CORE-GUIDES/data-sources/modules/data-sources-02-s3-operations.md)
1543
+ - [CSVParserService API Reference](../../../02-CORE-GUIDES/parsers/modules/02-core-guides-parsers-02-csv-parser.md)
1544
+
1545
+ ---
1546
+
1547
+ [← Back to Index](../../../02-CORE-GUIDES/advanced-services/advanced-services-readme.md) | [Previous: Real-Time →](./integration-patterns-01-real-time-processing.md) | [Next: Delta Sync →](./integration-patterns-03-delta-sync.md)