kreuzberg 4.0.0.pre.rc.8 → 4.0.0.pre.rc.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +14 -14
- data/.rspec +3 -3
- data/.rubocop.yaml +1 -1
- data/.rubocop.yml +538 -538
- data/Gemfile +8 -8
- data/Gemfile.lock +4 -104
- data/README.md +454 -432
- data/Rakefile +25 -25
- data/Steepfile +47 -47
- data/examples/async_patterns.rb +341 -341
- data/ext/kreuzberg_rb/extconf.rb +45 -45
- data/ext/kreuzberg_rb/native/.cargo/config.toml +2 -2
- data/ext/kreuzberg_rb/native/Cargo.lock +6941 -6721
- data/ext/kreuzberg_rb/native/Cargo.toml +54 -54
- data/ext/kreuzberg_rb/native/README.md +425 -425
- data/ext/kreuzberg_rb/native/build.rs +15 -15
- data/ext/kreuzberg_rb/native/include/ieeefp.h +11 -11
- data/ext/kreuzberg_rb/native/include/msvc_compat/strings.h +14 -14
- data/ext/kreuzberg_rb/native/include/strings.h +20 -20
- data/ext/kreuzberg_rb/native/include/unistd.h +47 -47
- data/ext/kreuzberg_rb/native/src/lib.rs +3158 -3135
- data/extconf.rb +28 -28
- data/kreuzberg.gemspec +214 -182
- data/lib/kreuzberg/api_proxy.rb +142 -142
- data/lib/kreuzberg/cache_api.rb +81 -46
- data/lib/kreuzberg/cli.rb +55 -55
- data/lib/kreuzberg/cli_proxy.rb +127 -127
- data/lib/kreuzberg/config.rb +724 -724
- data/lib/kreuzberg/error_context.rb +80 -32
- data/lib/kreuzberg/errors.rb +118 -118
- data/lib/kreuzberg/extraction_api.rb +340 -85
- data/lib/kreuzberg/mcp_proxy.rb +186 -186
- data/lib/kreuzberg/ocr_backend_protocol.rb +113 -113
- data/lib/kreuzberg/post_processor_protocol.rb +86 -86
- data/lib/kreuzberg/result.rb +279 -279
- data/lib/kreuzberg/setup_lib_path.rb +80 -80
- data/lib/kreuzberg/validator_protocol.rb +89 -89
- data/lib/kreuzberg/version.rb +5 -5
- data/lib/kreuzberg.rb +109 -103
- data/lib/pdfium.dll +0 -0
- data/sig/kreuzberg/internal.rbs +184 -184
- data/sig/kreuzberg.rbs +546 -537
- data/spec/binding/cache_spec.rb +227 -227
- data/spec/binding/cli_proxy_spec.rb +85 -85
- data/spec/binding/cli_spec.rb +55 -55
- data/spec/binding/config_spec.rb +345 -345
- data/spec/binding/config_validation_spec.rb +283 -283
- data/spec/binding/error_handling_spec.rb +213 -213
- data/spec/binding/errors_spec.rb +66 -66
- data/spec/binding/plugins/ocr_backend_spec.rb +307 -307
- data/spec/binding/plugins/postprocessor_spec.rb +269 -269
- data/spec/binding/plugins/validator_spec.rb +274 -274
- data/spec/fixtures/config.toml +39 -39
- data/spec/fixtures/config.yaml +41 -41
- data/spec/fixtures/invalid_config.toml +4 -4
- data/spec/smoke/package_spec.rb +178 -178
- data/spec/spec_helper.rb +42 -42
- data/vendor/Cargo.toml +45 -0
- data/vendor/kreuzberg/Cargo.toml +61 -38
- data/vendor/kreuzberg/README.md +230 -221
- data/vendor/kreuzberg/benches/otel_overhead.rs +48 -48
- data/vendor/kreuzberg/build.rs +843 -891
- data/vendor/kreuzberg/src/api/error.rs +81 -81
- data/vendor/kreuzberg/src/api/handlers.rs +199 -199
- data/vendor/kreuzberg/src/api/mod.rs +79 -79
- data/vendor/kreuzberg/src/api/server.rs +353 -353
- data/vendor/kreuzberg/src/api/types.rs +170 -170
- data/vendor/kreuzberg/src/cache/mod.rs +1167 -1167
- data/vendor/kreuzberg/src/chunking/mod.rs +1877 -1877
- data/vendor/kreuzberg/src/chunking/processor.rs +220 -220
- data/vendor/kreuzberg/src/core/batch_mode.rs +95 -95
- data/vendor/kreuzberg/src/core/config.rs +1080 -1080
- data/vendor/kreuzberg/src/core/extractor.rs +1156 -1156
- data/vendor/kreuzberg/src/core/io.rs +329 -329
- data/vendor/kreuzberg/src/core/mime.rs +605 -605
- data/vendor/kreuzberg/src/core/mod.rs +47 -47
- data/vendor/kreuzberg/src/core/pipeline.rs +1184 -1171
- data/vendor/kreuzberg/src/embeddings.rs +500 -432
- data/vendor/kreuzberg/src/error.rs +431 -431
- data/vendor/kreuzberg/src/extraction/archive.rs +954 -954
- data/vendor/kreuzberg/src/extraction/docx.rs +398 -398
- data/vendor/kreuzberg/src/extraction/email.rs +854 -854
- data/vendor/kreuzberg/src/extraction/excel.rs +688 -688
- data/vendor/kreuzberg/src/extraction/html.rs +601 -569
- data/vendor/kreuzberg/src/extraction/image.rs +491 -491
- data/vendor/kreuzberg/src/extraction/libreoffice.rs +574 -562
- data/vendor/kreuzberg/src/extraction/markdown.rs +213 -213
- data/vendor/kreuzberg/src/extraction/mod.rs +81 -81
- data/vendor/kreuzberg/src/extraction/office_metadata/app_properties.rs +398 -398
- data/vendor/kreuzberg/src/extraction/office_metadata/core_properties.rs +247 -247
- data/vendor/kreuzberg/src/extraction/office_metadata/custom_properties.rs +240 -240
- data/vendor/kreuzberg/src/extraction/office_metadata/mod.rs +130 -130
- data/vendor/kreuzberg/src/extraction/office_metadata/odt_properties.rs +284 -284
- data/vendor/kreuzberg/src/extraction/pptx.rs +3100 -3100
- data/vendor/kreuzberg/src/extraction/structured.rs +490 -490
- data/vendor/kreuzberg/src/extraction/table.rs +328 -328
- data/vendor/kreuzberg/src/extraction/text.rs +269 -269
- data/vendor/kreuzberg/src/extraction/xml.rs +333 -333
- data/vendor/kreuzberg/src/extractors/archive.rs +447 -447
- data/vendor/kreuzberg/src/extractors/bibtex.rs +470 -470
- data/vendor/kreuzberg/src/extractors/docbook.rs +504 -504
- data/vendor/kreuzberg/src/extractors/docx.rs +400 -400
- data/vendor/kreuzberg/src/extractors/email.rs +157 -157
- data/vendor/kreuzberg/src/extractors/epub.rs +708 -708
- data/vendor/kreuzberg/src/extractors/excel.rs +345 -345
- data/vendor/kreuzberg/src/extractors/fictionbook.rs +492 -492
- data/vendor/kreuzberg/src/extractors/html.rs +407 -407
- data/vendor/kreuzberg/src/extractors/image.rs +219 -219
- data/vendor/kreuzberg/src/extractors/jats.rs +1054 -1054
- data/vendor/kreuzberg/src/extractors/jupyter.rs +368 -368
- data/vendor/kreuzberg/src/extractors/latex.rs +653 -653
- data/vendor/kreuzberg/src/extractors/markdown.rs +701 -701
- data/vendor/kreuzberg/src/extractors/mod.rs +429 -429
- data/vendor/kreuzberg/src/extractors/odt.rs +628 -628
- data/vendor/kreuzberg/src/extractors/opml.rs +635 -635
- data/vendor/kreuzberg/src/extractors/orgmode.rs +529 -529
- data/vendor/kreuzberg/src/extractors/pdf.rs +749 -673
- data/vendor/kreuzberg/src/extractors/pptx.rs +267 -267
- data/vendor/kreuzberg/src/extractors/rst.rs +577 -577
- data/vendor/kreuzberg/src/extractors/rtf.rs +809 -809
- data/vendor/kreuzberg/src/extractors/security.rs +484 -484
- data/vendor/kreuzberg/src/extractors/security_tests.rs +367 -367
- data/vendor/kreuzberg/src/extractors/structured.rs +142 -142
- data/vendor/kreuzberg/src/extractors/text.rs +265 -265
- data/vendor/kreuzberg/src/extractors/typst.rs +651 -651
- data/vendor/kreuzberg/src/extractors/xml.rs +147 -147
- data/vendor/kreuzberg/src/image/dpi.rs +164 -164
- data/vendor/kreuzberg/src/image/mod.rs +6 -6
- data/vendor/kreuzberg/src/image/preprocessing.rs +417 -417
- data/vendor/kreuzberg/src/image/resize.rs +89 -89
- data/vendor/kreuzberg/src/keywords/config.rs +154 -154
- data/vendor/kreuzberg/src/keywords/mod.rs +237 -237
- data/vendor/kreuzberg/src/keywords/processor.rs +275 -275
- data/vendor/kreuzberg/src/keywords/rake.rs +293 -293
- data/vendor/kreuzberg/src/keywords/types.rs +68 -68
- data/vendor/kreuzberg/src/keywords/yake.rs +163 -163
- data/vendor/kreuzberg/src/language_detection/mod.rs +985 -985
- data/vendor/kreuzberg/src/language_detection/processor.rs +219 -219
- data/vendor/kreuzberg/src/lib.rs +113 -113
- data/vendor/kreuzberg/src/mcp/mod.rs +35 -35
- data/vendor/kreuzberg/src/mcp/server.rs +2076 -2076
- data/vendor/kreuzberg/src/ocr/cache.rs +469 -469
- data/vendor/kreuzberg/src/ocr/error.rs +37 -37
- data/vendor/kreuzberg/src/ocr/hocr.rs +216 -216
- data/vendor/kreuzberg/src/ocr/mod.rs +58 -58
- data/vendor/kreuzberg/src/ocr/processor.rs +863 -863
- data/vendor/kreuzberg/src/ocr/table/mod.rs +4 -4
- data/vendor/kreuzberg/src/ocr/table/tsv_parser.rs +144 -144
- data/vendor/kreuzberg/src/ocr/tesseract_backend.rs +452 -452
- data/vendor/kreuzberg/src/ocr/types.rs +393 -393
- data/vendor/kreuzberg/src/ocr/utils.rs +47 -47
- data/vendor/kreuzberg/src/ocr/validation.rs +206 -206
- data/vendor/kreuzberg/src/panic_context.rs +154 -154
- data/vendor/kreuzberg/src/pdf/bindings.rs +44 -0
- data/vendor/kreuzberg/src/pdf/bundled.rs +346 -328
- data/vendor/kreuzberg/src/pdf/error.rs +130 -130
- data/vendor/kreuzberg/src/pdf/images.rs +139 -139
- data/vendor/kreuzberg/src/pdf/metadata.rs +489 -489
- data/vendor/kreuzberg/src/pdf/mod.rs +68 -66
- data/vendor/kreuzberg/src/pdf/rendering.rs +368 -368
- data/vendor/kreuzberg/src/pdf/table.rs +420 -417
- data/vendor/kreuzberg/src/pdf/text.rs +240 -240
- data/vendor/kreuzberg/src/plugins/extractor.rs +1044 -1044
- data/vendor/kreuzberg/src/plugins/mod.rs +212 -212
- data/vendor/kreuzberg/src/plugins/ocr.rs +639 -639
- data/vendor/kreuzberg/src/plugins/processor.rs +650 -650
- data/vendor/kreuzberg/src/plugins/registry.rs +1339 -1339
- data/vendor/kreuzberg/src/plugins/traits.rs +258 -258
- data/vendor/kreuzberg/src/plugins/validator.rs +967 -967
- data/vendor/kreuzberg/src/stopwords/mod.rs +1470 -1470
- data/vendor/kreuzberg/src/text/mod.rs +25 -25
- data/vendor/kreuzberg/src/text/quality.rs +697 -697
- data/vendor/kreuzberg/src/text/quality_processor.rs +219 -219
- data/vendor/kreuzberg/src/text/string_utils.rs +217 -217
- data/vendor/kreuzberg/src/text/token_reduction/cjk_utils.rs +164 -164
- data/vendor/kreuzberg/src/text/token_reduction/config.rs +100 -100
- data/vendor/kreuzberg/src/text/token_reduction/core.rs +796 -796
- data/vendor/kreuzberg/src/text/token_reduction/filters.rs +902 -902
- data/vendor/kreuzberg/src/text/token_reduction/mod.rs +160 -160
- data/vendor/kreuzberg/src/text/token_reduction/semantic.rs +619 -619
- data/vendor/kreuzberg/src/text/token_reduction/simd_text.rs +147 -147
- data/vendor/kreuzberg/src/types.rs +1055 -1055
- data/vendor/kreuzberg/src/utils/mod.rs +17 -17
- data/vendor/kreuzberg/src/utils/quality.rs +959 -959
- data/vendor/kreuzberg/src/utils/string_utils.rs +381 -381
- data/vendor/kreuzberg/stopwords/af_stopwords.json +53 -53
- data/vendor/kreuzberg/stopwords/ar_stopwords.json +482 -482
- data/vendor/kreuzberg/stopwords/bg_stopwords.json +261 -261
- data/vendor/kreuzberg/stopwords/bn_stopwords.json +400 -400
- data/vendor/kreuzberg/stopwords/br_stopwords.json +1205 -1205
- data/vendor/kreuzberg/stopwords/ca_stopwords.json +280 -280
- data/vendor/kreuzberg/stopwords/cs_stopwords.json +425 -425
- data/vendor/kreuzberg/stopwords/da_stopwords.json +172 -172
- data/vendor/kreuzberg/stopwords/de_stopwords.json +622 -622
- data/vendor/kreuzberg/stopwords/el_stopwords.json +849 -849
- data/vendor/kreuzberg/stopwords/en_stopwords.json +1300 -1300
- data/vendor/kreuzberg/stopwords/eo_stopwords.json +175 -175
- data/vendor/kreuzberg/stopwords/es_stopwords.json +734 -734
- data/vendor/kreuzberg/stopwords/et_stopwords.json +37 -37
- data/vendor/kreuzberg/stopwords/eu_stopwords.json +100 -100
- data/vendor/kreuzberg/stopwords/fa_stopwords.json +801 -801
- data/vendor/kreuzberg/stopwords/fi_stopwords.json +849 -849
- data/vendor/kreuzberg/stopwords/fr_stopwords.json +693 -693
- data/vendor/kreuzberg/stopwords/ga_stopwords.json +111 -111
- data/vendor/kreuzberg/stopwords/gl_stopwords.json +162 -162
- data/vendor/kreuzberg/stopwords/gu_stopwords.json +226 -226
- data/vendor/kreuzberg/stopwords/ha_stopwords.json +41 -41
- data/vendor/kreuzberg/stopwords/he_stopwords.json +196 -196
- data/vendor/kreuzberg/stopwords/hi_stopwords.json +227 -227
- data/vendor/kreuzberg/stopwords/hr_stopwords.json +181 -181
- data/vendor/kreuzberg/stopwords/hu_stopwords.json +791 -791
- data/vendor/kreuzberg/stopwords/hy_stopwords.json +47 -47
- data/vendor/kreuzberg/stopwords/id_stopwords.json +760 -760
- data/vendor/kreuzberg/stopwords/it_stopwords.json +634 -634
- data/vendor/kreuzberg/stopwords/ja_stopwords.json +136 -136
- data/vendor/kreuzberg/stopwords/kn_stopwords.json +84 -84
- data/vendor/kreuzberg/stopwords/ko_stopwords.json +681 -681
- data/vendor/kreuzberg/stopwords/ku_stopwords.json +64 -64
- data/vendor/kreuzberg/stopwords/la_stopwords.json +51 -51
- data/vendor/kreuzberg/stopwords/lt_stopwords.json +476 -476
- data/vendor/kreuzberg/stopwords/lv_stopwords.json +163 -163
- data/vendor/kreuzberg/stopwords/ml_stopwords.json +1 -1
- data/vendor/kreuzberg/stopwords/mr_stopwords.json +101 -101
- data/vendor/kreuzberg/stopwords/ms_stopwords.json +477 -477
- data/vendor/kreuzberg/stopwords/ne_stopwords.json +490 -490
- data/vendor/kreuzberg/stopwords/nl_stopwords.json +415 -415
- data/vendor/kreuzberg/stopwords/no_stopwords.json +223 -223
- data/vendor/kreuzberg/stopwords/pl_stopwords.json +331 -331
- data/vendor/kreuzberg/stopwords/pt_stopwords.json +562 -562
- data/vendor/kreuzberg/stopwords/ro_stopwords.json +436 -436
- data/vendor/kreuzberg/stopwords/ru_stopwords.json +561 -561
- data/vendor/kreuzberg/stopwords/si_stopwords.json +193 -193
- data/vendor/kreuzberg/stopwords/sk_stopwords.json +420 -420
- data/vendor/kreuzberg/stopwords/sl_stopwords.json +448 -448
- data/vendor/kreuzberg/stopwords/so_stopwords.json +32 -32
- data/vendor/kreuzberg/stopwords/st_stopwords.json +33 -33
- data/vendor/kreuzberg/stopwords/sv_stopwords.json +420 -420
- data/vendor/kreuzberg/stopwords/sw_stopwords.json +76 -76
- data/vendor/kreuzberg/stopwords/ta_stopwords.json +129 -129
- data/vendor/kreuzberg/stopwords/te_stopwords.json +54 -54
- data/vendor/kreuzberg/stopwords/th_stopwords.json +118 -118
- data/vendor/kreuzberg/stopwords/tl_stopwords.json +149 -149
- data/vendor/kreuzberg/stopwords/tr_stopwords.json +506 -506
- data/vendor/kreuzberg/stopwords/uk_stopwords.json +75 -75
- data/vendor/kreuzberg/stopwords/ur_stopwords.json +519 -519
- data/vendor/kreuzberg/stopwords/vi_stopwords.json +647 -647
- data/vendor/kreuzberg/stopwords/yo_stopwords.json +62 -62
- data/vendor/kreuzberg/stopwords/zh_stopwords.json +796 -796
- data/vendor/kreuzberg/stopwords/zu_stopwords.json +31 -31
- data/vendor/kreuzberg/tests/api_extract_multipart.rs +52 -52
- data/vendor/kreuzberg/tests/api_tests.rs +966 -966
- data/vendor/kreuzberg/tests/archive_integration.rs +545 -545
- data/vendor/kreuzberg/tests/batch_orchestration.rs +556 -556
- data/vendor/kreuzberg/tests/batch_processing.rs +318 -318
- data/vendor/kreuzberg/tests/bibtex_parity_test.rs +421 -421
- data/vendor/kreuzberg/tests/concurrency_stress.rs +533 -533
- data/vendor/kreuzberg/tests/config_features.rs +612 -612
- data/vendor/kreuzberg/tests/config_loading_tests.rs +416 -416
- data/vendor/kreuzberg/tests/core_integration.rs +510 -510
- data/vendor/kreuzberg/tests/csv_integration.rs +414 -414
- data/vendor/kreuzberg/tests/docbook_extractor_tests.rs +500 -500
- data/vendor/kreuzberg/tests/docx_metadata_extraction_test.rs +122 -122
- data/vendor/kreuzberg/tests/docx_vs_pandoc_comparison.rs +370 -370
- data/vendor/kreuzberg/tests/email_integration.rs +327 -327
- data/vendor/kreuzberg/tests/epub_native_extractor_tests.rs +275 -275
- data/vendor/kreuzberg/tests/error_handling.rs +402 -402
- data/vendor/kreuzberg/tests/fictionbook_extractor_tests.rs +228 -228
- data/vendor/kreuzberg/tests/format_integration.rs +164 -161
- data/vendor/kreuzberg/tests/helpers/mod.rs +142 -142
- data/vendor/kreuzberg/tests/html_table_test.rs +551 -551
- data/vendor/kreuzberg/tests/image_integration.rs +255 -255
- data/vendor/kreuzberg/tests/instrumentation_test.rs +139 -139
- data/vendor/kreuzberg/tests/jats_extractor_tests.rs +639 -639
- data/vendor/kreuzberg/tests/jupyter_extractor_tests.rs +704 -704
- data/vendor/kreuzberg/tests/keywords_integration.rs +479 -479
- data/vendor/kreuzberg/tests/keywords_quality.rs +509 -509
- data/vendor/kreuzberg/tests/latex_extractor_tests.rs +496 -496
- data/vendor/kreuzberg/tests/markdown_extractor_tests.rs +490 -490
- data/vendor/kreuzberg/tests/mime_detection.rs +429 -429
- data/vendor/kreuzberg/tests/ocr_configuration.rs +514 -514
- data/vendor/kreuzberg/tests/ocr_errors.rs +698 -698
- data/vendor/kreuzberg/tests/ocr_quality.rs +629 -629
- data/vendor/kreuzberg/tests/ocr_stress.rs +469 -469
- data/vendor/kreuzberg/tests/odt_extractor_tests.rs +674 -674
- data/vendor/kreuzberg/tests/opml_extractor_tests.rs +616 -616
- data/vendor/kreuzberg/tests/orgmode_extractor_tests.rs +822 -822
- data/vendor/kreuzberg/tests/pdf_integration.rs +45 -45
- data/vendor/kreuzberg/tests/pdfium_linking.rs +374 -374
- data/vendor/kreuzberg/tests/pipeline_integration.rs +1436 -1436
- data/vendor/kreuzberg/tests/plugin_ocr_backend_test.rs +776 -776
- data/vendor/kreuzberg/tests/plugin_postprocessor_test.rs +560 -560
- data/vendor/kreuzberg/tests/plugin_system.rs +927 -927
- data/vendor/kreuzberg/tests/plugin_validator_test.rs +783 -783
- data/vendor/kreuzberg/tests/registry_integration_tests.rs +587 -587
- data/vendor/kreuzberg/tests/rst_extractor_tests.rs +694 -694
- data/vendor/kreuzberg/tests/rtf_extractor_tests.rs +775 -775
- data/vendor/kreuzberg/tests/security_validation.rs +416 -416
- data/vendor/kreuzberg/tests/stopwords_integration_test.rs +888 -888
- data/vendor/kreuzberg/tests/test_fastembed.rs +631 -631
- data/vendor/kreuzberg/tests/typst_behavioral_tests.rs +1260 -1260
- data/vendor/kreuzberg/tests/typst_extractor_tests.rs +648 -648
- data/vendor/kreuzberg/tests/xlsx_metadata_extraction_test.rs +87 -87
- data/vendor/kreuzberg-ffi/Cargo.toml +63 -0
- data/vendor/kreuzberg-ffi/README.md +851 -0
- data/vendor/kreuzberg-ffi/build.rs +176 -0
- data/vendor/kreuzberg-ffi/cbindgen.toml +27 -0
- data/vendor/kreuzberg-ffi/kreuzberg-ffi-install.pc +12 -0
- data/vendor/kreuzberg-ffi/kreuzberg-ffi.pc.in +12 -0
- data/vendor/kreuzberg-ffi/kreuzberg.h +1087 -0
- data/vendor/kreuzberg-ffi/src/lib.rs +3616 -0
- data/vendor/kreuzberg-ffi/src/panic_shield.rs +247 -0
- data/vendor/kreuzberg-ffi/tests.disabled/README.md +48 -0
- data/vendor/kreuzberg-ffi/tests.disabled/config_loading_tests.rs +299 -0
- data/vendor/kreuzberg-ffi/tests.disabled/config_tests.rs +346 -0
- data/vendor/kreuzberg-ffi/tests.disabled/extractor_tests.rs +232 -0
- data/vendor/kreuzberg-ffi/tests.disabled/plugin_registration_tests.rs +470 -0
- data/vendor/kreuzberg-tesseract/.commitlintrc.json +13 -0
- data/vendor/kreuzberg-tesseract/.crate-ignore +2 -0
- data/vendor/kreuzberg-tesseract/Cargo.lock +2933 -0
- data/vendor/kreuzberg-tesseract/Cargo.toml +48 -0
- data/vendor/kreuzberg-tesseract/LICENSE +22 -0
- data/vendor/kreuzberg-tesseract/README.md +399 -0
- data/vendor/kreuzberg-tesseract/build.rs +1354 -0
- data/vendor/kreuzberg-tesseract/patches/README.md +71 -0
- data/vendor/kreuzberg-tesseract/patches/tesseract.diff +199 -0
- data/vendor/kreuzberg-tesseract/src/api.rs +1371 -0
- data/vendor/kreuzberg-tesseract/src/choice_iterator.rs +77 -0
- data/vendor/kreuzberg-tesseract/src/enums.rs +297 -0
- data/vendor/kreuzberg-tesseract/src/error.rs +81 -0
- data/vendor/kreuzberg-tesseract/src/lib.rs +145 -0
- data/vendor/kreuzberg-tesseract/src/monitor.rs +57 -0
- data/vendor/kreuzberg-tesseract/src/mutable_iterator.rs +197 -0
- data/vendor/kreuzberg-tesseract/src/page_iterator.rs +253 -0
- data/vendor/kreuzberg-tesseract/src/result_iterator.rs +286 -0
- data/vendor/kreuzberg-tesseract/src/result_renderer.rs +183 -0
- data/vendor/kreuzberg-tesseract/tests/integration_test.rs +211 -0
- data/vendor/rb-sys/.cargo_vcs_info.json +5 -5
- data/vendor/rb-sys/Cargo.lock +393 -393
- data/vendor/rb-sys/Cargo.toml +70 -70
- data/vendor/rb-sys/Cargo.toml.orig +57 -57
- data/vendor/rb-sys/LICENSE-APACHE +190 -190
- data/vendor/rb-sys/LICENSE-MIT +21 -21
- data/vendor/rb-sys/build/features.rs +111 -111
- data/vendor/rb-sys/build/main.rs +286 -286
- data/vendor/rb-sys/build/stable_api_config.rs +155 -155
- data/vendor/rb-sys/build/version.rs +50 -50
- data/vendor/rb-sys/readme.md +36 -36
- data/vendor/rb-sys/src/bindings.rs +21 -21
- data/vendor/rb-sys/src/hidden.rs +11 -11
- data/vendor/rb-sys/src/lib.rs +35 -35
- data/vendor/rb-sys/src/macros.rs +371 -371
- data/vendor/rb-sys/src/memory.rs +53 -53
- data/vendor/rb-sys/src/ruby_abi_version.rs +38 -38
- data/vendor/rb-sys/src/special_consts.rs +31 -31
- data/vendor/rb-sys/src/stable_api/compiled.c +179 -179
- data/vendor/rb-sys/src/stable_api/compiled.rs +257 -257
- data/vendor/rb-sys/src/stable_api/ruby_2_7.rs +324 -324
- data/vendor/rb-sys/src/stable_api/ruby_3_0.rs +332 -332
- data/vendor/rb-sys/src/stable_api/ruby_3_1.rs +325 -325
- data/vendor/rb-sys/src/stable_api/ruby_3_2.rs +323 -323
- data/vendor/rb-sys/src/stable_api/ruby_3_3.rs +339 -339
- data/vendor/rb-sys/src/stable_api/ruby_3_4.rs +339 -339
- data/vendor/rb-sys/src/stable_api.rs +260 -260
- data/vendor/rb-sys/src/symbol.rs +31 -31
- data/vendor/rb-sys/src/tracking_allocator.rs +330 -330
- data/vendor/rb-sys/src/utils.rs +89 -89
- data/vendor/rb-sys/src/value_type.rs +7 -7
- metadata +44 -81
- data/vendor/rb-sys/bin/release.sh +0 -21
|
@@ -1,545 +1,545 @@
|
|
|
1
|
-
//! Archive extraction integration tests.
|
|
2
|
-
//!
|
|
3
|
-
//! Tests for ZIP, TAR, TAR.GZ, and 7z archive extraction.
|
|
4
|
-
//! Validates metadata extraction, content extraction, nested archives, and error handling.
|
|
5
|
-
|
|
6
|
-
#![cfg(feature = "archives")]
|
|
7
|
-
|
|
8
|
-
use kreuzberg::core::config::ExtractionConfig;
|
|
9
|
-
use kreuzberg::core::extractor::{extract_bytes, extract_bytes_sync};
|
|
10
|
-
use std::io::{Cursor, Write};
|
|
11
|
-
use tar::Builder as TarBuilder;
|
|
12
|
-
use zip::write::{FileOptions, ZipWriter};
|
|
13
|
-
|
|
14
|
-
mod helpers;
|
|
15
|
-
|
|
16
|
-
/// Test basic ZIP extraction with single file.
|
|
17
|
-
#[tokio::test]
|
|
18
|
-
async fn test_zip_basic_extraction() {
|
|
19
|
-
let config = ExtractionConfig::default();
|
|
20
|
-
|
|
21
|
-
let zip_bytes = create_simple_zip();
|
|
22
|
-
|
|
23
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
24
|
-
.await
|
|
25
|
-
.expect("Should extract ZIP successfully");
|
|
26
|
-
|
|
27
|
-
assert_eq!(result.mime_type, "application/zip");
|
|
28
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
29
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
30
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
31
|
-
|
|
32
|
-
assert!(result.content.contains("ZIP Archive"));
|
|
33
|
-
assert!(result.content.contains("test.txt"));
|
|
34
|
-
assert!(result.content.contains("Hello from ZIP!"));
|
|
35
|
-
|
|
36
|
-
assert!(result.metadata.format.is_some());
|
|
37
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
38
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
39
|
-
_ => panic!("Expected Archive metadata"),
|
|
40
|
-
};
|
|
41
|
-
assert_eq!(archive_meta.format, "ZIP");
|
|
42
|
-
assert_eq!(archive_meta.file_count, 1);
|
|
43
|
-
assert_eq!(archive_meta.file_list.len(), 1);
|
|
44
|
-
assert_eq!(archive_meta.file_list[0], "test.txt");
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
/// Test ZIP with multiple files.
|
|
48
|
-
#[tokio::test]
|
|
49
|
-
async fn test_zip_multiple_files() {
|
|
50
|
-
let config = ExtractionConfig::default();
|
|
51
|
-
|
|
52
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
53
|
-
{
|
|
54
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
55
|
-
let options = FileOptions::<'_, ()>::default();
|
|
56
|
-
|
|
57
|
-
zip.start_file("file1.txt", options).unwrap();
|
|
58
|
-
zip.write_all(b"Content 1").unwrap();
|
|
59
|
-
|
|
60
|
-
zip.start_file("file2.md", options).unwrap();
|
|
61
|
-
zip.write_all(b"# Content 2").unwrap();
|
|
62
|
-
|
|
63
|
-
zip.start_file("file3.json", options).unwrap();
|
|
64
|
-
zip.write_all(b"{\"key\": \"value\"}").unwrap();
|
|
65
|
-
|
|
66
|
-
zip.finish().unwrap();
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
let zip_bytes = cursor.into_inner();
|
|
70
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
71
|
-
.await
|
|
72
|
-
.expect("Should extract multi-file ZIP");
|
|
73
|
-
|
|
74
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
75
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
76
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
77
|
-
|
|
78
|
-
assert!(result.content.contains("file1.txt"));
|
|
79
|
-
assert!(result.content.contains("file2.md"));
|
|
80
|
-
assert!(result.content.contains("file3.json"));
|
|
81
|
-
|
|
82
|
-
assert!(result.content.contains("Content 1"));
|
|
83
|
-
assert!(result.content.contains("Content 2"));
|
|
84
|
-
assert!(result.content.contains("value"));
|
|
85
|
-
|
|
86
|
-
assert!(result.metadata.format.is_some());
|
|
87
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
88
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
89
|
-
_ => panic!("Expected Archive metadata"),
|
|
90
|
-
};
|
|
91
|
-
assert_eq!(archive_meta.file_count, 3, "Should have 3 files");
|
|
92
|
-
assert_eq!(archive_meta.file_list.len(), 3, "file_list should contain 3 entries");
|
|
93
|
-
assert!(archive_meta.file_list.contains(&"file1.txt".to_string()));
|
|
94
|
-
assert!(archive_meta.file_list.contains(&"file2.md".to_string()));
|
|
95
|
-
assert!(archive_meta.file_list.contains(&"file3.json".to_string()));
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
/// Test ZIP with nested directory structure.
|
|
99
|
-
#[tokio::test]
|
|
100
|
-
async fn test_zip_nested_directories() {
|
|
101
|
-
let config = ExtractionConfig::default();
|
|
102
|
-
|
|
103
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
104
|
-
{
|
|
105
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
106
|
-
let options = FileOptions::<'_, ()>::default();
|
|
107
|
-
|
|
108
|
-
zip.add_directory("dir1/", options).unwrap();
|
|
109
|
-
zip.add_directory("dir1/subdir/", options).unwrap();
|
|
110
|
-
|
|
111
|
-
zip.start_file("dir1/file.txt", options).unwrap();
|
|
112
|
-
zip.write_all(b"File in dir1").unwrap();
|
|
113
|
-
|
|
114
|
-
zip.start_file("dir1/subdir/nested.txt", options).unwrap();
|
|
115
|
-
zip.write_all(b"Nested file").unwrap();
|
|
116
|
-
|
|
117
|
-
zip.finish().unwrap();
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
let zip_bytes = cursor.into_inner();
|
|
121
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
122
|
-
.await
|
|
123
|
-
.expect("Should extract nested ZIP");
|
|
124
|
-
|
|
125
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
126
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
127
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
128
|
-
|
|
129
|
-
assert!(result.content.contains("dir1/"));
|
|
130
|
-
assert!(result.content.contains("dir1/file.txt"));
|
|
131
|
-
assert!(result.content.contains("dir1/subdir/nested.txt"));
|
|
132
|
-
|
|
133
|
-
assert!(result.content.contains("File in dir1"));
|
|
134
|
-
assert!(result.content.contains("Nested file"));
|
|
135
|
-
|
|
136
|
-
assert!(result.metadata.format.is_some());
|
|
137
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
138
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
139
|
-
_ => panic!("Expected Archive metadata"),
|
|
140
|
-
};
|
|
141
|
-
assert!(
|
|
142
|
-
archive_meta.file_count >= 2,
|
|
143
|
-
"Should have at least 2 files (excluding empty dirs)"
|
|
144
|
-
);
|
|
145
|
-
assert!(archive_meta.file_list.iter().any(|f| f.contains("dir1/file.txt")));
|
|
146
|
-
assert!(
|
|
147
|
-
archive_meta
|
|
148
|
-
.file_list
|
|
149
|
-
.iter()
|
|
150
|
-
.any(|f| f.contains("dir1/subdir/nested.txt"))
|
|
151
|
-
);
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
/// Test TAR extraction.
|
|
155
|
-
#[tokio::test]
|
|
156
|
-
async fn test_tar_extraction() {
|
|
157
|
-
let config = ExtractionConfig::default();
|
|
158
|
-
|
|
159
|
-
let tar_bytes = create_simple_tar();
|
|
160
|
-
|
|
161
|
-
let result = extract_bytes(&tar_bytes, "application/x-tar", &config)
|
|
162
|
-
.await
|
|
163
|
-
.expect("Should extract TAR successfully");
|
|
164
|
-
|
|
165
|
-
assert_eq!(result.mime_type, "application/x-tar");
|
|
166
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
167
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
168
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
169
|
-
|
|
170
|
-
assert!(result.content.contains("TAR Archive"));
|
|
171
|
-
assert!(result.content.contains("test.txt"));
|
|
172
|
-
assert!(result.content.contains("Hello from TAR!"));
|
|
173
|
-
|
|
174
|
-
assert!(result.metadata.format.is_some());
|
|
175
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
176
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
177
|
-
_ => panic!("Expected Archive metadata"),
|
|
178
|
-
};
|
|
179
|
-
assert_eq!(archive_meta.format, "TAR");
|
|
180
|
-
assert_eq!(archive_meta.file_count, 1);
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
/// Test TAR.GZ extraction (compressed TAR).
|
|
184
|
-
///
|
|
185
|
-
/// Note: TAR.GZ requires decompression before extraction.
|
|
186
|
-
/// This test validates TAR extraction which is the underlying format.
|
|
187
|
-
#[tokio::test]
|
|
188
|
-
async fn test_tar_gz_extraction() {
|
|
189
|
-
let config = ExtractionConfig::default();
|
|
190
|
-
|
|
191
|
-
let tar_bytes = create_simple_tar();
|
|
192
|
-
|
|
193
|
-
let result = extract_bytes(&tar_bytes, "application/x-tar", &config)
|
|
194
|
-
.await
|
|
195
|
-
.expect("Should extract TAR");
|
|
196
|
-
|
|
197
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
198
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
199
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
200
|
-
|
|
201
|
-
assert!(result.content.contains("TAR Archive"));
|
|
202
|
-
assert!(result.content.contains("test.txt"));
|
|
203
|
-
|
|
204
|
-
assert!(result.metadata.format.is_some());
|
|
205
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
206
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
207
|
-
_ => panic!("Expected Archive metadata"),
|
|
208
|
-
};
|
|
209
|
-
assert_eq!(archive_meta.format, "TAR");
|
|
210
|
-
assert_eq!(archive_meta.file_count, 1);
|
|
211
|
-
|
|
212
|
-
let result2 = extract_bytes(&tar_bytes, "application/tar", &config)
|
|
213
|
-
.await
|
|
214
|
-
.expect("Should extract with alternative MIME type");
|
|
215
|
-
|
|
216
|
-
assert!(
|
|
217
|
-
result2.chunks.is_none(),
|
|
218
|
-
"Chunks should be None without chunking config"
|
|
219
|
-
);
|
|
220
|
-
assert!(result2.detected_languages.is_none(), "Language detection not enabled");
|
|
221
|
-
assert!(result2.tables.is_empty(), "Archive should not have tables");
|
|
222
|
-
|
|
223
|
-
assert!(result2.content.contains("TAR Archive"));
|
|
224
|
-
assert!(result2.metadata.format.is_some());
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
/// Test 7z extraction.
|
|
228
|
-
#[tokio::test]
|
|
229
|
-
async fn test_7z_extraction() {
|
|
230
|
-
println!("7z test requires real 7z file - skipping programmatic creation");
|
|
231
|
-
}
|
|
232
|
-
|
|
233
|
-
/// Test nested archive (ZIP inside ZIP).
|
|
234
|
-
#[tokio::test]
|
|
235
|
-
async fn test_nested_archive() {
|
|
236
|
-
let config = ExtractionConfig::default();
|
|
237
|
-
|
|
238
|
-
let inner_zip = create_simple_zip();
|
|
239
|
-
|
|
240
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
241
|
-
{
|
|
242
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
243
|
-
let options = FileOptions::<'_, ()>::default();
|
|
244
|
-
|
|
245
|
-
zip.start_file("inner.zip", options).unwrap();
|
|
246
|
-
zip.write_all(&inner_zip).unwrap();
|
|
247
|
-
|
|
248
|
-
zip.start_file("readme.txt", options).unwrap();
|
|
249
|
-
zip.write_all(b"This archive contains another archive").unwrap();
|
|
250
|
-
|
|
251
|
-
zip.finish().unwrap();
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
let outer_zip_bytes = cursor.into_inner();
|
|
255
|
-
let result = extract_bytes(&outer_zip_bytes, "application/zip", &config)
|
|
256
|
-
.await
|
|
257
|
-
.expect("Should extract nested ZIP");
|
|
258
|
-
|
|
259
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
260
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
261
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
262
|
-
|
|
263
|
-
assert!(result.content.contains("inner.zip"));
|
|
264
|
-
assert!(result.content.contains("readme.txt"));
|
|
265
|
-
assert!(result.content.contains("This archive contains another archive"));
|
|
266
|
-
|
|
267
|
-
assert!(result.metadata.format.is_some());
|
|
268
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
269
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
270
|
-
_ => panic!("Expected Archive metadata"),
|
|
271
|
-
};
|
|
272
|
-
assert_eq!(archive_meta.file_count, 2, "Should have 2 files in outer archive");
|
|
273
|
-
assert!(archive_meta.file_list.contains(&"inner.zip".to_string()));
|
|
274
|
-
assert!(archive_meta.file_list.contains(&"readme.txt".to_string()));
|
|
275
|
-
}
|
|
276
|
-
|
|
277
|
-
/// Test archive with mixed file formats (PDF, DOCX, images).
|
|
278
|
-
#[tokio::test]
|
|
279
|
-
async fn test_archive_mixed_formats() {
|
|
280
|
-
let config = ExtractionConfig::default();
|
|
281
|
-
|
|
282
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
283
|
-
{
|
|
284
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
285
|
-
let options = FileOptions::<'_, ()>::default();
|
|
286
|
-
|
|
287
|
-
zip.start_file("document.txt", options).unwrap();
|
|
288
|
-
zip.write_all(b"Text document").unwrap();
|
|
289
|
-
|
|
290
|
-
zip.start_file("readme.md", options).unwrap();
|
|
291
|
-
zip.write_all(b"# README").unwrap();
|
|
292
|
-
|
|
293
|
-
zip.start_file("image.png", options).unwrap();
|
|
294
|
-
zip.write_all(&[0x89, 0x50, 0x4E, 0x47]).unwrap();
|
|
295
|
-
|
|
296
|
-
zip.start_file("document.pdf", options).unwrap();
|
|
297
|
-
zip.write_all(b"%PDF-1.4").unwrap();
|
|
298
|
-
|
|
299
|
-
zip.finish().unwrap();
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
let zip_bytes = cursor.into_inner();
|
|
303
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
304
|
-
.await
|
|
305
|
-
.expect("Should extract mixed-format ZIP");
|
|
306
|
-
|
|
307
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
308
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
309
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
310
|
-
|
|
311
|
-
assert!(result.content.contains("document.txt"));
|
|
312
|
-
assert!(result.content.contains("readme.md"));
|
|
313
|
-
assert!(result.content.contains("image.png"));
|
|
314
|
-
assert!(result.content.contains("document.pdf"));
|
|
315
|
-
|
|
316
|
-
assert!(result.content.contains("Text document"));
|
|
317
|
-
assert!(result.content.contains("# README"));
|
|
318
|
-
|
|
319
|
-
assert!(result.metadata.format.is_some());
|
|
320
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
321
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
322
|
-
_ => panic!("Expected Archive metadata"),
|
|
323
|
-
};
|
|
324
|
-
assert_eq!(archive_meta.file_count, 4, "Should have 4 files");
|
|
325
|
-
assert_eq!(archive_meta.file_list.len(), 4, "file_list should contain 4 entries");
|
|
326
|
-
assert!(archive_meta.file_list.contains(&"document.txt".to_string()));
|
|
327
|
-
assert!(archive_meta.file_list.contains(&"readme.md".to_string()));
|
|
328
|
-
assert!(archive_meta.file_list.contains(&"image.png".to_string()));
|
|
329
|
-
assert!(archive_meta.file_list.contains(&"document.pdf".to_string()));
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
/// Test password-protected archive (should fail gracefully).
|
|
333
|
-
#[tokio::test]
|
|
334
|
-
async fn test_password_protected_archive() {
|
|
335
|
-
let config = ExtractionConfig::default();
|
|
336
|
-
|
|
337
|
-
let invalid_zip = vec![0x50, 0x4B, 0x03, 0x04];
|
|
338
|
-
|
|
339
|
-
let result = extract_bytes(&invalid_zip, "application/zip", &config).await;
|
|
340
|
-
|
|
341
|
-
assert!(result.is_err(), "Should fail on invalid/encrypted ZIP");
|
|
342
|
-
}
|
|
343
|
-
|
|
344
|
-
/// Test corrupted archive.
|
|
345
|
-
#[tokio::test]
|
|
346
|
-
async fn test_corrupted_archive() {
|
|
347
|
-
let config = ExtractionConfig::default();
|
|
348
|
-
|
|
349
|
-
let corrupted_zip = vec![0x50, 0x4B, 0x03, 0x04, 0xFF, 0xFF, 0xFF, 0xFF];
|
|
350
|
-
|
|
351
|
-
let result = extract_bytes(&corrupted_zip, "application/zip", &config).await;
|
|
352
|
-
|
|
353
|
-
assert!(result.is_err(), "Should fail on corrupted ZIP");
|
|
354
|
-
|
|
355
|
-
let mut corrupted_tar = vec![0xFF; 512];
|
|
356
|
-
corrupted_tar[0..5].copy_from_slice(b"file\0");
|
|
357
|
-
|
|
358
|
-
let result = extract_bytes(&corrupted_tar, "application/x-tar", &config).await;
|
|
359
|
-
assert!(
|
|
360
|
-
result.is_ok() || result.is_err(),
|
|
361
|
-
"Should handle corrupted TAR gracefully"
|
|
362
|
-
);
|
|
363
|
-
}
|
|
364
|
-
|
|
365
|
-
/// Test large archive (100+ files).
|
|
366
|
-
#[tokio::test]
|
|
367
|
-
async fn test_large_archive() {
|
|
368
|
-
let config = ExtractionConfig::default();
|
|
369
|
-
|
|
370
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
371
|
-
{
|
|
372
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
373
|
-
let options = FileOptions::<'_, ()>::default();
|
|
374
|
-
|
|
375
|
-
for i in 0..100 {
|
|
376
|
-
zip.start_file(format!("file_{}.txt", i), options).unwrap();
|
|
377
|
-
zip.write_all(format!("Content {}", i).as_bytes()).unwrap();
|
|
378
|
-
}
|
|
379
|
-
|
|
380
|
-
zip.finish().unwrap();
|
|
381
|
-
}
|
|
382
|
-
|
|
383
|
-
let zip_bytes = cursor.into_inner();
|
|
384
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
385
|
-
.await
|
|
386
|
-
.expect("Should extract large ZIP");
|
|
387
|
-
|
|
388
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
389
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
390
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
391
|
-
|
|
392
|
-
assert!(result.metadata.format.is_some());
|
|
393
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
394
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
395
|
-
_ => panic!("Expected Archive metadata"),
|
|
396
|
-
};
|
|
397
|
-
assert_eq!(archive_meta.file_count, 100, "Should have 100 files");
|
|
398
|
-
assert_eq!(
|
|
399
|
-
archive_meta.file_list.len(),
|
|
400
|
-
100,
|
|
401
|
-
"file_list should contain 100 entries"
|
|
402
|
-
);
|
|
403
|
-
|
|
404
|
-
assert!(result.content.contains("file_0.txt"));
|
|
405
|
-
assert!(result.content.contains("file_99.txt"));
|
|
406
|
-
assert!(archive_meta.file_list.contains(&"file_0.txt".to_string()));
|
|
407
|
-
assert!(archive_meta.file_list.contains(&"file_50.txt".to_string()));
|
|
408
|
-
assert!(archive_meta.file_list.contains(&"file_99.txt".to_string()));
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
/// Test archive with special characters and Unicode filenames.
|
|
412
|
-
#[tokio::test]
|
|
413
|
-
async fn test_archive_with_special_characters() {
|
|
414
|
-
let config = ExtractionConfig::default();
|
|
415
|
-
|
|
416
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
417
|
-
{
|
|
418
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
419
|
-
let options = FileOptions::<'_, ()>::default();
|
|
420
|
-
|
|
421
|
-
zip.start_file("测试文件.txt", options).unwrap();
|
|
422
|
-
zip.write_all("Unicode content".as_bytes()).unwrap();
|
|
423
|
-
|
|
424
|
-
zip.start_file("file with spaces.txt", options).unwrap();
|
|
425
|
-
zip.write_all(b"Spaces in filename").unwrap();
|
|
426
|
-
|
|
427
|
-
zip.start_file("file-with-dashes.txt", options).unwrap();
|
|
428
|
-
zip.write_all(b"Dashes").unwrap();
|
|
429
|
-
|
|
430
|
-
zip.finish().unwrap();
|
|
431
|
-
}
|
|
432
|
-
|
|
433
|
-
let zip_bytes = cursor.into_inner();
|
|
434
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
435
|
-
.await
|
|
436
|
-
.expect("Should extract ZIP with special characters");
|
|
437
|
-
|
|
438
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
439
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
440
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
441
|
-
|
|
442
|
-
assert!(result.content.contains("测试文件.txt") || result.content.contains("txt"));
|
|
443
|
-
assert!(result.content.contains("file with spaces.txt"));
|
|
444
|
-
assert!(result.content.contains("file-with-dashes.txt"));
|
|
445
|
-
|
|
446
|
-
assert!(result.metadata.format.is_some());
|
|
447
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
448
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
449
|
-
_ => panic!("Expected Archive metadata"),
|
|
450
|
-
};
|
|
451
|
-
assert_eq!(archive_meta.file_count, 3, "Should have 3 files");
|
|
452
|
-
assert_eq!(archive_meta.file_list.len(), 3, "file_list should contain 3 entries");
|
|
453
|
-
assert!(archive_meta.file_list.iter().any(|f| f.contains("txt")));
|
|
454
|
-
assert!(archive_meta.file_list.contains(&"file with spaces.txt".to_string()));
|
|
455
|
-
assert!(archive_meta.file_list.contains(&"file-with-dashes.txt".to_string()));
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
/// Test empty archive.
|
|
459
|
-
#[tokio::test]
|
|
460
|
-
async fn test_empty_archive() {
|
|
461
|
-
let config = ExtractionConfig::default();
|
|
462
|
-
|
|
463
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
464
|
-
{
|
|
465
|
-
let zip = ZipWriter::new(&mut cursor);
|
|
466
|
-
zip.finish().unwrap();
|
|
467
|
-
}
|
|
468
|
-
|
|
469
|
-
let zip_bytes = cursor.into_inner();
|
|
470
|
-
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
471
|
-
.await
|
|
472
|
-
.expect("Should extract empty ZIP");
|
|
473
|
-
|
|
474
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
475
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
476
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
477
|
-
|
|
478
|
-
assert!(result.content.contains("ZIP Archive"));
|
|
479
|
-
assert!(result.metadata.format.is_some());
|
|
480
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
481
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
482
|
-
_ => panic!("Expected Archive metadata"),
|
|
483
|
-
};
|
|
484
|
-
assert_eq!(archive_meta.file_count, 0, "Empty archive should have 0 files");
|
|
485
|
-
assert_eq!(archive_meta.total_size, 0, "Empty archive should have 0 total size");
|
|
486
|
-
assert!(archive_meta.file_list.is_empty(), "file_list should be empty");
|
|
487
|
-
}
|
|
488
|
-
|
|
489
|
-
/// Test synchronous archive extraction.
|
|
490
|
-
#[test]
|
|
491
|
-
fn test_archive_extraction_sync() {
|
|
492
|
-
let config = ExtractionConfig::default();
|
|
493
|
-
|
|
494
|
-
let zip_bytes = create_simple_zip();
|
|
495
|
-
let result = extract_bytes_sync(&zip_bytes, "application/zip", &config).expect("Should extract ZIP synchronously");
|
|
496
|
-
|
|
497
|
-
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
498
|
-
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
499
|
-
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
500
|
-
|
|
501
|
-
assert!(result.content.contains("ZIP Archive"));
|
|
502
|
-
assert!(result.content.contains("test.txt"));
|
|
503
|
-
assert!(result.content.contains("Hello from ZIP!"));
|
|
504
|
-
|
|
505
|
-
assert!(result.metadata.format.is_some(), "Should have archive metadata");
|
|
506
|
-
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
507
|
-
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
508
|
-
_ => panic!("Expected Archive metadata"),
|
|
509
|
-
};
|
|
510
|
-
assert_eq!(archive_meta.format, "ZIP");
|
|
511
|
-
assert_eq!(archive_meta.file_count, 1);
|
|
512
|
-
assert_eq!(archive_meta.file_list.len(), 1);
|
|
513
|
-
assert_eq!(archive_meta.file_list[0], "test.txt");
|
|
514
|
-
}
|
|
515
|
-
|
|
516
|
-
fn create_simple_zip() -> Vec<u8> {
|
|
517
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
518
|
-
{
|
|
519
|
-
let mut zip = ZipWriter::new(&mut cursor);
|
|
520
|
-
let options = FileOptions::<'_, ()>::default();
|
|
521
|
-
|
|
522
|
-
zip.start_file("test.txt", options).unwrap();
|
|
523
|
-
zip.write_all(b"Hello from ZIP!").unwrap();
|
|
524
|
-
|
|
525
|
-
zip.finish().unwrap();
|
|
526
|
-
}
|
|
527
|
-
cursor.into_inner()
|
|
528
|
-
}
|
|
529
|
-
|
|
530
|
-
fn create_simple_tar() -> Vec<u8> {
|
|
531
|
-
let mut cursor = Cursor::new(Vec::new());
|
|
532
|
-
{
|
|
533
|
-
let mut tar = TarBuilder::new(&mut cursor);
|
|
534
|
-
|
|
535
|
-
let data = b"Hello from TAR!";
|
|
536
|
-
let mut header = tar::Header::new_gnu();
|
|
537
|
-
header.set_path("test.txt").unwrap();
|
|
538
|
-
header.set_size(data.len() as u64);
|
|
539
|
-
header.set_cksum();
|
|
540
|
-
tar.append(&header, &data[..]).unwrap();
|
|
541
|
-
|
|
542
|
-
tar.finish().unwrap();
|
|
543
|
-
}
|
|
544
|
-
cursor.into_inner()
|
|
545
|
-
}
|
|
1
|
+
//! Archive extraction integration tests.
|
|
2
|
+
//!
|
|
3
|
+
//! Tests for ZIP, TAR, TAR.GZ, and 7z archive extraction.
|
|
4
|
+
//! Validates metadata extraction, content extraction, nested archives, and error handling.
|
|
5
|
+
|
|
6
|
+
#![cfg(feature = "archives")]
|
|
7
|
+
|
|
8
|
+
use kreuzberg::core::config::ExtractionConfig;
|
|
9
|
+
use kreuzberg::core::extractor::{extract_bytes, extract_bytes_sync};
|
|
10
|
+
use std::io::{Cursor, Write};
|
|
11
|
+
use tar::Builder as TarBuilder;
|
|
12
|
+
use zip::write::{FileOptions, ZipWriter};
|
|
13
|
+
|
|
14
|
+
mod helpers;
|
|
15
|
+
|
|
16
|
+
/// Test basic ZIP extraction with single file.
|
|
17
|
+
#[tokio::test]
|
|
18
|
+
async fn test_zip_basic_extraction() {
|
|
19
|
+
let config = ExtractionConfig::default();
|
|
20
|
+
|
|
21
|
+
let zip_bytes = create_simple_zip();
|
|
22
|
+
|
|
23
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
24
|
+
.await
|
|
25
|
+
.expect("Should extract ZIP successfully");
|
|
26
|
+
|
|
27
|
+
assert_eq!(result.mime_type, "application/zip");
|
|
28
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
29
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
30
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
31
|
+
|
|
32
|
+
assert!(result.content.contains("ZIP Archive"));
|
|
33
|
+
assert!(result.content.contains("test.txt"));
|
|
34
|
+
assert!(result.content.contains("Hello from ZIP!"));
|
|
35
|
+
|
|
36
|
+
assert!(result.metadata.format.is_some());
|
|
37
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
38
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
39
|
+
_ => panic!("Expected Archive metadata"),
|
|
40
|
+
};
|
|
41
|
+
assert_eq!(archive_meta.format, "ZIP");
|
|
42
|
+
assert_eq!(archive_meta.file_count, 1);
|
|
43
|
+
assert_eq!(archive_meta.file_list.len(), 1);
|
|
44
|
+
assert_eq!(archive_meta.file_list[0], "test.txt");
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/// Test ZIP with multiple files.
|
|
48
|
+
#[tokio::test]
|
|
49
|
+
async fn test_zip_multiple_files() {
|
|
50
|
+
let config = ExtractionConfig::default();
|
|
51
|
+
|
|
52
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
53
|
+
{
|
|
54
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
55
|
+
let options = FileOptions::<'_, ()>::default();
|
|
56
|
+
|
|
57
|
+
zip.start_file("file1.txt", options).unwrap();
|
|
58
|
+
zip.write_all(b"Content 1").unwrap();
|
|
59
|
+
|
|
60
|
+
zip.start_file("file2.md", options).unwrap();
|
|
61
|
+
zip.write_all(b"# Content 2").unwrap();
|
|
62
|
+
|
|
63
|
+
zip.start_file("file3.json", options).unwrap();
|
|
64
|
+
zip.write_all(b"{\"key\": \"value\"}").unwrap();
|
|
65
|
+
|
|
66
|
+
zip.finish().unwrap();
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
let zip_bytes = cursor.into_inner();
|
|
70
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
71
|
+
.await
|
|
72
|
+
.expect("Should extract multi-file ZIP");
|
|
73
|
+
|
|
74
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
75
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
76
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
77
|
+
|
|
78
|
+
assert!(result.content.contains("file1.txt"));
|
|
79
|
+
assert!(result.content.contains("file2.md"));
|
|
80
|
+
assert!(result.content.contains("file3.json"));
|
|
81
|
+
|
|
82
|
+
assert!(result.content.contains("Content 1"));
|
|
83
|
+
assert!(result.content.contains("Content 2"));
|
|
84
|
+
assert!(result.content.contains("value"));
|
|
85
|
+
|
|
86
|
+
assert!(result.metadata.format.is_some());
|
|
87
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
88
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
89
|
+
_ => panic!("Expected Archive metadata"),
|
|
90
|
+
};
|
|
91
|
+
assert_eq!(archive_meta.file_count, 3, "Should have 3 files");
|
|
92
|
+
assert_eq!(archive_meta.file_list.len(), 3, "file_list should contain 3 entries");
|
|
93
|
+
assert!(archive_meta.file_list.contains(&"file1.txt".to_string()));
|
|
94
|
+
assert!(archive_meta.file_list.contains(&"file2.md".to_string()));
|
|
95
|
+
assert!(archive_meta.file_list.contains(&"file3.json".to_string()));
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/// Test ZIP with nested directory structure.
|
|
99
|
+
#[tokio::test]
|
|
100
|
+
async fn test_zip_nested_directories() {
|
|
101
|
+
let config = ExtractionConfig::default();
|
|
102
|
+
|
|
103
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
104
|
+
{
|
|
105
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
106
|
+
let options = FileOptions::<'_, ()>::default();
|
|
107
|
+
|
|
108
|
+
zip.add_directory("dir1/", options).unwrap();
|
|
109
|
+
zip.add_directory("dir1/subdir/", options).unwrap();
|
|
110
|
+
|
|
111
|
+
zip.start_file("dir1/file.txt", options).unwrap();
|
|
112
|
+
zip.write_all(b"File in dir1").unwrap();
|
|
113
|
+
|
|
114
|
+
zip.start_file("dir1/subdir/nested.txt", options).unwrap();
|
|
115
|
+
zip.write_all(b"Nested file").unwrap();
|
|
116
|
+
|
|
117
|
+
zip.finish().unwrap();
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
let zip_bytes = cursor.into_inner();
|
|
121
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
122
|
+
.await
|
|
123
|
+
.expect("Should extract nested ZIP");
|
|
124
|
+
|
|
125
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
126
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
127
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
128
|
+
|
|
129
|
+
assert!(result.content.contains("dir1/"));
|
|
130
|
+
assert!(result.content.contains("dir1/file.txt"));
|
|
131
|
+
assert!(result.content.contains("dir1/subdir/nested.txt"));
|
|
132
|
+
|
|
133
|
+
assert!(result.content.contains("File in dir1"));
|
|
134
|
+
assert!(result.content.contains("Nested file"));
|
|
135
|
+
|
|
136
|
+
assert!(result.metadata.format.is_some());
|
|
137
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
138
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
139
|
+
_ => panic!("Expected Archive metadata"),
|
|
140
|
+
};
|
|
141
|
+
assert!(
|
|
142
|
+
archive_meta.file_count >= 2,
|
|
143
|
+
"Should have at least 2 files (excluding empty dirs)"
|
|
144
|
+
);
|
|
145
|
+
assert!(archive_meta.file_list.iter().any(|f| f.contains("dir1/file.txt")));
|
|
146
|
+
assert!(
|
|
147
|
+
archive_meta
|
|
148
|
+
.file_list
|
|
149
|
+
.iter()
|
|
150
|
+
.any(|f| f.contains("dir1/subdir/nested.txt"))
|
|
151
|
+
);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/// Test TAR extraction.
|
|
155
|
+
#[tokio::test]
|
|
156
|
+
async fn test_tar_extraction() {
|
|
157
|
+
let config = ExtractionConfig::default();
|
|
158
|
+
|
|
159
|
+
let tar_bytes = create_simple_tar();
|
|
160
|
+
|
|
161
|
+
let result = extract_bytes(&tar_bytes, "application/x-tar", &config)
|
|
162
|
+
.await
|
|
163
|
+
.expect("Should extract TAR successfully");
|
|
164
|
+
|
|
165
|
+
assert_eq!(result.mime_type, "application/x-tar");
|
|
166
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
167
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
168
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
169
|
+
|
|
170
|
+
assert!(result.content.contains("TAR Archive"));
|
|
171
|
+
assert!(result.content.contains("test.txt"));
|
|
172
|
+
assert!(result.content.contains("Hello from TAR!"));
|
|
173
|
+
|
|
174
|
+
assert!(result.metadata.format.is_some());
|
|
175
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
176
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
177
|
+
_ => panic!("Expected Archive metadata"),
|
|
178
|
+
};
|
|
179
|
+
assert_eq!(archive_meta.format, "TAR");
|
|
180
|
+
assert_eq!(archive_meta.file_count, 1);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/// Test TAR.GZ extraction (compressed TAR).
|
|
184
|
+
///
|
|
185
|
+
/// Note: TAR.GZ requires decompression before extraction.
|
|
186
|
+
/// This test validates TAR extraction which is the underlying format.
|
|
187
|
+
#[tokio::test]
|
|
188
|
+
async fn test_tar_gz_extraction() {
|
|
189
|
+
let config = ExtractionConfig::default();
|
|
190
|
+
|
|
191
|
+
let tar_bytes = create_simple_tar();
|
|
192
|
+
|
|
193
|
+
let result = extract_bytes(&tar_bytes, "application/x-tar", &config)
|
|
194
|
+
.await
|
|
195
|
+
.expect("Should extract TAR");
|
|
196
|
+
|
|
197
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
198
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
199
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
200
|
+
|
|
201
|
+
assert!(result.content.contains("TAR Archive"));
|
|
202
|
+
assert!(result.content.contains("test.txt"));
|
|
203
|
+
|
|
204
|
+
assert!(result.metadata.format.is_some());
|
|
205
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
206
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
207
|
+
_ => panic!("Expected Archive metadata"),
|
|
208
|
+
};
|
|
209
|
+
assert_eq!(archive_meta.format, "TAR");
|
|
210
|
+
assert_eq!(archive_meta.file_count, 1);
|
|
211
|
+
|
|
212
|
+
let result2 = extract_bytes(&tar_bytes, "application/tar", &config)
|
|
213
|
+
.await
|
|
214
|
+
.expect("Should extract with alternative MIME type");
|
|
215
|
+
|
|
216
|
+
assert!(
|
|
217
|
+
result2.chunks.is_none(),
|
|
218
|
+
"Chunks should be None without chunking config"
|
|
219
|
+
);
|
|
220
|
+
assert!(result2.detected_languages.is_none(), "Language detection not enabled");
|
|
221
|
+
assert!(result2.tables.is_empty(), "Archive should not have tables");
|
|
222
|
+
|
|
223
|
+
assert!(result2.content.contains("TAR Archive"));
|
|
224
|
+
assert!(result2.metadata.format.is_some());
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
/// Test 7z extraction.
|
|
228
|
+
#[tokio::test]
|
|
229
|
+
async fn test_7z_extraction() {
|
|
230
|
+
println!("7z test requires real 7z file - skipping programmatic creation");
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/// Test nested archive (ZIP inside ZIP).
|
|
234
|
+
#[tokio::test]
|
|
235
|
+
async fn test_nested_archive() {
|
|
236
|
+
let config = ExtractionConfig::default();
|
|
237
|
+
|
|
238
|
+
let inner_zip = create_simple_zip();
|
|
239
|
+
|
|
240
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
241
|
+
{
|
|
242
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
243
|
+
let options = FileOptions::<'_, ()>::default();
|
|
244
|
+
|
|
245
|
+
zip.start_file("inner.zip", options).unwrap();
|
|
246
|
+
zip.write_all(&inner_zip).unwrap();
|
|
247
|
+
|
|
248
|
+
zip.start_file("readme.txt", options).unwrap();
|
|
249
|
+
zip.write_all(b"This archive contains another archive").unwrap();
|
|
250
|
+
|
|
251
|
+
zip.finish().unwrap();
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
let outer_zip_bytes = cursor.into_inner();
|
|
255
|
+
let result = extract_bytes(&outer_zip_bytes, "application/zip", &config)
|
|
256
|
+
.await
|
|
257
|
+
.expect("Should extract nested ZIP");
|
|
258
|
+
|
|
259
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
260
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
261
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
262
|
+
|
|
263
|
+
assert!(result.content.contains("inner.zip"));
|
|
264
|
+
assert!(result.content.contains("readme.txt"));
|
|
265
|
+
assert!(result.content.contains("This archive contains another archive"));
|
|
266
|
+
|
|
267
|
+
assert!(result.metadata.format.is_some());
|
|
268
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
269
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
270
|
+
_ => panic!("Expected Archive metadata"),
|
|
271
|
+
};
|
|
272
|
+
assert_eq!(archive_meta.file_count, 2, "Should have 2 files in outer archive");
|
|
273
|
+
assert!(archive_meta.file_list.contains(&"inner.zip".to_string()));
|
|
274
|
+
assert!(archive_meta.file_list.contains(&"readme.txt".to_string()));
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/// Test archive with mixed file formats (PDF, DOCX, images).
|
|
278
|
+
#[tokio::test]
|
|
279
|
+
async fn test_archive_mixed_formats() {
|
|
280
|
+
let config = ExtractionConfig::default();
|
|
281
|
+
|
|
282
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
283
|
+
{
|
|
284
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
285
|
+
let options = FileOptions::<'_, ()>::default();
|
|
286
|
+
|
|
287
|
+
zip.start_file("document.txt", options).unwrap();
|
|
288
|
+
zip.write_all(b"Text document").unwrap();
|
|
289
|
+
|
|
290
|
+
zip.start_file("readme.md", options).unwrap();
|
|
291
|
+
zip.write_all(b"# README").unwrap();
|
|
292
|
+
|
|
293
|
+
zip.start_file("image.png", options).unwrap();
|
|
294
|
+
zip.write_all(&[0x89, 0x50, 0x4E, 0x47]).unwrap();
|
|
295
|
+
|
|
296
|
+
zip.start_file("document.pdf", options).unwrap();
|
|
297
|
+
zip.write_all(b"%PDF-1.4").unwrap();
|
|
298
|
+
|
|
299
|
+
zip.finish().unwrap();
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
let zip_bytes = cursor.into_inner();
|
|
303
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
304
|
+
.await
|
|
305
|
+
.expect("Should extract mixed-format ZIP");
|
|
306
|
+
|
|
307
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
308
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
309
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
310
|
+
|
|
311
|
+
assert!(result.content.contains("document.txt"));
|
|
312
|
+
assert!(result.content.contains("readme.md"));
|
|
313
|
+
assert!(result.content.contains("image.png"));
|
|
314
|
+
assert!(result.content.contains("document.pdf"));
|
|
315
|
+
|
|
316
|
+
assert!(result.content.contains("Text document"));
|
|
317
|
+
assert!(result.content.contains("# README"));
|
|
318
|
+
|
|
319
|
+
assert!(result.metadata.format.is_some());
|
|
320
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
321
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
322
|
+
_ => panic!("Expected Archive metadata"),
|
|
323
|
+
};
|
|
324
|
+
assert_eq!(archive_meta.file_count, 4, "Should have 4 files");
|
|
325
|
+
assert_eq!(archive_meta.file_list.len(), 4, "file_list should contain 4 entries");
|
|
326
|
+
assert!(archive_meta.file_list.contains(&"document.txt".to_string()));
|
|
327
|
+
assert!(archive_meta.file_list.contains(&"readme.md".to_string()));
|
|
328
|
+
assert!(archive_meta.file_list.contains(&"image.png".to_string()));
|
|
329
|
+
assert!(archive_meta.file_list.contains(&"document.pdf".to_string()));
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
/// Test password-protected archive (should fail gracefully).
|
|
333
|
+
#[tokio::test]
|
|
334
|
+
async fn test_password_protected_archive() {
|
|
335
|
+
let config = ExtractionConfig::default();
|
|
336
|
+
|
|
337
|
+
let invalid_zip = vec![0x50, 0x4B, 0x03, 0x04];
|
|
338
|
+
|
|
339
|
+
let result = extract_bytes(&invalid_zip, "application/zip", &config).await;
|
|
340
|
+
|
|
341
|
+
assert!(result.is_err(), "Should fail on invalid/encrypted ZIP");
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
/// Test corrupted archive.
|
|
345
|
+
#[tokio::test]
|
|
346
|
+
async fn test_corrupted_archive() {
|
|
347
|
+
let config = ExtractionConfig::default();
|
|
348
|
+
|
|
349
|
+
let corrupted_zip = vec![0x50, 0x4B, 0x03, 0x04, 0xFF, 0xFF, 0xFF, 0xFF];
|
|
350
|
+
|
|
351
|
+
let result = extract_bytes(&corrupted_zip, "application/zip", &config).await;
|
|
352
|
+
|
|
353
|
+
assert!(result.is_err(), "Should fail on corrupted ZIP");
|
|
354
|
+
|
|
355
|
+
let mut corrupted_tar = vec![0xFF; 512];
|
|
356
|
+
corrupted_tar[0..5].copy_from_slice(b"file\0");
|
|
357
|
+
|
|
358
|
+
let result = extract_bytes(&corrupted_tar, "application/x-tar", &config).await;
|
|
359
|
+
assert!(
|
|
360
|
+
result.is_ok() || result.is_err(),
|
|
361
|
+
"Should handle corrupted TAR gracefully"
|
|
362
|
+
);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
/// Test large archive (100+ files).
|
|
366
|
+
#[tokio::test]
|
|
367
|
+
async fn test_large_archive() {
|
|
368
|
+
let config = ExtractionConfig::default();
|
|
369
|
+
|
|
370
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
371
|
+
{
|
|
372
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
373
|
+
let options = FileOptions::<'_, ()>::default();
|
|
374
|
+
|
|
375
|
+
for i in 0..100 {
|
|
376
|
+
zip.start_file(format!("file_{}.txt", i), options).unwrap();
|
|
377
|
+
zip.write_all(format!("Content {}", i).as_bytes()).unwrap();
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
zip.finish().unwrap();
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
let zip_bytes = cursor.into_inner();
|
|
384
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
385
|
+
.await
|
|
386
|
+
.expect("Should extract large ZIP");
|
|
387
|
+
|
|
388
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
389
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
390
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
391
|
+
|
|
392
|
+
assert!(result.metadata.format.is_some());
|
|
393
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
394
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
395
|
+
_ => panic!("Expected Archive metadata"),
|
|
396
|
+
};
|
|
397
|
+
assert_eq!(archive_meta.file_count, 100, "Should have 100 files");
|
|
398
|
+
assert_eq!(
|
|
399
|
+
archive_meta.file_list.len(),
|
|
400
|
+
100,
|
|
401
|
+
"file_list should contain 100 entries"
|
|
402
|
+
);
|
|
403
|
+
|
|
404
|
+
assert!(result.content.contains("file_0.txt"));
|
|
405
|
+
assert!(result.content.contains("file_99.txt"));
|
|
406
|
+
assert!(archive_meta.file_list.contains(&"file_0.txt".to_string()));
|
|
407
|
+
assert!(archive_meta.file_list.contains(&"file_50.txt".to_string()));
|
|
408
|
+
assert!(archive_meta.file_list.contains(&"file_99.txt".to_string()));
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
/// Test archive with special characters and Unicode filenames.
|
|
412
|
+
#[tokio::test]
|
|
413
|
+
async fn test_archive_with_special_characters() {
|
|
414
|
+
let config = ExtractionConfig::default();
|
|
415
|
+
|
|
416
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
417
|
+
{
|
|
418
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
419
|
+
let options = FileOptions::<'_, ()>::default();
|
|
420
|
+
|
|
421
|
+
zip.start_file("测试文件.txt", options).unwrap();
|
|
422
|
+
zip.write_all("Unicode content".as_bytes()).unwrap();
|
|
423
|
+
|
|
424
|
+
zip.start_file("file with spaces.txt", options).unwrap();
|
|
425
|
+
zip.write_all(b"Spaces in filename").unwrap();
|
|
426
|
+
|
|
427
|
+
zip.start_file("file-with-dashes.txt", options).unwrap();
|
|
428
|
+
zip.write_all(b"Dashes").unwrap();
|
|
429
|
+
|
|
430
|
+
zip.finish().unwrap();
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
let zip_bytes = cursor.into_inner();
|
|
434
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
435
|
+
.await
|
|
436
|
+
.expect("Should extract ZIP with special characters");
|
|
437
|
+
|
|
438
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
439
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
440
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
441
|
+
|
|
442
|
+
assert!(result.content.contains("测试文件.txt") || result.content.contains("txt"));
|
|
443
|
+
assert!(result.content.contains("file with spaces.txt"));
|
|
444
|
+
assert!(result.content.contains("file-with-dashes.txt"));
|
|
445
|
+
|
|
446
|
+
assert!(result.metadata.format.is_some());
|
|
447
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
448
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
449
|
+
_ => panic!("Expected Archive metadata"),
|
|
450
|
+
};
|
|
451
|
+
assert_eq!(archive_meta.file_count, 3, "Should have 3 files");
|
|
452
|
+
assert_eq!(archive_meta.file_list.len(), 3, "file_list should contain 3 entries");
|
|
453
|
+
assert!(archive_meta.file_list.iter().any(|f| f.contains("txt")));
|
|
454
|
+
assert!(archive_meta.file_list.contains(&"file with spaces.txt".to_string()));
|
|
455
|
+
assert!(archive_meta.file_list.contains(&"file-with-dashes.txt".to_string()));
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
/// Test empty archive.
|
|
459
|
+
#[tokio::test]
|
|
460
|
+
async fn test_empty_archive() {
|
|
461
|
+
let config = ExtractionConfig::default();
|
|
462
|
+
|
|
463
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
464
|
+
{
|
|
465
|
+
let zip = ZipWriter::new(&mut cursor);
|
|
466
|
+
zip.finish().unwrap();
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
let zip_bytes = cursor.into_inner();
|
|
470
|
+
let result = extract_bytes(&zip_bytes, "application/zip", &config)
|
|
471
|
+
.await
|
|
472
|
+
.expect("Should extract empty ZIP");
|
|
473
|
+
|
|
474
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
475
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
476
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
477
|
+
|
|
478
|
+
assert!(result.content.contains("ZIP Archive"));
|
|
479
|
+
assert!(result.metadata.format.is_some());
|
|
480
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
481
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
482
|
+
_ => panic!("Expected Archive metadata"),
|
|
483
|
+
};
|
|
484
|
+
assert_eq!(archive_meta.file_count, 0, "Empty archive should have 0 files");
|
|
485
|
+
assert_eq!(archive_meta.total_size, 0, "Empty archive should have 0 total size");
|
|
486
|
+
assert!(archive_meta.file_list.is_empty(), "file_list should be empty");
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
/// Test synchronous archive extraction.
|
|
490
|
+
#[test]
|
|
491
|
+
fn test_archive_extraction_sync() {
|
|
492
|
+
let config = ExtractionConfig::default();
|
|
493
|
+
|
|
494
|
+
let zip_bytes = create_simple_zip();
|
|
495
|
+
let result = extract_bytes_sync(&zip_bytes, "application/zip", &config).expect("Should extract ZIP synchronously");
|
|
496
|
+
|
|
497
|
+
assert!(result.chunks.is_none(), "Chunks should be None without chunking config");
|
|
498
|
+
assert!(result.detected_languages.is_none(), "Language detection not enabled");
|
|
499
|
+
assert!(result.tables.is_empty(), "Archive should not have tables");
|
|
500
|
+
|
|
501
|
+
assert!(result.content.contains("ZIP Archive"));
|
|
502
|
+
assert!(result.content.contains("test.txt"));
|
|
503
|
+
assert!(result.content.contains("Hello from ZIP!"));
|
|
504
|
+
|
|
505
|
+
assert!(result.metadata.format.is_some(), "Should have archive metadata");
|
|
506
|
+
let archive_meta = match result.metadata.format.as_ref().unwrap() {
|
|
507
|
+
kreuzberg::FormatMetadata::Archive(meta) => meta,
|
|
508
|
+
_ => panic!("Expected Archive metadata"),
|
|
509
|
+
};
|
|
510
|
+
assert_eq!(archive_meta.format, "ZIP");
|
|
511
|
+
assert_eq!(archive_meta.file_count, 1);
|
|
512
|
+
assert_eq!(archive_meta.file_list.len(), 1);
|
|
513
|
+
assert_eq!(archive_meta.file_list[0], "test.txt");
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
fn create_simple_zip() -> Vec<u8> {
|
|
517
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
518
|
+
{
|
|
519
|
+
let mut zip = ZipWriter::new(&mut cursor);
|
|
520
|
+
let options = FileOptions::<'_, ()>::default();
|
|
521
|
+
|
|
522
|
+
zip.start_file("test.txt", options).unwrap();
|
|
523
|
+
zip.write_all(b"Hello from ZIP!").unwrap();
|
|
524
|
+
|
|
525
|
+
zip.finish().unwrap();
|
|
526
|
+
}
|
|
527
|
+
cursor.into_inner()
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
fn create_simple_tar() -> Vec<u8> {
|
|
531
|
+
let mut cursor = Cursor::new(Vec::new());
|
|
532
|
+
{
|
|
533
|
+
let mut tar = TarBuilder::new(&mut cursor);
|
|
534
|
+
|
|
535
|
+
let data = b"Hello from TAR!";
|
|
536
|
+
let mut header = tar::Header::new_gnu();
|
|
537
|
+
header.set_path("test.txt").unwrap();
|
|
538
|
+
header.set_size(data.len() as u64);
|
|
539
|
+
header.set_cksum();
|
|
540
|
+
tar.append(&header, &data[..]).unwrap();
|
|
541
|
+
|
|
542
|
+
tar.finish().unwrap();
|
|
543
|
+
}
|
|
544
|
+
cursor.into_inner()
|
|
545
|
+
}
|