crawlee 1.1.0__tar.gz → 1.1.1b7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlee might be problematic. Click here for more details.
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/build_and_deploy_docs.yaml +1 -1
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/templates_e2e_tests.yaml +1 -1
- {crawlee-1.1.0 → crawlee-1.1.1b7}/CHANGELOG.md +12 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/PKG-INFO +1 -1
- {crawlee-1.1.0 → crawlee-1.1.1b7}/pyproject.toml +2 -2
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_request.py +1 -1
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_types.py +20 -1
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_basic/_basic_crawler.py +62 -46
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_dataset_client.py +2 -2
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_key_value_store_client.py +3 -3
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_request_queue_client.py +3 -3
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_autoscaling/test_autoscaled_pool.py +2 -4
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_basic/test_basic_crawler.py +44 -3
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/server.py +2 -1
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storages/test_dataset.py +17 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storages/test_key_value_store.py +18 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storages/test_request_queue.py +19 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/uv.lock +151 -153
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/yarn.lock +200 -254
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.editorconfig +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/CODEOWNERS +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/pull_request_template.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/check_pr_title.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/pre_release.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/release.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/run_code_checks.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.github/workflows/update_new_issue.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.gitignore +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.markdownlint.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/.pre-commit-config.yaml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/CONTRIBUTING.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/LICENSE +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/Makefile +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/README.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/apify_platform.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/apify/crawler_as_actor_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/apify/get_public_url.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/apify/log_with_config_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/apify/proxy_advanced_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/apify/proxy_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/google/cloud_run_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/code_examples/google/google_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/google_cloud.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/deployment/google_cloud_run.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/add_data_to_dataset.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/beautifulsoup_crawler.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/capture_screenshot_using_playwright.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/capturing_page_snapshots_with_error_snapshotter.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/adaptive_playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/add_data_to_dataset_bs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/add_data_to_dataset_dataset.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/add_data_to_dataset_pw.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/beautifulsoup_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/beautifulsoup_crawler_keep_alive.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/beautifulsoup_crawler_stop.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/capture_screenshot_using_playwright.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/configure_json_logging.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_all_links_on_website_bs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_all_links_on_website_pw.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_multiple_urls_bs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_multiple_urls_pw.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_specific_links_on_website_bs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_specific_links_on_website_pw.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_website_with_relative_links_all_links.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_website_with_relative_links_same_domain.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_website_with_relative_links_same_hostname.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/crawl_website_with_relative_links_same_origin.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/export_entire_dataset_to_file_csv.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/export_entire_dataset_to_file_json.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/extract_and_add_specific_links_on_website_bs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/extract_and_add_specific_links_on_website_pw.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/fill_and_submit_web_form_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/fill_and_submit_web_form_request.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/parsel_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/parsel_crawler_with_error_snapshotter.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/playwright_block_requests.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/playwright_crawler_with_camoufox.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/playwright_crawler_with_error_snapshotter.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/playwright_crawler_with_fingerprint_generator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/respect_robots_on_skipped_request.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/respect_robots_txt_file.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/resuming_paused_crawl.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/using_browser_profiles_chrome.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/using_browser_profiles_firefox.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/code_examples/using_sitemap_request_loader.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/crawl_all_links_on_website.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/crawl_multiple_urls.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/crawl_specific_links_on_website.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/crawl_website_with_relative_links.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/crawler_keep_alive.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/crawler_stop.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/export_entire_dataset_to_file.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/fill_and_submit_web_form.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/json_logging.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/parsel_crawler.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/playwright_crawler.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/playwright_crawler_adaptive.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/playwright_crawler_with_block_requests.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/playwright_crawler_with_camoufox.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/playwright_crawler_with_fingerprint_generator.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/respect_robots_txt_file.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/resuming_paused_crawl.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/using_browser_profile.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/examples/using_sitemap_request_loader.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/architecture_overview.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/avoid_blocking.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/avoid_blocking/playwright_with_fingerprint_generator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/creating_web_archive/manual_archiving_parsel_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/creating_web_archive/manual_archiving_playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/creating_web_archive/simple_pw_through_proxy_pywb_server.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/error_handling/change_handle_error_status.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/error_handling/disable_retry.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/error_handling/handle_proxy_error.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_clients/parsel_curl_impersonate_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_clients/parsel_httpx_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_clients/parsel_impit_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_crawlers/beautifulsoup_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_crawlers/custom_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_crawlers/http_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/http_crawlers/parsel_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/login_crawler/http_login.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/login_crawler/playwright_login.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler/browser_configuration_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler/multiple_launch_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler/plugin_browser_configuration_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler/pre_navigation_hook_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_adaptive/handler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_adaptive/init_beautifulsoup.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_adaptive/init_parsel.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_adaptive/init_prediction.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_adaptive/pre_nav_hooks.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_stagehand/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_stagehand/browser_classes.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_stagehand/stagehand_run.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/playwright_crawler_stagehand/support_classes.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/inspecting_bs_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/inspecting_pw_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/integration_bs_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/integration_pw_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/quick_start_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/session_bs_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/session_pw_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/tiers_bs_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/proxy_management/tiers_pw_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/rl_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/rl_basic_example_with_persist.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/rl_tandem_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/rl_tandem_example_explicit.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/sitemap_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/sitemap_example_with_persist.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/sitemap_tandem_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_loaders/sitemap_tandem_example_explicit.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/adaptive_crawler_handlers.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/basic_request_handlers.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/custom_router_default_only.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/error_handler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/failed_request_handler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/http_pre_navigation.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/playwright_pre_navigation.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/request_router/simple_default_handler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/running_in_web_server/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/running_in_web_server/crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/running_in_web_server/server.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/scaling_crawlers/max_tasks_per_minute_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/scaling_crawlers/min_and_max_concurrency_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_conflicts.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_crawler_configuration.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_crawler_event_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_crawler_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_locator_configuration.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_locator_event_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_locator_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_storage_configuration.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/service_locator/service_storage_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/multi_sessions_http.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/one_session_http.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/sm_basic.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/sm_beautifulsoup.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/sm_http.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/sm_parsel.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/sm_playwright.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/session_management/sm_standalone.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/custom_storage_client_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/file_system_storage_client_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/file_system_storage_client_configuration_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/memory_storage_client_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/redis_storage_client_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/redis_storage_client_configuration_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/registering_storage_clients_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/sql_storage_client_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storage_clients/sql_storage_client_configuration_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/cleaning_do_not_purge_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/cleaning_purge_explicitly_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/dataset_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/dataset_with_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/dataset_with_crawler_explicit_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/helper_add_requests_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/helper_enqueue_links_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/kvs_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/kvs_with_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/kvs_with_crawler_explicit_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/opening.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/rq_basic_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/rq_with_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/storages/rq_with_crawler_explicit_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/code_examples/trace_and_monitor_crawlers/instrument_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/crawler_login.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/creating_web_archive.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/error_handling.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/http_clients.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/http_crawlers.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/playwright_crawler.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/playwright_crawler_adaptive.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/playwright_crawler_stagehand.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/proxy_management.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/request_loaders.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/request_router.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/running_in_web_server.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/scaling_crawlers.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/service_locator.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/session_management.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/storage_clients.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/storages.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/guides/trace_and_monitor_crawlers.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/01_setting_up.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/02_first_crawler.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/03_adding_more_urls.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/04_real_world_project.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/05_crawling.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/06_scraping.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/07_saving_data.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/08_refactoring.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/09_running_in_cloud.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/02_bs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/02_bs_better.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/02_request_queue.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/03_enqueue_strategy.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/03_finding_new_links.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/03_globs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/03_original_code.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/03_transform_request.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/04_sanity_check.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/05_crawling_detail.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/05_crawling_listing.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/06_scraping.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/07_final_code.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/07_first_code.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/08_main.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/08_routes.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/09_apify_sdk.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/code_examples/routes.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/introduction/index.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/pyproject.toml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/quick-start/code_examples/beautifulsoup_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/quick-start/code_examples/parsel_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/quick-start/code_examples/playwright_crawler_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/quick-start/code_examples/playwright_crawler_headful_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/quick-start/index.mdx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/upgrading/upgrading_to_v0x.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/docs/upgrading/upgrading_to_v1.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/renovate.json +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_autoscaling/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_autoscaling/_types.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_autoscaling/autoscaled_pool.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_autoscaling/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_autoscaling/snapshotter.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_autoscaling/system_status.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_browserforge_workaround.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_cli.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_consts.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_log_config.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_service_locator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/blocked.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/byte_size.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/console.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/crypto.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/docs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/file.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/globs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/html_to_text.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/models.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/raise_if_too_many_kwargs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/recoverable_state.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/recurring_task.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/requests.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/robots.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/sitemap.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/system.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/time.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/try_import.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/urls.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/wait.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/_utils/web.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_browser_controller.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_browser_plugin.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_browser_pool.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_playwright_browser.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_playwright_browser_controller.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_playwright_browser_plugin.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/_types.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/browsers/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/configuration.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_abstract_http/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_abstract_http/_abstract_http_parser.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_abstract_http/_http_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_abstract_http/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler_statistics.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/_rendering_type_predictor.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/_result_comparator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_adaptive_playwright/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_basic/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_basic/_basic_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_basic/_context_pipeline.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_basic/_logging_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_basic/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_beautifulsoup/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_parser.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_beautifulsoup/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_beautifulsoup/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_http/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_http/_http_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_http/_http_parser.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_parsel/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_parsel/_parsel_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_parsel/_parsel_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_parsel/_parsel_parser.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_parsel/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/_playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/_playwright_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/_playwright_http_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/_playwright_pre_nav_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/_types.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_playwright/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/_types.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/crawlers/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/errors.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/events/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/events/_event_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/events/_local_event_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/events/_types.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/events/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/_browserforge_adapter.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/_consts.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/_fingerprint_generator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/_header_generator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/_types.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/fingerprint_suite/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/http_clients/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/http_clients/_base.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/http_clients/_curl_impersonate.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/http_clients/_httpx.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/http_clients/_impit.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/otel/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/otel/crawler_instrumentor.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/cookiecutter.json +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/hooks/post_gen_project.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/hooks/pre_gen_project.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/main.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/main_beautifulsoup.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/main_parsel.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/main_playwright.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/main_playwright_camoufox.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/routes_beautifulsoup.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/routes_camoufox.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/routes_parsel.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/routes_playwright.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/templates/routes_playwright_camoufox.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/.dockerignore +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/Dockerfile +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/README.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/pyproject.toml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/requirements.txt +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/__main__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/main.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/routes.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/proxy_configuration.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/request_loaders/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/request_loaders/_request_list.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/request_loaders/_request_loader.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/request_loaders/_request_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/request_loaders/_request_manager_tandem.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/request_loaders/_sitemap_request_loader.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/router.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/sessions/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/sessions/_cookies.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/sessions/_models.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/sessions/_session.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/sessions/_session_pool.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/sessions/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/statistics/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/statistics/_error_snapshotter.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/statistics/_error_tracker.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/statistics/_models.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/statistics/_statistics.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_base/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_base/_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_base/_key_value_store_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_base/_request_queue_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_base/_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_base/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_memory/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_memory/_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_memory/_key_value_store_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_memory/_request_queue_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_memory/_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_memory/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/_client_mixin.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/_key_value_store_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/_request_queue_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/lua_scripts/atomic_bloom_add_requests.lua +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/lua_scripts/atomic_fetch_request.lua +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/lua_scripts/atomic_set_add_requests.lua +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/lua_scripts/reclaim_stale_requests.lua +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_redis/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/_client_mixin.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/_db_models.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/_key_value_store_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/_request_queue_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/_storage_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_sql/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/models.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/_base.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/_dataset.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/_key_value_store.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/_request_queue.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/_storage_instance_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/_utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storages/py.typed +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/e2e/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/e2e/conftest.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/e2e/project_template/test_static_crawlers_templates.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/e2e/project_template/utils.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/README.md +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/__init__.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_autoscaling/test_snapshotter.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_autoscaling/test_system_status.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_statistics/test_error_tracker.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_statistics/test_periodic_logging.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_statistics/test_persistence.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_statistics/test_request_processing_record.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_byte_size.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_console.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_crypto.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_file.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_globs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_html_to_text.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_measure_time.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_raise_if_too_many_kwargs.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_recurring_task.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_requests.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_robots.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_sitemap.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_system.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_timedelata_ms.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/_utils/test_urls.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/browsers/test_browser_pool.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/browsers/test_playwright_browser.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/browsers/test_playwright_browser_controller.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/browsers/test_playwright_browser_plugin.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/conftest.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler_statistics.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawling_context.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_adaptive_playwright/test_predictor.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_basic/test_context_pipeline.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_beautifulsoup/test_beautifulsoup_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_http/test_http_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_parsel/test_parsel_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/crawlers/_playwright/test_playwright_crawler.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/events/test_event_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/events/test_local_event_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/fingerprint_suite/test_adapters.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/fingerprint_suite/test_header_generator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/http_clients/test_http_clients.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/http_clients/test_httpx.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/otel/test_crawler_instrumentor.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/proxy_configuration/test_new_proxy_info.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/proxy_configuration/test_tiers.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/request_loaders/test_request_list.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/request_loaders/test_sitemap_request_loader.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/server_endpoints.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/sessions/test_cookies.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/sessions/test_models.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/sessions/test_session.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/sessions/test_session_pool.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_file_system/test_fs_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_file_system/test_fs_kvs_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_file_system/test_fs_rq_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_memory/test_memory_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_memory/test_memory_kvs_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_memory/test_memory_rq_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_redis/test_redis_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_redis/test_redis_kvs_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_redis/test_redis_rq_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_sql/test_sql_dataset_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_sql/test_sql_kvs_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storage_clients/_sql/test_sql_rq_client.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storages/conftest.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storages/test_request_manager_tandem.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/storages/test_storage_instance_manager.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/test_cli.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/test_configuration.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/test_log_config.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/test_router.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/tests/unit/test_service_locator.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/.eslintrc.json +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/.yarnrc.yml +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/babel.config.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/build_api_reference.sh +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/docusaurus.config.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/generate_module_shortcuts.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/package.json +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/patches/@docusaurus+core+3.4.0.patch +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/patches/@docusaurus+core+3.5.2.patch +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/roa-loader/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/roa-loader/package.json +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/sidebars.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/ApiLink.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Button.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Button.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/CopyButton.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/CopyButton.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Gradients.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Highlights.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Highlights.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/HomepageCliExample.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/HomepageCliExample.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/HomepageCtaSection.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/HomepageCtaSection.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/HomepageHeroSection.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/HomepageHeroSection.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/LanguageInfoWidget.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/LanguageInfoWidget.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/LanguageSwitch.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/LanguageSwitch.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/RiverSection.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/RiverSection.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/ThreeCardsWithIcon.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/ThreeCardsWithIcon.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/animated-crawlee-logo-dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/Homepage/animated-crawlee-logo-light.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/RunnableCodeBlock.jsx +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/components/RunnableCodeBlock.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/css/custom.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/pages/home_page_example.py +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/pages/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/pages/index.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/ColorModeToggle/dark-mode-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/ColorModeToggle/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/ColorModeToggle/light-mode-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/ColorModeToggle/styles.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/DocItem/Layout/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/DocItem/Layout/styles.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Footer/LinkItem/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Footer/LinkItem/index.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Footer/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Footer/index.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/MDXComponents/A.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/Content/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/Content/styles.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/Logo/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/Logo/index.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/MobileSidebar/Header/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/MobileSidebar/Header/index.module.css +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/MobileSidebar/Layout/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/MobileSidebar/PrimaryMenu/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/Navbar/MobileSidebar/index.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/src/theme/NavbarItem/ComponentTypes.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/.nojekyll +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/font/lota.woff +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/font/lota.woff2 +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/API.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/arrow_right.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/auto-scaling-dark.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/auto-scaling-light.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/check.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/chrome-scrape-dark.gif +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/chrome-scrape-light.gif +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/cloud_icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/community-dark-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/community-light-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-dark-new.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-javascript-dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-javascript-light.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-light-new.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-light.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-logo-monocolor.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-logo.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-python-dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-python-light.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/crawlee-python-og.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/defaults-dark-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/defaults-light-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/discord-brand-dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/discord-brand.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/docusaurus.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/external-link.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/favicon.ico +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/favorite-tools-dark.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/favorite-tools-light.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/auto-scaling.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/automate-everything.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/fingerprints.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/node-requests.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/runs-on-py.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/storage.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/features/works-everywhere.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/fill-and-submit-web-form/00.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/fill-and-submit-web-form/01.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/fill-and-submit-web-form/02.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/fill-and-submit-web-form/03.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/getting-started/current-price.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/getting-started/scraping-practice.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/getting-started/select-an-element.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/getting-started/selected-element.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/getting-started/sku.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/getting-started/title.jpg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/github-brand-dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/github-brand.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/guides/jaeger_otel_search_view_example.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/guides/jaeger_otel_trace_example.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/hearth copy.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/hearth.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/javascript_logo.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/js_file.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/logo-big.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/logo-blur.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/logo-blur.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/logo-zoom.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/menu-arrows.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/oss_logo.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/puppeteer-live-view-dashboard.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/puppeteer-live-view-detail.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/queue-dark-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/queue-light-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/resuming-paused-crawl/00.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/resuming-paused-crawl/01.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/robot.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/routing-dark-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/routing-light-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/scraping-utils-dark-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/scraping-utils-light-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/smart-proxy-dark.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/smart-proxy-light.webp +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/source_code.png +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/system.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/triangles_dark.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/triangles_light.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/workflow.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/zero-setup-dark-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/img/zero-setup-light-icon.svg +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/js/custom.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/static/robots.txt +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/docs-prettier.config.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/utils/externalLink.js +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/website_gif/chrome-scrape-dark.gif +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/website_gif/chrome-scrape-dark.mp4 +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/website_gif/chrome-scrape-light.gif +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/website_gif/chrome-scrape-light.mp4 +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tools/website_gif/website_gif.mjs +0 -0
- {crawlee-1.1.0 → crawlee-1.1.1b7}/website/tsconfig.eslint.json +0 -0
|
@@ -2,6 +2,18 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to this project will be documented in this file.
|
|
4
4
|
|
|
5
|
+
<!-- git-cliff-unreleased-start -->
|
|
6
|
+
## 1.1.1 - **not yet released**
|
|
7
|
+
|
|
8
|
+
### 🐛 Bug Fixes
|
|
9
|
+
|
|
10
|
+
- Unify separators in `unique_key` construction ([#1569](https://github.com/apify/crawlee-python/pull/1569)) ([af46a37](https://github.com/apify/crawlee-python/commit/af46a3733b059a8052489296e172f005def953f7)) by [@vdusek](https://github.com/vdusek), closes [#1512](https://github.com/apify/crawlee-python/issues/1512)
|
|
11
|
+
- Fix `same-domain` strategy ignoring public suffix ([#1572](https://github.com/apify/crawlee-python/pull/1572)) ([3d018b2](https://github.com/apify/crawlee-python/commit/3d018b21a28a4bee493829783057188d6106a69b)) by [@Pijukatel](https://github.com/Pijukatel), closes [#1571](https://github.com/apify/crawlee-python/issues/1571)
|
|
12
|
+
- Make context helpers work in `FailedRequestHandler` and `ErrorHandler` ([#1570](https://github.com/apify/crawlee-python/pull/1570)) ([b830019](https://github.com/apify/crawlee-python/commit/b830019350830ac33075316061659e2854f7f4a5)) by [@Pijukatel](https://github.com/Pijukatel), closes [#1532](https://github.com/apify/crawlee-python/issues/1532)
|
|
13
|
+
- Fix non-ASCII character corruption in `FileSystemStorageClient` on systems without UTF-8 default encoding ([#1580](https://github.com/apify/crawlee-python/pull/1580)) ([f179f86](https://github.com/apify/crawlee-python/commit/f179f8671b0b6af9264450e4fef7e49d1cecd2bd)) by [@Mantisus](https://github.com/Mantisus), closes [#1579](https://github.com/apify/crawlee-python/issues/1579)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
<!-- git-cliff-unreleased-end -->
|
|
5
17
|
## [1.1.0](https://github.com/apify/crawlee-python/releases/tag/v1.1.0) (2025-11-18)
|
|
6
18
|
|
|
7
19
|
### 🚀 Features
|
|
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "crawlee"
|
|
7
|
-
version = "1.1.
|
|
7
|
+
version = "1.1.1b7"
|
|
8
8
|
description = "Crawlee for Python"
|
|
9
9
|
authors = [{ name = "Apify Technologies s.r.o.", email = "support@apify.com" }]
|
|
10
10
|
license = { file = "LICENSE" }
|
|
@@ -117,7 +117,7 @@ dev = [
|
|
|
117
117
|
"types-colorama<1.0.0",
|
|
118
118
|
"types-psutil<8.0.0",
|
|
119
119
|
"types-python-dateutil<3.0.0",
|
|
120
|
-
"uvicorn[standard]
|
|
120
|
+
"uvicorn[standard]<1.0.0",
|
|
121
121
|
]
|
|
122
122
|
|
|
123
123
|
[tool.hatch.build.targets.wheel]
|
|
@@ -15,7 +15,7 @@ if TYPE_CHECKING:
|
|
|
15
15
|
import re
|
|
16
16
|
from collections.abc import Callable, Coroutine, Sequence
|
|
17
17
|
|
|
18
|
-
from typing_extensions import NotRequired, Required, Unpack
|
|
18
|
+
from typing_extensions import NotRequired, Required, Self, Unpack
|
|
19
19
|
|
|
20
20
|
from crawlee import Glob, Request
|
|
21
21
|
from crawlee._request import RequestOptions
|
|
@@ -643,6 +643,25 @@ class BasicCrawlingContext:
|
|
|
643
643
|
"""Return hash of the context. Each context is considered unique."""
|
|
644
644
|
return id(self)
|
|
645
645
|
|
|
646
|
+
def create_modified_copy(
|
|
647
|
+
self,
|
|
648
|
+
push_data: PushDataFunction | None = None,
|
|
649
|
+
add_requests: AddRequestsFunction | None = None,
|
|
650
|
+
get_key_value_store: GetKeyValueStoreFromRequestHandlerFunction | None = None,
|
|
651
|
+
) -> Self:
|
|
652
|
+
"""Create a modified copy of the crawling context with specified changes."""
|
|
653
|
+
original_fields = {field.name: getattr(self, field.name) for field in dataclasses.fields(self)}
|
|
654
|
+
modified_fields = {
|
|
655
|
+
key: value
|
|
656
|
+
for key, value in {
|
|
657
|
+
'push_data': push_data,
|
|
658
|
+
'add_requests': add_requests,
|
|
659
|
+
'get_key_value_store': get_key_value_store,
|
|
660
|
+
}.items()
|
|
661
|
+
if value
|
|
662
|
+
}
|
|
663
|
+
return self.__class__(**{**original_fields, **modified_fields})
|
|
664
|
+
|
|
646
665
|
|
|
647
666
|
class GetDataKwargs(TypedDict):
|
|
648
667
|
"""Keyword arguments for dataset's `get_data` method."""
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
|
|
4
4
|
import asyncio
|
|
5
|
+
import functools
|
|
5
6
|
import logging
|
|
6
7
|
import signal
|
|
7
8
|
import sys
|
|
@@ -14,7 +15,7 @@ from contextlib import AsyncExitStack, suppress
|
|
|
14
15
|
from datetime import timedelta
|
|
15
16
|
from functools import partial
|
|
16
17
|
from pathlib import Path
|
|
17
|
-
from typing import TYPE_CHECKING, Any, Generic, Literal, cast
|
|
18
|
+
from typing import TYPE_CHECKING, Any, Generic, Literal, ParamSpec, cast
|
|
18
19
|
from urllib.parse import ParseResult, urlparse
|
|
19
20
|
from weakref import WeakKeyDictionary
|
|
20
21
|
|
|
@@ -96,6 +97,9 @@ if TYPE_CHECKING:
|
|
|
96
97
|
TCrawlingContext = TypeVar('TCrawlingContext', bound=BasicCrawlingContext, default=BasicCrawlingContext)
|
|
97
98
|
TStatisticsState = TypeVar('TStatisticsState', bound=StatisticsState, default=StatisticsState)
|
|
98
99
|
TRequestIterator = TypeVar('TRequestIterator', str, Request)
|
|
100
|
+
TParams = ParamSpec('TParams')
|
|
101
|
+
T = TypeVar('T')
|
|
102
|
+
|
|
99
103
|
ErrorHandler = Callable[[TCrawlingContext, Exception], Awaitable[Request | None]]
|
|
100
104
|
FailedRequestHandler = Callable[[TCrawlingContext, Exception], Awaitable[None]]
|
|
101
105
|
SkippedRequestCallback = Callable[[str, SkippedReason], Awaitable[None]]
|
|
@@ -520,6 +524,24 @@ class BasicCrawler(Generic[TCrawlingContext, TStatisticsState]):
|
|
|
520
524
|
self._logger.info(f'Crawler.stop() was called with following reason: {reason}.')
|
|
521
525
|
self._unexpected_stop = True
|
|
522
526
|
|
|
527
|
+
def _wrap_handler_with_error_context(
|
|
528
|
+
self, handler: Callable[[TCrawlingContext | BasicCrawlingContext, Exception], Awaitable[T]]
|
|
529
|
+
) -> Callable[[TCrawlingContext | BasicCrawlingContext, Exception], Awaitable[T]]:
|
|
530
|
+
"""Decorate error handlers to make their context helpers usable."""
|
|
531
|
+
|
|
532
|
+
@functools.wraps(handler)
|
|
533
|
+
async def wrapped_handler(context: TCrawlingContext | BasicCrawlingContext, exception: Exception) -> T:
|
|
534
|
+
# Original context helpers that are from `RequestHandlerRunResult` will not be commited as the request
|
|
535
|
+
# failed. Modified context provides context helpers with direct access to the storages.
|
|
536
|
+
error_context = context.create_modified_copy(
|
|
537
|
+
push_data=self._push_data,
|
|
538
|
+
get_key_value_store=self.get_key_value_store,
|
|
539
|
+
add_requests=functools.partial(self._add_requests, context),
|
|
540
|
+
)
|
|
541
|
+
return await handler(error_context, exception)
|
|
542
|
+
|
|
543
|
+
return wrapped_handler
|
|
544
|
+
|
|
523
545
|
def _stop_if_max_requests_count_exceeded(self) -> None:
|
|
524
546
|
"""Call `stop` when the maximum number of requests to crawl has been reached."""
|
|
525
547
|
if self._max_requests_per_crawl is None:
|
|
@@ -618,7 +640,7 @@ class BasicCrawler(Generic[TCrawlingContext, TStatisticsState]):
|
|
|
618
640
|
|
|
619
641
|
The error handler is invoked after a request handler error occurs and before a retry attempt.
|
|
620
642
|
"""
|
|
621
|
-
self._error_handler = handler
|
|
643
|
+
self._error_handler = self._wrap_handler_with_error_context(handler)
|
|
622
644
|
return handler
|
|
623
645
|
|
|
624
646
|
def failed_request_handler(
|
|
@@ -628,7 +650,7 @@ class BasicCrawler(Generic[TCrawlingContext, TStatisticsState]):
|
|
|
628
650
|
|
|
629
651
|
The failed request handler is invoked when a request has failed all retry attempts.
|
|
630
652
|
"""
|
|
631
|
-
self._failed_request_handler = handler
|
|
653
|
+
self._failed_request_handler = self._wrap_handler_with_error_context(handler)
|
|
632
654
|
return handler
|
|
633
655
|
|
|
634
656
|
def on_skipped_request(self, callback: SkippedRequestCallback) -> SkippedRequestCallback:
|
|
@@ -1043,8 +1065,8 @@ class BasicCrawler(Generic[TCrawlingContext, TStatisticsState]):
|
|
|
1043
1065
|
return target_url.hostname == origin_url.hostname
|
|
1044
1066
|
|
|
1045
1067
|
if strategy == 'same-domain':
|
|
1046
|
-
origin_domain = self._tld_extractor.extract_str(origin_url.hostname).
|
|
1047
|
-
target_domain = self._tld_extractor.extract_str(target_url.hostname).
|
|
1068
|
+
origin_domain = self._tld_extractor.extract_str(origin_url.hostname).top_domain_under_public_suffix
|
|
1069
|
+
target_domain = self._tld_extractor.extract_str(target_url.hostname).top_domain_under_public_suffix
|
|
1048
1070
|
return origin_domain == target_domain
|
|
1049
1071
|
|
|
1050
1072
|
if strategy == 'same-origin':
|
|
@@ -1256,52 +1278,46 @@ class BasicCrawler(Generic[TCrawlingContext, TStatisticsState]):
|
|
|
1256
1278
|
else:
|
|
1257
1279
|
yield Request.from_url(url)
|
|
1258
1280
|
|
|
1259
|
-
async def
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
storage_client=self._service_locator.get_storage_client(),
|
|
1280
|
-
configuration=self._service_locator.get_configuration(),
|
|
1281
|
-
)
|
|
1282
|
-
else:
|
|
1283
|
-
request_manager = base_request_manager
|
|
1284
|
-
|
|
1285
|
-
requests = list[Request]()
|
|
1286
|
-
|
|
1287
|
-
base_url = url if (url := add_requests_call.get('base_url')) else origin
|
|
1288
|
-
|
|
1289
|
-
requests_iterator = self._convert_url_to_request_iterator(add_requests_call['requests'], base_url)
|
|
1281
|
+
async def _add_requests(
|
|
1282
|
+
self,
|
|
1283
|
+
context: BasicCrawlingContext,
|
|
1284
|
+
requests: Sequence[str | Request],
|
|
1285
|
+
rq_id: str | None = None,
|
|
1286
|
+
rq_name: str | None = None,
|
|
1287
|
+
rq_alias: str | None = None,
|
|
1288
|
+
**kwargs: Unpack[EnqueueLinksKwargs],
|
|
1289
|
+
) -> None:
|
|
1290
|
+
"""Add requests method aware of the crawling context."""
|
|
1291
|
+
if rq_id or rq_name or rq_alias:
|
|
1292
|
+
request_manager: RequestManager = await RequestQueue.open(
|
|
1293
|
+
id=rq_id,
|
|
1294
|
+
name=rq_name,
|
|
1295
|
+
alias=rq_alias,
|
|
1296
|
+
storage_client=self._service_locator.get_storage_client(),
|
|
1297
|
+
configuration=self._service_locator.get_configuration(),
|
|
1298
|
+
)
|
|
1299
|
+
else:
|
|
1300
|
+
request_manager = await self.get_request_manager()
|
|
1290
1301
|
|
|
1291
|
-
|
|
1302
|
+
context_aware_requests = list[Request]()
|
|
1303
|
+
base_url = kwargs.get('base_url') or context.request.loaded_url or context.request.url
|
|
1304
|
+
requests_iterator = self._convert_url_to_request_iterator(requests, base_url)
|
|
1305
|
+
filter_requests_iterator = self._enqueue_links_filter_iterator(requests_iterator, context.request.url, **kwargs)
|
|
1306
|
+
for dst_request in filter_requests_iterator:
|
|
1307
|
+
# Update the crawl depth of the request.
|
|
1308
|
+
dst_request.crawl_depth = context.request.crawl_depth + 1
|
|
1292
1309
|
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
)
|
|
1310
|
+
if self._max_crawl_depth is None or dst_request.crawl_depth <= self._max_crawl_depth:
|
|
1311
|
+
context_aware_requests.append(dst_request)
|
|
1296
1312
|
|
|
1297
|
-
|
|
1298
|
-
# Update the crawl depth of the request.
|
|
1299
|
-
dst_request.crawl_depth = context.request.crawl_depth + 1
|
|
1313
|
+
return await request_manager.add_requests(context_aware_requests)
|
|
1300
1314
|
|
|
1301
|
-
|
|
1302
|
-
|
|
1315
|
+
async def _commit_request_handler_result(self, context: BasicCrawlingContext) -> None:
|
|
1316
|
+
"""Commit request handler result for the input `context`. Result is taken from `_context_result_map`."""
|
|
1317
|
+
result = self._context_result_map[context]
|
|
1303
1318
|
|
|
1304
|
-
|
|
1319
|
+
for add_requests_call in result.add_requests_calls:
|
|
1320
|
+
await self._add_requests(context, **add_requests_call)
|
|
1305
1321
|
|
|
1306
1322
|
for push_data_call in result.push_data_calls:
|
|
1307
1323
|
await self._push_data(**push_data_call)
|
{crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_dataset_client.py
RENAMED
|
@@ -134,7 +134,7 @@ class FileSystemDatasetClient(DatasetClient):
|
|
|
134
134
|
continue
|
|
135
135
|
|
|
136
136
|
try:
|
|
137
|
-
file = await asyncio.to_thread(path_to_metadata.open)
|
|
137
|
+
file = await asyncio.to_thread(path_to_metadata.open, 'r', encoding='utf-8')
|
|
138
138
|
try:
|
|
139
139
|
file_content = json.load(file)
|
|
140
140
|
metadata = DatasetMetadata(**file_content)
|
|
@@ -163,7 +163,7 @@ class FileSystemDatasetClient(DatasetClient):
|
|
|
163
163
|
|
|
164
164
|
# If the dataset directory exists, reconstruct the client from the metadata file.
|
|
165
165
|
if path_to_dataset.exists() and path_to_metadata.exists():
|
|
166
|
-
file = await asyncio.to_thread(open, path_to_metadata)
|
|
166
|
+
file = await asyncio.to_thread(open, path_to_metadata, 'r', encoding='utf-8')
|
|
167
167
|
try:
|
|
168
168
|
file_content = json.load(file)
|
|
169
169
|
finally:
|
|
@@ -133,7 +133,7 @@ class FileSystemKeyValueStoreClient(KeyValueStoreClient):
|
|
|
133
133
|
continue
|
|
134
134
|
|
|
135
135
|
try:
|
|
136
|
-
file = await asyncio.to_thread(path_to_metadata.open)
|
|
136
|
+
file = await asyncio.to_thread(path_to_metadata.open, 'r', encoding='utf-8')
|
|
137
137
|
try:
|
|
138
138
|
file_content = json.load(file)
|
|
139
139
|
metadata = KeyValueStoreMetadata(**file_content)
|
|
@@ -162,7 +162,7 @@ class FileSystemKeyValueStoreClient(KeyValueStoreClient):
|
|
|
162
162
|
|
|
163
163
|
# If the key-value store directory exists, reconstruct the client from the metadata file.
|
|
164
164
|
if path_to_kvs.exists() and path_to_metadata.exists():
|
|
165
|
-
file = await asyncio.to_thread(open, path_to_metadata)
|
|
165
|
+
file = await asyncio.to_thread(open, path_to_metadata, 'r', encoding='utf-8')
|
|
166
166
|
try:
|
|
167
167
|
file_content = json.load(file)
|
|
168
168
|
finally:
|
|
@@ -239,7 +239,7 @@ class FileSystemKeyValueStoreClient(KeyValueStoreClient):
|
|
|
239
239
|
# Read the metadata file
|
|
240
240
|
async with self._lock:
|
|
241
241
|
try:
|
|
242
|
-
file = await asyncio.to_thread(open, record_metadata_filepath)
|
|
242
|
+
file = await asyncio.to_thread(open, record_metadata_filepath, 'r', encoding='utf-8')
|
|
243
243
|
except FileNotFoundError:
|
|
244
244
|
logger.warning(f'Metadata file disappeared for key "{key}", aborting get_value')
|
|
245
245
|
return None
|
{crawlee-1.1.0 → crawlee-1.1.1b7}/src/crawlee/storage_clients/_file_system/_request_queue_client.py
RENAMED
|
@@ -197,7 +197,7 @@ class FileSystemRequestQueueClient(RequestQueueClient):
|
|
|
197
197
|
continue
|
|
198
198
|
|
|
199
199
|
try:
|
|
200
|
-
file = await asyncio.to_thread(path_to_metadata.open)
|
|
200
|
+
file = await asyncio.to_thread(path_to_metadata.open, 'r', encoding='utf-8')
|
|
201
201
|
try:
|
|
202
202
|
file_content = json.load(file)
|
|
203
203
|
metadata = RequestQueueMetadata(**file_content)
|
|
@@ -232,7 +232,7 @@ class FileSystemRequestQueueClient(RequestQueueClient):
|
|
|
232
232
|
|
|
233
233
|
# If the RQ directory exists, reconstruct the client from the metadata file.
|
|
234
234
|
if path_to_rq.exists() and path_to_metadata.exists():
|
|
235
|
-
file = await asyncio.to_thread(open, path_to_metadata)
|
|
235
|
+
file = await asyncio.to_thread(open, path_to_metadata, 'r', encoding='utf-8')
|
|
236
236
|
try:
|
|
237
237
|
file_content = json.load(file)
|
|
238
238
|
finally:
|
|
@@ -775,7 +775,7 @@ class FileSystemRequestQueueClient(RequestQueueClient):
|
|
|
775
775
|
"""
|
|
776
776
|
# Open the request file.
|
|
777
777
|
try:
|
|
778
|
-
file = await asyncio.to_thread(open, file_path)
|
|
778
|
+
file = await asyncio.to_thread(open, file_path, 'r', encoding='utf-8')
|
|
779
779
|
except FileNotFoundError:
|
|
780
780
|
logger.warning(f'Request file "{file_path}" not found.')
|
|
781
781
|
return None
|
|
@@ -310,14 +310,14 @@ async def test_allows_multiple_run_calls(system_status: SystemStatus | Mock) ->
|
|
|
310
310
|
done_count = 0
|
|
311
311
|
|
|
312
312
|
async def run() -> None:
|
|
313
|
-
await asyncio.sleep(0.1)
|
|
314
313
|
nonlocal done_count
|
|
315
314
|
done_count += 1
|
|
315
|
+
await asyncio.sleep(0.1)
|
|
316
316
|
|
|
317
317
|
pool = AutoscaledPool(
|
|
318
318
|
system_status=system_status,
|
|
319
319
|
run_task_function=run,
|
|
320
|
-
is_task_ready_function=lambda: future(
|
|
320
|
+
is_task_ready_function=lambda: future(done_count < 4),
|
|
321
321
|
is_finished_function=lambda: future(done_count >= 4),
|
|
322
322
|
concurrency_settings=ConcurrencySettings(
|
|
323
323
|
min_concurrency=4,
|
|
@@ -330,8 +330,6 @@ async def test_allows_multiple_run_calls(system_status: SystemStatus | Mock) ->
|
|
|
330
330
|
assert done_count == 4
|
|
331
331
|
|
|
332
332
|
done_count = 0
|
|
333
|
-
await asyncio.sleep(0.2) # Allow any lingering callbacks to complete
|
|
334
|
-
done_count = 0 # Reset again to ensure clean state
|
|
335
333
|
|
|
336
334
|
await pool.run()
|
|
337
335
|
assert done_count == 4
|
|
@@ -284,6 +284,46 @@ async def test_calls_failed_request_handler() -> None:
|
|
|
284
284
|
assert isinstance(calls[0][1], RuntimeError)
|
|
285
285
|
|
|
286
286
|
|
|
287
|
+
@pytest.mark.parametrize('handler', ['failed_request_handler', 'error_handler'])
|
|
288
|
+
async def test_handlers_use_context_helpers(tmp_path: Path, handler: str) -> None:
|
|
289
|
+
"""Test that context helpers used in `failed_request_handler` and in `error_handler` have effect."""
|
|
290
|
+
# Prepare crawler
|
|
291
|
+
storage_client = FileSystemStorageClient()
|
|
292
|
+
crawler = BasicCrawler(
|
|
293
|
+
max_request_retries=1, storage_client=storage_client, configuration=Configuration(storage_dir=str(tmp_path))
|
|
294
|
+
)
|
|
295
|
+
# Test data
|
|
296
|
+
rq_alias = 'other'
|
|
297
|
+
test_data = {'some': 'data'}
|
|
298
|
+
test_key = 'key'
|
|
299
|
+
test_value = 'value'
|
|
300
|
+
test_request = Request.from_url('https://d.placeholder.com')
|
|
301
|
+
|
|
302
|
+
# Request handler with injected error
|
|
303
|
+
@crawler.router.default_handler
|
|
304
|
+
async def request_handler(context: BasicCrawlingContext) -> None:
|
|
305
|
+
raise RuntimeError('Arbitrary crash for testing purposes')
|
|
306
|
+
|
|
307
|
+
# Apply one of the handlers
|
|
308
|
+
@getattr(crawler, handler) # type:ignore[misc] # Untyped decorator is ok to make the test concise
|
|
309
|
+
async def handler_implementation(context: BasicCrawlingContext, error: Exception) -> None:
|
|
310
|
+
await context.push_data(test_data)
|
|
311
|
+
await context.add_requests(requests=[test_request], rq_alias=rq_alias)
|
|
312
|
+
kvs = await context.get_key_value_store()
|
|
313
|
+
await kvs.set_value(test_key, test_value)
|
|
314
|
+
|
|
315
|
+
await crawler.run(['https://b.placeholder.com'])
|
|
316
|
+
|
|
317
|
+
# Verify that the context helpers used in handlers had effect on used storages
|
|
318
|
+
dataset = await Dataset.open(storage_client=storage_client)
|
|
319
|
+
kvs = await KeyValueStore.open(storage_client=storage_client)
|
|
320
|
+
rq = await RequestQueue.open(alias=rq_alias, storage_client=storage_client)
|
|
321
|
+
|
|
322
|
+
assert test_value == await kvs.get_value(test_key)
|
|
323
|
+
assert [test_data] == (await dataset.get_data()).items
|
|
324
|
+
assert test_request == await rq.fetch_next_request()
|
|
325
|
+
|
|
326
|
+
|
|
287
327
|
async def test_handles_error_in_failed_request_handler() -> None:
|
|
288
328
|
crawler = BasicCrawler(max_request_retries=3)
|
|
289
329
|
|
|
@@ -347,6 +387,7 @@ STRATEGY_TEST_URLS = (
|
|
|
347
387
|
'https://blog.someplace.com/index.html',
|
|
348
388
|
'https://redirect.someplace.com',
|
|
349
389
|
'https://other.place.com/index.html',
|
|
390
|
+
'https://someplace.jp/',
|
|
350
391
|
)
|
|
351
392
|
|
|
352
393
|
INCLUDE_TEST_URLS = (
|
|
@@ -401,7 +442,7 @@ INCLUDE_TEST_URLS = (
|
|
|
401
442
|
AddRequestsTestInput(
|
|
402
443
|
start_url=STRATEGY_TEST_URLS[0],
|
|
403
444
|
loaded_url=STRATEGY_TEST_URLS[0],
|
|
404
|
-
requests=STRATEGY_TEST_URLS
|
|
445
|
+
requests=STRATEGY_TEST_URLS,
|
|
405
446
|
kwargs=EnqueueLinksKwargs(strategy='same-domain'),
|
|
406
447
|
expected_urls=STRATEGY_TEST_URLS[1:4],
|
|
407
448
|
),
|
|
@@ -411,7 +452,7 @@ INCLUDE_TEST_URLS = (
|
|
|
411
452
|
AddRequestsTestInput(
|
|
412
453
|
start_url=STRATEGY_TEST_URLS[0],
|
|
413
454
|
loaded_url=STRATEGY_TEST_URLS[0],
|
|
414
|
-
requests=STRATEGY_TEST_URLS
|
|
455
|
+
requests=STRATEGY_TEST_URLS,
|
|
415
456
|
kwargs=EnqueueLinksKwargs(strategy='same-hostname'),
|
|
416
457
|
expected_urls=[STRATEGY_TEST_URLS[1]],
|
|
417
458
|
),
|
|
@@ -421,7 +462,7 @@ INCLUDE_TEST_URLS = (
|
|
|
421
462
|
AddRequestsTestInput(
|
|
422
463
|
start_url=STRATEGY_TEST_URLS[0],
|
|
423
464
|
loaded_url=STRATEGY_TEST_URLS[0],
|
|
424
|
-
requests=STRATEGY_TEST_URLS
|
|
465
|
+
requests=STRATEGY_TEST_URLS,
|
|
425
466
|
kwargs=EnqueueLinksKwargs(strategy='same-origin'),
|
|
426
467
|
expected_urls=[],
|
|
427
468
|
),
|
|
@@ -470,8 +470,9 @@ class TestServer(Server):
|
|
|
470
470
|
# Set the event loop policy in thread with server for Windows and Python 3.12+.
|
|
471
471
|
# This is necessary because there are problems with closing connections when using `ProactorEventLoop`
|
|
472
472
|
if sys.version_info >= (3, 12) and sys.platform == 'win32':
|
|
473
|
-
asyncio.
|
|
473
|
+
return asyncio.run(self.serve(sockets=sockets), loop_factory=asyncio.SelectorEventLoop)
|
|
474
474
|
super().run(sockets=sockets)
|
|
475
|
+
return None
|
|
475
476
|
|
|
476
477
|
|
|
477
478
|
def serve_in_thread(server: TestServer) -> Iterator[TestServer]:
|
|
@@ -1082,3 +1082,20 @@ async def test_validate_name(storage_client: StorageClient, name: str, *, is_val
|
|
|
1082
1082
|
else:
|
|
1083
1083
|
with pytest.raises(ValueError, match=rf'Invalid storage name "{name}".*'):
|
|
1084
1084
|
await Dataset.open(name=name, storage_client=storage_client)
|
|
1085
|
+
|
|
1086
|
+
|
|
1087
|
+
async def test_record_with_noascii_chars(dataset: Dataset) -> None:
|
|
1088
|
+
"""Test handling record with non-ASCII characters."""
|
|
1089
|
+
init_value = {
|
|
1090
|
+
'record_1': 'Supermaxi El Jardín',
|
|
1091
|
+
'record_2': 'záznam dva',
|
|
1092
|
+
'record_3': '記録三',
|
|
1093
|
+
}
|
|
1094
|
+
|
|
1095
|
+
# Save the record to the dataset
|
|
1096
|
+
await dataset.push_data(init_value)
|
|
1097
|
+
|
|
1098
|
+
# Get the record and verify
|
|
1099
|
+
value = await dataset.get_data()
|
|
1100
|
+
assert value is not None
|
|
1101
|
+
assert value.items[0] == init_value
|
|
@@ -1132,3 +1132,21 @@ async def test_get_auto_saved_value_various_global_clients(
|
|
|
1132
1132
|
await kvs.persist_autosaved_values()
|
|
1133
1133
|
|
|
1134
1134
|
assert await kvs.get_value(test_key) == autosaved_value_kvs
|
|
1135
|
+
|
|
1136
|
+
|
|
1137
|
+
async def test_record_with_noascii_chars(kvs: KeyValueStore) -> None:
|
|
1138
|
+
"""Test storing and retrieving a record with non-ASCII characters."""
|
|
1139
|
+
init_value = {
|
|
1140
|
+
'record_1': 'Supermaxi El Jardín',
|
|
1141
|
+
'record_2': 'záznam dva',
|
|
1142
|
+
'record_3': '記録三',
|
|
1143
|
+
}
|
|
1144
|
+
key = 'non_ascii_key'
|
|
1145
|
+
|
|
1146
|
+
# Save the record in the key-value store
|
|
1147
|
+
await kvs.set_value(key, init_value)
|
|
1148
|
+
|
|
1149
|
+
# Get the record and verify
|
|
1150
|
+
value = await kvs.get_value(key)
|
|
1151
|
+
assert value is not None
|
|
1152
|
+
assert value == init_value
|
|
@@ -1348,3 +1348,22 @@ async def test_reclaim_request_with_change_state(rq: RequestQueue) -> None:
|
|
|
1348
1348
|
assert reclaimed_request is not None
|
|
1349
1349
|
assert reclaimed_request.url == 'https://example.com/original'
|
|
1350
1350
|
assert reclaimed_request.user_data['state'] == 'modified'
|
|
1351
|
+
|
|
1352
|
+
|
|
1353
|
+
async def test_request_with_noascii_chars(rq: RequestQueue) -> None:
|
|
1354
|
+
"""Test handling requests with non-ASCII characters in user data."""
|
|
1355
|
+
data_with_special_chars = {
|
|
1356
|
+
'record_1': 'Supermaxi El Jardín',
|
|
1357
|
+
'record_2': 'záznam dva',
|
|
1358
|
+
'record_3': '記録三',
|
|
1359
|
+
}
|
|
1360
|
+
init_request = Request.from_url('https://crawlee.dev', user_data=data_with_special_chars)
|
|
1361
|
+
|
|
1362
|
+
# Add a request with special user data
|
|
1363
|
+
await rq.add_request(init_request)
|
|
1364
|
+
|
|
1365
|
+
# Get the request and verify
|
|
1366
|
+
request = await rq.fetch_next_request()
|
|
1367
|
+
assert request is not None
|
|
1368
|
+
assert request.url == 'https://crawlee.dev'
|
|
1369
|
+
assert request.user_data == init_request.user_data
|