crawlee 1.0.5b3__tar.gz → 1.0.5b7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlee might be problematic. Click here for more details.
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/CHANGELOG.md +1 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/PKG-INFO +1 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/pyproject.toml +1 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/pyproject.toml +1 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/robots.py +17 -5
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/statistics/_error_snapshotter.py +1 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +3 -3
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_beautifulsoup/test_beautifulsoup_crawler.py +35 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_http/test_http_crawler.py +2 -2
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_parsel/test_parsel_crawler.py +35 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_playwright/test_playwright_crawler.py +34 -1
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/server.py +10 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/server_endpoints.py +10 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/uv.lock +131 -121
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/package.json +4 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/yarn.lock +345 -325
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.editorconfig +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/CODEOWNERS +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/pull_request_template.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/build_and_deploy_docs.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/check_pr_title.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/pre_release.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/release.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/run_code_checks.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/templates_e2e_tests.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.github/workflows/update_new_issue.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.gitignore +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.markdownlint.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/.pre-commit-config.yaml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/CONTRIBUTING.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/LICENSE +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/Makefile +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/README.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/apify_platform.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/apify/crawler_as_actor_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/apify/get_public_url.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/apify/log_with_config_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/apify/proxy_advanced_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/apify/proxy_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/google/cloud_run_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/code_examples/google/google_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/google_cloud.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/deployment/google_cloud_run.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/add_data_to_dataset.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/beautifulsoup_crawler.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/capture_screenshot_using_playwright.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/capturing_page_snapshots_with_error_snapshotter.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/adaptive_playwright_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/add_data_to_dataset_bs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/add_data_to_dataset_dataset.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/add_data_to_dataset_pw.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/beautifulsoup_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/beautifulsoup_crawler_keep_alive.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/beautifulsoup_crawler_stop.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/capture_screenshot_using_playwright.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/configure_json_logging.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_all_links_on_website_bs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_all_links_on_website_pw.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_multiple_urls_bs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_multiple_urls_pw.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_specific_links_on_website_bs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_specific_links_on_website_pw.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_website_with_relative_links_all_links.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_website_with_relative_links_same_domain.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_website_with_relative_links_same_hostname.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/crawl_website_with_relative_links_same_origin.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/export_entire_dataset_to_file_csv.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/export_entire_dataset_to_file_json.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/extract_and_add_specific_links_on_website_bs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/extract_and_add_specific_links_on_website_pw.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/fill_and_submit_web_form_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/fill_and_submit_web_form_request.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/parsel_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/parsel_crawler_with_error_snapshotter.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/playwright_block_requests.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/playwright_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/playwright_crawler_with_camoufox.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/playwright_crawler_with_error_snapshotter.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/playwright_crawler_with_fingerprint_generator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/respect_robots_on_skipped_request.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/respect_robots_txt_file.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/resuming_paused_crawl.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/using_browser_profiles_chrome.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/code_examples/using_browser_profiles_firefox.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/crawl_all_links_on_website.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/crawl_multiple_urls.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/crawl_specific_links_on_website.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/crawl_website_with_relative_links.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/crawler_keep_alive.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/crawler_stop.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/export_entire_dataset_to_file.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/fill_and_submit_web_form.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/json_logging.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/parsel_crawler.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/playwright_crawler.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/playwright_crawler_adaptive.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/playwright_crawler_with_block_requests.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/playwright_crawler_with_camoufox.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/playwright_crawler_with_fingerprint_generator.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/respect_robots_txt_file.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/resuming_paused_crawl.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/examples/using_browser_profile.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/architecture_overview.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/avoid_blocking.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/avoid_blocking/playwright_with_fingerprint_generator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/creating_web_archive/manual_archiving_parsel_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/creating_web_archive/manual_archiving_playwright_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/creating_web_archive/simple_pw_through_proxy_pywb_server.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/error_handling/change_handle_error_status.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/error_handling/disable_retry.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/error_handling/handle_proxy_error.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_clients/parsel_curl_impersonate_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_clients/parsel_httpx_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_clients/parsel_impit_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_crawlers/beautifulsoup_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_crawlers/custom_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_crawlers/http_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/http_crawlers/parsel_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/login_crawler/http_login.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/login_crawler/playwright_login.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler/browser_configuration_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler/multiple_launch_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler/plugin_browser_configuration_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler/pre_navigation_hook_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_adaptive/handler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_adaptive/init_beautifulsoup.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_adaptive/init_parsel.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_adaptive/init_prediction.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_adaptive/pre_nav_hooks.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_stagehand/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_stagehand/browser_classes.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_stagehand/stagehand_run.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/playwright_crawler_stagehand/support_classes.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/inspecting_bs_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/inspecting_pw_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/integration_bs_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/integration_pw_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/quick_start_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/session_bs_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/session_pw_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/tiers_bs_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/proxy_management/tiers_pw_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/rl_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/rl_basic_example_with_persist.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/rl_tandem_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/rl_tandem_example_explicit.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/sitemap_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/sitemap_example_with_persist.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/sitemap_tandem_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_loaders/sitemap_tandem_example_explicit.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/adaptive_crawler_handlers.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/basic_request_handlers.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/custom_router_default_only.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/error_handler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/failed_request_handler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/http_pre_navigation.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/playwright_pre_navigation.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/request_router/simple_default_handler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/running_in_web_server/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/running_in_web_server/crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/running_in_web_server/server.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/scaling_crawlers/max_tasks_per_minute_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/scaling_crawlers/min_and_max_concurrency_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_conflicts.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_crawler_configuration.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_crawler_event_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_crawler_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_locator_configuration.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_locator_event_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_locator_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_storage_configuration.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/service_locator/service_storage_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/multi_sessions_http.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/one_session_http.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/sm_basic.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/sm_beautifulsoup.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/sm_http.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/sm_parsel.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/sm_playwright.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/session_management/sm_standalone.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/custom_storage_client_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/file_system_storage_client_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/file_system_storage_client_configuration_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/memory_storage_client_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/registering_storage_clients_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/sql_storage_client_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storage_clients/sql_storage_client_configuration_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/cleaning_do_not_purge_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/cleaning_purge_explicitly_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/dataset_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/dataset_with_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/dataset_with_crawler_explicit_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/helper_add_requests_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/helper_enqueue_links_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/kvs_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/kvs_with_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/kvs_with_crawler_explicit_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/opening.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/rq_basic_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/rq_with_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/storages/rq_with_crawler_explicit_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/code_examples/trace_and_monitor_crawlers/instrument_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/crawler_login.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/creating_web_archive.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/error_handling.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/http_clients.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/http_crawlers.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/playwright_crawler.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/playwright_crawler_adaptive.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/playwright_crawler_stagehand.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/proxy_management.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/request_loaders.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/request_router.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/running_in_web_server.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/scaling_crawlers.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/service_locator.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/session_management.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/storage_clients.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/storages.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/guides/trace_and_monitor_crawlers.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/01_setting_up.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/02_first_crawler.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/03_adding_more_urls.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/04_real_world_project.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/05_crawling.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/06_scraping.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/07_saving_data.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/08_refactoring.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/09_running_in_cloud.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/02_bs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/02_bs_better.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/02_request_queue.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/03_enqueue_strategy.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/03_finding_new_links.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/03_globs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/03_original_code.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/03_transform_request.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/04_sanity_check.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/05_crawling_detail.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/05_crawling_listing.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/06_scraping.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/07_final_code.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/07_first_code.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/08_main.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/08_routes.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/09_apify_sdk.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/code_examples/routes.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/introduction/index.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/quick-start/code_examples/beautifulsoup_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/quick-start/code_examples/parsel_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/quick-start/code_examples/playwright_crawler_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/quick-start/code_examples/playwright_crawler_headful_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/quick-start/index.mdx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/upgrading/upgrading_to_v0x.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/docs/upgrading/upgrading_to_v1.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/renovate.json +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_autoscaling/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_autoscaling/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_autoscaling/autoscaled_pool.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_autoscaling/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_autoscaling/snapshotter.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_autoscaling/system_status.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_browserforge_workaround.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_cli.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_consts.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_log_config.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_request.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_service_locator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/blocked.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/byte_size.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/console.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/crypto.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/docs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/file.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/globs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/html_to_text.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/models.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/raise_if_too_many_kwargs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/recoverable_state.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/recurring_task.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/requests.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/sitemap.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/system.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/time.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/try_import.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/urls.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/wait.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/_utils/web.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_browser_controller.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_browser_plugin.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_browser_pool.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_playwright_browser.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_playwright_browser_controller.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_playwright_browser_plugin.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/browsers/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/configuration.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_abstract_http/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_abstract_http/_abstract_http_parser.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_abstract_http/_http_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_abstract_http/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler_statistics.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/_rendering_type_predictor.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/_result_comparator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_adaptive_playwright/_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_basic/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_basic/_basic_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_basic/_basic_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_basic/_context_pipeline.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_basic/_logging_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_basic/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_beautifulsoup/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_parser.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_beautifulsoup/_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_beautifulsoup/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_http/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_http/_http_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_http/_http_parser.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_parsel/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_parsel/_parsel_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_parsel/_parsel_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_parsel/_parsel_parser.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_parsel/_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/_playwright_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/_playwright_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/_playwright_http_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/_playwright_pre_nav_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_playwright/_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/crawlers/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/errors.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/events/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/events/_event_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/events/_local_event_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/events/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/events/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/_browserforge_adapter.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/_consts.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/_fingerprint_generator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/_header_generator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/_types.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/fingerprint_suite/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/http_clients/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/http_clients/_base.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/http_clients/_curl_impersonate.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/http_clients/_httpx.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/http_clients/_impit.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/otel/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/otel/crawler_instrumentor.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/cookiecutter.json +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/hooks/post_gen_project.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/hooks/pre_gen_project.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/main.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/main_beautifulsoup.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/main_parsel.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/main_playwright.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/main_playwright_camoufox.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/routes_beautifulsoup.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/routes_camoufox.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/routes_parsel.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/routes_playwright.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/templates/routes_playwright_camoufox.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/.dockerignore +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/Dockerfile +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/README.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/pyproject.toml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/requirements.txt +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/__main__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/main.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/routes.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/proxy_configuration.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/request_loaders/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/request_loaders/_request_list.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/request_loaders/_request_loader.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/request_loaders/_request_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/request_loaders/_request_manager_tandem.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/request_loaders/_sitemap_request_loader.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/router.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/sessions/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/sessions/_cookies.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/sessions/_models.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/sessions/_session.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/sessions/_session_pool.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/sessions/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/statistics/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/statistics/_error_tracker.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/statistics/_models.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/statistics/_statistics.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_base/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_base/_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_base/_key_value_store_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_base/_request_queue_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_base/_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_base/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/_key_value_store_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/_request_queue_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_file_system/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_memory/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_memory/_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_memory/_key_value_store_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_memory/_request_queue_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_memory/_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_memory/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/_client_mixin.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/_db_models.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/_key_value_store_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/_request_queue_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/_storage_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/_sql/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/models.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storage_clients/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/_base.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/_dataset.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/_key_value_store.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/_request_queue.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/_storage_instance_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/_utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/src/crawlee/storages/py.typed +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/e2e/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/e2e/conftest.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/e2e/project_template/test_static_crawlers_templates.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/e2e/project_template/utils.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/README.md +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/__init__.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_autoscaling/test_autoscaled_pool.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_autoscaling/test_snapshotter.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_autoscaling/test_system_status.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_statistics/test_error_tracker.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_statistics/test_periodic_logging.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_statistics/test_persistence.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_statistics/test_request_processing_record.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_byte_size.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_console.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_crypto.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_file.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_globs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_html_to_text.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_measure_time.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_raise_if_too_many_kwargs.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_recurring_task.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_requests.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_robots.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_sitemap.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_system.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_timedelata_ms.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/_utils/test_urls.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/browsers/test_browser_pool.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/browsers/test_playwright_browser.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/browsers/test_playwright_browser_controller.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/browsers/test_playwright_browser_plugin.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/conftest.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler_statistics.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawling_context.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_adaptive_playwright/test_predictor.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_basic/test_basic_crawler.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_basic/test_context_pipeline.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/events/test_event_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/events/test_local_event_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/fingerprint_suite/test_adapters.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/fingerprint_suite/test_header_generator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/http_clients/test_http_clients.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/http_clients/test_httpx.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/otel/test_crawler_instrumentor.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/proxy_configuration/test_new_proxy_info.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/proxy_configuration/test_tiers.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/request_loaders/test_request_list.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/request_loaders/test_sitemap_request_loader.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/sessions/test_cookies.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/sessions/test_models.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/sessions/test_session.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/sessions/test_session_pool.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_file_system/test_fs_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_file_system/test_fs_kvs_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_file_system/test_fs_rq_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_memory/test_memory_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_memory/test_memory_kvs_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_memory/test_memory_rq_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_sql/test_sql_dataset_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_sql/test_sql_kvs_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storage_clients/_sql/test_sql_rq_client.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storages/conftest.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storages/test_dataset.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storages/test_key_value_store.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storages/test_request_manager_tandem.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storages/test_request_queue.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/storages/test_storage_instance_manager.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/test_cli.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/test_configuration.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/test_log_config.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/test_router.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/test_service_locator.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/.eslintrc.json +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/.yarnrc.yml +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/babel.config.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/build_api_reference.sh +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/docusaurus.config.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/generate_module_shortcuts.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/patches/@docusaurus+core+3.4.0.patch +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/patches/@docusaurus+core+3.5.2.patch +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/roa-loader/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/roa-loader/package.json +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/sidebars.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/ApiLink.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Button.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Button.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/CopyButton.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/CopyButton.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Gradients.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Highlights.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Highlights.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/HomepageCliExample.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/HomepageCliExample.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/HomepageCtaSection.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/HomepageCtaSection.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/HomepageHeroSection.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/HomepageHeroSection.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/LanguageInfoWidget.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/LanguageInfoWidget.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/LanguageSwitch.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/LanguageSwitch.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/RiverSection.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/RiverSection.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/ThreeCardsWithIcon.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/ThreeCardsWithIcon.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/animated-crawlee-logo-dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/Homepage/animated-crawlee-logo-light.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/RunnableCodeBlock.jsx +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/components/RunnableCodeBlock.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/css/custom.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/pages/home_page_example.py +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/pages/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/pages/index.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/ColorModeToggle/dark-mode-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/ColorModeToggle/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/ColorModeToggle/light-mode-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/ColorModeToggle/styles.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/DocItem/Layout/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/DocItem/Layout/styles.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Footer/LinkItem/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Footer/LinkItem/index.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Footer/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Footer/index.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/MDXComponents/A.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/Content/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/Content/styles.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/Logo/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/Logo/index.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/MobileSidebar/Header/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/MobileSidebar/Header/index.module.css +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/MobileSidebar/Layout/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/MobileSidebar/PrimaryMenu/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/Navbar/MobileSidebar/index.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/src/theme/NavbarItem/ComponentTypes.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/.nojekyll +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/font/lota.woff +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/font/lota.woff2 +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/API.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/apify_logo.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/apify_og_SDK.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/apify_sdk.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/apify_sdk_white.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/arrow_right.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/auto-scaling-dark.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/auto-scaling-light.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/check.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/chrome-scrape-dark.gif +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/chrome-scrape-light.gif +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/cloud_icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/community-dark-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/community-light-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-dark-new.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-javascript-dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-javascript-light.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-light-new.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-light.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-logo-monocolor.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-logo.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-python-dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-python-light.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/crawlee-python-og.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/defaults-dark-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/defaults-light-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/discord-brand-dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/discord-brand.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/docusaurus.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/external-link.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/favicon.ico +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/favorite-tools-dark.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/favorite-tools-light.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/auto-scaling.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/automate-everything.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/fingerprints.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/node-requests.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/runs-on-py.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/storage.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/features/works-everywhere.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/fill-and-submit-web-form/00.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/fill-and-submit-web-form/01.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/fill-and-submit-web-form/02.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/fill-and-submit-web-form/03.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/getting-started/current-price.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/getting-started/scraping-practice.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/getting-started/select-an-element.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/getting-started/selected-element.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/getting-started/sku.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/getting-started/title.jpg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/github-brand-dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/github-brand.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/guides/jaeger_otel_search_view_example.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/guides/jaeger_otel_trace_example.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/hearth copy.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/hearth.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/javascript_logo.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/js_file.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/logo-big.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/logo-blur.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/logo-blur.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/logo-zoom.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/menu-arrows.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/oss_logo.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/puppeteer-live-view-dashboard.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/puppeteer-live-view-detail.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/queue-dark-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/queue-light-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/resuming-paused-crawl/00.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/resuming-paused-crawl/01.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/robot.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/routing-dark-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/routing-light-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/scraping-utils-dark-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/scraping-utils-light-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/smart-proxy-dark.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/smart-proxy-light.webp +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/source_code.png +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/system.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/triangles_dark.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/triangles_light.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/workflow.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/zero-setup-dark-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/img/zero-setup-light-icon.svg +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/js/custom.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/static/robots.txt +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/docs-prettier.config.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/utils/externalLink.js +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/website_gif/chrome-scrape-dark.gif +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/website_gif/chrome-scrape-dark.mp4 +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/website_gif/chrome-scrape-light.gif +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/website_gif/chrome-scrape-light.mp4 +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tools/website_gif/website_gif.mjs +0 -0
- {crawlee-1.0.5b3 → crawlee-1.0.5b7}/website/tsconfig.eslint.json +0 -0
|
@@ -12,6 +12,7 @@ All notable changes to this project will be documented in this file.
|
|
|
12
12
|
### 🐛 Bug Fixes
|
|
13
13
|
|
|
14
14
|
- Improve indexing of the `request_queue_records` table for `SqlRequestQueueClient` ([#1527](https://github.com/apify/crawlee-python/pull/1527)) ([6509534](https://github.com/apify/crawlee-python/commit/65095346a9d8b703b10c91e0510154c3c48a4176)) by [@Mantisus](https://github.com/Mantisus), closes [#1526](https://github.com/apify/crawlee-python/issues/1526)
|
|
15
|
+
- Improve error handling for `RobotsTxtFile.load` ([#1524](https://github.com/apify/crawlee-python/pull/1524)) ([596a311](https://github.com/apify/crawlee-python/commit/596a31184914a254b3e7a81fd2f48ea8eda7db49)) by [@Mantisus](https://github.com/Mantisus)
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
<!-- git-cliff-unreleased-end -->
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Line
|
|
1
|
+
# Line length different from the rest of the code to make sure that the example codes visualised on the generated
|
|
2
2
|
# documentation webpages are shown without vertical slider to make them more readable.
|
|
3
3
|
|
|
4
4
|
[tool.ruff]
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
from logging import getLogger
|
|
3
4
|
from typing import TYPE_CHECKING
|
|
4
5
|
|
|
5
6
|
from protego import Protego
|
|
@@ -15,6 +16,9 @@ if TYPE_CHECKING:
|
|
|
15
16
|
from crawlee.proxy_configuration import ProxyInfo
|
|
16
17
|
|
|
17
18
|
|
|
19
|
+
logger = getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
18
22
|
class RobotsTxtFile:
|
|
19
23
|
def __init__(
|
|
20
24
|
self, url: str, robots: Protego, http_client: HttpClient | None = None, proxy_info: ProxyInfo | None = None
|
|
@@ -56,12 +60,20 @@ class RobotsTxtFile:
|
|
|
56
60
|
http_client: The `HttpClient` instance used to perform the network request for fetching the robots.txt file.
|
|
57
61
|
proxy_info: Optional `ProxyInfo` to be used when fetching the robots.txt file. If None, no proxy is used.
|
|
58
62
|
"""
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
+
try:
|
|
64
|
+
response = await http_client.send_request(url, proxy_info=proxy_info)
|
|
65
|
+
|
|
66
|
+
body = (
|
|
67
|
+
b'User-agent: *\nAllow: /'
|
|
68
|
+
if is_status_code_client_error(response.status_code)
|
|
69
|
+
else await response.read()
|
|
70
|
+
)
|
|
71
|
+
robots = Protego.parse(body.decode('utf-8'))
|
|
72
|
+
|
|
73
|
+
except Exception as e:
|
|
74
|
+
logger.warning(f'Failed to fetch from robots.txt from "{url}" with error: "{e}"')
|
|
63
75
|
|
|
64
|
-
|
|
76
|
+
robots = Protego.parse('User-agent: *\nAllow: /')
|
|
65
77
|
|
|
66
78
|
return cls(url, robots, http_client=http_client, proxy_info=proxy_info)
|
|
67
79
|
|
|
@@ -32,7 +32,7 @@ class ErrorSnapshotter:
|
|
|
32
32
|
"""Capture error snapshot and save it to key value store.
|
|
33
33
|
|
|
34
34
|
It saves the error snapshot directly to a key value store. It can't use `context.get_key_value_store` because
|
|
35
|
-
it returns `KeyValueStoreChangeRecords` which is
|
|
35
|
+
it returns `KeyValueStoreChangeRecords` which is committed to the key value store only if the `RequestHandler`
|
|
36
36
|
returned without an exception. ErrorSnapshotter is on the contrary active only when `RequestHandler` fails with
|
|
37
37
|
an exception.
|
|
38
38
|
|
|
@@ -292,7 +292,7 @@ async def test_playwright_only_hook(test_urls: list[str]) -> None:
|
|
|
292
292
|
|
|
293
293
|
await crawler.run(test_urls[:1])
|
|
294
294
|
|
|
295
|
-
# Default behavior. Hook is called
|
|
295
|
+
# Default behavior. Hook is called every time, both static sub crawler and playwright sub crawler.
|
|
296
296
|
pre_nav_hook_common.assert_has_calls([call(test_urls[0]), call(test_urls[0])])
|
|
297
297
|
# Hook is called only by playwright sub crawler.
|
|
298
298
|
pre_nav_hook_playwright.assert_called_once_with('about:blank')
|
|
@@ -433,13 +433,13 @@ async def test_adaptive_crawling_statistics(test_urls: list[str]) -> None:
|
|
|
433
433
|
],
|
|
434
434
|
)
|
|
435
435
|
async def test_adaptive_crawler_exceptions_in_sub_crawlers(*, error_in_pw_crawler: bool, test_urls: list[str]) -> None:
|
|
436
|
-
"""Test that correct results are
|
|
436
|
+
"""Test that correct results are committed when exceptions are raised in sub crawlers.
|
|
437
437
|
|
|
438
438
|
Exception in bs sub crawler will be logged and pw sub crawler used instead.
|
|
439
439
|
Any result from bs sub crawler will be discarded, result form pw crawler will be saved instead.
|
|
440
440
|
(But global state modifications through `use_state` will not be reverted!!!)
|
|
441
441
|
|
|
442
|
-
Exception in pw sub crawler will prevent any result from being
|
|
442
|
+
Exception in pw sub crawler will prevent any result from being committed. Even if `push_data` was called before
|
|
443
443
|
the exception
|
|
444
444
|
"""
|
|
445
445
|
static_only_no_detection_predictor = _SimpleRenderingTypePredictor(detection_probability_recommendation=cycle([0]))
|
{crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_beautifulsoup/test_beautifulsoup_crawler.py
RENAMED
|
@@ -6,7 +6,7 @@ from unittest import mock
|
|
|
6
6
|
import pytest
|
|
7
7
|
|
|
8
8
|
from crawlee import ConcurrencySettings, Glob, HttpHeaders, RequestTransformAction, SkippedReason
|
|
9
|
-
from crawlee.crawlers import BeautifulSoupCrawler, BeautifulSoupCrawlingContext
|
|
9
|
+
from crawlee.crawlers import BasicCrawlingContext, BeautifulSoupCrawler, BeautifulSoupCrawlingContext
|
|
10
10
|
from crawlee.storages import RequestQueue
|
|
11
11
|
|
|
12
12
|
if TYPE_CHECKING:
|
|
@@ -167,6 +167,40 @@ async def test_respect_robots_txt(server_url: URL, http_client: HttpClient) -> N
|
|
|
167
167
|
}
|
|
168
168
|
|
|
169
169
|
|
|
170
|
+
async def test_respect_robots_txt_with_problematic_links(server_url: URL, http_client: HttpClient) -> None:
|
|
171
|
+
"""Test checks the crawler behavior with links that may cause problems when attempting to retrieve robots.txt."""
|
|
172
|
+
visit = mock.Mock()
|
|
173
|
+
fail = mock.Mock()
|
|
174
|
+
crawler = BeautifulSoupCrawler(
|
|
175
|
+
http_client=http_client,
|
|
176
|
+
respect_robots_txt_file=True,
|
|
177
|
+
max_request_retries=0,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
@crawler.router.default_handler
|
|
181
|
+
async def request_handler(context: BeautifulSoupCrawlingContext) -> None:
|
|
182
|
+
visit(context.request.url)
|
|
183
|
+
await context.enqueue_links(strategy='all')
|
|
184
|
+
|
|
185
|
+
@crawler.failed_request_handler
|
|
186
|
+
async def error_handler(context: BasicCrawlingContext, _error: Exception) -> None:
|
|
187
|
+
fail(context.request.url)
|
|
188
|
+
|
|
189
|
+
await crawler.run([str(server_url / 'problematic_links')])
|
|
190
|
+
|
|
191
|
+
visited = {call[0][0] for call in visit.call_args_list}
|
|
192
|
+
failed = {call[0][0] for call in fail.call_args_list}
|
|
193
|
+
|
|
194
|
+
# Email must be skipped
|
|
195
|
+
# https://avatars.githubusercontent.com/apify does not get robots.txt, but is correct for the crawler.
|
|
196
|
+
assert visited == {str(server_url / 'problematic_links'), 'https://avatars.githubusercontent.com/apify'}
|
|
197
|
+
|
|
198
|
+
# The budplaceholder.com does not exist.
|
|
199
|
+
assert failed == {
|
|
200
|
+
'https://budplaceholder.com/',
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
|
|
170
204
|
async def test_on_skipped_request(server_url: URL, http_client: HttpClient) -> None:
|
|
171
205
|
crawler = BeautifulSoupCrawler(http_client=http_client, respect_robots_txt_file=True)
|
|
172
206
|
skip = mock.Mock()
|
|
@@ -126,9 +126,9 @@ async def test_handles_client_errors(
|
|
|
126
126
|
pytest.param([], True, 4, 1, id='default_behavior'),
|
|
127
127
|
# error without retry for all 4xx statuses
|
|
128
128
|
pytest.param([], False, 0, 1, id='default_behavior_without_session_pool'),
|
|
129
|
-
# take as successful status codes from the `ignore_http_error_status_codes` list with
|
|
129
|
+
# take as successful status codes from the `ignore_http_error_status_codes` list with Session Pool
|
|
130
130
|
pytest.param([403], True, 0, 0, id='ignore_error_status_codes'),
|
|
131
|
-
# take as successful status codes from the `ignore_http_error_status_codes` list without
|
|
131
|
+
# take as successful status codes from the `ignore_http_error_status_codes` list without Session Pool
|
|
132
132
|
pytest.param([403], False, 0, 0, id='ignore_error_status_codes_without_session_pool'),
|
|
133
133
|
],
|
|
134
134
|
)
|
|
@@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
|
|
14
14
|
from yarl import URL
|
|
15
15
|
|
|
16
16
|
from crawlee._request import RequestOptions
|
|
17
|
-
from crawlee.crawlers import ParselCrawlingContext
|
|
17
|
+
from crawlee.crawlers import BasicCrawlingContext, ParselCrawlingContext
|
|
18
18
|
from crawlee.http_clients._base import HttpClient
|
|
19
19
|
|
|
20
20
|
|
|
@@ -261,6 +261,40 @@ async def test_respect_robots_txt(server_url: URL, http_client: HttpClient) -> N
|
|
|
261
261
|
}
|
|
262
262
|
|
|
263
263
|
|
|
264
|
+
async def test_respect_robots_txt_with_problematic_links(server_url: URL, http_client: HttpClient) -> None:
|
|
265
|
+
"""Test checks the crawler behavior with links that may cause problems when attempting to retrieve robots.txt."""
|
|
266
|
+
visit = mock.Mock()
|
|
267
|
+
fail = mock.Mock()
|
|
268
|
+
crawler = ParselCrawler(
|
|
269
|
+
http_client=http_client,
|
|
270
|
+
respect_robots_txt_file=True,
|
|
271
|
+
max_request_retries=0,
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
@crawler.router.default_handler
|
|
275
|
+
async def request_handler(context: ParselCrawlingContext) -> None:
|
|
276
|
+
visit(context.request.url)
|
|
277
|
+
await context.enqueue_links(strategy='all')
|
|
278
|
+
|
|
279
|
+
@crawler.failed_request_handler
|
|
280
|
+
async def error_handler(context: BasicCrawlingContext, _error: Exception) -> None:
|
|
281
|
+
fail(context.request.url)
|
|
282
|
+
|
|
283
|
+
await crawler.run([str(server_url / 'problematic_links')])
|
|
284
|
+
|
|
285
|
+
visited = {call[0][0] for call in visit.call_args_list}
|
|
286
|
+
failed = {call[0][0] for call in fail.call_args_list}
|
|
287
|
+
|
|
288
|
+
# Email must be skipped
|
|
289
|
+
# https://avatars.githubusercontent.com/apify does not get robots.txt, but is correct for the crawler.
|
|
290
|
+
assert visited == {str(server_url / 'problematic_links'), 'https://avatars.githubusercontent.com/apify'}
|
|
291
|
+
|
|
292
|
+
# The budplaceholder.com does not exist.
|
|
293
|
+
assert failed == {
|
|
294
|
+
'https://budplaceholder.com/',
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
|
|
264
298
|
async def test_on_skipped_request(server_url: URL, http_client: HttpClient) -> None:
|
|
265
299
|
crawler = ParselCrawler(http_client=http_client, respect_robots_txt_file=True)
|
|
266
300
|
skip = mock.Mock()
|
{crawlee-1.0.5b3 → crawlee-1.0.5b7}/tests/unit/crawlers/_playwright/test_playwright_crawler.py
RENAMED
|
@@ -48,7 +48,7 @@ if TYPE_CHECKING:
|
|
|
48
48
|
from crawlee._request import RequestOptions
|
|
49
49
|
from crawlee._types import HttpMethod, HttpPayload
|
|
50
50
|
from crawlee.browsers._types import BrowserType
|
|
51
|
-
from crawlee.crawlers import PlaywrightCrawlingContext, PlaywrightPreNavCrawlingContext
|
|
51
|
+
from crawlee.crawlers import BasicCrawlingContext, PlaywrightCrawlingContext, PlaywrightPreNavCrawlingContext
|
|
52
52
|
|
|
53
53
|
|
|
54
54
|
@pytest.mark.parametrize(
|
|
@@ -671,6 +671,39 @@ async def test_respect_robots_txt(server_url: URL) -> None:
|
|
|
671
671
|
}
|
|
672
672
|
|
|
673
673
|
|
|
674
|
+
async def test_respect_robots_txt_with_problematic_links(server_url: URL) -> None:
|
|
675
|
+
"""Test checks the crawler behavior with links that may cause problems when attempting to retrieve robots.txt."""
|
|
676
|
+
visit = mock.Mock()
|
|
677
|
+
fail = mock.Mock()
|
|
678
|
+
crawler = PlaywrightCrawler(
|
|
679
|
+
respect_robots_txt_file=True,
|
|
680
|
+
max_request_retries=0,
|
|
681
|
+
)
|
|
682
|
+
|
|
683
|
+
@crawler.router.default_handler
|
|
684
|
+
async def request_handler(context: PlaywrightCrawlingContext) -> None:
|
|
685
|
+
visit(context.request.url)
|
|
686
|
+
await context.enqueue_links(strategy='all')
|
|
687
|
+
|
|
688
|
+
@crawler.failed_request_handler
|
|
689
|
+
async def error_handler(context: BasicCrawlingContext, _error: Exception) -> None:
|
|
690
|
+
fail(context.request.url)
|
|
691
|
+
|
|
692
|
+
await crawler.run([str(server_url / 'problematic_links')])
|
|
693
|
+
|
|
694
|
+
visited = {call[0][0] for call in visit.call_args_list}
|
|
695
|
+
failed = {call[0][0] for call in fail.call_args_list}
|
|
696
|
+
|
|
697
|
+
# Email must be skipped
|
|
698
|
+
# https://avatars.githubusercontent.com/apify does not get robots.txt, but is correct for the crawler.
|
|
699
|
+
assert visited == {str(server_url / 'problematic_links'), 'https://avatars.githubusercontent.com/apify'}
|
|
700
|
+
|
|
701
|
+
# The budplaceholder.com does not exist.
|
|
702
|
+
assert failed == {
|
|
703
|
+
'https://budplaceholder.com/',
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
|
|
674
707
|
async def test_on_skipped_request(server_url: URL) -> None:
|
|
675
708
|
crawler = PlaywrightCrawler(respect_robots_txt_file=True)
|
|
676
709
|
skip = mock.Mock()
|
|
@@ -18,6 +18,7 @@ from tests.unit.server_endpoints import (
|
|
|
18
18
|
GENERIC_RESPONSE,
|
|
19
19
|
HELLO_WORLD,
|
|
20
20
|
INCAPSULA,
|
|
21
|
+
PROBLEMATIC_LINKS,
|
|
21
22
|
ROBOTS_TXT,
|
|
22
23
|
SECONDARY_INDEX,
|
|
23
24
|
START_ENQUEUE,
|
|
@@ -102,6 +103,7 @@ async def app(scope: dict[str, Any], receive: Receive, send: Send) -> None:
|
|
|
102
103
|
'page_1': generic_response_endpoint,
|
|
103
104
|
'page_2': generic_response_endpoint,
|
|
104
105
|
'page_3': generic_response_endpoint,
|
|
106
|
+
'problematic_links': problematic_links_endpoint,
|
|
105
107
|
'set_cookies': set_cookies,
|
|
106
108
|
'set_complex_cookies': set_complex_cookies,
|
|
107
109
|
'cookies': get_cookies,
|
|
@@ -287,6 +289,14 @@ async def generic_response_endpoint(_scope: dict[str, Any], _receive: Receive, s
|
|
|
287
289
|
)
|
|
288
290
|
|
|
289
291
|
|
|
292
|
+
async def problematic_links_endpoint(_scope: dict[str, Any], _receive: Receive, send: Send) -> None:
|
|
293
|
+
"""Handle requests with a page containing problematic links."""
|
|
294
|
+
await send_html_response(
|
|
295
|
+
send,
|
|
296
|
+
PROBLEMATIC_LINKS,
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
|
|
290
300
|
async def redirect_to_url(scope: dict[str, Any], _receive: Receive, send: Send) -> None:
|
|
291
301
|
"""Handle requests that should redirect to a specified full URL."""
|
|
292
302
|
query_params = get_query_params(scope.get('query_string', b''))
|
|
@@ -35,6 +35,16 @@ INCAPSULA = b"""\
|
|
|
35
35
|
</iframe>
|
|
36
36
|
</body></html>"""
|
|
37
37
|
|
|
38
|
+
PROBLEMATIC_LINKS = b"""\
|
|
39
|
+
<html><head>
|
|
40
|
+
<title>Hello</title>
|
|
41
|
+
</head>
|
|
42
|
+
<body>
|
|
43
|
+
<a href="https://budplaceholder.com/">Placeholder</a>
|
|
44
|
+
<a href="mailto:test@test.com">test@test.com</a>
|
|
45
|
+
<a href=https://avatars.githubusercontent.com/apify>Apify avatar/a>
|
|
46
|
+
</body></html>"""
|
|
47
|
+
|
|
38
48
|
GENERIC_RESPONSE = b"""\
|
|
39
49
|
<html><head>
|
|
40
50
|
<title>Hello</title>
|