crawlee 1.0.2b4__tar.gz → 1.0.5b21__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/build_and_deploy_docs.yaml +3 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/release.yaml +3 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/run_code_checks.yaml +4 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/templates_e2e_tests.yaml +3 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.gitignore +1 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/CHANGELOG.md +41 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/CONTRIBUTING.md +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/Makefile +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/PKG-INFO +9 -5
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/apify_platform.mdx +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/using_browser_profiles_chrome.py +2 -4
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/playwright_crawler_with_fingerprint_generator.mdx +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/using_browser_profile.mdx +0 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/architecture_overview.mdx +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/avoid_blocking.mdx +1 -1
- crawlee-1.0.5b21/docs/guides/code_examples/storage_clients/redis_storage_client_basic_example.py +10 -0
- crawlee-1.0.5b21/docs/guides/code_examples/storage_clients/redis_storage_client_configuration_example.py +27 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/request_loaders.mdx +8 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/storage_clients.mdx +175 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/trace_and_monitor_crawlers.mdx +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/09_running_in_cloud.mdx +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/pyproject.toml +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/pyproject.toml +25 -22
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_request.py +31 -20
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_service_locator.py +4 -4
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_types.py +10 -16
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/recoverable_state.py +32 -8
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/recurring_task.py +15 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/robots.py +17 -5
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/sitemap.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/urls.py +9 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_browser_pool.py +4 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_playwright_browser_controller.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_playwright_browser_plugin.py +17 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_types.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/configuration.py +3 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py +3 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py +33 -13
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_basic/_basic_crawler.py +19 -11
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/_playwright_crawler.py +11 -4
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/events/_event_manager.py +3 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/_header_generator.py +2 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/otel/crawler_instrumentor.py +3 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/request_loaders/_sitemap_request_loader.py +5 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/sessions/_session_pool.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/statistics/_error_snapshotter.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/statistics/_models.py +32 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/statistics/_statistics.py +24 -33
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/__init__.py +4 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/_request_queue_client.py +24 -6
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/__init__.py +6 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/_client_mixin.py +295 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/_dataset_client.py +325 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/_key_value_store_client.py +264 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/_request_queue_client.py +586 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/_storage_client.py +146 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/_utils.py +23 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/lua_scripts/atomic_bloom_add_requests.lua +36 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/lua_scripts/atomic_fetch_request.lua +49 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/lua_scripts/atomic_set_add_requests.lua +37 -0
- crawlee-1.0.5b21/src/crawlee/storage_clients/_redis/lua_scripts/reclaim_stale_requests.lua +34 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/_db_models.py +1 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/_key_value_store_client.py +3 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/_request_queue_client.py +18 -4
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/_storage_client.py +10 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/_key_value_store.py +5 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/e2e/project_template/utils.py +3 -2
- crawlee-1.0.5b21/tests/unit/_autoscaling/test_snapshotter.py +353 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_sitemap.py +0 -6
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_system.py +8 -6
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/browsers/test_playwright_browser_plugin.py +10 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/conftest.py +25 -10
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +107 -7
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_basic/test_basic_crawler.py +90 -6
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_beautifulsoup/test_beautifulsoup_crawler.py +37 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_http/test_http_crawler.py +2 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_parsel/test_parsel_crawler.py +37 -3
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_playwright/test_playwright_crawler.py +34 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/events/test_event_manager.py +12 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/otel/test_crawler_instrumentor.py +8 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/server.py +10 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/server_endpoints.py +11 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_file_system/test_fs_dataset_client.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_file_system/test_fs_kvs_client.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_file_system/test_fs_rq_client.py +11 -3
- crawlee-1.0.5b21/tests/unit/storage_clients/_redis/test_redis_dataset_client.py +146 -0
- crawlee-1.0.5b21/tests/unit/storage_clients/_redis/test_redis_kvs_client.py +217 -0
- crawlee-1.0.5b21/tests/unit/storage_clients/_redis/test_redis_rq_client.py +257 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_sql/test_sql_dataset_client.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_sql/test_sql_kvs_client.py +1 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_sql/test_sql_rq_client.py +1 -1
- crawlee-1.0.5b21/tests/unit/storages/conftest.py +39 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storages/test_dataset.py +3 -6
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storages/test_key_value_store.py +44 -7
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storages/test_request_queue.py +63 -6
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storages/test_storage_instance_manager.py +7 -20
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/test_configuration.py +30 -13
- crawlee-1.0.5b21/uv.lock +4382 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/docusaurus.config.js +2 -2
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/package.json +6 -2
- crawlee-1.0.5b21/website/src/components/RunnableCodeBlock.jsx +42 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/css/custom.css +0 -1
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/pages/home_page_example.py +14 -9
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/pages/index.js +1 -1
- crawlee-1.0.5b21/website/static/.nojekyll +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/yarn.lock +770 -798
- crawlee-1.0.2b4/tests/unit/_autoscaling/test_snapshotter.py +0 -333
- crawlee-1.0.2b4/tests/unit/storages/conftest.py +0 -18
- crawlee-1.0.2b4/uv.lock +0 -3966
- crawlee-1.0.2b4/website/src/components/RunnableCodeBlock.jsx +0 -40
- crawlee-1.0.2b4/website/static/img/apify_logo.svg +0 -5
- crawlee-1.0.2b4/website/static/img/apify_og_SDK.png +0 -0
- crawlee-1.0.2b4/website/static/img/apify_sdk.svg +0 -13
- crawlee-1.0.2b4/website/static/img/apify_sdk_white.svg +0 -13
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.editorconfig +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/CODEOWNERS +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/pull_request_template.md +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/check_pr_title.yaml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/pre_release.yaml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.github/workflows/update_new_issue.yaml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.markdownlint.yaml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/.pre-commit-config.yaml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/LICENSE +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/README.md +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/apify/crawler_as_actor_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/apify/get_public_url.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/apify/log_with_config_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/apify/proxy_advanced_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/apify/proxy_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/google/cloud_run_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/code_examples/google/google_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/google_cloud.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/deployment/google_cloud_run.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/add_data_to_dataset.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/beautifulsoup_crawler.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/capture_screenshot_using_playwright.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/capturing_page_snapshots_with_error_snapshotter.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/adaptive_playwright_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/add_data_to_dataset_bs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/add_data_to_dataset_dataset.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/add_data_to_dataset_pw.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/beautifulsoup_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/beautifulsoup_crawler_keep_alive.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/beautifulsoup_crawler_stop.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/capture_screenshot_using_playwright.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/configure_json_logging.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_all_links_on_website_bs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_all_links_on_website_pw.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_multiple_urls_bs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_multiple_urls_pw.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_specific_links_on_website_bs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_specific_links_on_website_pw.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_website_with_relative_links_all_links.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_website_with_relative_links_same_domain.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_website_with_relative_links_same_hostname.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/crawl_website_with_relative_links_same_origin.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/export_entire_dataset_to_file_csv.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/export_entire_dataset_to_file_json.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/extract_and_add_specific_links_on_website_bs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/extract_and_add_specific_links_on_website_pw.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/fill_and_submit_web_form_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/fill_and_submit_web_form_request.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/parsel_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/parsel_crawler_with_error_snapshotter.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/playwright_block_requests.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/playwright_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/playwright_crawler_with_camoufox.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/playwright_crawler_with_error_snapshotter.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/playwright_crawler_with_fingerprint_generator.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/respect_robots_on_skipped_request.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/respect_robots_txt_file.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/resuming_paused_crawl.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/using_browser_profiles_firefox.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/crawl_all_links_on_website.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/crawl_multiple_urls.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/crawl_specific_links_on_website.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/crawl_website_with_relative_links.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/crawler_keep_alive.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/crawler_stop.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/export_entire_dataset_to_file.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/fill_and_submit_web_form.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/json_logging.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/parsel_crawler.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/playwright_crawler.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/playwright_crawler_adaptive.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/playwright_crawler_with_block_requests.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/playwright_crawler_with_camoufox.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/respect_robots_txt_file.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/resuming_paused_crawl.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/avoid_blocking/playwright_with_fingerprint_generator.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/creating_web_archive/manual_archiving_parsel_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/creating_web_archive/manual_archiving_playwright_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/creating_web_archive/simple_pw_through_proxy_pywb_server.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/error_handling/change_handle_error_status.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/error_handling/disable_retry.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/error_handling/handle_proxy_error.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_clients/parsel_curl_impersonate_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_clients/parsel_httpx_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_clients/parsel_impit_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_crawlers/beautifulsoup_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_crawlers/custom_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_crawlers/http_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/http_crawlers/parsel_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/login_crawler/http_login.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/login_crawler/playwright_login.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler/browser_configuration_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler/multiple_launch_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler/plugin_browser_configuration_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler/pre_navigation_hook_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_adaptive/handler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_adaptive/init_beautifulsoup.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_adaptive/init_parsel.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_adaptive/init_prediction.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_adaptive/pre_nav_hooks.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_stagehand/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_stagehand/browser_classes.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_stagehand/stagehand_run.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/playwright_crawler_stagehand/support_classes.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/inspecting_bs_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/inspecting_pw_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/integration_bs_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/integration_pw_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/quick_start_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/session_bs_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/session_pw_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/tiers_bs_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/proxy_management/tiers_pw_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/rl_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/rl_basic_example_with_persist.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/rl_tandem_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/rl_tandem_example_explicit.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/sitemap_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/sitemap_example_with_persist.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/sitemap_tandem_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_loaders/sitemap_tandem_example_explicit.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/adaptive_crawler_handlers.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/basic_request_handlers.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/custom_router_default_only.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/error_handler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/failed_request_handler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/http_pre_navigation.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/playwright_pre_navigation.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/request_router/simple_default_handler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/running_in_web_server/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/running_in_web_server/crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/running_in_web_server/server.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/scaling_crawlers/max_tasks_per_minute_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/scaling_crawlers/min_and_max_concurrency_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_conflicts.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_crawler_configuration.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_crawler_event_manager.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_crawler_storage_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_locator_configuration.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_locator_event_manager.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_locator_storage_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_storage_configuration.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/service_locator/service_storage_storage_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/multi_sessions_http.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/one_session_http.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/sm_basic.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/sm_beautifulsoup.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/sm_http.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/sm_parsel.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/sm_playwright.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/session_management/sm_standalone.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/custom_storage_client_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/file_system_storage_client_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/file_system_storage_client_configuration_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/memory_storage_client_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/registering_storage_clients_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/sql_storage_client_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storage_clients/sql_storage_client_configuration_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/cleaning_do_not_purge_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/cleaning_purge_explicitly_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/dataset_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/dataset_with_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/dataset_with_crawler_explicit_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/helper_add_requests_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/helper_enqueue_links_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/kvs_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/kvs_with_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/kvs_with_crawler_explicit_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/opening.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/rq_basic_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/rq_with_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/storages/rq_with_crawler_explicit_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/code_examples/trace_and_monitor_crawlers/instrument_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/crawler_login.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/creating_web_archive.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/error_handling.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/http_clients.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/http_crawlers.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/playwright_crawler.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/playwright_crawler_adaptive.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/playwright_crawler_stagehand.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/proxy_management.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/request_router.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/running_in_web_server.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/scaling_crawlers.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/service_locator.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/session_management.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/guides/storages.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/01_setting_up.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/02_first_crawler.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/03_adding_more_urls.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/04_real_world_project.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/05_crawling.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/06_scraping.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/07_saving_data.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/08_refactoring.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/02_bs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/02_bs_better.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/02_request_queue.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/03_enqueue_strategy.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/03_finding_new_links.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/03_globs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/03_original_code.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/03_transform_request.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/04_sanity_check.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/05_crawling_detail.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/05_crawling_listing.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/06_scraping.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/07_final_code.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/07_first_code.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/08_main.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/08_routes.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/09_apify_sdk.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/code_examples/routes.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/introduction/index.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/quick-start/code_examples/beautifulsoup_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/quick-start/code_examples/parsel_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/quick-start/code_examples/playwright_crawler_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/quick-start/code_examples/playwright_crawler_headful_example.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/quick-start/index.mdx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/upgrading/upgrading_to_v0x.md +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/upgrading/upgrading_to_v1.md +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/renovate.json +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_autoscaling/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_autoscaling/_types.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_autoscaling/autoscaled_pool.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_autoscaling/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_autoscaling/snapshotter.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_autoscaling/system_status.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_browserforge_workaround.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_cli.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_consts.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_log_config.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/blocked.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/byte_size.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/console.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/crypto.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/docs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/file.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/globs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/html_to_text.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/models.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/raise_if_too_many_kwargs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/requests.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/system.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/time.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/try_import.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/wait.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/_utils/web.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_browser_controller.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_browser_plugin.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/_playwright_browser.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/browsers/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_abstract_http/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_abstract_http/_abstract_http_parser.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_abstract_http/_http_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_abstract_http/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler_statistics.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/_rendering_type_predictor.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/_result_comparator.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_adaptive_playwright/_utils.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_basic/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_basic/_basic_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_basic/_context_pipeline.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_basic/_logging_utils.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_basic/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_beautifulsoup/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_beautifulsoup/_beautifulsoup_parser.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_beautifulsoup/_utils.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_beautifulsoup/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_http/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_http/_http_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_http/_http_parser.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_parsel/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_parsel/_parsel_crawler.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_parsel/_parsel_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_parsel/_parsel_parser.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_parsel/_utils.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/_playwright_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/_playwright_http_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/_playwright_pre_nav_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/_types.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_playwright/_utils.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/_types.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/crawlers/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/errors.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/events/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/events/_local_event_manager.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/events/_types.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/events/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/_browserforge_adapter.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/_consts.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/_fingerprint_generator.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/_types.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/fingerprint_suite/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/http_clients/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/http_clients/_base.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/http_clients/_curl_impersonate.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/http_clients/_httpx.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/http_clients/_impit.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/otel/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/cookiecutter.json +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/hooks/post_gen_project.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/hooks/pre_gen_project.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/main.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/main_beautifulsoup.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/main_parsel.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/main_playwright.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/main_playwright_camoufox.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/routes_beautifulsoup.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/routes_camoufox.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/routes_parsel.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/routes_playwright.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/templates/routes_playwright_camoufox.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/.dockerignore +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/Dockerfile +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/README.md +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/pyproject.toml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/requirements.txt +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/__main__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/main.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/project_template/{{cookiecutter.project_name}}/{{cookiecutter.__package_name}}/routes.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/proxy_configuration.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/request_loaders/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/request_loaders/_request_list.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/request_loaders/_request_loader.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/request_loaders/_request_manager.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/request_loaders/_request_manager_tandem.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/router.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/sessions/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/sessions/_cookies.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/sessions/_models.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/sessions/_session.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/sessions/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/statistics/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/statistics/_error_tracker.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_base/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_base/_dataset_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_base/_key_value_store_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_base/_request_queue_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_base/_storage_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_base/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/_dataset_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/_key_value_store_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/_storage_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/_utils.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_file_system/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_memory/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_memory/_dataset_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_memory/_key_value_store_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_memory/_request_queue_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_memory/_storage_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_memory/py.typed +0 -0
- {crawlee-1.0.2b4/src/crawlee/storage_clients/_sql → crawlee-1.0.5b21/src/crawlee/storage_clients/_redis}/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/_client_mixin.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/_sql/_dataset_client.py +0 -0
- {crawlee-1.0.2b4/src/crawlee/storage_clients → crawlee-1.0.5b21/src/crawlee/storage_clients/_sql}/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storage_clients/models.py +0 -0
- {crawlee-1.0.2b4/src/crawlee/storages → crawlee-1.0.5b21/src/crawlee/storage_clients}/py.typed +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/_base.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/_dataset.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/_request_queue.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/_storage_instance_manager.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/src/crawlee/storages/_utils.py +0 -0
- /crawlee-1.0.2b4/tests/__init__.py → /crawlee-1.0.5b21/src/crawlee/storages/py.typed +0 -0
- {crawlee-1.0.2b4/tests/e2e → crawlee-1.0.5b21/tests}/__init__.py +0 -0
- {crawlee-1.0.2b4/tests/unit → crawlee-1.0.5b21/tests/e2e}/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/e2e/conftest.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/e2e/project_template/test_static_crawlers_templates.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/README.md +0 -0
- /crawlee-1.0.2b4/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawling_context.py → /crawlee-1.0.5b21/tests/unit/__init__.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_autoscaling/test_autoscaled_pool.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_autoscaling/test_system_status.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_statistics/test_error_tracker.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_statistics/test_periodic_logging.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_statistics/test_persistence.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_statistics/test_request_processing_record.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_byte_size.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_console.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_crypto.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_file.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_globs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_html_to_text.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_measure_time.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_raise_if_too_many_kwargs.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_recurring_task.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_requests.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_robots.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_timedelata_ms.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/_utils/test_urls.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/browsers/test_browser_pool.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/browsers/test_playwright_browser.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/browsers/test_playwright_browser_controller.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler_statistics.py +0 -0
- /crawlee-1.0.2b4/website/static/.nojekyll → /crawlee-1.0.5b21/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawling_context.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_adaptive_playwright/test_predictor.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/crawlers/_basic/test_context_pipeline.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/events/test_local_event_manager.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/fingerprint_suite/test_adapters.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/fingerprint_suite/test_header_generator.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/http_clients/test_http_clients.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/http_clients/test_httpx.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/proxy_configuration/test_new_proxy_info.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/proxy_configuration/test_tiers.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/request_loaders/test_request_list.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/request_loaders/test_sitemap_request_loader.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/sessions/test_cookies.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/sessions/test_models.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/sessions/test_session.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/sessions/test_session_pool.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_memory/test_memory_dataset_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_memory/test_memory_kvs_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storage_clients/_memory/test_memory_rq_client.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/storages/test_request_manager_tandem.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/test_cli.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/test_log_config.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/test_router.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/tests/unit/test_service_locator.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/.eslintrc.json +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/.yarnrc.yml +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/babel.config.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/build_api_reference.sh +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/generate_module_shortcuts.py +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/patches/@docusaurus+core+3.4.0.patch +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/patches/@docusaurus+core+3.5.2.patch +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/roa-loader/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/roa-loader/package.json +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/sidebars.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/ApiLink.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Button.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Button.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/CopyButton.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/CopyButton.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Gradients.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Highlights.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Highlights.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/HomepageCliExample.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/HomepageCliExample.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/HomepageCtaSection.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/HomepageCtaSection.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/HomepageHeroSection.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/HomepageHeroSection.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/LanguageInfoWidget.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/LanguageInfoWidget.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/LanguageSwitch.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/LanguageSwitch.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/RiverSection.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/RiverSection.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/ThreeCardsWithIcon.jsx +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/ThreeCardsWithIcon.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/animated-crawlee-logo-dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/Homepage/animated-crawlee-logo-light.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/components/RunnableCodeBlock.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/pages/index.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/ColorModeToggle/dark-mode-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/ColorModeToggle/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/ColorModeToggle/light-mode-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/ColorModeToggle/styles.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/DocItem/Layout/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/DocItem/Layout/styles.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Footer/LinkItem/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Footer/LinkItem/index.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Footer/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Footer/index.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/MDXComponents/A.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/Content/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/Content/styles.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/Logo/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/Logo/index.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/MobileSidebar/Header/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/MobileSidebar/Header/index.module.css +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/MobileSidebar/Layout/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/MobileSidebar/PrimaryMenu/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/Navbar/MobileSidebar/index.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/src/theme/NavbarItem/ComponentTypes.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/font/lota.woff +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/font/lota.woff2 +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/API.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/arrow_right.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/auto-scaling-dark.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/auto-scaling-light.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/check.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/chrome-scrape-dark.gif +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/chrome-scrape-light.gif +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/cloud_icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/community-dark-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/community-light-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-dark-new.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-javascript-dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-javascript-light.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-light-new.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-light.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-logo-monocolor.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-logo.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-python-dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-python-light.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/crawlee-python-og.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/defaults-dark-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/defaults-light-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/discord-brand-dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/discord-brand.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/docusaurus.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/external-link.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/favicon.ico +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/favorite-tools-dark.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/favorite-tools-light.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/auto-scaling.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/automate-everything.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/fingerprints.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/node-requests.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/runs-on-py.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/storage.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/features/works-everywhere.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/fill-and-submit-web-form/00.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/fill-and-submit-web-form/01.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/fill-and-submit-web-form/02.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/fill-and-submit-web-form/03.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/getting-started/current-price.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/getting-started/scraping-practice.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/getting-started/select-an-element.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/getting-started/selected-element.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/getting-started/sku.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/getting-started/title.jpg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/github-brand-dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/github-brand.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/guides/jaeger_otel_search_view_example.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/guides/jaeger_otel_trace_example.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/hearth copy.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/hearth.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/javascript_logo.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/js_file.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/logo-big.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/logo-blur.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/logo-blur.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/logo-zoom.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/menu-arrows.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/oss_logo.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/puppeteer-live-view-dashboard.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/puppeteer-live-view-detail.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/queue-dark-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/queue-light-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/resuming-paused-crawl/00.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/resuming-paused-crawl/01.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/robot.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/routing-dark-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/routing-light-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/scraping-utils-dark-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/scraping-utils-light-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/smart-proxy-dark.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/smart-proxy-light.webp +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/source_code.png +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/system.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/triangles_dark.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/triangles_light.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/workflow.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/zero-setup-dark-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/img/zero-setup-light-icon.svg +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/js/custom.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/static/robots.txt +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/docs-prettier.config.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/utils/externalLink.js +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/website_gif/chrome-scrape-dark.gif +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/website_gif/chrome-scrape-dark.mp4 +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/website_gif/chrome-scrape-light.gif +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/website_gif/chrome-scrape-light.mp4 +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tools/website_gif/website_gif.mjs +0 -0
- {crawlee-1.0.2b4 → crawlee-1.0.5b21}/website/tsconfig.eslint.json +0 -0
|
@@ -10,7 +10,7 @@ on:
|
|
|
10
10
|
|
|
11
11
|
env:
|
|
12
12
|
NODE_VERSION: 20
|
|
13
|
-
PYTHON_VERSION: 3.
|
|
13
|
+
PYTHON_VERSION: 3.14
|
|
14
14
|
|
|
15
15
|
jobs:
|
|
16
16
|
build_and_deploy_docs:
|
|
@@ -30,7 +30,7 @@ jobs:
|
|
|
30
30
|
ref: ${{ github.event_name == 'workflow_call' && inputs.ref || github.ref }}
|
|
31
31
|
|
|
32
32
|
- name: Set up Node
|
|
33
|
-
uses: actions/setup-node@
|
|
33
|
+
uses: actions/setup-node@v6
|
|
34
34
|
with:
|
|
35
35
|
node-version: ${{ env.NODE_VERSION }}
|
|
36
36
|
|
|
@@ -40,7 +40,7 @@ jobs:
|
|
|
40
40
|
python-version: ${{ env.PYTHON_VERSION }}
|
|
41
41
|
|
|
42
42
|
- name: Set up uv package manager
|
|
43
|
-
uses: astral-sh/setup-uv@
|
|
43
|
+
uses: astral-sh/setup-uv@v7
|
|
44
44
|
with:
|
|
45
45
|
python-version: ${{ env.PYTHON_VERSION }}
|
|
46
46
|
|
|
@@ -47,13 +47,13 @@ jobs:
|
|
|
47
47
|
name: Lint check
|
|
48
48
|
uses: apify/workflows/.github/workflows/python_lint_check.yaml@main
|
|
49
49
|
with:
|
|
50
|
-
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
|
|
50
|
+
python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
|
51
51
|
|
|
52
52
|
type_check:
|
|
53
53
|
name: Type check
|
|
54
54
|
uses: apify/workflows/.github/workflows/python_type_check.yaml@main
|
|
55
55
|
with:
|
|
56
|
-
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
|
|
56
|
+
python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
|
57
57
|
|
|
58
58
|
unit_tests:
|
|
59
59
|
name: Unit tests
|
|
@@ -61,7 +61,7 @@ jobs:
|
|
|
61
61
|
secrets:
|
|
62
62
|
httpbin_url: ${{ secrets.APIFY_HTTPBIN_TOKEN && format('https://httpbin.apify.actor?token={0}', secrets.APIFY_HTTPBIN_TOKEN) || 'https://httpbin.org'}}
|
|
63
63
|
with:
|
|
64
|
-
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
|
|
64
|
+
python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
|
65
65
|
|
|
66
66
|
update_changelog:
|
|
67
67
|
name: Update changelog
|
|
@@ -21,13 +21,13 @@ jobs:
|
|
|
21
21
|
name: Lint check
|
|
22
22
|
uses: apify/workflows/.github/workflows/python_lint_check.yaml@main
|
|
23
23
|
with:
|
|
24
|
-
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
|
|
24
|
+
python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
|
25
25
|
|
|
26
26
|
type_check:
|
|
27
27
|
name: Type check
|
|
28
28
|
uses: apify/workflows/.github/workflows/python_type_check.yaml@main
|
|
29
29
|
with:
|
|
30
|
-
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
|
|
30
|
+
python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
|
31
31
|
|
|
32
32
|
unit_tests:
|
|
33
33
|
name: Unit tests
|
|
@@ -35,8 +35,9 @@ jobs:
|
|
|
35
35
|
secrets:
|
|
36
36
|
httpbin_url: ${{ secrets.APIFY_HTTPBIN_TOKEN && format('https://httpbin.apify.actor?token={0}', secrets.APIFY_HTTPBIN_TOKEN) || 'https://httpbin.org'}}
|
|
37
37
|
with:
|
|
38
|
-
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
|
|
38
|
+
python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
|
39
39
|
|
|
40
40
|
docs_check:
|
|
41
41
|
name: Docs check
|
|
42
42
|
uses: apify/workflows/.github/workflows/python_docs_check.yaml@main
|
|
43
|
+
secrets: inherit
|
|
@@ -7,7 +7,7 @@ on:
|
|
|
7
7
|
|
|
8
8
|
env:
|
|
9
9
|
NODE_VERSION: 22
|
|
10
|
-
PYTHON_VERSION: 3.
|
|
10
|
+
PYTHON_VERSION: 3.14
|
|
11
11
|
|
|
12
12
|
jobs:
|
|
13
13
|
end_to_end_tests:
|
|
@@ -27,7 +27,7 @@ jobs:
|
|
|
27
27
|
uses: actions/checkout@v5
|
|
28
28
|
|
|
29
29
|
- name: Setup node
|
|
30
|
-
uses: actions/setup-node@
|
|
30
|
+
uses: actions/setup-node@v6
|
|
31
31
|
with:
|
|
32
32
|
node-version: ${{ env.NODE_VERSION }}
|
|
33
33
|
|
|
@@ -44,7 +44,7 @@ jobs:
|
|
|
44
44
|
run: pipx install poetry
|
|
45
45
|
|
|
46
46
|
- name: Set up uv package manager
|
|
47
|
-
uses: astral-sh/setup-uv@
|
|
47
|
+
uses: astral-sh/setup-uv@v7
|
|
48
48
|
with:
|
|
49
49
|
python-version: ${{ env.PYTHON_VERSION }}
|
|
50
50
|
|
|
@@ -3,15 +3,53 @@
|
|
|
3
3
|
All notable changes to this project will be documented in this file.
|
|
4
4
|
|
|
5
5
|
<!-- git-cliff-unreleased-start -->
|
|
6
|
-
## 1.0.
|
|
6
|
+
## 1.0.5 - **not yet released**
|
|
7
|
+
|
|
8
|
+
### 🚀 Features
|
|
9
|
+
|
|
10
|
+
- Add `chrome` `BrowserType` for `PlaywrightCrawler` to use the Chrome browser ([#1487](https://github.com/apify/crawlee-python/pull/1487)) ([b06937b](https://github.com/apify/crawlee-python/commit/b06937bbc3afe3c936b554bfc503365c1b2c526b)) by [@Mantisus](https://github.com/Mantisus), closes [#1071](https://github.com/apify/crawlee-python/issues/1071)
|
|
11
|
+
- Add `RedisStorageClient` based on Redis v8.0+ ([#1406](https://github.com/apify/crawlee-python/pull/1406)) ([d08d13d](https://github.com/apify/crawlee-python/commit/d08d13d39203c24ab61fe254b0956d6744db3b5f)) by [@Mantisus](https://github.com/Mantisus)
|
|
12
|
+
- Add support for Python 3.14 ([#1553](https://github.com/apify/crawlee-python/pull/1553)) ([89e9130](https://github.com/apify/crawlee-python/commit/89e9130cabee0fbc974b29c26483b7fa0edf627c)) by [@Mantisus](https://github.com/Mantisus)
|
|
13
|
+
|
|
14
|
+
### 🐛 Bug Fixes
|
|
15
|
+
|
|
16
|
+
- Improve indexing of the `request_queue_records` table for `SqlRequestQueueClient` ([#1527](https://github.com/apify/crawlee-python/pull/1527)) ([6509534](https://github.com/apify/crawlee-python/commit/65095346a9d8b703b10c91e0510154c3c48a4176)) by [@Mantisus](https://github.com/Mantisus), closes [#1526](https://github.com/apify/crawlee-python/issues/1526)
|
|
17
|
+
- Improve error handling for `RobotsTxtFile.load` ([#1524](https://github.com/apify/crawlee-python/pull/1524)) ([596a311](https://github.com/apify/crawlee-python/commit/596a31184914a254b3e7a81fd2f48ea8eda7db49)) by [@Mantisus](https://github.com/Mantisus)
|
|
18
|
+
- Fix `crawler_runtime` not being updated during run and only in the end ([#1540](https://github.com/apify/crawlee-python/pull/1540)) ([0d6c3f6](https://github.com/apify/crawlee-python/commit/0d6c3f6d3337ddb6cab4873747c28cf95605d550)) by [@Pijukatel](https://github.com/Pijukatel), closes [#1541](https://github.com/apify/crawlee-python/issues/1541)
|
|
19
|
+
- Ensure persist state event emission when exiting `EventManager` context ([#1562](https://github.com/apify/crawlee-python/pull/1562)) ([6a44f17](https://github.com/apify/crawlee-python/commit/6a44f172600cbcacebab899082d6efc9105c4e03)) by [@Pijukatel](https://github.com/Pijukatel), closes [#1560](https://github.com/apify/crawlee-python/issues/1560)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
<!-- git-cliff-unreleased-end -->
|
|
23
|
+
## [1.0.4](https://github.com/apify/crawlee-python/releases/tag/v1.0.4) (2025-10-24)
|
|
24
|
+
|
|
25
|
+
### 🐛 Bug Fixes
|
|
26
|
+
|
|
27
|
+
- Respect `enqueue_strategy` in `enqueue_links` ([#1505](https://github.com/apify/crawlee-python/pull/1505)) ([6ee04bc](https://github.com/apify/crawlee-python/commit/6ee04bc08c50a70f2e956a79d4ce5072a726c3a8)) by [@Mantisus](https://github.com/Mantisus), closes [#1504](https://github.com/apify/crawlee-python/issues/1504)
|
|
28
|
+
- Exclude incorrect links before checking `robots.txt` ([#1502](https://github.com/apify/crawlee-python/pull/1502)) ([3273da5](https://github.com/apify/crawlee-python/commit/3273da5fee62ec9254666b376f382474c3532a56)) by [@Mantisus](https://github.com/Mantisus), closes [#1499](https://github.com/apify/crawlee-python/issues/1499)
|
|
29
|
+
- Resolve compatibility issue between `SqlStorageClient` and `AdaptivePlaywrightCrawler` ([#1496](https://github.com/apify/crawlee-python/pull/1496)) ([ce172c4](https://github.com/apify/crawlee-python/commit/ce172c425a8643a1d4c919db4f5e5a6e47e91deb)) by [@Mantisus](https://github.com/Mantisus), closes [#1495](https://github.com/apify/crawlee-python/issues/1495)
|
|
30
|
+
- Fix `BasicCrawler` statistics persistence ([#1490](https://github.com/apify/crawlee-python/pull/1490)) ([1eb1c19](https://github.com/apify/crawlee-python/commit/1eb1c19aa6f9dda4a0e3f7eda23f77a554f95076)) by [@Pijukatel](https://github.com/Pijukatel), closes [#1501](https://github.com/apify/crawlee-python/issues/1501)
|
|
31
|
+
- Save context state in result for `AdaptivePlaywrightCrawler` after isolated processing in `SubCrawler` ([#1488](https://github.com/apify/crawlee-python/pull/1488)) ([62b7c70](https://github.com/apify/crawlee-python/commit/62b7c70b54085fc65a660062028014f4502beba9)) by [@Mantisus](https://github.com/Mantisus), closes [#1483](https://github.com/apify/crawlee-python/issues/1483)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
## [1.0.3](https://github.com/apify/crawlee-python/releases/tag/v1.0.3) (2025-10-17)
|
|
35
|
+
|
|
36
|
+
### 🐛 Bug Fixes
|
|
37
|
+
|
|
38
|
+
- Add support for Pydantic v2.12 ([#1471](https://github.com/apify/crawlee-python/pull/1471)) ([35c1108](https://github.com/apify/crawlee-python/commit/35c110878c2f445a2866be2522ea8703e9b371dd)) by [@Mantisus](https://github.com/Mantisus), closes [#1464](https://github.com/apify/crawlee-python/issues/1464)
|
|
39
|
+
- Fix database version warning message ([#1485](https://github.com/apify/crawlee-python/pull/1485)) ([18a545e](https://github.com/apify/crawlee-python/commit/18a545ee8add92e844acd0068f9cb8580a82e1c9)) by [@Mantisus](https://github.com/Mantisus)
|
|
40
|
+
- Fix `reclaim_request` in `SqlRequestQueueClient` to correctly update the request state ([#1486](https://github.com/apify/crawlee-python/pull/1486)) ([1502469](https://github.com/apify/crawlee-python/commit/150246957f8f7f1ceb77bb77e3a02a903c50cae1)) by [@Mantisus](https://github.com/Mantisus), closes [#1484](https://github.com/apify/crawlee-python/issues/1484)
|
|
41
|
+
- Fix `KeyValueStore.auto_saved_value` failing in some scenarios ([#1438](https://github.com/apify/crawlee-python/pull/1438)) ([b35dee7](https://github.com/apify/crawlee-python/commit/b35dee78180e57161b826641d45a61b8d8f6ef51)) by [@Pijukatel](https://github.com/Pijukatel), closes [#1354](https://github.com/apify/crawlee-python/issues/1354)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
## [1.0.2](https://github.com/apify/crawlee-python/releases/tag/v1.0.2) (2025-10-08)
|
|
7
45
|
|
|
8
46
|
### 🐛 Bug Fixes
|
|
9
47
|
|
|
10
48
|
- Use Self type in the open() method of storage clients ([#1462](https://github.com/apify/crawlee-python/pull/1462)) ([4ec6f6c](https://github.com/apify/crawlee-python/commit/4ec6f6c08f81632197f602ff99151338b3eba6e7)) by [@janbuchar](https://github.com/janbuchar)
|
|
11
49
|
- Add storages name validation ([#1457](https://github.com/apify/crawlee-python/pull/1457)) ([84de11a](https://github.com/apify/crawlee-python/commit/84de11a3a603503076f5b7df487c9abab68a9015)) by [@Mantisus](https://github.com/Mantisus), closes [#1434](https://github.com/apify/crawlee-python/issues/1434)
|
|
50
|
+
- Pin pydantic version to <2.12.0 to avoid compatibility issues ([#1467](https://github.com/apify/crawlee-python/pull/1467)) ([f11b86f](https://github.com/apify/crawlee-python/commit/f11b86f7ed57f98e83dc1b52f15f2017a919bf59)) by [@vdusek](https://github.com/vdusek)
|
|
12
51
|
|
|
13
52
|
|
|
14
|
-
<!-- git-cliff-unreleased-end -->
|
|
15
53
|
## [1.0.1](https://github.com/apify/crawlee-python/releases/tag/v1.0.1) (2025-10-06)
|
|
16
54
|
|
|
17
55
|
### 🐛 Bug Fixes
|
|
@@ -248,7 +286,7 @@ All notable changes to this project will be documented in this file.
|
|
|
248
286
|
|
|
249
287
|
### 🐛 Bug Fixes
|
|
250
288
|
|
|
251
|
-
- Fix session
|
|
289
|
+
- Fix session management with retire ([#947](https://github.com/apify/crawlee-python/pull/947)) ([caee03f](https://github.com/apify/crawlee-python/commit/caee03fe3a43cc1d7a8d3f9e19b42df1bdb1c0aa)) by [@Mantisus](https://github.com/Mantisus)
|
|
252
290
|
- Fix templates - poetry-plugin-export version and camoufox template name ([#952](https://github.com/apify/crawlee-python/pull/952)) ([7addea6](https://github.com/apify/crawlee-python/commit/7addea6605359cceba208e16ec9131724bdb3e9b)) by [@Pijukatel](https://github.com/Pijukatel), closes [#951](https://github.com/apify/crawlee-python/issues/951)
|
|
253
291
|
- Fix convert relative link to absolute in `enqueue_links` for response with redirect ([#956](https://github.com/apify/crawlee-python/pull/956)) ([694102e](https://github.com/apify/crawlee-python/commit/694102e163bb9021a4830d2545d153f6f8f3de90)) by [@Mantisus](https://github.com/Mantisus), closes [#955](https://github.com/apify/crawlee-python/issues/955)
|
|
254
292
|
- Fix `CurlImpersonateHttpClient` cookies handler ([#946](https://github.com/apify/crawlee-python/pull/946)) ([ed415c4](https://github.com/apify/crawlee-python/commit/ed415c433da2a40b0ee62534f0730d0737e991b8)) by [@Mantisus](https://github.com/Mantisus)
|
|
@@ -103,7 +103,7 @@ make run-docs
|
|
|
103
103
|
Publishing new versions to [PyPI](https://pypi.org/project/crawlee) is automated through GitHub Actions.
|
|
104
104
|
|
|
105
105
|
- **Beta releases**: On each commit to the master branch, a new beta release is automatically published. The version number is determined based on the latest release and conventional commits. The beta version suffix is incremented by 1 from the last beta release on PyPI.
|
|
106
|
-
- **Stable releases**: A stable version release may be created by triggering the `release` GitHub Actions workflow. The version number is determined based on the latest release and conventional commits (`auto` release type), or it may be
|
|
106
|
+
- **Stable releases**: A stable version release may be created by triggering the `release` GitHub Actions workflow. The version number is determined based on the latest release and conventional commits (`auto` release type), or it may be overridden using the `custom` release type.
|
|
107
107
|
|
|
108
108
|
### Publishing to PyPI manually
|
|
109
109
|
|
|
@@ -38,7 +38,7 @@ unit-tests-cov:
|
|
|
38
38
|
uv run pytest --numprocesses=auto -vv --cov=src/crawlee --cov-append --cov-report=html tests/unit -m "not run_alone"
|
|
39
39
|
|
|
40
40
|
e2e-templates-tests $(args):
|
|
41
|
-
uv run pytest --numprocesses=$(E2E_TESTS_CONCURRENCY) -vv tests/e2e/project_template "$(args)"
|
|
41
|
+
uv run pytest --numprocesses=$(E2E_TESTS_CONCURRENCY) -vv tests/e2e/project_template "$(args)" --timeout=600
|
|
42
42
|
|
|
43
43
|
format:
|
|
44
44
|
uv run ruff check --fix
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: crawlee
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.5b21
|
|
4
4
|
Summary: Crawlee for Python
|
|
5
5
|
Project-URL: Apify Homepage, https://apify.com
|
|
6
6
|
Project-URL: Changelog, https://crawlee.dev/python/docs/changelog
|
|
@@ -223,15 +223,16 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
223
223
|
Classifier: Programming Language :: Python :: 3.11
|
|
224
224
|
Classifier: Programming Language :: Python :: 3.12
|
|
225
225
|
Classifier: Programming Language :: Python :: 3.13
|
|
226
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
226
227
|
Classifier: Topic :: Software Development :: Libraries
|
|
227
228
|
Requires-Python: >=3.10
|
|
228
229
|
Requires-Dist: cachetools>=5.5.0
|
|
229
230
|
Requires-Dist: colorama>=0.4.0
|
|
230
|
-
Requires-Dist: impit>=0.
|
|
231
|
+
Requires-Dist: impit>=0.8.0
|
|
231
232
|
Requires-Dist: more-itertools>=10.2.0
|
|
232
233
|
Requires-Dist: protego>=0.5.0
|
|
233
234
|
Requires-Dist: psutil>=6.0.0
|
|
234
|
-
Requires-Dist: pydantic-settings
|
|
235
|
+
Requires-Dist: pydantic-settings>=2.12.0
|
|
235
236
|
Requires-Dist: pydantic>=2.11.0
|
|
236
237
|
Requires-Dist: pyee>=9.0.0
|
|
237
238
|
Requires-Dist: tldextract>=5.1.0
|
|
@@ -246,7 +247,7 @@ Requires-Dist: scikit-learn>=1.6.0; extra == 'adaptive-crawler'
|
|
|
246
247
|
Provides-Extra: all
|
|
247
248
|
Requires-Dist: aiosqlite>=0.21.0; extra == 'all'
|
|
248
249
|
Requires-Dist: apify-fingerprint-datapoints>=0.0.2; extra == 'all'
|
|
249
|
-
Requires-Dist: asyncpg>=0.24.0; extra == 'all'
|
|
250
|
+
Requires-Dist: asyncpg>=0.24.0; (python_version < '3.14') and extra == 'all'
|
|
250
251
|
Requires-Dist: beautifulsoup4[lxml]>=4.12.0; extra == 'all'
|
|
251
252
|
Requires-Dist: browserforge>=1.2.3; extra == 'all'
|
|
252
253
|
Requires-Dist: cookiecutter>=2.6.0; extra == 'all'
|
|
@@ -263,6 +264,7 @@ Requires-Dist: opentelemetry-sdk>=1.34.1; extra == 'all'
|
|
|
263
264
|
Requires-Dist: opentelemetry-semantic-conventions>=0.54; extra == 'all'
|
|
264
265
|
Requires-Dist: parsel>=1.10.0; extra == 'all'
|
|
265
266
|
Requires-Dist: playwright>=1.27.0; extra == 'all'
|
|
267
|
+
Requires-Dist: redis[hiredis]>=7.0.0; extra == 'all'
|
|
266
268
|
Requires-Dist: rich>=13.9.0; extra == 'all'
|
|
267
269
|
Requires-Dist: scikit-learn>=1.6.0; extra == 'all'
|
|
268
270
|
Requires-Dist: sqlalchemy[asyncio]<3.0.0,>=2.0.0; extra == 'all'
|
|
@@ -296,8 +298,10 @@ Provides-Extra: playwright
|
|
|
296
298
|
Requires-Dist: apify-fingerprint-datapoints>=0.0.2; extra == 'playwright'
|
|
297
299
|
Requires-Dist: browserforge>=1.2.3; extra == 'playwright'
|
|
298
300
|
Requires-Dist: playwright>=1.27.0; extra == 'playwright'
|
|
301
|
+
Provides-Extra: redis
|
|
302
|
+
Requires-Dist: redis[hiredis]>=7.0.0; extra == 'redis'
|
|
299
303
|
Provides-Extra: sql-postgres
|
|
300
|
-
Requires-Dist: asyncpg>=0.24.0; extra == 'sql-postgres'
|
|
304
|
+
Requires-Dist: asyncpg>=0.24.0; (python_version < '3.14') and extra == 'sql-postgres'
|
|
301
305
|
Requires-Dist: sqlalchemy[asyncio]<3.0.0,>=2.0.0; extra == 'sql-postgres'
|
|
302
306
|
Provides-Extra: sql-sqlite
|
|
303
307
|
Requires-Dist: aiosqlite>=0.21.0; extra == 'sql-sqlite'
|
|
@@ -99,7 +99,7 @@ apify run
|
|
|
99
99
|
For running Crawlee code as an Actor on [Apify platform](https://apify.com/actors) you need to wrap the body of the main function of your crawler with `async with Actor`.
|
|
100
100
|
|
|
101
101
|
:::info NOTE
|
|
102
|
-
Adding `async with Actor` is the only important thing needed to run it on Apify platform as an Actor. It is needed to initialize your Actor (e.g. to set the correct storage implementation) and to correctly handle
|
|
102
|
+
Adding `async with Actor` is the only important thing needed to run it on Apify platform as an Actor. It is needed to initialize your Actor (e.g. to set the correct storage implementation) and to correctly handle exiting the process.
|
|
103
103
|
:::
|
|
104
104
|
|
|
105
105
|
Let's look at the `BeautifulSoupCrawler` example from the [Quick start](../quick-start) guide:
|
{crawlee-1.0.2b4 → crawlee-1.0.5b21}/docs/examples/code_examples/using_browser_profiles_chrome.py
RENAMED
|
@@ -27,15 +27,13 @@ async def main() -> None:
|
|
|
27
27
|
|
|
28
28
|
crawler = PlaywrightCrawler(
|
|
29
29
|
headless=False,
|
|
30
|
-
# Use
|
|
31
|
-
browser_type='
|
|
30
|
+
# Use the installed Chrome browser
|
|
31
|
+
browser_type='chrome',
|
|
32
32
|
# Disable fingerprints to preserve profile identity
|
|
33
33
|
fingerprint_generator=None,
|
|
34
34
|
# Set user data directory to temp folder
|
|
35
35
|
user_data_dir=tmp_profile_dir,
|
|
36
36
|
browser_launch_options={
|
|
37
|
-
# Use installed Chrome browser
|
|
38
|
-
'channel': 'chrome',
|
|
39
37
|
# Slow down actions to mimic human behavior
|
|
40
38
|
'slow_mo': 200,
|
|
41
39
|
'args': [
|
|
@@ -18,8 +18,6 @@ Using browser profiles allows you to leverage existing login sessions, saved pas
|
|
|
18
18
|
|
|
19
19
|
To run <ApiLink to="class/PlaywrightCrawler">`PlaywrightCrawler`</ApiLink> with your Chrome profile, you need to know the path to your profile files. You can find this information by entering `chrome://version/` as a URL in your Chrome browser. If you have multiple profiles, pay attention to the profile name - if you only have one profile, it's always `Default`.
|
|
20
20
|
|
|
21
|
-
You also need to use the [`channel`](https://playwright.dev/python/docs/api/class-browsertype#browser-type-launch-option-channel) parameter in `browser_launch_options` to use the Chrome browser installed on your system instead of Playwright's Chromium.
|
|
22
|
-
|
|
23
21
|
:::warning Profile access limitation
|
|
24
22
|
Due to [Chrome's security policies](https://developer.chrome.com/blog/remote-debugging-port), automation cannot use your main browsing profile directly. The example copies your profile to a temporary location as a workaround.
|
|
25
23
|
:::
|
|
@@ -291,7 +291,7 @@ Request loaders provide a subset of <ApiLink to="class/RequestQueue">`RequestQue
|
|
|
291
291
|
|
|
292
292
|
- <ApiLink to="class/RequestLoader">`RequestLoader`</ApiLink> - Base interface for read-only access to a stream of requests, with capabilities like fetching the next request, marking as handled, and status checking.
|
|
293
293
|
- <ApiLink to="class/RequestList">`RequestList`</ApiLink> - Lightweight in-memory implementation of `RequestLoader` for managing static lists of URLs.
|
|
294
|
-
- <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink> -
|
|
294
|
+
- <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink> - A specialized loader that reads URLs from XML and plain-text sitemaps following the [Sitemaps protocol](https://www.sitemaps.org/protocol.html) with filtering capabilities.
|
|
295
295
|
|
|
296
296
|
### Request managers
|
|
297
297
|
|
|
@@ -25,7 +25,7 @@ Changing browser fingerprints can be a tedious job. Luckily, Crawlee provides th
|
|
|
25
25
|
{PlaywrightDefaultFingerprintGenerator}
|
|
26
26
|
</RunnableCodeBlock>
|
|
27
27
|
|
|
28
|
-
In certain cases we want to narrow down the fingerprints used - e.g. specify a certain operating system, locale or browser. This is also possible with Crawlee - the crawler can have the generation algorithm customized to reflect the particular browser version and many more. For description of fingerprint generation options please see <ApiLink to="class/HeaderGeneratorOptions">`HeaderGeneratorOptions`</ApiLink>, <ApiLink to="class/ScreenOptions">`ScreenOptions`</ApiLink> and <ApiLink to="class/BrowserforgeFingerprintGenerator#__init__">`DefaultFingerprintGenerator.__init__`</ApiLink> See the example
|
|
28
|
+
In certain cases we want to narrow down the fingerprints used - e.g. specify a certain operating system, locale or browser. This is also possible with Crawlee - the crawler can have the generation algorithm customized to reflect the particular browser version and many more. For description of fingerprint generation options please see <ApiLink to="class/HeaderGeneratorOptions">`HeaderGeneratorOptions`</ApiLink>, <ApiLink to="class/ScreenOptions">`ScreenOptions`</ApiLink> and <ApiLink to="class/BrowserforgeFingerprintGenerator#__init__">`DefaultFingerprintGenerator.__init__`</ApiLink> See the example below:
|
|
29
29
|
|
|
30
30
|
<CodeBlock className="language-python">
|
|
31
31
|
{PlaywrightDefaultFingerprintGeneratorWithArgs}
|
crawlee-1.0.5b21/docs/guides/code_examples/storage_clients/redis_storage_client_basic_example.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from crawlee.crawlers import ParselCrawler
|
|
2
|
+
from crawlee.storage_clients import RedisStorageClient
|
|
3
|
+
|
|
4
|
+
# Create a new instance of storage client using connection string.
|
|
5
|
+
# 'redis://localhost:6379' is the just placeholder, replace it with your actual
|
|
6
|
+
# connection string.
|
|
7
|
+
storage_client = RedisStorageClient(connection_string='redis://localhost:6379')
|
|
8
|
+
|
|
9
|
+
# And pass it to the crawler.
|
|
10
|
+
crawler = ParselCrawler(storage_client=storage_client)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from redis.asyncio import Redis
|
|
2
|
+
|
|
3
|
+
from crawlee.configuration import Configuration
|
|
4
|
+
from crawlee.crawlers import ParselCrawler
|
|
5
|
+
from crawlee.storage_clients import RedisStorageClient
|
|
6
|
+
|
|
7
|
+
# Create a new instance of storage client using a Redis client with custom settings.
|
|
8
|
+
# Replace host and port with your actual Redis server configuration.
|
|
9
|
+
# Other Redis client settings can be adjusted as needed.
|
|
10
|
+
storage_client = RedisStorageClient(
|
|
11
|
+
redis=Redis(
|
|
12
|
+
host='localhost',
|
|
13
|
+
port=6379,
|
|
14
|
+
retry_on_timeout=True,
|
|
15
|
+
socket_keepalive=True,
|
|
16
|
+
socket_connect_timeout=10,
|
|
17
|
+
)
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
# Create a configuration with custom settings.
|
|
21
|
+
configuration = Configuration(purge_on_start=False)
|
|
22
|
+
|
|
23
|
+
# And pass them to the crawler.
|
|
24
|
+
crawler = ParselCrawler(
|
|
25
|
+
storage_client=storage_client,
|
|
26
|
+
configuration=configuration,
|
|
27
|
+
)
|
|
@@ -31,7 +31,7 @@ The [`request_loaders`](https://github.com/apify/crawlee-python/tree/master/src/
|
|
|
31
31
|
And specific request loader implementations:
|
|
32
32
|
|
|
33
33
|
- <ApiLink to="class/RequestList">`RequestList`</ApiLink>: A lightweight implementation for managing a static list of URLs.
|
|
34
|
-
- <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink>: A specialized loader that reads URLs from XML sitemaps with filtering capabilities.
|
|
34
|
+
- <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink>: A specialized loader that reads URLs from XML and plain-text sitemaps following the [Sitemaps protocol](https://www.sitemaps.org/protocol.html) with filtering capabilities.
|
|
35
35
|
|
|
36
36
|
Below is a class diagram that illustrates the relationships between these components and the <ApiLink to="class/RequestQueue">`RequestQueue`</ApiLink>:
|
|
37
37
|
|
|
@@ -130,7 +130,13 @@ To enable persistence, provide `persist_state_key` and optionally `persist_reque
|
|
|
130
130
|
|
|
131
131
|
### Sitemap request loader
|
|
132
132
|
|
|
133
|
-
The <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink> is a specialized request loader that reads URLs from
|
|
133
|
+
The <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink> is a specialized request loader that reads URLs from sitemaps following the [Sitemaps protocol](https://www.sitemaps.org/protocol.html). It supports both XML and plain text sitemap formats. It's particularly useful when you want to crawl a website systematically by following its sitemap structure.
|
|
134
|
+
|
|
135
|
+
:::note
|
|
136
|
+
The `SitemapRequestLoader` is designed specifically for sitemaps that follow the standard Sitemaps protocol. HTML pages containing links are not supported by this loader - those should be handled by regular crawlers using the `enqueue_links` functionality.
|
|
137
|
+
:::
|
|
138
|
+
|
|
139
|
+
The loader supports filtering URLs using glob patterns and regular expressions, allowing you to include or exclude specific types of URLs. The <ApiLink to="class/SitemapRequestLoader">`SitemapRequestLoader`</ApiLink> provides streaming processing of sitemaps, ensuring efficient memory usage without loading the entire sitemap into memory.
|
|
134
140
|
|
|
135
141
|
<RunnableCodeBlock className="language-python" language="python">
|
|
136
142
|
{SitemapExample}
|
|
@@ -17,6 +17,8 @@ import CustomStorageClientExample from '!!raw-loader!roa-loader!./code_examples/
|
|
|
17
17
|
import RegisteringStorageClientsExample from '!!raw-loader!roa-loader!./code_examples/storage_clients/registering_storage_clients_example.py';
|
|
18
18
|
import SQLStorageClientBasicExample from '!!raw-loader!roa-loader!./code_examples/storage_clients/sql_storage_client_basic_example.py';
|
|
19
19
|
import SQLStorageClientConfigurationExample from '!!raw-loader!./code_examples/storage_clients/sql_storage_client_configuration_example.py';
|
|
20
|
+
import RedisStorageClientBasicExample from '!!raw-loader!./code_examples/storage_clients/redis_storage_client_basic_example.py';
|
|
21
|
+
import RedisStorageClientConfigurationExample from '!!raw-loader!./code_examples/storage_clients/redis_storage_client_configuration_example.py';
|
|
20
22
|
|
|
21
23
|
Storage clients provide a unified interface for interacting with <ApiLink to="class/Dataset">`Dataset`</ApiLink>, <ApiLink to="class/KeyValueStore">`KeyValueStore`</ApiLink>, and <ApiLink to="class/RequestQueue">`RequestQueue`</ApiLink>, regardless of the underlying implementation. They handle operations like creating, reading, updating, and deleting storage instances, as well as managing data persistence and cleanup. This abstraction makes it easy to switch between different environments, such as local development and cloud production setups.
|
|
22
24
|
|
|
@@ -26,7 +28,8 @@ Crawlee provides three main storage client implementations:
|
|
|
26
28
|
|
|
27
29
|
- <ApiLink to="class/FileSystemStorageClient">`FileSystemStorageClient`</ApiLink> - Provides persistent file system storage with in-memory caching.
|
|
28
30
|
- <ApiLink to="class/MemoryStorageClient">`MemoryStorageClient`</ApiLink> - Stores data in memory with no persistence.
|
|
29
|
-
- <ApiLink to="class/SqlStorageClient">`SqlStorageClient`</ApiLink>
|
|
31
|
+
- <ApiLink to="class/SqlStorageClient">`SqlStorageClient`</ApiLink> - Provides persistent storage using a SQL database ([SQLite](https://sqlite.org/) or [PostgreSQL](https://www.postgresql.org/)). Requires installing the extra dependency: `crawlee[sql_sqlite]` for SQLite or `crawlee[sql_postgres]` for PostgreSQL.
|
|
32
|
+
- <ApiLink to="class/RedisStorageClient">`RedisStorageClient`</ApiLink> - Provides persistent storage using a [Redis](https://redis.io/) database v8.0+. Requires installing the extra dependency `crawlee[redis]`.
|
|
30
33
|
- [`ApifyStorageClient`](https://docs.apify.com/sdk/python/reference/class/ApifyStorageClient) - Manages storage on the [Apify platform](https://apify.com), implemented in the [Apify SDK](https://github.com/apify/apify-sdk-python).
|
|
31
34
|
|
|
32
35
|
```mermaid
|
|
@@ -56,6 +59,8 @@ class MemoryStorageClient
|
|
|
56
59
|
|
|
57
60
|
class SqlStorageClient
|
|
58
61
|
|
|
62
|
+
class RedisStorageClient
|
|
63
|
+
|
|
59
64
|
class ApifyStorageClient
|
|
60
65
|
|
|
61
66
|
%% ========================
|
|
@@ -65,6 +70,7 @@ class ApifyStorageClient
|
|
|
65
70
|
StorageClient --|> FileSystemStorageClient
|
|
66
71
|
StorageClient --|> MemoryStorageClient
|
|
67
72
|
StorageClient --|> SqlStorageClient
|
|
73
|
+
StorageClient --|> RedisStorageClient
|
|
68
74
|
StorageClient --|> ApifyStorageClient
|
|
69
75
|
```
|
|
70
76
|
|
|
@@ -304,8 +310,8 @@ Configuration options for the <ApiLink to="class/SqlStorageClient">`SqlStorageCl
|
|
|
304
310
|
|
|
305
311
|
Configuration options for the <ApiLink to="class/SqlStorageClient">`SqlStorageClient`</ApiLink> can be set via constructor arguments:
|
|
306
312
|
|
|
307
|
-
- **`connection_string`** (default: SQLite in <ApiLink to="class/Configuration">`Configuration`</ApiLink> storage dir)
|
|
308
|
-
- **`engine`**
|
|
313
|
+
- **`connection_string`** (default: SQLite in <ApiLink to="class/Configuration">`Configuration`</ApiLink> storage dir) - SQLAlchemy connection string, e.g. `sqlite+aiosqlite:///my.db` or `postgresql+asyncpg://user:pass@host/db`.
|
|
314
|
+
- **`engine`** - Pre-configured SQLAlchemy AsyncEngine (optional).
|
|
309
315
|
|
|
310
316
|
For advanced scenarios, you can configure <ApiLink to="class/SqlStorageClient">`SqlStorageClient`</ApiLink> with a custom SQLAlchemy engine and additional options via the <ApiLink to="class/Configuration">`Configuration`</ApiLink> class. This is useful, for example, when connecting to an external PostgreSQL database or customizing connection pooling.
|
|
311
317
|
|
|
@@ -313,6 +319,172 @@ For advanced scenarios, you can configure <ApiLink to="class/SqlStorageClient">`
|
|
|
313
319
|
{SQLStorageClientConfigurationExample}
|
|
314
320
|
</CodeBlock>
|
|
315
321
|
|
|
322
|
+
### Redis storage client
|
|
323
|
+
|
|
324
|
+
:::warning Experimental feature
|
|
325
|
+
The <ApiLink to="class/RedisStorageClient">`RedisStorageClient`</ApiLink> is experimental. Its API and behavior may change in future releases.
|
|
326
|
+
:::
|
|
327
|
+
|
|
328
|
+
The <ApiLink to="class/RedisStorageClient">`RedisStorageClient`</ApiLink> provides persistent storage using [Redis](https://redis.io/) database. It supports concurrent access from multiple independent clients or processes and uses Redis native data structures for efficient operations.
|
|
329
|
+
|
|
330
|
+
:::note dependencies
|
|
331
|
+
The <ApiLink to="class/RedisStorageClient">`RedisStorageClient`</ApiLink> is not included in the core Crawlee package.
|
|
332
|
+
To use it, you need to install Crawlee with the Redis extra dependency:
|
|
333
|
+
|
|
334
|
+
<code>pip install 'crawlee[redis]'</code>
|
|
335
|
+
|
|
336
|
+
Additionally, Redis version 8.0 or higher is required.
|
|
337
|
+
:::
|
|
338
|
+
|
|
339
|
+
:::note Redis persistence
|
|
340
|
+
Data persistence in Redis depends on your [database configuration](https://redis.io/docs/latest/operate/oss_and_stack/management/persistence/).
|
|
341
|
+
:::
|
|
342
|
+
|
|
343
|
+
The client requires either a Redis connection string or a pre-configured Redis client instance. Use a pre-configured client when you need custom Redis settings such as connection pooling, timeouts, or SSL/TLS encryption.
|
|
344
|
+
|
|
345
|
+
<CodeBlock className="language-python" language="python">
|
|
346
|
+
{RedisStorageClientBasicExample}
|
|
347
|
+
</CodeBlock>
|
|
348
|
+
|
|
349
|
+
Data is organized using Redis key patterns. Below are the main data structures used for each storage type:
|
|
350
|
+
|
|
351
|
+
```mermaid
|
|
352
|
+
---
|
|
353
|
+
config:
|
|
354
|
+
class:
|
|
355
|
+
hideEmptyMembersBox: true
|
|
356
|
+
---
|
|
357
|
+
|
|
358
|
+
classDiagram
|
|
359
|
+
|
|
360
|
+
%% ========================
|
|
361
|
+
%% Storage Client
|
|
362
|
+
%% ========================
|
|
363
|
+
|
|
364
|
+
class RedisDatasetClient {
|
|
365
|
+
<<Dataset>>
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
%% ========================
|
|
369
|
+
%% Dataset Keys
|
|
370
|
+
%% ========================
|
|
371
|
+
|
|
372
|
+
class DatasetKeys {
|
|
373
|
+
datasets:[name]:items - JSON Array
|
|
374
|
+
datasets:[name]:metadata - JSON Object
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
class DatasetsIndexes {
|
|
378
|
+
datasets:id_to_name - Hash
|
|
379
|
+
datasets:name_to_id - Hash
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
%% ========================
|
|
383
|
+
%% Client to Keys arrows
|
|
384
|
+
%% ========================
|
|
385
|
+
|
|
386
|
+
RedisDatasetClient --> DatasetKeys
|
|
387
|
+
RedisDatasetClient --> DatasetsIndexes
|
|
388
|
+
```
|
|
389
|
+
|
|
390
|
+
```mermaid
|
|
391
|
+
---
|
|
392
|
+
config:
|
|
393
|
+
class:
|
|
394
|
+
hideEmptyMembersBox: true
|
|
395
|
+
---
|
|
396
|
+
|
|
397
|
+
classDiagram
|
|
398
|
+
|
|
399
|
+
%% ========================
|
|
400
|
+
%% Storage Clients
|
|
401
|
+
%% ========================
|
|
402
|
+
|
|
403
|
+
class RedisKeyValueStoreClient {
|
|
404
|
+
<<Key-value store>>
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
%% ========================
|
|
408
|
+
%% Key-Value Store Keys
|
|
409
|
+
%% ========================
|
|
410
|
+
|
|
411
|
+
class KeyValueStoreKeys {
|
|
412
|
+
key_value_stores:[name]:items - Hash
|
|
413
|
+
key_value_stores:[name]:metadata_items - Hash
|
|
414
|
+
key_value_stores:[name]:metadata - JSON Object
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
class KeyValueStoresIndexes {
|
|
418
|
+
key_value_stores:id_to_name - Hash
|
|
419
|
+
key_value_stores:name_to_id - Hash
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
%% ========================
|
|
423
|
+
%% Client to Keys arrows
|
|
424
|
+
%% ========================
|
|
425
|
+
|
|
426
|
+
RedisKeyValueStoreClient --> KeyValueStoreKeys
|
|
427
|
+
RedisKeyValueStoreClient --> KeyValueStoresIndexes
|
|
428
|
+
```
|
|
429
|
+
|
|
430
|
+
```mermaid
|
|
431
|
+
---
|
|
432
|
+
config:
|
|
433
|
+
class:
|
|
434
|
+
hideEmptyMembersBox: true
|
|
435
|
+
---
|
|
436
|
+
|
|
437
|
+
classDiagram
|
|
438
|
+
|
|
439
|
+
%% ========================
|
|
440
|
+
%% Storage Clients
|
|
441
|
+
%% ========================
|
|
442
|
+
|
|
443
|
+
class RedisRequestQueueClient {
|
|
444
|
+
<<Request queue>>
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
%% ========================
|
|
448
|
+
%% Request Queue Keys
|
|
449
|
+
%% ========================
|
|
450
|
+
|
|
451
|
+
class RequestQueueKeys{
|
|
452
|
+
request_queues:[name]:queue - List
|
|
453
|
+
request_queues:[name]:data - Hash
|
|
454
|
+
request_queues:[name]:in_progress - Hash
|
|
455
|
+
request_queues:[name]:added_bloom_filter - Bloom Filter | bloom queue_dedup_strategy
|
|
456
|
+
request_queues:[name]:handled_bloom_filter - Bloom Filter | bloom queue_dedup_strategy
|
|
457
|
+
request_queues:[name]:pending_set - Set | default queue_dedup_strategy
|
|
458
|
+
request_queues:[name]:handled_set - Set | default queue_dedup_strategy
|
|
459
|
+
request_queues:[name]:metadata - JSON Object
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
class RequestQueuesIndexes {
|
|
463
|
+
request_queues:id_to_name - Hash
|
|
464
|
+
request_queues:name_to_id - Hash
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
%% ========================
|
|
468
|
+
%% Client to Keys arrows
|
|
469
|
+
%% ========================
|
|
470
|
+
|
|
471
|
+
RedisRequestQueueClient --> RequestQueueKeys
|
|
472
|
+
RedisRequestQueueClient --> RequestQueuesIndexes
|
|
473
|
+
```
|
|
474
|
+
|
|
475
|
+
Configuration options for the <ApiLink to="class/RedisStorageClient">`RedisStorageClient`</ApiLink> can be set through environment variables or the <ApiLink to="class/Configuration">`Configuration`</ApiLink> class:
|
|
476
|
+
|
|
477
|
+
- **`purge_on_start`** (env: `CRAWLEE_PURGE_ON_START`, default: `True`) - Whether to purge default storages on start.
|
|
478
|
+
|
|
479
|
+
Configuration options for the <ApiLink to="class/RedisStorageClient">`RedisStorageClient`</ApiLink> can be set via constructor arguments:
|
|
480
|
+
|
|
481
|
+
- **`connection_string`** - Redis connection string, e.g. `redis://localhost:6379/0`.
|
|
482
|
+
- **`redis`** - Pre-configured Redis client instance (optional).
|
|
483
|
+
|
|
484
|
+
<CodeBlock className="language-python" language="python">
|
|
485
|
+
{RedisStorageClientConfigurationExample}
|
|
486
|
+
</CodeBlock>
|
|
487
|
+
|
|
316
488
|
## Creating a custom storage client
|
|
317
489
|
|
|
318
490
|
A storage client consists of two parts: the storage client factory and individual storage type clients. The <ApiLink to="class/StorageClient">`StorageClient`</ApiLink> acts as a factory that creates specific clients (<ApiLink to="class/DatasetClient">`DatasetClient`</ApiLink>, <ApiLink to="class/KeyValueStoreClient">`KeyValueStoreClient`</ApiLink>, <ApiLink to="class/RequestQueueClient">`RequestQueueClient`</ApiLink>) where the actual storage logic is implemented.
|
|
@@ -45,7 +45,7 @@ You can use different tools to consume the OpenTelemetry data that might better
|
|
|
45
45
|
|
|
46
46
|
## Customize the instrumentation
|
|
47
47
|
|
|
48
|
-
You can customize the <ApiLink to="class/CrawlerInstrumentor">`CrawlerInstrumentor`</ApiLink>. Depending on the arguments used during its initialization, the instrumentation will be applied to different parts
|
|
48
|
+
You can customize the <ApiLink to="class/CrawlerInstrumentor">`CrawlerInstrumentor`</ApiLink>. Depending on the arguments used during its initialization, the instrumentation will be applied to different parts of the Crawlee code. By default, it instruments some functions that can give quite a good picture of each individual request handling. To turn this default instrumentation off, you can pass `request_handling_instrumentation=False` during initialization. You can also extend instrumentation by passing `instrument_classes=[...]` initialization argument that contains classes you want to be auto-instrumented. All their public methods will be automatically instrumented. Bear in mind that instrumentation has some runtime costs as well. The more instrumentation is used, the more overhead it will add to the crawler execution.
|
|
49
49
|
|
|
50
50
|
You can also create your instrumentation by selecting only the methods you want to instrument. For more details, see the <ApiLink to="class/CrawlerInstrumentor">`CrawlerInstrumentor`</ApiLink> source code and the [Python documentation for OpenTelemetry](https://opentelemetry.io/docs/languages/python/).
|
|
51
51
|
|