ob-metaflow 2.11.13.1__py2.py3-none-any.whl → 2.19.7.1rc0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/R.py +10 -7
- metaflow/__init__.py +40 -25
- metaflow/_vendor/imghdr/__init__.py +186 -0
- metaflow/_vendor/importlib_metadata/__init__.py +1063 -0
- metaflow/_vendor/importlib_metadata/_adapters.py +68 -0
- metaflow/_vendor/importlib_metadata/_collections.py +30 -0
- metaflow/_vendor/importlib_metadata/_compat.py +71 -0
- metaflow/_vendor/importlib_metadata/_functools.py +104 -0
- metaflow/_vendor/importlib_metadata/_itertools.py +73 -0
- metaflow/_vendor/importlib_metadata/_meta.py +48 -0
- metaflow/_vendor/importlib_metadata/_text.py +99 -0
- metaflow/_vendor/importlib_metadata/py.typed +0 -0
- metaflow/_vendor/typeguard/__init__.py +48 -0
- metaflow/_vendor/typeguard/_checkers.py +1070 -0
- metaflow/_vendor/typeguard/_config.py +108 -0
- metaflow/_vendor/typeguard/_decorators.py +233 -0
- metaflow/_vendor/typeguard/_exceptions.py +42 -0
- metaflow/_vendor/typeguard/_functions.py +308 -0
- metaflow/_vendor/typeguard/_importhook.py +213 -0
- metaflow/_vendor/typeguard/_memo.py +48 -0
- metaflow/_vendor/typeguard/_pytest_plugin.py +127 -0
- metaflow/_vendor/typeguard/_suppression.py +86 -0
- metaflow/_vendor/typeguard/_transformer.py +1229 -0
- metaflow/_vendor/typeguard/_union_transformer.py +55 -0
- metaflow/_vendor/typeguard/_utils.py +173 -0
- metaflow/_vendor/typeguard/py.typed +0 -0
- metaflow/_vendor/typing_extensions.py +3641 -0
- metaflow/_vendor/v3_7/importlib_metadata/__init__.py +1063 -0
- metaflow/_vendor/v3_7/importlib_metadata/_adapters.py +68 -0
- metaflow/_vendor/v3_7/importlib_metadata/_collections.py +30 -0
- metaflow/_vendor/v3_7/importlib_metadata/_compat.py +71 -0
- metaflow/_vendor/v3_7/importlib_metadata/_functools.py +104 -0
- metaflow/_vendor/v3_7/importlib_metadata/_itertools.py +73 -0
- metaflow/_vendor/v3_7/importlib_metadata/_meta.py +48 -0
- metaflow/_vendor/v3_7/importlib_metadata/_text.py +99 -0
- metaflow/_vendor/v3_7/importlib_metadata/py.typed +0 -0
- metaflow/_vendor/v3_7/typeguard/__init__.py +48 -0
- metaflow/_vendor/v3_7/typeguard/_checkers.py +906 -0
- metaflow/_vendor/v3_7/typeguard/_config.py +108 -0
- metaflow/_vendor/v3_7/typeguard/_decorators.py +237 -0
- metaflow/_vendor/v3_7/typeguard/_exceptions.py +42 -0
- metaflow/_vendor/v3_7/typeguard/_functions.py +310 -0
- metaflow/_vendor/v3_7/typeguard/_importhook.py +213 -0
- metaflow/_vendor/v3_7/typeguard/_memo.py +48 -0
- metaflow/_vendor/v3_7/typeguard/_pytest_plugin.py +100 -0
- metaflow/_vendor/v3_7/typeguard/_suppression.py +88 -0
- metaflow/_vendor/v3_7/typeguard/_transformer.py +1207 -0
- metaflow/_vendor/v3_7/typeguard/_union_transformer.py +54 -0
- metaflow/_vendor/v3_7/typeguard/_utils.py +169 -0
- metaflow/_vendor/v3_7/typeguard/py.typed +0 -0
- metaflow/_vendor/v3_7/typing_extensions.py +3072 -0
- metaflow/_vendor/yaml/__init__.py +427 -0
- metaflow/_vendor/yaml/composer.py +139 -0
- metaflow/_vendor/yaml/constructor.py +748 -0
- metaflow/_vendor/yaml/cyaml.py +101 -0
- metaflow/_vendor/yaml/dumper.py +62 -0
- metaflow/_vendor/yaml/emitter.py +1137 -0
- metaflow/_vendor/yaml/error.py +75 -0
- metaflow/_vendor/yaml/events.py +86 -0
- metaflow/_vendor/yaml/loader.py +63 -0
- metaflow/_vendor/yaml/nodes.py +49 -0
- metaflow/_vendor/yaml/parser.py +589 -0
- metaflow/_vendor/yaml/reader.py +185 -0
- metaflow/_vendor/yaml/representer.py +389 -0
- metaflow/_vendor/yaml/resolver.py +227 -0
- metaflow/_vendor/yaml/scanner.py +1435 -0
- metaflow/_vendor/yaml/serializer.py +111 -0
- metaflow/_vendor/yaml/tokens.py +104 -0
- metaflow/cards.py +5 -0
- metaflow/cli.py +331 -785
- metaflow/cli_args.py +17 -0
- metaflow/cli_components/__init__.py +0 -0
- metaflow/cli_components/dump_cmd.py +96 -0
- metaflow/cli_components/init_cmd.py +52 -0
- metaflow/cli_components/run_cmds.py +546 -0
- metaflow/cli_components/step_cmd.py +334 -0
- metaflow/cli_components/utils.py +140 -0
- metaflow/client/__init__.py +1 -0
- metaflow/client/core.py +467 -73
- metaflow/client/filecache.py +75 -35
- metaflow/clone_util.py +7 -1
- metaflow/cmd/code/__init__.py +231 -0
- metaflow/cmd/develop/stub_generator.py +756 -288
- metaflow/cmd/develop/stubs.py +12 -28
- metaflow/cmd/main_cli.py +6 -4
- metaflow/cmd/make_wrapper.py +78 -0
- metaflow/datastore/__init__.py +1 -0
- metaflow/datastore/content_addressed_store.py +41 -10
- metaflow/datastore/datastore_set.py +11 -2
- metaflow/datastore/flow_datastore.py +156 -10
- metaflow/datastore/spin_datastore.py +91 -0
- metaflow/datastore/task_datastore.py +154 -39
- metaflow/debug.py +5 -0
- metaflow/decorators.py +404 -78
- metaflow/exception.py +8 -2
- metaflow/extension_support/__init__.py +527 -376
- metaflow/extension_support/_empty_file.py +2 -2
- metaflow/extension_support/plugins.py +49 -31
- metaflow/flowspec.py +482 -33
- metaflow/graph.py +210 -42
- metaflow/includefile.py +84 -40
- metaflow/lint.py +141 -22
- metaflow/meta_files.py +13 -0
- metaflow/{metadata → metadata_provider}/heartbeat.py +24 -8
- metaflow/{metadata → metadata_provider}/metadata.py +86 -1
- metaflow/metaflow_config.py +175 -28
- metaflow/metaflow_config_funcs.py +51 -3
- metaflow/metaflow_current.py +4 -10
- metaflow/metaflow_environment.py +139 -53
- metaflow/metaflow_git.py +115 -0
- metaflow/metaflow_profile.py +18 -0
- metaflow/metaflow_version.py +150 -66
- metaflow/mflog/__init__.py +4 -3
- metaflow/mflog/save_logs.py +2 -2
- metaflow/multicore_utils.py +31 -14
- metaflow/package/__init__.py +673 -0
- metaflow/packaging_sys/__init__.py +880 -0
- metaflow/packaging_sys/backend.py +128 -0
- metaflow/packaging_sys/distribution_support.py +153 -0
- metaflow/packaging_sys/tar_backend.py +99 -0
- metaflow/packaging_sys/utils.py +54 -0
- metaflow/packaging_sys/v1.py +527 -0
- metaflow/parameters.py +149 -28
- metaflow/plugins/__init__.py +74 -5
- metaflow/plugins/airflow/airflow.py +40 -25
- metaflow/plugins/airflow/airflow_cli.py +22 -5
- metaflow/plugins/airflow/airflow_decorator.py +1 -1
- metaflow/plugins/airflow/airflow_utils.py +5 -3
- metaflow/plugins/airflow/sensors/base_sensor.py +4 -4
- metaflow/plugins/airflow/sensors/external_task_sensor.py +2 -2
- metaflow/plugins/airflow/sensors/s3_sensor.py +2 -2
- metaflow/plugins/argo/argo_client.py +78 -33
- metaflow/plugins/argo/argo_events.py +6 -6
- metaflow/plugins/argo/argo_workflows.py +2410 -527
- metaflow/plugins/argo/argo_workflows_cli.py +571 -121
- metaflow/plugins/argo/argo_workflows_decorator.py +43 -12
- metaflow/plugins/argo/argo_workflows_deployer.py +106 -0
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +453 -0
- metaflow/plugins/argo/capture_error.py +73 -0
- metaflow/plugins/argo/conditional_input_paths.py +35 -0
- metaflow/plugins/argo/exit_hooks.py +209 -0
- metaflow/plugins/argo/jobset_input_paths.py +15 -0
- metaflow/plugins/argo/param_val.py +19 -0
- metaflow/plugins/aws/aws_client.py +10 -3
- metaflow/plugins/aws/aws_utils.py +55 -2
- metaflow/plugins/aws/batch/batch.py +72 -5
- metaflow/plugins/aws/batch/batch_cli.py +33 -10
- metaflow/plugins/aws/batch/batch_client.py +4 -3
- metaflow/plugins/aws/batch/batch_decorator.py +102 -35
- metaflow/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.py +13 -10
- metaflow/plugins/aws/step_functions/dynamo_db_client.py +0 -3
- metaflow/plugins/aws/step_functions/production_token.py +1 -1
- metaflow/plugins/aws/step_functions/step_functions.py +65 -8
- metaflow/plugins/aws/step_functions/step_functions_cli.py +101 -7
- metaflow/plugins/aws/step_functions/step_functions_decorator.py +1 -2
- metaflow/plugins/aws/step_functions/step_functions_deployer.py +97 -0
- metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +264 -0
- metaflow/plugins/azure/azure_exceptions.py +1 -1
- metaflow/plugins/azure/azure_secret_manager_secrets_provider.py +240 -0
- metaflow/plugins/azure/azure_tail.py +1 -1
- metaflow/plugins/azure/includefile_support.py +2 -0
- metaflow/plugins/cards/card_cli.py +66 -30
- metaflow/plugins/cards/card_creator.py +25 -1
- metaflow/plugins/cards/card_datastore.py +21 -49
- metaflow/plugins/cards/card_decorator.py +132 -8
- metaflow/plugins/cards/card_modules/basic.py +112 -17
- metaflow/plugins/cards/card_modules/bundle.css +1 -1
- metaflow/plugins/cards/card_modules/card.py +16 -1
- metaflow/plugins/cards/card_modules/chevron/renderer.py +1 -1
- metaflow/plugins/cards/card_modules/components.py +665 -28
- metaflow/plugins/cards/card_modules/convert_to_native_type.py +36 -7
- metaflow/plugins/cards/card_modules/json_viewer.py +232 -0
- metaflow/plugins/cards/card_modules/main.css +1 -0
- metaflow/plugins/cards/card_modules/main.js +68 -49
- metaflow/plugins/cards/card_modules/renderer_tools.py +1 -0
- metaflow/plugins/cards/card_modules/test_cards.py +26 -12
- metaflow/plugins/cards/card_server.py +39 -14
- metaflow/plugins/cards/component_serializer.py +2 -9
- metaflow/plugins/cards/metadata.py +22 -0
- metaflow/plugins/catch_decorator.py +9 -0
- metaflow/plugins/datastores/azure_storage.py +10 -1
- metaflow/plugins/datastores/gs_storage.py +6 -2
- metaflow/plugins/datastores/local_storage.py +12 -6
- metaflow/plugins/datastores/spin_storage.py +12 -0
- metaflow/plugins/datatools/local.py +2 -0
- metaflow/plugins/datatools/s3/s3.py +126 -75
- metaflow/plugins/datatools/s3/s3op.py +254 -121
- metaflow/plugins/env_escape/__init__.py +3 -3
- metaflow/plugins/env_escape/client_modules.py +102 -72
- metaflow/plugins/env_escape/server.py +7 -0
- metaflow/plugins/env_escape/stub.py +24 -5
- metaflow/plugins/events_decorator.py +343 -185
- metaflow/plugins/exit_hook/__init__.py +0 -0
- metaflow/plugins/exit_hook/exit_hook_decorator.py +46 -0
- metaflow/plugins/exit_hook/exit_hook_script.py +52 -0
- metaflow/plugins/gcp/__init__.py +1 -1
- metaflow/plugins/gcp/gcp_secret_manager_secrets_provider.py +11 -6
- metaflow/plugins/gcp/gs_tail.py +10 -6
- metaflow/plugins/gcp/includefile_support.py +3 -0
- metaflow/plugins/kubernetes/kube_utils.py +108 -0
- metaflow/plugins/kubernetes/kubernetes.py +411 -130
- metaflow/plugins/kubernetes/kubernetes_cli.py +168 -36
- metaflow/plugins/kubernetes/kubernetes_client.py +104 -2
- metaflow/plugins/kubernetes/kubernetes_decorator.py +246 -88
- metaflow/plugins/kubernetes/kubernetes_job.py +253 -581
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +1071 -0
- metaflow/plugins/kubernetes/spot_metadata_cli.py +69 -0
- metaflow/plugins/kubernetes/spot_monitor_sidecar.py +109 -0
- metaflow/plugins/logs_cli.py +359 -0
- metaflow/plugins/{metadata → metadata_providers}/local.py +144 -84
- metaflow/plugins/{metadata → metadata_providers}/service.py +103 -26
- metaflow/plugins/metadata_providers/spin.py +16 -0
- metaflow/plugins/package_cli.py +36 -24
- metaflow/plugins/parallel_decorator.py +128 -11
- metaflow/plugins/parsers.py +16 -0
- metaflow/plugins/project_decorator.py +51 -5
- metaflow/plugins/pypi/bootstrap.py +357 -105
- metaflow/plugins/pypi/conda_decorator.py +82 -81
- metaflow/plugins/pypi/conda_environment.py +187 -52
- metaflow/plugins/pypi/micromamba.py +157 -47
- metaflow/plugins/pypi/parsers.py +268 -0
- metaflow/plugins/pypi/pip.py +88 -13
- metaflow/plugins/pypi/pypi_decorator.py +37 -1
- metaflow/plugins/pypi/utils.py +48 -2
- metaflow/plugins/resources_decorator.py +2 -2
- metaflow/plugins/secrets/__init__.py +3 -0
- metaflow/plugins/secrets/secrets_decorator.py +26 -181
- metaflow/plugins/secrets/secrets_func.py +49 -0
- metaflow/plugins/secrets/secrets_spec.py +101 -0
- metaflow/plugins/secrets/utils.py +74 -0
- metaflow/plugins/tag_cli.py +4 -7
- metaflow/plugins/test_unbounded_foreach_decorator.py +41 -6
- metaflow/plugins/timeout_decorator.py +3 -3
- metaflow/plugins/uv/__init__.py +0 -0
- metaflow/plugins/uv/bootstrap.py +128 -0
- metaflow/plugins/uv/uv_environment.py +72 -0
- metaflow/procpoll.py +1 -1
- metaflow/pylint_wrapper.py +5 -1
- metaflow/runner/__init__.py +0 -0
- metaflow/runner/click_api.py +717 -0
- metaflow/runner/deployer.py +470 -0
- metaflow/runner/deployer_impl.py +201 -0
- metaflow/runner/metaflow_runner.py +714 -0
- metaflow/runner/nbdeploy.py +132 -0
- metaflow/runner/nbrun.py +225 -0
- metaflow/runner/subprocess_manager.py +650 -0
- metaflow/runner/utils.py +335 -0
- metaflow/runtime.py +1078 -260
- metaflow/sidecar/sidecar_worker.py +1 -1
- metaflow/system/__init__.py +5 -0
- metaflow/system/system_logger.py +85 -0
- metaflow/system/system_monitor.py +108 -0
- metaflow/system/system_utils.py +19 -0
- metaflow/task.py +521 -225
- metaflow/tracing/__init__.py +7 -7
- metaflow/tracing/span_exporter.py +31 -38
- metaflow/tracing/tracing_modules.py +38 -43
- metaflow/tuple_util.py +27 -0
- metaflow/user_configs/__init__.py +0 -0
- metaflow/user_configs/config_options.py +563 -0
- metaflow/user_configs/config_parameters.py +598 -0
- metaflow/user_decorators/__init__.py +0 -0
- metaflow/user_decorators/common.py +144 -0
- metaflow/user_decorators/mutable_flow.py +512 -0
- metaflow/user_decorators/mutable_step.py +424 -0
- metaflow/user_decorators/user_flow_decorator.py +264 -0
- metaflow/user_decorators/user_step_decorator.py +749 -0
- metaflow/util.py +243 -27
- metaflow/vendor.py +23 -7
- metaflow/version.py +1 -1
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/Makefile +355 -0
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/Tiltfile +726 -0
- ob_metaflow-2.19.7.1rc0.data/data/share/metaflow/devtools/pick_services.sh +105 -0
- ob_metaflow-2.19.7.1rc0.dist-info/METADATA +87 -0
- ob_metaflow-2.19.7.1rc0.dist-info/RECORD +445 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/WHEEL +1 -1
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/entry_points.txt +1 -0
- metaflow/_vendor/v3_5/__init__.py +0 -1
- metaflow/_vendor/v3_5/importlib_metadata/__init__.py +0 -644
- metaflow/_vendor/v3_5/importlib_metadata/_compat.py +0 -152
- metaflow/package.py +0 -188
- ob_metaflow-2.11.13.1.dist-info/METADATA +0 -85
- ob_metaflow-2.11.13.1.dist-info/RECORD +0 -308
- /metaflow/_vendor/{v3_5/zipp.py → zipp.py} +0 -0
- /metaflow/{metadata → metadata_provider}/__init__.py +0 -0
- /metaflow/{metadata → metadata_provider}/util.py +0 -0
- /metaflow/plugins/{metadata → metadata_providers}/__init__.py +0 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info/licenses}/LICENSE +0 -0
- {ob_metaflow-2.11.13.1.dist-info → ob_metaflow-2.19.7.1rc0.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import print_function
|
|
2
2
|
|
|
3
3
|
import importlib
|
|
4
|
-
import json
|
|
5
4
|
import os
|
|
6
5
|
import re
|
|
7
6
|
import sys
|
|
@@ -11,6 +10,12 @@ from collections import defaultdict, namedtuple
|
|
|
11
10
|
|
|
12
11
|
from importlib.abc import MetaPathFinder, Loader
|
|
13
12
|
from itertools import chain
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Dict
|
|
15
|
+
|
|
16
|
+
from metaflow.meta_files import read_info_file
|
|
17
|
+
from metaflow.util import walk_without_cycles
|
|
18
|
+
|
|
14
19
|
|
|
15
20
|
#
|
|
16
21
|
# This file provides the support for Metaflow's extension mechanism which allows
|
|
@@ -59,6 +64,9 @@ __all__ = (
|
|
|
59
64
|
"load_module",
|
|
60
65
|
"get_modules",
|
|
61
66
|
"dump_module_info",
|
|
67
|
+
"get_extensions_in_dir",
|
|
68
|
+
"extension_info",
|
|
69
|
+
"update_package_info",
|
|
62
70
|
"get_aliased_modules",
|
|
63
71
|
"package_mfext_package",
|
|
64
72
|
"package_mfext_all",
|
|
@@ -76,13 +84,19 @@ EXT_CONFIG_REGEXP = re.compile(r"^mfextinit_[a-zA-Z0-9_-]+\.py$")
|
|
|
76
84
|
EXT_META_REGEXP = re.compile(r"^mfextmeta_[a-zA-Z0-9_-]+\.py$")
|
|
77
85
|
REQ_NAME = re.compile(r"^(([a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])|[a-zA-Z0-9]).*$")
|
|
78
86
|
EXT_EXCLUDE_SUFFIXES = [".pyc"]
|
|
87
|
+
FINDER_TRANS = str.maketrans(".-", "__")
|
|
79
88
|
|
|
80
89
|
# To get verbose messages, set METAFLOW_DEBUG_EXT to 1
|
|
81
90
|
DEBUG_EXT = os.environ.get("METAFLOW_DEBUG_EXT", False)
|
|
82
91
|
|
|
92
|
+
# This is extracted only from environment variable and here separately from
|
|
93
|
+
# metaflow_config to prevent nasty circular dependencies
|
|
94
|
+
EXTENSIONS_SEARCH_DIRS = os.environ.get("METAFLOW_EXTENSIONS_SEARCH_DIRS", "").split(
|
|
95
|
+
os.pathsep
|
|
96
|
+
)
|
|
83
97
|
|
|
84
98
|
MFExtPackage = namedtuple("MFExtPackage", "package_name tl_package config_module")
|
|
85
|
-
MFExtModule = namedtuple("MFExtModule", "tl_package module")
|
|
99
|
+
MFExtModule = namedtuple("MFExtModule", "package_name tl_package module")
|
|
86
100
|
|
|
87
101
|
|
|
88
102
|
def load_module(module_name):
|
|
@@ -92,9 +106,6 @@ def load_module(module_name):
|
|
|
92
106
|
|
|
93
107
|
def get_modules(extension_point):
|
|
94
108
|
modules_to_load = []
|
|
95
|
-
if not _mfext_supported:
|
|
96
|
-
_ext_debug("Not supported for your Python version -- 3.4+ is needed")
|
|
97
|
-
return []
|
|
98
109
|
if extension_point not in _extension_points:
|
|
99
110
|
raise RuntimeError(
|
|
100
111
|
"Metaflow extension point '%s' not supported" % extension_point
|
|
@@ -113,17 +124,61 @@ def get_modules(extension_point):
|
|
|
113
124
|
return modules_to_load
|
|
114
125
|
|
|
115
126
|
|
|
116
|
-
def dump_module_info():
|
|
117
|
-
|
|
127
|
+
def dump_module_info(all_packages=None, pkgs_per_extension_point=None):
|
|
128
|
+
if all_packages is None:
|
|
129
|
+
all_packages = _all_packages
|
|
130
|
+
if pkgs_per_extension_point is None:
|
|
131
|
+
pkgs_per_extension_point = _pkgs_per_extension_point
|
|
132
|
+
|
|
118
133
|
sanitized_all_packages = dict()
|
|
119
134
|
# Strip out root_paths (we don't need it and no need to expose user's dir structure)
|
|
120
|
-
for k, v in
|
|
135
|
+
for k, v in all_packages.items():
|
|
121
136
|
sanitized_all_packages[k] = {
|
|
122
137
|
"root_paths": None,
|
|
123
138
|
"meta_module": v["meta_module"],
|
|
124
139
|
"files": v["files"],
|
|
140
|
+
"full_path_files": None,
|
|
141
|
+
"version": v["version"],
|
|
142
|
+
"package_version": v.get("package_version", "<unk>"),
|
|
143
|
+
"extension_name": v.get("extension_name", "<unk>"),
|
|
125
144
|
}
|
|
126
|
-
return "ext_info", [sanitized_all_packages,
|
|
145
|
+
return "ext_info", [sanitized_all_packages, pkgs_per_extension_point]
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def get_extensions_in_dir(d):
|
|
149
|
+
return _get_extension_packages(ignore_info_file=True, restrict_to_directories=[d])
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def extension_info(packages=None):
|
|
153
|
+
if packages is None:
|
|
154
|
+
packages = _all_packages
|
|
155
|
+
# Returns information about installed extensions so it it can be stored in
|
|
156
|
+
# _graph_info.
|
|
157
|
+
return {
|
|
158
|
+
"installed": {
|
|
159
|
+
k: {
|
|
160
|
+
"dist_version": v["version"],
|
|
161
|
+
"package_version": v.get("package_version", "<unk>"),
|
|
162
|
+
"extension_name": v.get("extension_name", "<unk>"),
|
|
163
|
+
}
|
|
164
|
+
for k, v in packages.items()
|
|
165
|
+
},
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def update_package_info(pkg_to_update=None, package_name=None, **kwargs):
|
|
170
|
+
pkg = None
|
|
171
|
+
if pkg_to_update:
|
|
172
|
+
pkg = pkg_to_update
|
|
173
|
+
elif package_name:
|
|
174
|
+
pkg = _all_packages.get(package_name)
|
|
175
|
+
for k, v in kwargs.items():
|
|
176
|
+
if k in pkg:
|
|
177
|
+
raise ValueError(
|
|
178
|
+
"Trying to overwrite existing key '%s' for package %s" % (k, str(pkg))
|
|
179
|
+
)
|
|
180
|
+
pkg[k] = v
|
|
181
|
+
return pkg
|
|
127
182
|
|
|
128
183
|
|
|
129
184
|
def get_aliased_modules():
|
|
@@ -134,18 +189,26 @@ def package_mfext_package(package_name):
|
|
|
134
189
|
from metaflow.util import to_unicode
|
|
135
190
|
|
|
136
191
|
_ext_debug("Packaging '%s'" % package_name)
|
|
137
|
-
_filter_files_package(package_name)
|
|
138
192
|
pkg_info = _all_packages.get(package_name, None)
|
|
193
|
+
|
|
139
194
|
if pkg_info and pkg_info.get("root_paths", None):
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
195
|
+
if pkg_info["full_path_files"]:
|
|
196
|
+
# Case for initial packaging
|
|
197
|
+
for f, short_name in zip(pkg_info["full_path_files"], pkg_info["files"]):
|
|
198
|
+
f_unicode = os.path.join(EXT_PKG, to_unicode(short_name))
|
|
199
|
+
_ext_debug(" Adding '%s' as '%s'" % (f, f_unicode))
|
|
200
|
+
yield f, f_unicode
|
|
201
|
+
else:
|
|
202
|
+
# When re-packaging (ie: packaging Metaflow from a Metaflow run):
|
|
203
|
+
single_path = len(pkg_info["root_paths"]) == 1
|
|
204
|
+
for p in pkg_info["root_paths"]:
|
|
205
|
+
root_path = to_unicode(p)
|
|
206
|
+
for f in pkg_info["files"]:
|
|
207
|
+
f_unicode = to_unicode(f)
|
|
208
|
+
fp = os.path.join(root_path, f_unicode)
|
|
209
|
+
if single_path or os.path.isfile(fp):
|
|
210
|
+
_ext_debug(" Adding '%s'" % fp)
|
|
211
|
+
yield fp, os.path.join(EXT_PKG, f_unicode)
|
|
149
212
|
|
|
150
213
|
|
|
151
214
|
def package_mfext_all():
|
|
@@ -153,13 +216,17 @@ def package_mfext_all():
|
|
|
153
216
|
# the packaged metaflow_extensions directory "self-contained" so that
|
|
154
217
|
# python doesn't go and search other parts of the system for more
|
|
155
218
|
# metaflow_extensions.
|
|
156
|
-
|
|
157
|
-
os.path.
|
|
158
|
-
|
|
219
|
+
if _all_packages:
|
|
220
|
+
yield os.path.join(
|
|
221
|
+
os.path.dirname(os.path.abspath(__file__)), "_empty_file.py"
|
|
222
|
+
), os.path.join(EXT_PKG, "__init__.py")
|
|
159
223
|
|
|
160
224
|
for p in _all_packages:
|
|
161
|
-
|
|
162
|
-
|
|
225
|
+
yield from package_mfext_package(p)
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def package_mfext_all_descriptions():
|
|
229
|
+
return _all_packages
|
|
163
230
|
|
|
164
231
|
|
|
165
232
|
def load_globals(module, dst_globals, extra_indent=False):
|
|
@@ -254,19 +321,16 @@ def multiload_all(modules, extension_point, dst_globals):
|
|
|
254
321
|
|
|
255
322
|
|
|
256
323
|
_py_ver = sys.version_info[:2]
|
|
257
|
-
_mfext_supported = False
|
|
258
324
|
_aliased_modules = []
|
|
259
325
|
|
|
260
|
-
|
|
261
|
-
import importlib.util
|
|
326
|
+
import importlib.util
|
|
262
327
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
_mfext_supported = True
|
|
328
|
+
if _py_ver >= (3, 8):
|
|
329
|
+
from importlib import metadata
|
|
330
|
+
elif _py_ver >= (3, 7):
|
|
331
|
+
from metaflow._vendor.v3_7 import importlib_metadata as metadata
|
|
332
|
+
else:
|
|
333
|
+
from metaflow._vendor.v3_6 import importlib_metadata as metadata
|
|
270
334
|
|
|
271
335
|
# Extension points are the directories that can be present in a EXT_PKG to
|
|
272
336
|
# contribute to that extension point. For example, if you have
|
|
@@ -294,41 +358,41 @@ def _ext_debug(*args, **kwargs):
|
|
|
294
358
|
print(init_str, *args, **kwargs)
|
|
295
359
|
|
|
296
360
|
|
|
297
|
-
def _get_extension_packages():
|
|
298
|
-
if not _mfext_supported:
|
|
299
|
-
_ext_debug("Not supported for your Python version -- 3.4+ is needed")
|
|
300
|
-
return {}, {}
|
|
301
|
-
|
|
361
|
+
def _get_extension_packages(ignore_info_file=False, restrict_to_directories=None):
|
|
302
362
|
# If we have an INFO file with the appropriate information (if running from a saved
|
|
303
363
|
# code package for example), we use that directly
|
|
304
364
|
# Pre-compute on _extension_points
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
365
|
+
info_content = read_info_file()
|
|
366
|
+
if not ignore_info_file and info_content:
|
|
367
|
+
all_pkg, ext_to_pkg = info_content.get("ext_info", (None, None))
|
|
368
|
+
if all_pkg is not None and ext_to_pkg is not None:
|
|
369
|
+
_ext_debug("Loading pre-computed information from INFO file")
|
|
370
|
+
# We need to properly convert stuff in ext_to_pkg
|
|
371
|
+
for k, v in ext_to_pkg.items():
|
|
372
|
+
v = [MFExtPackage(*d) for d in v]
|
|
373
|
+
ext_to_pkg[k] = v
|
|
374
|
+
return all_pkg, ext_to_pkg
|
|
375
|
+
|
|
376
|
+
# Late import to prevent some circular nastiness
|
|
377
|
+
if restrict_to_directories is None and EXTENSIONS_SEARCH_DIRS != [""]:
|
|
378
|
+
restrict_to_directories = EXTENSIONS_SEARCH_DIRS
|
|
319
379
|
|
|
320
380
|
# Check if we even have extensions
|
|
321
381
|
try:
|
|
322
382
|
extensions_module = importlib.import_module(EXT_PKG)
|
|
323
383
|
except ImportError as e:
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
raise
|
|
384
|
+
# e.name is set to the name of the package that fails to load
|
|
385
|
+
# so don't error ONLY IF the error is importing this module (but do
|
|
386
|
+
# error if there is a transitive import error)
|
|
387
|
+
if not (isinstance(e, ModuleNotFoundError) and e.name == EXT_PKG):
|
|
388
|
+
raise
|
|
330
389
|
return {}, {}
|
|
331
390
|
|
|
391
|
+
if restrict_to_directories:
|
|
392
|
+
restrict_to_directories = [
|
|
393
|
+
Path(p).resolve().as_posix() for p in restrict_to_directories
|
|
394
|
+
]
|
|
395
|
+
|
|
332
396
|
# There are two "types" of packages:
|
|
333
397
|
# - those installed on the system (distributions)
|
|
334
398
|
# - those present in the PYTHONPATH
|
|
@@ -341,8 +405,85 @@ def _get_extension_packages():
|
|
|
341
405
|
# At this point, we look at all the paths and create a set. As we find distributions
|
|
342
406
|
# that match it, we will remove from the set and then will be left with any
|
|
343
407
|
# PYTHONPATH "packages"
|
|
344
|
-
all_paths = set(
|
|
408
|
+
all_paths = set()
|
|
409
|
+
# Records which finders provided which paths if applicable
|
|
410
|
+
# This is then later used to determine which paths belong
|
|
411
|
+
# to which distribution
|
|
412
|
+
finders_to_paths = dict()
|
|
413
|
+
|
|
414
|
+
# Temporary variables to support the loop below and make sure we loop through all
|
|
415
|
+
# the paths in the submodule_search_locations including calling the path hooks.
|
|
416
|
+
# We could skip calling things on the path hooks since the module was just imported
|
|
417
|
+
# by importlib so the values are probably already in submodule_search_locations but
|
|
418
|
+
# there may be cases where we need to call multiple times. This also allows us to tie
|
|
419
|
+
# the finders (ie: the path hooks) back to the distribution since they share a name.
|
|
420
|
+
# This is useful in knowing which paths we consider as belonging to a distribution so
|
|
421
|
+
# we know which order to load it in.
|
|
422
|
+
seen_path_values = set()
|
|
423
|
+
new_paths = extensions_module.__spec__.submodule_search_locations
|
|
424
|
+
_ext_debug("Found initial paths: %s" % str(new_paths))
|
|
425
|
+
while new_paths:
|
|
426
|
+
paths = new_paths
|
|
427
|
+
new_paths = []
|
|
428
|
+
for p in paths:
|
|
429
|
+
if p in seen_path_values:
|
|
430
|
+
continue
|
|
431
|
+
if os.path.isdir(p):
|
|
432
|
+
all_paths.add(Path(p).resolve().as_posix())
|
|
433
|
+
elif p in sys.path_importer_cache:
|
|
434
|
+
# We have a path hook that we likely need to call to get the actual path
|
|
435
|
+
addl_spec = sys.path_importer_cache[p].find_spec(EXT_PKG)
|
|
436
|
+
if addl_spec is not None and addl_spec.submodule_search_locations:
|
|
437
|
+
new_paths.extend(addl_spec.submodule_search_locations)
|
|
438
|
+
# Remove .__path_hook__ and add .py to match the name of the file
|
|
439
|
+
# installed by the distribution
|
|
440
|
+
finder_name = p[:-14].translate(FINDER_TRANS) + ".py"
|
|
441
|
+
new_dirs = [
|
|
442
|
+
d
|
|
443
|
+
for d in addl_spec.submodule_search_locations
|
|
444
|
+
if os.path.isdir(d)
|
|
445
|
+
]
|
|
446
|
+
_ext_debug(
|
|
447
|
+
"Finder %s added directories %s"
|
|
448
|
+
% (finder_name, ", ".join(new_dirs))
|
|
449
|
+
)
|
|
450
|
+
finders_to_paths.setdefault(finder_name, []).extend(new_dirs)
|
|
451
|
+
else:
|
|
452
|
+
# This may not be as required since it is likely the importer cache has
|
|
453
|
+
# everything already but just in case, we will also go through the
|
|
454
|
+
# path hooks and see if we find another one
|
|
455
|
+
for path_hook in sys.path_hooks:
|
|
456
|
+
try:
|
|
457
|
+
finder = path_hook(p)
|
|
458
|
+
addl_spec = finder.find_spec(EXT_PKG)
|
|
459
|
+
if (
|
|
460
|
+
addl_spec is not None
|
|
461
|
+
and addl_spec.submodule_search_locations
|
|
462
|
+
):
|
|
463
|
+
finder_name = p[:-14].translate(FINDER_TRANS) + ".py"
|
|
464
|
+
new_dirs = [
|
|
465
|
+
d
|
|
466
|
+
for d in addl_spec.submodule_search_locations
|
|
467
|
+
if os.path.isdir(d)
|
|
468
|
+
]
|
|
469
|
+
_ext_debug(
|
|
470
|
+
"Finder (through hooks) %s added directories %s"
|
|
471
|
+
% (finder_name, ", ".join(new_dirs))
|
|
472
|
+
)
|
|
473
|
+
finders_to_paths.setdefault(finder_name, []).extend(
|
|
474
|
+
new_dirs
|
|
475
|
+
)
|
|
476
|
+
new_paths.extend(addl_spec.submodule_search_locations)
|
|
477
|
+
break
|
|
478
|
+
except ImportError:
|
|
479
|
+
continue
|
|
480
|
+
seen_path_values.add(p)
|
|
481
|
+
|
|
345
482
|
_ext_debug("Found packages present at %s" % str(all_paths))
|
|
483
|
+
if restrict_to_directories:
|
|
484
|
+
_ext_debug(
|
|
485
|
+
"Processed packages will be restricted to %s" % str(restrict_to_directories)
|
|
486
|
+
)
|
|
346
487
|
|
|
347
488
|
list_ext_points = [x.split(".") for x in _extension_points]
|
|
348
489
|
init_ext_points = [x[0] for x in list_ext_points]
|
|
@@ -379,19 +520,158 @@ def _get_extension_packages():
|
|
|
379
520
|
# Same as config_to_pkg for meta files
|
|
380
521
|
meta_to_pkg = defaultdict(list)
|
|
381
522
|
|
|
523
|
+
# The file passed to process_file has EXT_PKG as the first component
|
|
524
|
+
# root_dir also has EXT_PKG as the last component
|
|
525
|
+
def process_file(state: Dict[str, Any], root_dir: str, file: str):
|
|
526
|
+
parts = file.split("/")
|
|
527
|
+
|
|
528
|
+
if len(parts) > 1 and parts[0] == EXT_PKG:
|
|
529
|
+
# Check for top-level files (ie: meta file which specifies how to package
|
|
530
|
+
# the extension and __init__.py file)
|
|
531
|
+
if len(parts) == 2:
|
|
532
|
+
# Ensure that we don't have a __init__.py to force this package to
|
|
533
|
+
# be a NS package
|
|
534
|
+
if parts[1] == "__init__.py":
|
|
535
|
+
raise RuntimeError(
|
|
536
|
+
"Package '%s' providing '%s' is not an implicit namespace "
|
|
537
|
+
"package as required" % (state["name"], EXT_PKG)
|
|
538
|
+
)
|
|
539
|
+
# Check for any metadata; we can only have one metadata per
|
|
540
|
+
# distribution at most
|
|
541
|
+
if EXT_META_REGEXP.match(parts[1]) is not None:
|
|
542
|
+
potential_meta_module = ".".join([EXT_PKG, parts[1][:-3]])
|
|
543
|
+
if state["meta_module"]:
|
|
544
|
+
raise RuntimeError(
|
|
545
|
+
"Package '%s' defines more than one meta configuration: "
|
|
546
|
+
"'%s' and '%s' (at least)"
|
|
547
|
+
% (
|
|
548
|
+
state["name"],
|
|
549
|
+
state["meta_module"],
|
|
550
|
+
potential_meta_module,
|
|
551
|
+
)
|
|
552
|
+
)
|
|
553
|
+
state["meta_module"] = potential_meta_module
|
|
554
|
+
_ext_debug(
|
|
555
|
+
"Found meta '%s' for '%s'"
|
|
556
|
+
% (state["meta_module"], state["name"])
|
|
557
|
+
)
|
|
558
|
+
meta_to_pkg[state["meta_module"]].append(state["name"])
|
|
559
|
+
|
|
560
|
+
# Record the file as a candidate for inclusion when packaging if
|
|
561
|
+
# needed
|
|
562
|
+
if not any(parts[-1].endswith(suffix) for suffix in EXT_EXCLUDE_SUFFIXES):
|
|
563
|
+
# Strip out metaflow_extensions from the file
|
|
564
|
+
state["files"].append(os.path.join(*parts[1:]))
|
|
565
|
+
state["full_path_files"].append(os.path.join(root_dir, *parts[1:]))
|
|
566
|
+
|
|
567
|
+
if parts[1] in init_ext_points:
|
|
568
|
+
# This is most likely a problem as we need an intermediate
|
|
569
|
+
# "identifier"
|
|
570
|
+
raise RuntimeError(
|
|
571
|
+
"Package '%s' should conform to '%s.X.%s' and not '%s.%s' where "
|
|
572
|
+
"X is your organization's name for example"
|
|
573
|
+
% (
|
|
574
|
+
state["name"],
|
|
575
|
+
EXT_PKG,
|
|
576
|
+
parts[1],
|
|
577
|
+
EXT_PKG,
|
|
578
|
+
parts[1],
|
|
579
|
+
)
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
if len(parts) > 3 and parts[0] == EXT_PKG:
|
|
583
|
+
# We go over _extension_points *in order* to make sure we get more
|
|
584
|
+
# specific paths first
|
|
585
|
+
|
|
586
|
+
# To give useful errors in case multiple top-level packages in
|
|
587
|
+
# one package
|
|
588
|
+
dist_full_name = "%s[%s]" % (state["name"], parts[1])
|
|
589
|
+
for idx, ext_list in enumerate(list_ext_points):
|
|
590
|
+
if (
|
|
591
|
+
len(parts) > len(ext_list) + 2
|
|
592
|
+
and parts[2 : 2 + len(ext_list)] == ext_list
|
|
593
|
+
):
|
|
594
|
+
# Check if this is an "init" file
|
|
595
|
+
config_module = None
|
|
596
|
+
|
|
597
|
+
if len(parts) == len(ext_list) + 3 and (
|
|
598
|
+
EXT_CONFIG_REGEXP.match(parts[-1]) is not None
|
|
599
|
+
or parts[-1] == "__init__.py"
|
|
600
|
+
):
|
|
601
|
+
parts[-1] = parts[-1][:-3] # Remove the .py
|
|
602
|
+
config_module = ".".join(parts)
|
|
603
|
+
|
|
604
|
+
config_to_pkg[config_module].append(dist_full_name)
|
|
605
|
+
cur_pkg = (
|
|
606
|
+
extension_points_to_pkg[_extension_points[idx]]
|
|
607
|
+
.setdefault(state["name"], {})
|
|
608
|
+
.get(parts[1])
|
|
609
|
+
)
|
|
610
|
+
if cur_pkg is not None:
|
|
611
|
+
if (
|
|
612
|
+
config_module is not None
|
|
613
|
+
and cur_pkg.config_module is not None
|
|
614
|
+
):
|
|
615
|
+
raise RuntimeError(
|
|
616
|
+
"Package '%s' defines more than one "
|
|
617
|
+
"configuration file for '%s': '%s' and '%s'"
|
|
618
|
+
% (
|
|
619
|
+
dist_full_name,
|
|
620
|
+
_extension_points[idx],
|
|
621
|
+
config_module,
|
|
622
|
+
cur_pkg.config_module,
|
|
623
|
+
)
|
|
624
|
+
)
|
|
625
|
+
if config_module is not None:
|
|
626
|
+
_ext_debug(
|
|
627
|
+
" Top-level '%s' found config file '%s'"
|
|
628
|
+
% (parts[1], config_module)
|
|
629
|
+
)
|
|
630
|
+
extension_points_to_pkg[_extension_points[idx]][
|
|
631
|
+
state["name"]
|
|
632
|
+
][parts[1]] = MFExtPackage(
|
|
633
|
+
package_name=state["name"],
|
|
634
|
+
tl_package=parts[1],
|
|
635
|
+
config_module=config_module,
|
|
636
|
+
)
|
|
637
|
+
else:
|
|
638
|
+
_ext_debug(
|
|
639
|
+
" Top-level '%s' extends '%s' with config '%s'"
|
|
640
|
+
% (parts[1], _extension_points[idx], config_module)
|
|
641
|
+
)
|
|
642
|
+
extension_points_to_pkg[_extension_points[idx]][state["name"]][
|
|
643
|
+
parts[1]
|
|
644
|
+
] = MFExtPackage(
|
|
645
|
+
package_name=state["name"],
|
|
646
|
+
tl_package=parts[1],
|
|
647
|
+
config_module=config_module,
|
|
648
|
+
)
|
|
649
|
+
break
|
|
650
|
+
|
|
382
651
|
# 1st step: look for distributions (the common case)
|
|
383
652
|
for dist in metadata.distributions():
|
|
384
653
|
if any(
|
|
385
654
|
[pkg == EXT_PKG for pkg in (dist.read_text("top_level.txt") or "").split()]
|
|
386
655
|
):
|
|
387
|
-
#
|
|
388
|
-
#
|
|
389
|
-
#
|
|
390
|
-
#
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
656
|
+
# Note that locate_file does not actually make sure the file exists. It just
|
|
657
|
+
# appends whatever you pass in to locate_file to the folder containing the
|
|
658
|
+
# metadata for the distribution. We will therefore check if we are actually
|
|
659
|
+
# seeing files in that directory using has_file_in_dist_root.
|
|
660
|
+
dist_root = dist.locate_file(EXT_PKG).resolve().as_posix()
|
|
661
|
+
all_roots = []
|
|
662
|
+
has_file_in_dist_root = False
|
|
394
663
|
dist_name = dist.metadata["Name"]
|
|
664
|
+
dist_version = dist.metadata["Version"]
|
|
665
|
+
if restrict_to_directories:
|
|
666
|
+
parent_dirs = list(
|
|
667
|
+
p.as_posix() for p in Path(dist_root).resolve().parents
|
|
668
|
+
)
|
|
669
|
+
if all(p not in parent_dirs for p in restrict_to_directories):
|
|
670
|
+
_ext_debug(
|
|
671
|
+
"Ignoring package at %s as it is not in the considered directories"
|
|
672
|
+
% dist_root
|
|
673
|
+
)
|
|
674
|
+
continue
|
|
395
675
|
if dist_name in mf_ext_packages:
|
|
396
676
|
_ext_debug(
|
|
397
677
|
"Ignoring duplicate package '%s' (duplicate paths in sys.path? (%s))"
|
|
@@ -399,143 +679,88 @@ def _get_extension_packages():
|
|
|
399
679
|
)
|
|
400
680
|
continue
|
|
401
681
|
_ext_debug(
|
|
402
|
-
"Found extension package '%s' at '%s'..."
|
|
682
|
+
"Found extension package '%s' at presumptive path '%s'..."
|
|
683
|
+
% (dist_name, dist_root)
|
|
403
684
|
)
|
|
404
685
|
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
686
|
+
state = {
|
|
687
|
+
"name": dist_name,
|
|
688
|
+
"files": [],
|
|
689
|
+
"full_path_files": [],
|
|
690
|
+
"meta_module": None, # Meta information about the package (if applicable)
|
|
691
|
+
}
|
|
692
|
+
addl_dirs = []
|
|
408
693
|
# At this point, we check to see what extension points this package
|
|
409
694
|
# contributes to. This is to enable multiple namespace packages to contribute
|
|
410
695
|
# to the same extension point (for example, you may have multiple packages
|
|
411
696
|
# that have plugins)
|
|
412
|
-
for f in dist.files:
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
)
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
)
|
|
444
|
-
)
|
|
445
|
-
|
|
446
|
-
# Check for any metadata; we can only have one metadata per
|
|
447
|
-
# distribution at most
|
|
448
|
-
if EXT_META_REGEXP.match(parts[1]) is not None:
|
|
449
|
-
potential_meta_module = ".".join([EXT_PKG, parts[1][:-3]])
|
|
450
|
-
if meta_module:
|
|
451
|
-
raise RuntimeError(
|
|
452
|
-
"Package '%s' defines more than one meta configuration: "
|
|
453
|
-
"'%s' and '%s' (at least)"
|
|
454
|
-
% (
|
|
455
|
-
dist_name,
|
|
456
|
-
meta_module,
|
|
457
|
-
potential_meta_module,
|
|
458
|
-
)
|
|
459
|
-
)
|
|
460
|
-
meta_module = potential_meta_module
|
|
697
|
+
for f in dist.files or []:
|
|
698
|
+
if f.suffix == ".pth":
|
|
699
|
+
# This is a directory we need to walk to find the files
|
|
700
|
+
d = f.read_text().strip()
|
|
701
|
+
if os.path.isdir(d):
|
|
702
|
+
_ext_debug(" Found additional directory '%s' from .pth" % d)
|
|
703
|
+
addl_dirs.append(d)
|
|
704
|
+
elif str(f).startswith("__editable__"):
|
|
705
|
+
# This is a finder file because we already checked for .pth
|
|
706
|
+
_ext_debug(
|
|
707
|
+
" Added additional directories from finder '%s': %s"
|
|
708
|
+
% (str(f), ", ".join(finders_to_paths.get(str(f), [])))
|
|
709
|
+
)
|
|
710
|
+
addl_dirs.extend(finders_to_paths.get(str(f), []))
|
|
711
|
+
elif f.parts[0] == EXT_PKG:
|
|
712
|
+
has_file_in_dist_root = True
|
|
713
|
+
process_file(state, dist_root, str(f))
|
|
714
|
+
else:
|
|
715
|
+
# We ignore the file
|
|
716
|
+
continue
|
|
717
|
+
|
|
718
|
+
if has_file_in_dist_root:
|
|
719
|
+
all_roots.append(dist_root)
|
|
720
|
+
all_paths.discard(dist_root)
|
|
721
|
+
# Now walk any additional directory for this distribution as well
|
|
722
|
+
for addl_dir in addl_dirs:
|
|
723
|
+
if restrict_to_directories:
|
|
724
|
+
parent_dirs = list(
|
|
725
|
+
p.as_posix() for p in Path(addl_dir).resolve().parents
|
|
726
|
+
)
|
|
727
|
+
if all(p not in parent_dirs for p in restrict_to_directories):
|
|
461
728
|
_ext_debug(
|
|
462
|
-
"
|
|
729
|
+
"Ignoring package at %s as it is not in the considered "
|
|
730
|
+
"directories" % addl_dir
|
|
463
731
|
)
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
config_module = None
|
|
480
|
-
|
|
481
|
-
if len(parts) == len(ext_list) + 3 and (
|
|
482
|
-
EXT_CONFIG_REGEXP.match(parts[-1]) is not None
|
|
483
|
-
or parts[-1] == "__init__.py"
|
|
484
|
-
):
|
|
485
|
-
parts[-1] = parts[-1][:-3] # Remove the .py
|
|
486
|
-
config_module = ".".join(parts)
|
|
487
|
-
|
|
488
|
-
config_to_pkg[config_module].append(dist_full_name)
|
|
489
|
-
cur_pkg = (
|
|
490
|
-
extension_points_to_pkg[_extension_points[idx]]
|
|
491
|
-
.setdefault(dist_name, {})
|
|
492
|
-
.get(parts[1])
|
|
493
|
-
)
|
|
494
|
-
if cur_pkg is not None:
|
|
495
|
-
if (
|
|
496
|
-
config_module is not None
|
|
497
|
-
and cur_pkg.config_module is not None
|
|
498
|
-
):
|
|
499
|
-
raise RuntimeError(
|
|
500
|
-
"Package '%s' defines more than one "
|
|
501
|
-
"configuration file for '%s': '%s' and '%s'"
|
|
502
|
-
% (
|
|
503
|
-
dist_full_name,
|
|
504
|
-
_extension_points[idx],
|
|
505
|
-
config_module,
|
|
506
|
-
cur_pkg.config_module,
|
|
507
|
-
)
|
|
508
|
-
)
|
|
509
|
-
if config_module is not None:
|
|
510
|
-
_ext_debug(
|
|
511
|
-
" Top-level '%s' found config file '%s'"
|
|
512
|
-
% (parts[1], config_module)
|
|
513
|
-
)
|
|
514
|
-
extension_points_to_pkg[_extension_points[idx]][
|
|
515
|
-
dist_name
|
|
516
|
-
][parts[1]] = MFExtPackage(
|
|
517
|
-
package_name=dist_name,
|
|
518
|
-
tl_package=parts[1],
|
|
519
|
-
config_module=config_module,
|
|
520
|
-
)
|
|
521
|
-
else:
|
|
522
|
-
_ext_debug(
|
|
523
|
-
" Top-level '%s' extends '%s' with config '%s'"
|
|
524
|
-
% (parts[1], _extension_points[idx], config_module)
|
|
525
|
-
)
|
|
526
|
-
extension_points_to_pkg[_extension_points[idx]][
|
|
527
|
-
dist_name
|
|
528
|
-
][parts[1]] = MFExtPackage(
|
|
529
|
-
package_name=dist_name,
|
|
530
|
-
tl_package=parts[1],
|
|
531
|
-
config_module=config_module,
|
|
532
|
-
)
|
|
533
|
-
break
|
|
732
|
+
continue
|
|
733
|
+
base_depth = len(addl_dir.split("/"))
|
|
734
|
+
# .pth files give addl_dirs that don't have EXT_PKG at the end but
|
|
735
|
+
# finders do so check this
|
|
736
|
+
if addl_dir.split("/")[-1] == EXT_PKG:
|
|
737
|
+
base_depth -= 1
|
|
738
|
+
else:
|
|
739
|
+
addl_dir = os.path.join(addl_dir, EXT_PKG)
|
|
740
|
+
all_roots.append(addl_dir)
|
|
741
|
+
all_paths.discard(addl_dir)
|
|
742
|
+
_ext_debug(" Walking additional directory '%s'" % addl_dir)
|
|
743
|
+
for root, _, files in walk_without_cycles(addl_dir):
|
|
744
|
+
relative_root = "/".join(root.split("/")[base_depth:])
|
|
745
|
+
for f in files:
|
|
746
|
+
process_file(state, addl_dir, os.path.join(relative_root, f))
|
|
534
747
|
mf_ext_packages[dist_name] = {
|
|
535
|
-
"root_paths":
|
|
536
|
-
"meta_module": meta_module,
|
|
537
|
-
"
|
|
748
|
+
"root_paths": all_roots,
|
|
749
|
+
"meta_module": state["meta_module"],
|
|
750
|
+
"full_path_files": state["full_path_files"],
|
|
751
|
+
"files": state["files"],
|
|
752
|
+
"version": dist_version,
|
|
538
753
|
}
|
|
754
|
+
if addl_dirs:
|
|
755
|
+
# If we have additional directories, this means that we may need to filter
|
|
756
|
+
# the files based on the meta information about the module since we
|
|
757
|
+
# walked down the directories instead of relying simply on files that
|
|
758
|
+
# were packaged with the distribution. We do this now so we don't have to
|
|
759
|
+
# do it multiple times later for packaging. This is only useful if the
|
|
760
|
+
# distribution does not completely specify the files that need to be
|
|
761
|
+
# installed. In the case where the distribution completely specifies the
|
|
762
|
+
# files, we ignore the meta module
|
|
763
|
+
_filter_files_package(mf_ext_packages[dist_name])
|
|
539
764
|
# At this point, we have all the packages that contribute to EXT_PKG,
|
|
540
765
|
# we now check to see if there is an order to respect based on dependencies. We will
|
|
541
766
|
# return an ordered list that respects that order and is ordered alphabetically in
|
|
@@ -596,13 +821,21 @@ def _get_extension_packages():
|
|
|
596
821
|
all_paths_list.sort()
|
|
597
822
|
|
|
598
823
|
# This block of code is the equivalent of the one above for distributions except
|
|
599
|
-
# for PYTHONPATH packages.
|
|
600
|
-
# different because we construct the file list instead of having it nicely provided
|
|
601
|
-
# to us.
|
|
824
|
+
# for PYTHONPATH packages.
|
|
602
825
|
package_name_to_path = dict()
|
|
603
826
|
if len(all_paths_list) > 0:
|
|
604
827
|
_ext_debug("Non installed packages present at %s" % str(all_paths))
|
|
605
828
|
for package_count, package_path in enumerate(all_paths_list):
|
|
829
|
+
if restrict_to_directories:
|
|
830
|
+
parent_dirs = list(
|
|
831
|
+
p.as_posix() for p in Path(package_path).resolve().parents
|
|
832
|
+
)
|
|
833
|
+
if all(p not in parent_dirs for p in restrict_to_directories):
|
|
834
|
+
_ext_debug(
|
|
835
|
+
"Ignoring non-installed package at %s as it is not in "
|
|
836
|
+
"the considered directories" % package_path
|
|
837
|
+
)
|
|
838
|
+
continue
|
|
606
839
|
# We give an alternate name for the visible package name. It is
|
|
607
840
|
# not exposed to the end user but used to refer to the package, and it
|
|
608
841
|
# doesn't provide much additional information to have the full path
|
|
@@ -615,131 +848,32 @@ def _get_extension_packages():
|
|
|
615
848
|
)
|
|
616
849
|
package_name_to_path[package_name] = package_path
|
|
617
850
|
base_depth = len(package_path.split("/"))
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
# relative_root strips out metaflow_extensions
|
|
624
|
-
relative_root = "/".join(parts[base_depth:])
|
|
625
|
-
relative_module = ".".join(parts[base_depth - 1 :])
|
|
626
|
-
files_to_include.extend(
|
|
627
|
-
[
|
|
628
|
-
"/".join([relative_root, f]) if relative_root else f
|
|
629
|
-
for f in files
|
|
630
|
-
if not any(
|
|
631
|
-
[f.endswith(suffix) for suffix in EXT_EXCLUDE_SUFFIXES]
|
|
632
|
-
)
|
|
633
|
-
]
|
|
634
|
-
)
|
|
635
|
-
if cur_depth == base_depth:
|
|
636
|
-
if "__init__.py" in files:
|
|
637
|
-
raise RuntimeError(
|
|
638
|
-
"'%s' at '%s' is not an implicit namespace package as required"
|
|
639
|
-
% (EXT_PKG, root)
|
|
640
|
-
)
|
|
641
|
-
for d in dirs:
|
|
642
|
-
if d in init_ext_points:
|
|
643
|
-
raise RuntimeError(
|
|
644
|
-
"Package at '%s' should conform to' %s.X.%s' and not "
|
|
645
|
-
"'%s.%s' where X is your organization's name for example"
|
|
646
|
-
% (root, EXT_PKG, d, EXT_PKG, d)
|
|
647
|
-
)
|
|
648
|
-
# Check for meta files for this package
|
|
649
|
-
meta_files = [
|
|
650
|
-
x for x in map(EXT_META_REGEXP.match, files) if x is not None
|
|
651
|
-
]
|
|
652
|
-
if meta_files:
|
|
653
|
-
# We should have one meta file at most
|
|
654
|
-
if len(meta_files) > 1:
|
|
655
|
-
raise RuntimeError(
|
|
656
|
-
"Package at '%s' defines more than one meta file: %s"
|
|
657
|
-
% (
|
|
658
|
-
package_path,
|
|
659
|
-
", and ".join(
|
|
660
|
-
["'%s'" % x.group(0) for x in meta_files]
|
|
661
|
-
),
|
|
662
|
-
)
|
|
663
|
-
)
|
|
664
|
-
else:
|
|
665
|
-
meta_module = ".".join(
|
|
666
|
-
[relative_module, meta_files[0].group(0)[:-3]]
|
|
667
|
-
)
|
|
668
|
-
|
|
669
|
-
elif cur_depth > base_depth + 1:
|
|
670
|
-
# We want at least a top-level name and something under
|
|
671
|
-
tl_name = parts[base_depth]
|
|
672
|
-
tl_fullname = "%s[%s]" % (package_path, tl_name)
|
|
673
|
-
prefix_match = parts[base_depth + 1 :]
|
|
674
|
-
for idx, ext_list in enumerate(list_ext_points):
|
|
675
|
-
if prefix_match == ext_list:
|
|
676
|
-
# We check to see if this is an actual extension point
|
|
677
|
-
# or if we just have a directory on the way to another
|
|
678
|
-
# extension point. To do this, we check to see if we have
|
|
679
|
-
# any files or directories that are *not* directly another
|
|
680
|
-
# extension point
|
|
681
|
-
skip_extension = len(files) == 0
|
|
682
|
-
if skip_extension:
|
|
683
|
-
next_dir_idx = len(list_ext_points[idx])
|
|
684
|
-
ok_subdirs = [
|
|
685
|
-
list_ext_points[j][next_dir_idx]
|
|
686
|
-
for j in range(0, idx)
|
|
687
|
-
if len(list_ext_points[j]) > next_dir_idx
|
|
688
|
-
]
|
|
689
|
-
skip_extension = set(dirs).issubset(set(ok_subdirs))
|
|
690
|
-
|
|
691
|
-
if skip_extension:
|
|
692
|
-
_ext_debug(
|
|
693
|
-
" Skipping '%s' as no files/directory of interest"
|
|
694
|
-
% _extension_points[idx]
|
|
695
|
-
)
|
|
696
|
-
continue
|
|
697
|
-
|
|
698
|
-
# Check for any "init" files
|
|
699
|
-
init_files = [
|
|
700
|
-
x.group(0)
|
|
701
|
-
for x in map(EXT_CONFIG_REGEXP.match, files)
|
|
702
|
-
if x is not None
|
|
703
|
-
]
|
|
704
|
-
if "__init__.py" in files:
|
|
705
|
-
init_files.append("__init__.py")
|
|
706
|
-
|
|
707
|
-
config_module = None
|
|
708
|
-
if len(init_files) > 1:
|
|
709
|
-
raise RuntimeError(
|
|
710
|
-
"Package at '%s' defines more than one configuration "
|
|
711
|
-
"file for '%s': %s"
|
|
712
|
-
% (
|
|
713
|
-
tl_fullname,
|
|
714
|
-
".".join(prefix_match),
|
|
715
|
-
", and ".join(["'%s'" % x for x in init_files]),
|
|
716
|
-
)
|
|
717
|
-
)
|
|
718
|
-
elif len(init_files) == 1:
|
|
719
|
-
config_module = ".".join(
|
|
720
|
-
[relative_module, init_files[0][:-3]]
|
|
721
|
-
)
|
|
722
|
-
config_to_pkg[config_module].append(tl_fullname)
|
|
723
|
-
|
|
724
|
-
d = extension_points_to_pkg[_extension_points[idx]][
|
|
725
|
-
package_name
|
|
726
|
-
] = dict()
|
|
727
|
-
d[tl_name] = MFExtPackage(
|
|
728
|
-
package_name=package_name,
|
|
729
|
-
tl_package=tl_name,
|
|
730
|
-
config_module=config_module,
|
|
731
|
-
)
|
|
732
|
-
_ext_debug(
|
|
733
|
-
" Extends '%s' with config '%s'"
|
|
734
|
-
% (_extension_points[idx], config_module)
|
|
735
|
-
)
|
|
736
|
-
mf_pkg_list.append(package_name)
|
|
737
|
-
mf_ext_packages[package_name] = {
|
|
738
|
-
"root_paths": [package_path],
|
|
739
|
-
"meta_module": meta_module,
|
|
740
|
-
"files": files_to_include,
|
|
851
|
+
state = {
|
|
852
|
+
"name": package_name,
|
|
853
|
+
"files": [],
|
|
854
|
+
"full_path_files": [],
|
|
855
|
+
"meta_module": None,
|
|
741
856
|
}
|
|
742
857
|
|
|
858
|
+
for root, _, files in walk_without_cycles(package_path):
|
|
859
|
+
relative_root = "/".join(root.split("/")[base_depth - 1 :])
|
|
860
|
+
for f in files:
|
|
861
|
+
process_file(state, package_path, os.path.join(relative_root, f))
|
|
862
|
+
|
|
863
|
+
if state["files"]:
|
|
864
|
+
mf_pkg_list.append(package_name)
|
|
865
|
+
mf_ext_packages[package_name] = {
|
|
866
|
+
"root_paths": [package_path],
|
|
867
|
+
"meta_module": state["meta_module"],
|
|
868
|
+
"full_path_files": state["full_path_files"],
|
|
869
|
+
"files": state["files"],
|
|
870
|
+
"version": "_local_",
|
|
871
|
+
}
|
|
872
|
+
# Always filter here since we don't have any distribution information
|
|
873
|
+
_filter_files_package(mf_ext_packages[package_name])
|
|
874
|
+
else:
|
|
875
|
+
_ext_debug("Skipping package as no files found (empty dir?)")
|
|
876
|
+
|
|
743
877
|
# Sanity check that we only have one package per configuration file.
|
|
744
878
|
# This prevents multiple packages from providing the same named configuration
|
|
745
879
|
# file which would result in one overwriting the other if they are both installed.
|
|
@@ -795,33 +929,83 @@ def _get_extension_packages():
|
|
|
795
929
|
return mf_ext_packages, extension_points_to_pkg
|
|
796
930
|
|
|
797
931
|
|
|
798
|
-
_all_packages, _pkgs_per_extension_point = _get_extension_packages()
|
|
799
|
-
|
|
800
|
-
|
|
801
932
|
def _attempt_load_module(module_name):
|
|
802
933
|
try:
|
|
803
934
|
extension_module = importlib.import_module(module_name)
|
|
804
935
|
except ImportError as e:
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
raise
|
|
936
|
+
# e.name is set to the name of the package that fails to load
|
|
937
|
+
# so don't error ONLY IF the error is importing this module (but do
|
|
938
|
+
# error if there is a transitive import error)
|
|
939
|
+
errored_names = [EXT_PKG]
|
|
940
|
+
parts = module_name.split(".")
|
|
941
|
+
for p in parts[1:]:
|
|
942
|
+
errored_names.append("%s.%s" % (errored_names[-1], p))
|
|
943
|
+
if not (isinstance(e, ModuleNotFoundError) and e.name in errored_names):
|
|
944
|
+
print(
|
|
945
|
+
"The following exception occurred while trying to load '%s' ('%s')"
|
|
946
|
+
% (EXT_PKG, module_name)
|
|
947
|
+
)
|
|
948
|
+
raise
|
|
819
949
|
_ext_debug(" Unknown error when loading '%s': %s" % (module_name, e))
|
|
820
950
|
return None
|
|
821
951
|
else:
|
|
822
952
|
return extension_module
|
|
823
953
|
|
|
824
954
|
|
|
955
|
+
def _filter_files_package(pkg):
|
|
956
|
+
if pkg and pkg["root_paths"] and pkg["meta_module"]:
|
|
957
|
+
meta_module = _attempt_load_module(pkg["meta_module"])
|
|
958
|
+
if meta_module:
|
|
959
|
+
filter_function = meta_module.__dict__.get("filter_function")
|
|
960
|
+
include_suffixes = meta_module.__dict__.get("include_suffixes")
|
|
961
|
+
exclude_suffixes = meta_module.__dict__.get("exclude_suffixes")
|
|
962
|
+
|
|
963
|
+
# Behavior is as follows:
|
|
964
|
+
# - if nothing specified, include all files (so do nothing here)
|
|
965
|
+
# - if filter_function specified, call that function on the list of files
|
|
966
|
+
# and only include the files where the function returns True. Note that
|
|
967
|
+
# the function will always be passed a value that starts with
|
|
968
|
+
# metaflow_extensions/...
|
|
969
|
+
# - if include_suffixes, only include those suffixes
|
|
970
|
+
# - if *not* include_suffixes but exclude_suffixes, include everything *except*
|
|
971
|
+
# files ending with that suffix
|
|
972
|
+
new_files, new_full_path_files = [], []
|
|
973
|
+
|
|
974
|
+
if filter_function:
|
|
975
|
+
for short_file, full_file in zip(pkg["files"], pkg["full_path_files"]):
|
|
976
|
+
try:
|
|
977
|
+
if filter_function(os.path.join(EXT_PKG, short_file)):
|
|
978
|
+
new_files.append(short_file)
|
|
979
|
+
new_full_path_files.append(full_file)
|
|
980
|
+
except Exception as e:
|
|
981
|
+
_ext_debug(
|
|
982
|
+
" Exception '%s' when calling filter_function on "
|
|
983
|
+
"'%s', ignoring file" % (e, short_file)
|
|
984
|
+
)
|
|
985
|
+
elif include_suffixes:
|
|
986
|
+
for short_file, full_file in zip(pkg["files"], pkg["full_path_files"]):
|
|
987
|
+
if any(
|
|
988
|
+
[short_file.endswith(suffix) for suffix in include_suffixes]
|
|
989
|
+
):
|
|
990
|
+
new_files.append(short_file)
|
|
991
|
+
new_full_path_files.append(full_file)
|
|
992
|
+
elif exclude_suffixes:
|
|
993
|
+
for short_file, full_file in zip(pkg["files"], pkg["full_path_files"]):
|
|
994
|
+
if not any(
|
|
995
|
+
[short_file.endswith(suffix) for suffix in exclude_suffixes]
|
|
996
|
+
):
|
|
997
|
+
new_files.append(short_file)
|
|
998
|
+
new_full_path_files.append(full_file)
|
|
999
|
+
else:
|
|
1000
|
+
new_files = pkg["files"]
|
|
1001
|
+
new_full_path_files = pkg["full_path_files"]
|
|
1002
|
+
pkg["files"] = new_files
|
|
1003
|
+
pkg["full_path_files"] = new_full_path_files
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
_all_packages, _pkgs_per_extension_point = _get_extension_packages()
|
|
1007
|
+
|
|
1008
|
+
|
|
825
1009
|
def _get_extension_config(distribution_name, tl_pkg, extension_point, config_module):
|
|
826
1010
|
if config_module is not None and not config_module.endswith("__init__"):
|
|
827
1011
|
module_name = config_module
|
|
@@ -866,45 +1050,12 @@ def _get_extension_config(distribution_name, tl_pkg, extension_point, config_mod
|
|
|
866
1050
|
_ext_debug("Package '%s' is rooted at %s" % (distribution_name, root_paths))
|
|
867
1051
|
_all_packages[distribution_name]["root_paths"] = root_paths
|
|
868
1052
|
|
|
869
|
-
return MFExtModule(
|
|
1053
|
+
return MFExtModule(
|
|
1054
|
+
package_name=distribution_name, tl_package=tl_pkg, module=extension_module
|
|
1055
|
+
)
|
|
870
1056
|
return None
|
|
871
1057
|
|
|
872
1058
|
|
|
873
|
-
def _filter_files_package(package_name):
|
|
874
|
-
pkg = _all_packages.get(package_name)
|
|
875
|
-
if pkg and pkg["root_paths"] and pkg["meta_module"]:
|
|
876
|
-
meta_module = _attempt_load_module(pkg["meta_module"])
|
|
877
|
-
if meta_module:
|
|
878
|
-
include_suffixes = meta_module.__dict__.get("include_suffixes")
|
|
879
|
-
exclude_suffixes = meta_module.__dict__.get("exclude_suffixes")
|
|
880
|
-
|
|
881
|
-
# Behavior is as follows:
|
|
882
|
-
# - if nothing specified, include all files (so do nothing here)
|
|
883
|
-
# - if include_suffixes, only include those suffixes
|
|
884
|
-
# - if *not* include_suffixes but exclude_suffixes, include everything *except*
|
|
885
|
-
# files ending with that suffix
|
|
886
|
-
if include_suffixes:
|
|
887
|
-
new_files = [
|
|
888
|
-
f
|
|
889
|
-
for f in pkg["files"]
|
|
890
|
-
if any([f.endswith(suffix) for suffix in include_suffixes])
|
|
891
|
-
]
|
|
892
|
-
elif exclude_suffixes:
|
|
893
|
-
new_files = [
|
|
894
|
-
f
|
|
895
|
-
for f in pkg["files"]
|
|
896
|
-
if not any([f.endswith(suffix) for suffix in exclude_suffixes])
|
|
897
|
-
]
|
|
898
|
-
else:
|
|
899
|
-
new_files = pkg["files"]
|
|
900
|
-
pkg["files"] = new_files
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
def _filter_files_all():
|
|
904
|
-
for p in _all_packages:
|
|
905
|
-
_filter_files_package(p)
|
|
906
|
-
|
|
907
|
-
|
|
908
1059
|
class _AliasLoader(Loader):
|
|
909
1060
|
def __init__(self, alias, orig):
|
|
910
1061
|
self._alias = alias
|
|
@@ -978,9 +1129,9 @@ class _OrigLoader(Loader):
|
|
|
978
1129
|
if self._previously_loaded_module:
|
|
979
1130
|
sys.modules[self._orig_name] = self._previously_loaded_module
|
|
980
1131
|
if self._previously_loaded_parent_module:
|
|
981
|
-
sys.modules[
|
|
982
|
-
|
|
983
|
-
|
|
1132
|
+
sys.modules[".".join(self._orig_name.split(".")[:-1])] = (
|
|
1133
|
+
self._previously_loaded_parent_module
|
|
1134
|
+
)
|
|
984
1135
|
|
|
985
1136
|
|
|
986
1137
|
class _LazyFinder(MetaPathFinder):
|