pyflyby 1.10.4__cp311-cp311-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyflyby/__init__.py +61 -0
- pyflyby/__main__.py +9 -0
- pyflyby/_autoimp.py +2228 -0
- pyflyby/_cmdline.py +591 -0
- pyflyby/_comms.py +221 -0
- pyflyby/_dbg.py +1383 -0
- pyflyby/_dynimp.py +154 -0
- pyflyby/_fast_iter_modules.cpython-311-darwin.so +0 -0
- pyflyby/_file.py +771 -0
- pyflyby/_flags.py +230 -0
- pyflyby/_format.py +186 -0
- pyflyby/_idents.py +227 -0
- pyflyby/_import_sorting.py +165 -0
- pyflyby/_importclns.py +658 -0
- pyflyby/_importdb.py +535 -0
- pyflyby/_imports2s.py +643 -0
- pyflyby/_importstmt.py +723 -0
- pyflyby/_interactive.py +2113 -0
- pyflyby/_livepatch.py +793 -0
- pyflyby/_log.py +107 -0
- pyflyby/_modules.py +646 -0
- pyflyby/_parse.py +1396 -0
- pyflyby/_py.py +2165 -0
- pyflyby/_saveframe.py +1145 -0
- pyflyby/_saveframe_reader.py +471 -0
- pyflyby/_util.py +458 -0
- pyflyby/_version.py +8 -0
- pyflyby/autoimport.py +20 -0
- pyflyby/etc/pyflyby/canonical.py +10 -0
- pyflyby/etc/pyflyby/common.py +27 -0
- pyflyby/etc/pyflyby/forget.py +10 -0
- pyflyby/etc/pyflyby/mandatory.py +10 -0
- pyflyby/etc/pyflyby/numpy.py +156 -0
- pyflyby/etc/pyflyby/std.py +335 -0
- pyflyby/importdb.py +19 -0
- pyflyby/libexec/pyflyby/colordiff +34 -0
- pyflyby/libexec/pyflyby/diff-colorize +148 -0
- pyflyby/share/emacs/site-lisp/pyflyby.el +112 -0
- pyflyby-1.10.4.data/scripts/collect-exports +76 -0
- pyflyby-1.10.4.data/scripts/collect-imports +58 -0
- pyflyby-1.10.4.data/scripts/find-import +38 -0
- pyflyby-1.10.4.data/scripts/prune-broken-imports +34 -0
- pyflyby-1.10.4.data/scripts/pyflyby-diff +34 -0
- pyflyby-1.10.4.data/scripts/reformat-imports +27 -0
- pyflyby-1.10.4.data/scripts/replace-star-imports +37 -0
- pyflyby-1.10.4.data/scripts/saveframe +299 -0
- pyflyby-1.10.4.data/scripts/tidy-imports +170 -0
- pyflyby-1.10.4.data/scripts/transform-imports +47 -0
- pyflyby-1.10.4.dist-info/METADATA +605 -0
- pyflyby-1.10.4.dist-info/RECORD +53 -0
- pyflyby-1.10.4.dist-info/WHEEL +6 -0
- pyflyby-1.10.4.dist-info/entry_points.txt +4 -0
- pyflyby-1.10.4.dist-info/licenses/LICENSE.txt +19 -0
pyflyby/_importdb.py
ADDED
|
@@ -0,0 +1,535 @@
|
|
|
1
|
+
# pyflyby/_importdb.py.
|
|
2
|
+
# Copyright (C) 2011, 2012, 2013, 2014, 2015 Karl Chen.
|
|
3
|
+
# License: MIT http://opensource.org/licenses/MIT
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
from collections import defaultdict
|
|
10
|
+
import os
|
|
11
|
+
import re
|
|
12
|
+
import sys
|
|
13
|
+
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
from typing import Any, Dict, List, Tuple, Union
|
|
17
|
+
|
|
18
|
+
from pyflyby._file import (Filename, UnsafeFilenameError,
|
|
19
|
+
expand_py_files_from_args)
|
|
20
|
+
from pyflyby._idents import dotted_prefixes
|
|
21
|
+
from pyflyby._importclns import ImportMap, ImportSet
|
|
22
|
+
from pyflyby._importstmt import Import, ImportStatement
|
|
23
|
+
from pyflyby._log import logger
|
|
24
|
+
from pyflyby._parse import PythonBlock
|
|
25
|
+
from pyflyby._util import cached_attribute, memoize, stable_unique
|
|
26
|
+
|
|
27
|
+
if sys.version_info <= (3, 12):
|
|
28
|
+
from typing_extensions import Self
|
|
29
|
+
else:
|
|
30
|
+
from typing import Self
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@memoize
|
|
34
|
+
def _find_etc_dirs():
|
|
35
|
+
result = []
|
|
36
|
+
dirs = Filename(__file__).real.dir.ancestors[:-1]
|
|
37
|
+
for dir in dirs:
|
|
38
|
+
candidate = dir / "etc/pyflyby"
|
|
39
|
+
if candidate.isdir:
|
|
40
|
+
result.append(candidate)
|
|
41
|
+
break
|
|
42
|
+
global_dir = Filename("/etc/pyflyby")
|
|
43
|
+
if global_dir.exists:
|
|
44
|
+
result.append(global_dir)
|
|
45
|
+
return result
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _get_env_var(env_var_name, default):
|
|
49
|
+
'''
|
|
50
|
+
Get an environment variable and split on ":", replacing ``-`` with the
|
|
51
|
+
default.
|
|
52
|
+
'''
|
|
53
|
+
assert re.match("^[A-Z_]+$", env_var_name)
|
|
54
|
+
assert isinstance(default, (tuple, list))
|
|
55
|
+
value = list(filter(None, os.environ.get(env_var_name, '').split(':')))
|
|
56
|
+
if not value:
|
|
57
|
+
return default
|
|
58
|
+
# Replace '-' with ``default``
|
|
59
|
+
try:
|
|
60
|
+
idx = value.index('-')
|
|
61
|
+
except ValueError:
|
|
62
|
+
pass
|
|
63
|
+
else:
|
|
64
|
+
value[idx:idx+1] = default
|
|
65
|
+
return value
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _get_python_path(env_var_name, default_path, target_dirname):
|
|
69
|
+
'''
|
|
70
|
+
Expand an environment variable specifying pyflyby input config files.
|
|
71
|
+
|
|
72
|
+
- Default to ``default_path`` if the environment variable is undefined.
|
|
73
|
+
- Process colon delimiters.
|
|
74
|
+
- Replace "-" with ``default_path``.
|
|
75
|
+
- Expand triple dots.
|
|
76
|
+
- Recursively traverse directories.
|
|
77
|
+
|
|
78
|
+
:rtype:
|
|
79
|
+
``tuple`` of ``Filename`` s
|
|
80
|
+
'''
|
|
81
|
+
pathnames = _get_env_var(env_var_name, default_path)
|
|
82
|
+
if pathnames == ["EMPTY"]:
|
|
83
|
+
# The special code PYFLYBY_PATH=EMPTY means we intentionally want to
|
|
84
|
+
# use an empty PYFLYBY_PATH (and don't fall back to the default path,
|
|
85
|
+
# nor warn about an empty path).
|
|
86
|
+
return ()
|
|
87
|
+
for p in pathnames:
|
|
88
|
+
if re.match("/|[.]/|[.][.][.]/|~/", p):
|
|
89
|
+
continue
|
|
90
|
+
raise ValueError(
|
|
91
|
+
"{env_var_name} components should start with / or ./ or ~/ or .../. "
|
|
92
|
+
"Use {env_var_name}=./{p} instead of {env_var_name}={p} if you really "
|
|
93
|
+
"want to use the current directory."
|
|
94
|
+
.format(env_var_name=env_var_name, p=p))
|
|
95
|
+
pathnames = [os.path.expanduser(p) for p in pathnames]
|
|
96
|
+
pathnames = _expand_tripledots(pathnames, target_dirname)
|
|
97
|
+
for fn in pathnames:
|
|
98
|
+
assert isinstance(fn, Filename)
|
|
99
|
+
pathnames = stable_unique(pathnames)
|
|
100
|
+
for p in pathnames:
|
|
101
|
+
assert isinstance(p, Filename)
|
|
102
|
+
pathnames = expand_py_files_from_args(pathnames)
|
|
103
|
+
if not pathnames:
|
|
104
|
+
logger.warning(
|
|
105
|
+
"No import libraries found (%s=%r, default=%r)"
|
|
106
|
+
% (env_var_name, os.environ.get(env_var_name), default_path))
|
|
107
|
+
return tuple(pathnames)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
# TODO: stop memoizing here after using StatCache. Actually just inline into
|
|
111
|
+
# _ancestors_on_same_partition
|
|
112
|
+
@memoize
|
|
113
|
+
def _get_st_dev(filename: Filename):
|
|
114
|
+
assert isinstance(filename, Filename)
|
|
115
|
+
try:
|
|
116
|
+
return os.stat(str(filename)).st_dev
|
|
117
|
+
except OSError:
|
|
118
|
+
return None
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def _ancestors_on_same_partition(filename):
|
|
122
|
+
"""
|
|
123
|
+
Generate ancestors of ``filename`` that exist and are on the same partition
|
|
124
|
+
as the first existing ancestor of ``filename``.
|
|
125
|
+
|
|
126
|
+
For example, suppose a partition is mounted on /u/homer; /u is a different
|
|
127
|
+
partition. Suppose /u/homer/aa exists but /u/homer/aa/bb does not exist.
|
|
128
|
+
Then::
|
|
129
|
+
|
|
130
|
+
>>> _ancestors_on_same_partition(Filename("/u/homer/aa/bb/cc")) # doctest: +SKIP
|
|
131
|
+
[Filename("/u/homer", Filename("/u/homer/aa")]
|
|
132
|
+
|
|
133
|
+
:rtype:
|
|
134
|
+
``list`` of ``Filename``
|
|
135
|
+
"""
|
|
136
|
+
result = []
|
|
137
|
+
dev = None
|
|
138
|
+
for f in filename.ancestors:
|
|
139
|
+
this_dev = _get_st_dev(f)
|
|
140
|
+
if this_dev is None:
|
|
141
|
+
continue
|
|
142
|
+
if dev is None:
|
|
143
|
+
dev = this_dev
|
|
144
|
+
elif dev != this_dev:
|
|
145
|
+
break
|
|
146
|
+
result.append(f)
|
|
147
|
+
return result
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _expand_tripledots(pathnames, target_dirname):
|
|
151
|
+
"""
|
|
152
|
+
Expand pathnames of the form ``".../foo/bar"`` as "../../foo/bar",
|
|
153
|
+
"../foo/bar", "./foo/bar" etc., up to the oldest ancestor with the same
|
|
154
|
+
st_dev.
|
|
155
|
+
|
|
156
|
+
For example, suppose a partition is mounted on /u/homer; /u is a different
|
|
157
|
+
partition. Then::
|
|
158
|
+
|
|
159
|
+
>>> _expand_tripledots(["/foo", ".../tt"], "/u/homer/aa") # doctest: +SKIP
|
|
160
|
+
[Filename("/foo"), Filename("/u/homer/tt"), Filename("/u/homer/aa/tt")]
|
|
161
|
+
|
|
162
|
+
:type pathnames:
|
|
163
|
+
sequence of ``str`` (not ``Filename``)
|
|
164
|
+
:type target_dirname:
|
|
165
|
+
`Filename`
|
|
166
|
+
:rtype:
|
|
167
|
+
``list`` of `Filename`
|
|
168
|
+
"""
|
|
169
|
+
assert isinstance(target_dirname, Filename)
|
|
170
|
+
if not isinstance(pathnames, (tuple, list)):
|
|
171
|
+
pathnames = [pathnames]
|
|
172
|
+
result = []
|
|
173
|
+
for pathname in pathnames:
|
|
174
|
+
if not pathname.startswith(".../"):
|
|
175
|
+
result.append(Filename(pathname))
|
|
176
|
+
continue
|
|
177
|
+
suffix = pathname[4:]
|
|
178
|
+
expanded = []
|
|
179
|
+
for p in _ancestors_on_same_partition(target_dirname):
|
|
180
|
+
try:
|
|
181
|
+
expanded.append(p / suffix)
|
|
182
|
+
except UnsafeFilenameError:
|
|
183
|
+
continue
|
|
184
|
+
result.extend(expanded[::-1])
|
|
185
|
+
return result
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class ImportDB:
|
|
189
|
+
"""
|
|
190
|
+
A database of known, mandatory, canonical imports.
|
|
191
|
+
|
|
192
|
+
@iattr known_imports:
|
|
193
|
+
Set of known imports. For use by tidy-imports and autoimporter.
|
|
194
|
+
@iattr mandatory_imports:
|
|
195
|
+
Set of imports that must be added by tidy-imports.
|
|
196
|
+
@iattr canonical_imports:
|
|
197
|
+
Map of imports that tidy-imports transforms on every run.
|
|
198
|
+
@iattr forget_imports:
|
|
199
|
+
Set of imports to remove from known_imports, mandatory_imports,
|
|
200
|
+
canonical_imports.
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
forget_imports : ImportSet
|
|
204
|
+
known_imports : ImportSet
|
|
205
|
+
mandatory_imports: ImportSet
|
|
206
|
+
canonical_imports: ImportMap
|
|
207
|
+
|
|
208
|
+
_default_cache: Dict[Any, Any] = {}
|
|
209
|
+
|
|
210
|
+
def __new__(cls, *args):
|
|
211
|
+
if len(args) != 1:
|
|
212
|
+
raise TypeError
|
|
213
|
+
arg, = args
|
|
214
|
+
if isinstance(arg, cls):
|
|
215
|
+
return arg
|
|
216
|
+
if isinstance(arg, ImportSet):
|
|
217
|
+
return cls._from_data(arg, [], [], [])
|
|
218
|
+
return cls._from_code(arg) # PythonBlock, Filename, etc
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
@classmethod
|
|
224
|
+
def clear_default_cache(cls):
|
|
225
|
+
"""
|
|
226
|
+
Clear the class cache of default ImportDBs.
|
|
227
|
+
|
|
228
|
+
Subsequent calls to ImportDB.get_default() will not reuse previously
|
|
229
|
+
cached results. Existing ImportDB instances are not affected by this
|
|
230
|
+
call.
|
|
231
|
+
"""
|
|
232
|
+
if logger.debug_enabled:
|
|
233
|
+
allpyfiles = set()
|
|
234
|
+
for tup in cls._default_cache:
|
|
235
|
+
if tup[0] != 2:
|
|
236
|
+
continue
|
|
237
|
+
for tup2 in tup[1:]:
|
|
238
|
+
for f in tup2:
|
|
239
|
+
assert isinstance(f, Filename)
|
|
240
|
+
if f.ext == ".py":
|
|
241
|
+
allpyfiles.add(f)
|
|
242
|
+
nfiles = len(allpyfiles)
|
|
243
|
+
logger.debug("ImportDB: Clearing default cache of %d files", nfiles)
|
|
244
|
+
cls._default_cache.clear()
|
|
245
|
+
|
|
246
|
+
@classmethod
|
|
247
|
+
def get_default(cls, target_filename: Union[Filename, str], /):
|
|
248
|
+
"""
|
|
249
|
+
Return the default import library for the given target filename.
|
|
250
|
+
|
|
251
|
+
This will read various .../.pyflyby files as specified by
|
|
252
|
+
$PYFLYBY_PATH.
|
|
253
|
+
|
|
254
|
+
Memoized.
|
|
255
|
+
|
|
256
|
+
:param target_filename:
|
|
257
|
+
The target filename for which to get the import database. Note that
|
|
258
|
+
the target filename itself is not read. Instead, the target
|
|
259
|
+
filename is relevant because we look for .../.pyflyby based on the
|
|
260
|
+
target filename.
|
|
261
|
+
:rtype:
|
|
262
|
+
`ImportDB`
|
|
263
|
+
"""
|
|
264
|
+
# We're going to canonicalize target_filename in a number of steps.
|
|
265
|
+
# At each step, see if we've seen the input so far. We do the cache
|
|
266
|
+
# checking incrementally since the steps involve syscalls. Since this
|
|
267
|
+
# is going to potentially be executed inside the IPython interactive
|
|
268
|
+
# loop, we cache as much as possible.
|
|
269
|
+
# TODO: Consider refreshing periodically. Check if files have
|
|
270
|
+
# been touched, and if so, return new data. Check file timestamps at
|
|
271
|
+
# most once every 60 seconds.
|
|
272
|
+
cache_keys:List[Tuple[Any,...]] = []
|
|
273
|
+
if target_filename is None:
|
|
274
|
+
target_filename = "."
|
|
275
|
+
|
|
276
|
+
if isinstance(target_filename, Filename):
|
|
277
|
+
target_filename = str(target_filename)
|
|
278
|
+
|
|
279
|
+
assert isinstance(target_filename, str), (
|
|
280
|
+
target_filename,
|
|
281
|
+
type(target_filename),
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
target_path = Path(target_filename).resolve()
|
|
285
|
+
|
|
286
|
+
parents: List[Path]
|
|
287
|
+
if target_path.is_dir():
|
|
288
|
+
parents = [target_path]
|
|
289
|
+
else:
|
|
290
|
+
parents = []
|
|
291
|
+
|
|
292
|
+
# filter safe parents
|
|
293
|
+
safe_parent = None
|
|
294
|
+
for p in parents + list(target_path.parents):
|
|
295
|
+
try:
|
|
296
|
+
safe_parent = Filename(str(p))
|
|
297
|
+
break
|
|
298
|
+
except UnsafeFilenameError:
|
|
299
|
+
pass
|
|
300
|
+
if safe_parent is None:
|
|
301
|
+
raise ValueError("No know path are safe")
|
|
302
|
+
|
|
303
|
+
target_dirname = safe_parent
|
|
304
|
+
|
|
305
|
+
if target_filename.startswith("/dev"):
|
|
306
|
+
try:
|
|
307
|
+
target_dirname = Filename(".")
|
|
308
|
+
except UnsafeFilenameError:
|
|
309
|
+
pass
|
|
310
|
+
# TODO: with StatCache
|
|
311
|
+
while True:
|
|
312
|
+
key = (
|
|
313
|
+
1,
|
|
314
|
+
target_dirname,
|
|
315
|
+
os.getenv("PYFLYBY_PATH"),
|
|
316
|
+
)
|
|
317
|
+
cache_keys.append(key)
|
|
318
|
+
if key in cls._default_cache:
|
|
319
|
+
return cls._default_cache[key]
|
|
320
|
+
if target_dirname.isdir:
|
|
321
|
+
break
|
|
322
|
+
target_dirname = target_dirname.dir
|
|
323
|
+
try:
|
|
324
|
+
target_dirname = target_dirname.real
|
|
325
|
+
except UnsafeFilenameError:
|
|
326
|
+
pass
|
|
327
|
+
if target_dirname != cache_keys[-1][0]:
|
|
328
|
+
cache_keys.append((1,
|
|
329
|
+
target_dirname,
|
|
330
|
+
os.getenv("PYFLYBY_PATH")))
|
|
331
|
+
try:
|
|
332
|
+
return cls._default_cache[cache_keys[-1]]
|
|
333
|
+
except KeyError:
|
|
334
|
+
pass
|
|
335
|
+
DEFAULT_PYFLYBY_PATH = []
|
|
336
|
+
DEFAULT_PYFLYBY_PATH += [str(p) for p in _find_etc_dirs()]
|
|
337
|
+
DEFAULT_PYFLYBY_PATH += [
|
|
338
|
+
".../.pyflyby",
|
|
339
|
+
"~/.pyflyby",
|
|
340
|
+
]
|
|
341
|
+
logger.debug("DEFAULT_PYFLYBY_PATH=%s", DEFAULT_PYFLYBY_PATH)
|
|
342
|
+
filenames = _get_python_path("PYFLYBY_PATH", DEFAULT_PYFLYBY_PATH,
|
|
343
|
+
target_dirname)
|
|
344
|
+
cache_keys.append((2, filenames))
|
|
345
|
+
try:
|
|
346
|
+
return cls._default_cache[cache_keys[-1]]
|
|
347
|
+
except KeyError:
|
|
348
|
+
pass
|
|
349
|
+
result = cls._from_code(filenames)
|
|
350
|
+
for k in cache_keys:
|
|
351
|
+
cls._default_cache[k] = result
|
|
352
|
+
return result
|
|
353
|
+
|
|
354
|
+
@classmethod
|
|
355
|
+
def interpret_arg(cls, arg, target_filename) -> ImportDB:
|
|
356
|
+
if arg is None:
|
|
357
|
+
return cls.get_default(target_filename)
|
|
358
|
+
else:
|
|
359
|
+
return cls(arg)
|
|
360
|
+
|
|
361
|
+
@classmethod
|
|
362
|
+
def _from_data(cls, known_imports, mandatory_imports,
|
|
363
|
+
canonical_imports, forget_imports):
|
|
364
|
+
self = object.__new__(cls)
|
|
365
|
+
self.forget_imports = ImportSet(forget_imports )
|
|
366
|
+
self.known_imports = ImportSet(known_imports ).without_imports(forget_imports)
|
|
367
|
+
self.mandatory_imports = ImportSet(mandatory_imports).without_imports(forget_imports)
|
|
368
|
+
# TODO: provide more fine-grained control about canonical_imports.
|
|
369
|
+
self.canonical_imports = ImportMap(canonical_imports).without_imports(forget_imports)
|
|
370
|
+
return self
|
|
371
|
+
|
|
372
|
+
def __or__(self, other:'Self') -> 'Self':
|
|
373
|
+
assert isinstance(other, ImportDB)
|
|
374
|
+
return self._from_data(
|
|
375
|
+
known_imports = self.known_imports | other.known_imports,
|
|
376
|
+
mandatory_imports = self.mandatory_imports | other.mandatory_imports,
|
|
377
|
+
canonical_imports = self.canonical_imports | other.canonical_imports,
|
|
378
|
+
forget_imports = self.forget_imports | other.forget_imports
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
@classmethod
|
|
383
|
+
def _from_code(cls, blocks):
|
|
384
|
+
"""
|
|
385
|
+
Load an import database from code.
|
|
386
|
+
|
|
387
|
+
>>> ImportDB._from_code('''
|
|
388
|
+
... import foo, bar as barf
|
|
389
|
+
... from xx import yy
|
|
390
|
+
... __mandatory_imports__ = ['__future__.division',
|
|
391
|
+
... 'import aa . bb . cc as dd']
|
|
392
|
+
... __forget_imports__ = ['xx.yy', 'from xx import zz']
|
|
393
|
+
... __canonical_imports__ = {'bad.baad': 'good.goood'}
|
|
394
|
+
... ''')
|
|
395
|
+
ImportDB('''
|
|
396
|
+
import bar as barf
|
|
397
|
+
import foo
|
|
398
|
+
<BLANKLINE>
|
|
399
|
+
__mandatory_imports__ = [
|
|
400
|
+
'from __future__ import division',
|
|
401
|
+
'from aa.bb import cc as dd',
|
|
402
|
+
]
|
|
403
|
+
<BLANKLINE>
|
|
404
|
+
__canonical_imports__ = {
|
|
405
|
+
'bad.baad': 'good.goood',
|
|
406
|
+
}
|
|
407
|
+
<BLANKLINE>
|
|
408
|
+
__forget_imports__ = [
|
|
409
|
+
'from xx import yy',
|
|
410
|
+
'from xx import zz',
|
|
411
|
+
]
|
|
412
|
+
''')
|
|
413
|
+
|
|
414
|
+
:rtype:
|
|
415
|
+
`ImportDB`
|
|
416
|
+
"""
|
|
417
|
+
if not isinstance(blocks, (tuple, list)):
|
|
418
|
+
blocks = [blocks]
|
|
419
|
+
known_imports = []
|
|
420
|
+
mandatory_imports = []
|
|
421
|
+
canonical_imports = []
|
|
422
|
+
forget_imports = []
|
|
423
|
+
blocks = [PythonBlock(b) for b in blocks]
|
|
424
|
+
for block in blocks:
|
|
425
|
+
for statement in block.statements:
|
|
426
|
+
if statement.is_comment_or_blank:
|
|
427
|
+
continue
|
|
428
|
+
if statement.is_import:
|
|
429
|
+
known_imports.extend(ImportStatement(statement).imports)
|
|
430
|
+
continue
|
|
431
|
+
try:
|
|
432
|
+
name, value = statement.get_assignment_literal_value()
|
|
433
|
+
if name == "__mandatory_imports__":
|
|
434
|
+
mandatory_imports.append(cls._parse_import_set(value))
|
|
435
|
+
elif name == "__canonical_imports__":
|
|
436
|
+
canonical_imports.append(cls._parse_import_map(value))
|
|
437
|
+
elif name == "__forget_imports__":
|
|
438
|
+
forget_imports.append(cls._parse_import_set(value))
|
|
439
|
+
else:
|
|
440
|
+
raise ValueError(
|
|
441
|
+
"Unknown assignment to %r (expected one of "
|
|
442
|
+
"__mandatory_imports__, __canonical_imports__, "
|
|
443
|
+
"__forget_imports__)" % (name,))
|
|
444
|
+
except ValueError as e:
|
|
445
|
+
raise ValueError(
|
|
446
|
+
"While parsing %s: error in %r: %s"
|
|
447
|
+
% (block.filename, statement, e))
|
|
448
|
+
return cls._from_data(known_imports,
|
|
449
|
+
mandatory_imports,
|
|
450
|
+
canonical_imports,
|
|
451
|
+
forget_imports)
|
|
452
|
+
|
|
453
|
+
@classmethod
|
|
454
|
+
def _parse_import_set(cls, arg):
|
|
455
|
+
if isinstance(arg, str):
|
|
456
|
+
arg = [arg]
|
|
457
|
+
if not isinstance(arg, (tuple, list)):
|
|
458
|
+
raise ValueError("Expected a list, not a %s" % (type(arg).__name__,))
|
|
459
|
+
for item in arg:
|
|
460
|
+
if not isinstance(item, str):
|
|
461
|
+
raise ValueError(
|
|
462
|
+
"Expected a list of str, not %s" % (type(item).__name__,))
|
|
463
|
+
return ImportSet(arg)
|
|
464
|
+
|
|
465
|
+
@classmethod
|
|
466
|
+
def _parse_import_map(cls, arg):
|
|
467
|
+
if isinstance(arg, str):
|
|
468
|
+
arg = [arg]
|
|
469
|
+
if not isinstance(arg, dict):
|
|
470
|
+
raise ValueError("Expected a dict, not a %s" % (type(arg).__name__,))
|
|
471
|
+
for k, v in arg.items():
|
|
472
|
+
if not isinstance(k, str):
|
|
473
|
+
raise ValueError(
|
|
474
|
+
"Expected a dict of str, not %s" % (type(k).__name__,))
|
|
475
|
+
if not isinstance(v, str):
|
|
476
|
+
raise ValueError(
|
|
477
|
+
"Expected a dict of str, not %s" % (type(v).__name__,))
|
|
478
|
+
return ImportMap(arg)
|
|
479
|
+
|
|
480
|
+
@cached_attribute
|
|
481
|
+
def by_fullname_or_import_as(self) -> Dict[str, Tuple[Import, ...]]:
|
|
482
|
+
"""
|
|
483
|
+
Map from ``fullname`` and ``import_as`` to `Import` s.
|
|
484
|
+
|
|
485
|
+
>>> import pprint
|
|
486
|
+
>>> db = ImportDB('from aa.bb import cc as dd')
|
|
487
|
+
>>> pprint.pprint(db.by_fullname_or_import_as)
|
|
488
|
+
{'aa': (Import('import aa'),),
|
|
489
|
+
'aa.bb': (Import('import aa.bb'),),
|
|
490
|
+
'dd': (Import('from aa.bb import cc as dd'),)}
|
|
491
|
+
|
|
492
|
+
:rtype:
|
|
493
|
+
``dict`` mapping from ``str`` to tuple of `Import` s
|
|
494
|
+
"""
|
|
495
|
+
# TODO: make known_imports take into account the below forget_imports,
|
|
496
|
+
# then move this function into ImportSet
|
|
497
|
+
d = defaultdict(set)
|
|
498
|
+
for imp in self.known_imports.imports:
|
|
499
|
+
# Given an import like "from foo.bar import quux as QUUX", add the
|
|
500
|
+
# following entries:
|
|
501
|
+
# - "QUUX" => "from foo.bar import quux as QUUX"
|
|
502
|
+
# - "foo.bar" => "import foo.bar"
|
|
503
|
+
# - "foo" => "import foo"
|
|
504
|
+
# We don't include an entry labeled "quux" because the user has
|
|
505
|
+
# implied he doesn't want to pollute the global namespace with
|
|
506
|
+
# "quux", only "QUUX".
|
|
507
|
+
d[imp.import_as].add(imp)
|
|
508
|
+
for prefix in dotted_prefixes(imp.fullname)[:-1]:
|
|
509
|
+
d[prefix].add(Import.from_parts(prefix, prefix))
|
|
510
|
+
return dict( (k, tuple(sorted(v - set(self.forget_imports.imports))))
|
|
511
|
+
for k, v in d.items())
|
|
512
|
+
|
|
513
|
+
def __repr__(self):
|
|
514
|
+
printed = self.pretty_print()
|
|
515
|
+
lines = "".join(" "+line for line in printed.splitlines(True))
|
|
516
|
+
return "%s('''\n%s''')" % (type(self).__name__, lines)
|
|
517
|
+
|
|
518
|
+
def pretty_print(self):
|
|
519
|
+
s = self.known_imports.pretty_print()
|
|
520
|
+
if self.mandatory_imports:
|
|
521
|
+
s += "\n__mandatory_imports__ = [\n"
|
|
522
|
+
for imp in self.mandatory_imports.imports:
|
|
523
|
+
s += " '%s',\n" % imp
|
|
524
|
+
s += "]\n"
|
|
525
|
+
if self.canonical_imports:
|
|
526
|
+
s += "\n__canonical_imports__ = {\n"
|
|
527
|
+
for k, v in sorted(self.canonical_imports.items()):
|
|
528
|
+
s += " '%s': '%s',\n" % (k, v)
|
|
529
|
+
s += "}\n"
|
|
530
|
+
if self.forget_imports:
|
|
531
|
+
s += "\n__forget_imports__ = [\n"
|
|
532
|
+
for imp in self.forget_imports.imports:
|
|
533
|
+
s += " '%s',\n" % imp
|
|
534
|
+
s += "]\n"
|
|
535
|
+
return s
|