npe2 0.7.9rc0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. npe2/_command_registry.py +6 -5
  2. npe2/_dynamic_plugin.py +25 -27
  3. npe2/_inspection/_compile.py +9 -8
  4. npe2/_inspection/_fetch.py +18 -30
  5. npe2/_inspection/_from_npe1.py +26 -32
  6. npe2/_inspection/_setuputils.py +14 -14
  7. npe2/_inspection/_visitors.py +26 -21
  8. npe2/_plugin_manager.py +45 -57
  9. npe2/_pydantic_util.py +53 -0
  10. npe2/_pytest_plugin.py +3 -4
  11. npe2/_setuptools_plugin.py +9 -9
  12. npe2/cli.py +25 -21
  13. npe2/implements.py +13 -10
  14. npe2/implements.pyi +3 -2
  15. npe2/io_utils.py +40 -44
  16. npe2/manifest/_bases.py +15 -14
  17. npe2/manifest/_npe1_adapter.py +3 -3
  18. npe2/manifest/_package_metadata.py +40 -47
  19. npe2/manifest/contributions/_commands.py +16 -14
  20. npe2/manifest/contributions/_configuration.py +22 -20
  21. npe2/manifest/contributions/_contributions.py +13 -14
  22. npe2/manifest/contributions/_icon.py +3 -5
  23. npe2/manifest/contributions/_json_schema.py +86 -89
  24. npe2/manifest/contributions/_keybindings.py +5 -6
  25. npe2/manifest/contributions/_menus.py +11 -9
  26. npe2/manifest/contributions/_readers.py +10 -8
  27. npe2/manifest/contributions/_sample_data.py +16 -15
  28. npe2/manifest/contributions/_submenu.py +2 -4
  29. npe2/manifest/contributions/_themes.py +18 -22
  30. npe2/manifest/contributions/_widgets.py +6 -5
  31. npe2/manifest/contributions/_writers.py +22 -18
  32. npe2/manifest/schema.py +82 -70
  33. npe2/manifest/utils.py +24 -28
  34. npe2/plugin_manager.py +17 -14
  35. npe2/types.py +16 -19
  36. {npe2-0.7.9rc0.dist-info → npe2-0.8.0.dist-info}/METADATA +13 -7
  37. npe2-0.8.0.dist-info/RECORD +49 -0
  38. {npe2-0.7.9rc0.dist-info → npe2-0.8.0.dist-info}/WHEEL +1 -1
  39. npe2/_pydantic_compat.py +0 -54
  40. npe2-0.7.9rc0.dist-info/RECORD +0 -49
  41. {npe2-0.7.9rc0.dist-info → npe2-0.8.0.dist-info}/entry_points.txt +0 -0
  42. {npe2-0.7.9rc0.dist-info → npe2-0.8.0.dist-info}/licenses/LICENSE +0 -0
npe2/cli.py CHANGED
@@ -1,10 +1,12 @@
1
1
  import builtins
2
2
  import warnings
3
+ from collections.abc import Iterator, Sequence
3
4
  from enum import Enum
4
5
  from pathlib import Path
5
- from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence
6
+ from typing import TYPE_CHECKING
6
7
 
7
8
  import typer
9
+ from pydantic import ValidationError
8
10
 
9
11
  from npe2 import PluginManager, PluginManifest, __version__
10
12
 
@@ -26,7 +28,7 @@ def _show_version_and_exit(value: bool) -> None:
26
28
 
27
29
  @app.callback()
28
30
  def _main(
29
- version: Optional[bool] = typer.Option(
31
+ version: bool | None = typer.Option(
30
32
  None,
31
33
  "-v",
32
34
  "--version",
@@ -132,13 +134,13 @@ def validate(
132
134
  ),
133
135
  ):
134
136
  """Validate manifest for a distribution name or manifest filepath."""
135
- err: Optional[Exception] = None
137
+ err: Exception | None = None
136
138
  try:
137
139
  pm = PluginManifest._from_package_or_name(name)
138
140
  msg = f"✔ Manifest for {(pm.display_name or pm.name)!r} valid!"
139
141
  if imports:
140
142
  pm.validate_imports()
141
- except PluginManifest.ValidationError as e:
143
+ except ValidationError as e:
142
144
  msg = f"🅇 Invalid! {e}"
143
145
  err = e
144
146
  except Exception as e:
@@ -171,14 +173,14 @@ def parse(
171
173
  format: Format = typer.Option(
172
174
  "yaml", "-f", "--format", help="Markdown format to use."
173
175
  ),
174
- indent: Optional[int] = typer.Option(
176
+ indent: int | None = typer.Option(
175
177
  None,
176
178
  "--indent",
177
179
  help="Number of spaces to indent (for json)",
178
180
  min=0,
179
181
  max=10,
180
182
  ),
181
- output: Optional[Path] = typer.Option(
183
+ output: Path | None = typer.Option(
182
184
  None,
183
185
  "-o",
184
186
  "--output",
@@ -197,7 +199,7 @@ def parse(
197
199
  _pprint_formatted(manifest_string, fmt)
198
200
 
199
201
 
200
- def _make_rows(pm_dict: dict, normed_fields: Sequence[str]) -> Iterator[List]:
202
+ def _make_rows(pm_dict: dict, normed_fields: Sequence[str]) -> Iterator[list]:
201
203
  """Cleanup output from pm.dict() into rows for table.
202
204
 
203
205
  outside of just extracting the fields we care about, this also:
@@ -231,8 +233,8 @@ def _make_rows(pm_dict: dict, normed_fields: Sequence[str]) -> Iterator[List]:
231
233
  yield row
232
234
 
233
235
 
234
- @app.command()
235
- def list(
236
+ @app.command(name="list")
237
+ def list_(
236
238
  fields: str = typer.Option(
237
239
  "name,version,npe2,contributions",
238
240
  help="Comma seperated list of fields to include in the output."
@@ -302,7 +304,9 @@ def list(
302
304
 
303
305
  # standard records format used for the other formats
304
306
  # [{column -> value}, ... , {column -> value}]
305
- data: List[dict] = [dict(zip(requested_fields, row)) for row in rows]
307
+ data: builtins.list[dict] = [
308
+ dict(zip(requested_fields, row, strict=False)) for row in rows
309
+ ]
306
310
 
307
311
  if format == ListFormat.json:
308
312
  import json
@@ -321,9 +325,9 @@ def list(
321
325
 
322
326
  @app.command()
323
327
  def fetch(
324
- name: List[str],
325
- version: Optional[str] = None,
326
- include_package_meta: Optional[bool] = typer.Option(
328
+ name: builtins.list[str],
329
+ version: str | None = None,
330
+ include_package_meta: bool | None = typer.Option(
327
331
  False,
328
332
  "-m",
329
333
  "--include-package-meta",
@@ -332,14 +336,14 @@ def fetch(
332
336
  format: Format = typer.Option(
333
337
  "yaml", "-f", "--format", help="Markdown format to use."
334
338
  ),
335
- indent: Optional[int] = typer.Option(
339
+ indent: int | None = typer.Option(
336
340
  None,
337
341
  "--indent",
338
342
  help="Number of spaces to indent (for json)",
339
343
  min=0,
340
344
  max=10,
341
345
  ),
342
- output: Optional[Path] = typer.Option(
346
+ output: Path | None = typer.Option(
343
347
  None,
344
348
  "-o",
345
349
  "--output",
@@ -379,7 +383,7 @@ def convert(
379
383
  "package is provided instead of a directory, the new manifest will simply be "
380
384
  "printed to stdout.",
381
385
  ),
382
- dry_run: Optional[bool] = typer.Option(
386
+ dry_run: bool | None = typer.Option(
383
387
  False,
384
388
  "--dry-runs",
385
389
  "-n",
@@ -433,16 +437,16 @@ def convert(
433
437
 
434
438
  @app.command()
435
439
  def cache(
436
- clear: Optional[bool] = typer.Option(
440
+ clear: bool | None = typer.Option(
437
441
  False, "--clear", "-d", help="Clear the npe1 adapter manifest cache"
438
442
  ),
439
- names: List[str] = typer.Argument(
443
+ names: builtins.list[str] = typer.Argument(
440
444
  None, help="Name(s) of distributions to list/delete"
441
445
  ),
442
- list_: Optional[bool] = typer.Option(
446
+ list_: bool | None = typer.Option(
443
447
  False, "--list", "-l", help="List cached manifests"
444
448
  ),
445
- verbose: Optional[bool] = typer.Option(False, "--verbose", "-v", help="verbose"),
449
+ verbose: bool | None = typer.Option(False, "--verbose", "-v", help="verbose"),
446
450
  ):
447
451
  """Cache utils"""
448
452
  from npe2.manifest._npe1_adapter import ADAPTER_CACHE, clear_cache
@@ -481,7 +485,7 @@ def cache(
481
485
  @app.command()
482
486
  def compile(
483
487
  src_dir: str,
484
- output: Optional[Path] = typer.Option(
488
+ output: Path | None = typer.Option(
485
489
  None,
486
490
  "-o",
487
491
  "--output",
npe2/implements.py CHANGED
@@ -1,8 +1,9 @@
1
1
  import contextlib
2
+ from collections.abc import Callable, Sequence
2
3
  from inspect import Parameter, Signature
3
- from typing import Any, Callable, List, Sequence, Type, TypeVar
4
+ from typing import Any, TypeVar
4
5
 
5
- from npe2._pydantic_compat import BaseModel
6
+ from pydantic import BaseModel
6
7
 
7
8
  from .manifest import contributions
8
9
 
@@ -22,7 +23,7 @@ T = TypeVar("T", bound=Callable[..., Any])
22
23
  CHECK_ARGS_PARAM = "ensure_args_valid"
23
24
 
24
25
 
25
- def _build_decorator(contrib: Type[BaseModel]) -> Callable:
26
+ def _build_decorator(contrib: type[BaseModel]) -> Callable:
26
27
  """Create a decorator (e.g. `@implements.reader`) to mark an object as a contrib.
27
28
 
28
29
  Parameters
@@ -32,24 +33,26 @@ def _build_decorator(contrib: Type[BaseModel]) -> Callable:
32
33
  """
33
34
  # build a signature based on the fields in this contribution type, mixed with
34
35
  # the fields in the CommandContribution
35
- contribs: Sequence[Type[BaseModel]] = (contributions.CommandContribution, contrib)
36
- params: List[Parameter] = []
36
+ contribs: Sequence[type[BaseModel]] = (contributions.CommandContribution, contrib)
37
+ params: list[Parameter] = []
37
38
  for contrib in contribs:
38
39
  # iterate over the fields in the contribution types
39
- for field in contrib.__fields__.values():
40
+ for name, field in contrib.model_fields.items():
40
41
  # we don't need python_name (since that will be gleaned from the function
41
42
  # we're decorating) ... and we don't need `command`, since that will just
42
43
  # be a string pointing to the contributions.commands entry that we are
43
44
  # creating here.
44
- if field.name not in {"python_name", "command"}:
45
+ if name not in {"python_name", "command"}:
45
46
  # ensure that required fields raise a TypeError if they are not provided
46
- default = Parameter.empty if field.required else field.get_default()
47
+ default = (
48
+ Parameter.empty if field.is_required() else field.get_default()
49
+ )
47
50
  # create the parameter and add it to the signature.
48
51
  param = Parameter(
49
- field.name,
52
+ name,
50
53
  Parameter.KEYWORD_ONLY,
51
54
  default=default,
52
- annotation=field.outer_type_ or field.type_,
55
+ annotation=field.annotation,
53
56
  )
54
57
  params.append(param)
55
58
 
npe2/implements.pyi CHANGED
@@ -1,6 +1,7 @@
1
- from typing import Any, Callable, TypeVar
1
+ from collections.abc import Callable
2
+ from typing import Any, TypeVar
2
3
 
3
- from npe2._pydantic_compat import BaseModel as BaseModel
4
+ from pydantic import BaseModel as BaseModel
4
5
 
5
6
  from .manifest import PluginManifest as PluginManifest
6
7
  from .manifest import contributions as contributions
npe2/io_utils.py CHANGED
@@ -1,13 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from collections.abc import Sequence
3
4
  from typing import (
4
5
  TYPE_CHECKING,
5
- List,
6
6
  Literal,
7
- Optional,
8
- Sequence,
9
- Tuple,
10
- Union,
11
7
  cast,
12
8
  overload,
13
9
  )
@@ -23,8 +19,8 @@ if TYPE_CHECKING:
23
19
 
24
20
 
25
21
  def read(
26
- paths: List[str], *, stack: bool, plugin_name: Optional[str] = None
27
- ) -> List[LayerData]:
22
+ paths: list[str], *, stack: bool, plugin_name: str | None = None
23
+ ) -> list[LayerData]:
28
24
  """Try to read file at `path`, with plugins offering a ReaderContribution.
29
25
 
30
26
  Parameters
@@ -53,11 +49,11 @@ def read(
53
49
 
54
50
 
55
51
  def read_get_reader(
56
- path: Union[str, Sequence[str]],
52
+ path: str | Sequence[str],
57
53
  *,
58
- plugin_name: Optional[str] = None,
59
- stack: Optional[bool] = None,
60
- ) -> Tuple[List[LayerData], ReaderContribution]:
54
+ plugin_name: str | None = None,
55
+ stack: bool | None = None,
56
+ ) -> tuple[list[LayerData], ReaderContribution]:
61
57
  """Variant of `read` that also returns the `ReaderContribution` used."""
62
58
  if stack is None:
63
59
  # "npe1" old path
@@ -75,10 +71,10 @@ def read_get_reader(
75
71
 
76
72
  def write(
77
73
  path: str,
78
- layer_data: List[Union[FullLayerData, napari.layers.Layer]],
74
+ layer_data: list[FullLayerData | napari.layers.Layer],
79
75
  *,
80
- plugin_name: Optional[str] = None,
81
- ) -> List[str]:
76
+ plugin_name: str | None = None,
77
+ ) -> list[str]:
82
78
  """Write layer_data tuples to `path`.
83
79
 
84
80
  Parameters
@@ -107,10 +103,10 @@ def write(
107
103
 
108
104
  def write_get_writer(
109
105
  path: str,
110
- layer_data: List[Union[FullLayerData, napari.layers.Layer]],
106
+ layer_data: list[FullLayerData | napari.layers.Layer],
111
107
  *,
112
- plugin_name: Optional[str] = None,
113
- ) -> Tuple[List[str], WriterContribution]:
108
+ plugin_name: str | None = None,
109
+ ) -> tuple[list[str], WriterContribution]:
114
110
  """Variant of write that also returns the `WriterContribution` used."""
115
111
  return _write(path, layer_data, plugin_name=plugin_name, return_writer=True)
116
112
 
@@ -120,34 +116,34 @@ def write_get_writer(
120
116
 
121
117
  @overload
122
118
  def _read(
123
- paths: Union[str, Sequence[str]],
119
+ paths: str | Sequence[str],
124
120
  *,
125
121
  stack: bool,
126
- plugin_name: Optional[str] = None,
122
+ plugin_name: str | None = None,
127
123
  return_reader: Literal[False] = False,
128
124
  _pm=None,
129
- ) -> List[LayerData]: ...
125
+ ) -> list[LayerData]: ...
130
126
 
131
127
 
132
128
  @overload
133
129
  def _read(
134
- paths: Union[str, Sequence[str]],
130
+ paths: str | Sequence[str],
135
131
  *,
136
132
  stack: bool,
137
- plugin_name: Optional[str] = None,
133
+ plugin_name: str | None = None,
138
134
  return_reader: Literal[True],
139
135
  _pm=None,
140
- ) -> Tuple[List[LayerData], ReaderContribution]: ...
136
+ ) -> tuple[list[LayerData], ReaderContribution]: ...
141
137
 
142
138
 
143
139
  def _read(
144
- paths: Union[str, Sequence[str]],
140
+ paths: str | Sequence[str],
145
141
  *,
146
142
  stack: bool,
147
- plugin_name: Optional[str] = None,
143
+ plugin_name: str | None = None,
148
144
  return_reader: bool = False,
149
- _pm: Optional[PluginManager] = None,
150
- ) -> Union[Tuple[List[LayerData], ReaderContribution], List[LayerData]]:
145
+ _pm: PluginManager | None = None,
146
+ ) -> tuple[list[LayerData], ReaderContribution] | list[LayerData]:
151
147
  """Execute the `read...` functions above."""
152
148
  if _pm is None:
153
149
  _pm = PluginManager.instance()
@@ -157,9 +153,9 @@ def _read(
157
153
  chosen_compatible_readers = _get_compatible_readers_by_choice(
158
154
  plugin_name, paths, _pm
159
155
  )
160
- assert (
161
- chosen_compatible_readers
162
- ), "No readers to try. Expected an exception before this point."
156
+ assert chosen_compatible_readers, (
157
+ "No readers to try. Expected an exception before this point."
158
+ )
163
159
 
164
160
  for rdr in chosen_compatible_readers:
165
161
  read_func = rdr.exec(
@@ -179,7 +175,7 @@ def _read(
179
175
 
180
176
 
181
177
  def _get_compatible_readers_by_choice(
182
- plugin_name: Union[str, None], paths: Union[str, Sequence[str]], pm: PluginManager
178
+ plugin_name: str | None, paths: str | Sequence[str], pm: PluginManager
183
179
  ):
184
180
  """Returns compatible readers filtered by validated plugin choice.
185
181
 
@@ -263,39 +259,39 @@ def _get_compatible_readers_by_choice(
263
259
  @overload
264
260
  def _write(
265
261
  path: str,
266
- layer_data: List[Union[FullLayerData, napari.layers.Layer]],
262
+ layer_data: list[FullLayerData | napari.layers.Layer],
267
263
  *,
268
- plugin_name: Optional[str] = None,
264
+ plugin_name: str | None = None,
269
265
  return_writer: Literal[False] = False,
270
- _pm: Optional[PluginManager] = None,
271
- ) -> List[str]: ...
266
+ _pm: PluginManager | None = None,
267
+ ) -> list[str]: ...
272
268
 
273
269
 
274
270
  @overload
275
271
  def _write(
276
272
  path: str,
277
- layer_data: List[Union[FullLayerData, napari.layers.Layer]],
273
+ layer_data: list[FullLayerData | napari.layers.Layer],
278
274
  *,
279
- plugin_name: Optional[str] = None,
275
+ plugin_name: str | None = None,
280
276
  return_writer: Literal[True],
281
- _pm: Optional[PluginManager] = None,
282
- ) -> Tuple[List[str], WriterContribution]: ...
277
+ _pm: PluginManager | None = None,
278
+ ) -> tuple[list[str], WriterContribution]: ...
283
279
 
284
280
 
285
281
  def _write(
286
282
  path: str,
287
- layer_data: List[Union[FullLayerData, napari.layers.Layer]],
283
+ layer_data: list[FullLayerData | napari.layers.Layer],
288
284
  *,
289
- plugin_name: Optional[str] = None,
285
+ plugin_name: str | None = None,
290
286
  return_writer: bool = False,
291
- _pm: Optional[PluginManager] = None,
292
- ) -> Union[List[str], Tuple[List[str], WriterContribution]]:
287
+ _pm: PluginManager | None = None,
288
+ ) -> list[str] | tuple[list[str], WriterContribution]:
293
289
  if not layer_data:
294
290
  raise ValueError("Must provide layer data")
295
291
  if _pm is None:
296
292
  _pm = PluginManager.instance()
297
293
 
298
- _layer_tuples: List[FullLayerData] = [
294
+ _layer_tuples: list[FullLayerData] = [
299
295
  (
300
296
  cast("napari.layers.Layer", x).as_layer_data_tuple()
301
297
  if hasattr(x, "as_layer_data_tuple")
npe2/manifest/_bases.py CHANGED
@@ -1,11 +1,10 @@
1
1
  import json
2
+ from collections.abc import Callable
2
3
  from contextlib import contextmanager
3
4
  from pathlib import Path
4
- from typing import Callable, Dict, Optional, Union
5
5
 
6
6
  import yaml
7
-
8
- from npe2._pydantic_compat import BaseModel, PrivateAttr
7
+ from pydantic import BaseModel, PrivateAttr
9
8
 
10
9
 
11
10
  class ImportExportModel(BaseModel):
@@ -17,7 +16,7 @@ class ImportExportModel(BaseModel):
17
16
  some_field: str = Field(..., always_export=True)
18
17
  """
19
18
 
20
- _source_file: Optional[Path] = PrivateAttr(None)
19
+ _source_file: Path | None = PrivateAttr(None)
21
20
 
22
21
  def toml(self, pyproject=False, **kwargs) -> str:
23
22
  """Generate serialized `toml` string for this model.
@@ -28,7 +27,7 @@ class ImportExportModel(BaseModel):
28
27
  If `True`, output will be in pyproject format, with all data under
29
28
  `tool.napari`, by default `False`.
30
29
  **kwargs
31
- passed to `BaseModel.json()`
30
+ passed to `BaseModel.model_dump_json()`
32
31
  """
33
32
  import tomli_w
34
33
 
@@ -43,12 +42,12 @@ class ImportExportModel(BaseModel):
43
42
  Parameters
44
43
  ----------
45
44
  **kwargs
46
- passed to `BaseModel.json()`
45
+ passed to `BaseModel.model_dump_json()`
47
46
  """
48
47
  return yaml.safe_dump(self._serialized_data(**kwargs), sort_keys=False)
49
48
 
50
49
  @classmethod
51
- def from_file(cls, path: Union[Path, str]):
50
+ def from_file(cls, path: Path | str):
52
51
  """Parse model from a metadata file.
53
52
 
54
53
  Parameters
@@ -101,20 +100,22 @@ class ImportExportModel(BaseModel):
101
100
  """using json encoders for all outputs"""
102
101
  kwargs.setdefault("exclude_unset", True)
103
102
  with self._required_export_fields_set():
104
- return json.loads(self.json(**kwargs))
103
+ return json.loads(self.model_dump_json(**kwargs))
105
104
 
106
105
  @contextmanager
107
106
  def _required_export_fields_set(self):
108
- fields = self.__fields__.items()
109
- required = {k for k, v in fields if v.field_info.extra.get("always_export")}
107
+ field_schemas = self.model_json_schema()["properties"]
108
+ required = {
109
+ k for k, v in field_schemas.items() if v.get("always_export", False)
110
+ }
110
111
 
111
- was_there: Dict[str, bool] = {}
112
+ was_there: dict[str, bool] = {}
112
113
  for f in required:
113
- was_there[f] = f in self.__fields_set__
114
- self.__fields_set__.add(f)
114
+ was_there[f] = f in self.model_fields_set
115
+ self.model_fields_set.add(f)
115
116
  try:
116
117
  yield
117
118
  finally:
118
119
  for f in required:
119
120
  if not was_there.get(f):
120
- self.__fields_set__.discard(f)
121
+ self.model_fields_set.discard(f)
@@ -3,10 +3,10 @@ import logging
3
3
  import os
4
4
  import site
5
5
  import warnings
6
+ from collections.abc import Sequence
6
7
  from importlib import metadata
7
8
  from pathlib import Path
8
9
  from shutil import rmtree
9
- from typing import List, Sequence
10
10
 
11
11
  from platformdirs import user_cache_dir
12
12
 
@@ -20,7 +20,7 @@ ADAPTER_CACHE = Path(user_cache_dir("napari", "napari")) / "npe2" / "adapter_man
20
20
  NPE2_NOCACHE = "NPE2_NOCACHE"
21
21
 
22
22
 
23
- def clear_cache(names: Sequence[str] = ()) -> List[Path]:
23
+ def clear_cache(names: Sequence[str] = ()) -> list[Path]:
24
24
  """Clear cached NPE1Adapter manifests.
25
25
 
26
26
  Parameters
@@ -33,7 +33,7 @@ def clear_cache(names: Sequence[str] = ()) -> List[Path]:
33
33
  List[Path]
34
34
  List of filepaths cleared
35
35
  """
36
- _cleared: List[Path] = []
36
+ _cleared: list[Path] = []
37
37
  if ADAPTER_CACHE.exists():
38
38
  if names:
39
39
  for f in ADAPTER_CACHE.glob("*.yaml"):