mikro-next 0.19.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mikro_next-0.19.0/.gitignore +147 -0
- mikro_next-0.19.0/PKG-INFO +181 -0
- mikro_next-0.19.0/README.md +156 -0
- mikro_next-0.19.0/mikro_next/__init__.py +32 -0
- mikro_next-0.19.0/mikro_next/api/__init__.py +1 -0
- mikro_next-0.19.0/mikro_next/api/project.json +215 -0
- mikro_next-0.19.0/mikro_next/api/schema.graphql +4499 -0
- mikro_next-0.19.0/mikro_next/api/schema.py +9068 -0
- mikro_next-0.19.0/mikro_next/arkitekt.py +117 -0
- mikro_next-0.19.0/mikro_next/contrib/__init__.py +0 -0
- mikro_next-0.19.0/mikro_next/contrib/fakts/__init__.py +0 -0
- mikro_next-0.19.0/mikro_next/contrib/fakts/datalayer.py +40 -0
- mikro_next-0.19.0/mikro_next/datalayer.py +76 -0
- mikro_next-0.19.0/mikro_next/deployments/test/configs/mikro_next.yaml +36 -0
- mikro_next-0.19.0/mikro_next/deployments/test/configs/minio.yaml +10 -0
- mikro_next-0.19.0/mikro_next/deployments/test/docker-compose.yml +45 -0
- mikro_next-0.19.0/mikro_next/deployments/test/public_key.pem +9 -0
- mikro_next-0.19.0/mikro_next/errors.py +14 -0
- mikro_next-0.19.0/mikro_next/funcs.py +68 -0
- mikro_next-0.19.0/mikro_next/io/__init__.py +0 -0
- mikro_next-0.19.0/mikro_next/io/download.py +158 -0
- mikro_next-0.19.0/mikro_next/io/errors.py +22 -0
- mikro_next-0.19.0/mikro_next/io/types.py +62 -0
- mikro_next-0.19.0/mikro_next/io/upload.py +216 -0
- mikro_next-0.19.0/mikro_next/links/__init__.py +6 -0
- mikro_next-0.19.0/mikro_next/links/errors.py +0 -0
- mikro_next-0.19.0/mikro_next/links/upload.py +309 -0
- mikro_next-0.19.0/mikro_next/mikro_next.py +33 -0
- mikro_next-0.19.0/mikro_next/rath.py +64 -0
- mikro_next-0.19.0/mikro_next/rekuest.py +145 -0
- mikro_next-0.19.0/mikro_next/scalars.py +804 -0
- mikro_next-0.19.0/mikro_next/traits.py +531 -0
- mikro_next-0.19.0/mikro_next/utils.py +51 -0
- mikro_next-0.19.0/mikro_next/widgets.py +76 -0
- mikro_next-0.19.0/pyproject.toml +114 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
**/__pycache__
|
|
4
|
+
*.py[cod]
|
|
5
|
+
*$py.class
|
|
6
|
+
*.pyc
|
|
7
|
+
|
|
8
|
+
# C extensions
|
|
9
|
+
*.so
|
|
10
|
+
|
|
11
|
+
# Distribution / packaging
|
|
12
|
+
.Python
|
|
13
|
+
build/
|
|
14
|
+
develop-eggs/
|
|
15
|
+
dist/
|
|
16
|
+
downloads/
|
|
17
|
+
eggs/
|
|
18
|
+
.eggs/
|
|
19
|
+
lib/
|
|
20
|
+
lib64/
|
|
21
|
+
parts/
|
|
22
|
+
sdist/
|
|
23
|
+
var/
|
|
24
|
+
wheels/
|
|
25
|
+
pip-wheel-metadata/
|
|
26
|
+
share/python-wheels/
|
|
27
|
+
*.egg-info/
|
|
28
|
+
.installed.cfg
|
|
29
|
+
*.egg
|
|
30
|
+
|
|
31
|
+
# PyInstaller
|
|
32
|
+
# Usually these files are written by a python script from a template
|
|
33
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
34
|
+
|
|
35
|
+
*.spec
|
|
36
|
+
|
|
37
|
+
# Installer logs
|
|
38
|
+
pip-log.txt
|
|
39
|
+
pip-delete-this-directory.txt
|
|
40
|
+
|
|
41
|
+
# Unit test / coverage reports
|
|
42
|
+
htmlcov/
|
|
43
|
+
.tox/
|
|
44
|
+
.nox/
|
|
45
|
+
.coverage
|
|
46
|
+
.coverage.*
|
|
47
|
+
.cache
|
|
48
|
+
nosetests.xml
|
|
49
|
+
coverage.xml
|
|
50
|
+
*.cover
|
|
51
|
+
*.py,cover
|
|
52
|
+
.hypothesis/
|
|
53
|
+
.pytest_cache/
|
|
54
|
+
cover/
|
|
55
|
+
|
|
56
|
+
# Translations
|
|
57
|
+
*.mo
|
|
58
|
+
*.pot
|
|
59
|
+
|
|
60
|
+
# Django stuff:
|
|
61
|
+
*.log
|
|
62
|
+
local_settings.py
|
|
63
|
+
db.sqlite3
|
|
64
|
+
db.sqlite3-journal
|
|
65
|
+
|
|
66
|
+
# Flask stuff:
|
|
67
|
+
instance/
|
|
68
|
+
.webassets-cache
|
|
69
|
+
|
|
70
|
+
# Scrapy stuff:
|
|
71
|
+
.scrapy
|
|
72
|
+
|
|
73
|
+
# Sphinx documentation
|
|
74
|
+
docs/_build/
|
|
75
|
+
|
|
76
|
+
# PyBuilder
|
|
77
|
+
.pybuilder/
|
|
78
|
+
target/
|
|
79
|
+
|
|
80
|
+
# Jupyter Notebook
|
|
81
|
+
.ipynb_checkpoints
|
|
82
|
+
|
|
83
|
+
# IPython
|
|
84
|
+
profile_default/
|
|
85
|
+
ipython_config.py
|
|
86
|
+
|
|
87
|
+
# pyenv
|
|
88
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
89
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
90
|
+
# .python-version
|
|
91
|
+
|
|
92
|
+
# pipenv
|
|
93
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
94
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
95
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
96
|
+
# install all needed dependencies.
|
|
97
|
+
#Pipfile.lock
|
|
98
|
+
|
|
99
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
|
100
|
+
__pypackages__/
|
|
101
|
+
|
|
102
|
+
# Celery stuff
|
|
103
|
+
celerybeat-schedule
|
|
104
|
+
celerybeat.pid
|
|
105
|
+
|
|
106
|
+
# SageMath parsed files
|
|
107
|
+
*.sage.py
|
|
108
|
+
|
|
109
|
+
# Environments
|
|
110
|
+
.env
|
|
111
|
+
.venv
|
|
112
|
+
env/
|
|
113
|
+
venv/
|
|
114
|
+
ENV/
|
|
115
|
+
env.bak/
|
|
116
|
+
venv.bak/
|
|
117
|
+
|
|
118
|
+
# Spyder project settings
|
|
119
|
+
.spyderproject
|
|
120
|
+
.spyproject
|
|
121
|
+
|
|
122
|
+
# Rope project settings
|
|
123
|
+
.ropeproject
|
|
124
|
+
|
|
125
|
+
# mkdocs documentation
|
|
126
|
+
/site
|
|
127
|
+
|
|
128
|
+
# mypy
|
|
129
|
+
.mypy_cache/
|
|
130
|
+
.dmypy.json
|
|
131
|
+
dmypy.json
|
|
132
|
+
|
|
133
|
+
# Pyre type checker
|
|
134
|
+
.pyre/
|
|
135
|
+
|
|
136
|
+
# pytype static type analyzer
|
|
137
|
+
.pytype/
|
|
138
|
+
|
|
139
|
+
# Cython debug symbols
|
|
140
|
+
cython_debug/
|
|
141
|
+
|
|
142
|
+
# static files generated from Django application using `collectstatic`
|
|
143
|
+
media
|
|
144
|
+
export
|
|
145
|
+
static_collected
|
|
146
|
+
data
|
|
147
|
+
token.temp
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mikro-next
|
|
3
|
+
Version: 0.19.0
|
|
4
|
+
Summary: next images for arkitekt
|
|
5
|
+
Author-email: jhnnsrs <jhnnsrs@gmail.com>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Requires-Python: >=3.11
|
|
8
|
+
Requires-Dist: dask>=2024.12.1
|
|
9
|
+
Requires-Dist: fakts-next>=2
|
|
10
|
+
Requires-Dist: numcodecs>=0.16.1
|
|
11
|
+
Requires-Dist: numpy>=1.26; python_version ~= '3.12'
|
|
12
|
+
Requires-Dist: pandas>=2.2
|
|
13
|
+
Requires-Dist: pytest>=8.3.5
|
|
14
|
+
Requires-Dist: rath>=3.7
|
|
15
|
+
Requires-Dist: s3fs>=2024.10.0
|
|
16
|
+
Requires-Dist: websockets>=15.0.1
|
|
17
|
+
Requires-Dist: xarray>=2024.11.0
|
|
18
|
+
Requires-Dist: zarr>=3.1
|
|
19
|
+
Provides-Extra: complete
|
|
20
|
+
Requires-Dist: pyarrow>=12.0.1; extra == 'complete'
|
|
21
|
+
Provides-Extra: table
|
|
22
|
+
Requires-Dist: pyarrow>=12.0.1; extra == 'table'
|
|
23
|
+
Provides-Extra: turms
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# mikro
|
|
27
|
+
|
|
28
|
+
[](https://codecov.io/gh/arkitektio/mikro-next)
|
|
29
|
+
[](https://pypi.org/project/mikro-next/)
|
|
30
|
+
[](https://pypi.org/project/mikro-next/)
|
|
31
|
+

|
|
32
|
+
[](https://pypi.python.org/pypi/mikro-next/)
|
|
33
|
+
[](https://pypi.python.org/pypi/mikro-next/)
|
|
34
|
+
[](https://pypi.python.org/pypi/mikro-next/)
|
|
35
|
+
|
|
36
|
+
mikro-next is the python client for the next version of the mikro-server environment.
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# Quick Start
|
|
40
|
+
|
|
41
|
+
Let's discover **mikro in less than 5 minutes**.
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
### Inspiration
|
|
45
|
+
|
|
46
|
+
Mikro is the client app for the mikro-server, a graphql compliant server for hosting your microscopy data. Mikro tries to
|
|
47
|
+
facilitate a transition to use modern technologies for the storage and retrieval of microscopy data. It emphasizes the importance
|
|
48
|
+
of relations within your data and tries to make them accessible through a GraphQL Interface.
|
|
49
|
+
|
|
50
|
+
### Installation
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
pip install mikro-next
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
### Design
|
|
57
|
+
|
|
58
|
+
Mikro is just a client and therefore only concerns itself with the querying (retrieval) and mutation (altering) of data on
|
|
59
|
+
the central server. Therefore its only composes two major components:
|
|
60
|
+
|
|
61
|
+
- Rath: A graphql client to query complex relationships in your data through simple queries.
|
|
62
|
+
- Datalayer: A way of accessing and retrieving binary data (image arrays, big tables,...) through known python apis like xarray and numpy
|
|
63
|
+
|
|
64
|
+
Under the hood Mikro is build on the growing ecosystem of graphql and pydantic as well as the amazing toolstack
|
|
65
|
+
of zarr, dask and xarray for scientific computation.
|
|
66
|
+
|
|
67
|
+
### Features
|
|
68
|
+
|
|
69
|
+
- Easy to extend with custom graphql logic (together with turms can generate APIs for very complex relationship)
|
|
70
|
+
- Interoperable and standardization (has bindings for Dataframes and Numpy arrays)
|
|
71
|
+
- Fully Typed and Validated(uses pydantic for validation)
|
|
72
|
+
|
|
73
|
+
### Prerequisits
|
|
74
|
+
|
|
75
|
+
You need a fully configured mikro-server running in your lab, that mikro can connect to. The easiest way to do this is to
|
|
76
|
+
use the [arkitekt.live](https://arkitekt.live) platform, which provides a fully managed mikro-server for your lab. Just
|
|
77
|
+
follow the instructions on the website to get started. If you just want a local test service, check out the
|
|
78
|
+
tests/integration/docker-compose.yml file, which contains a docker-compose file to start a mikro-server
|
|
79
|
+
locally with a postgres database and a minio object storage.
|
|
80
|
+
|
|
81
|
+
## Example Use case
|
|
82
|
+
|
|
83
|
+
The API of Mikro is best explained on this example:
|
|
84
|
+
|
|
85
|
+
```python
|
|
86
|
+
from arkitekt_next import easy
|
|
87
|
+
from mikro_next.api.schema import get_random_image
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
with easy("my-app") as app:
|
|
91
|
+
g = get_random_image()
|
|
92
|
+
|
|
93
|
+
maximum_intensity_l = g.data.max()
|
|
94
|
+
maximum_intensity = maximum_intensity.compute()
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
1. **First we construct an App**:
|
|
98
|
+
App is the entrypoint of every client accessing the mikro service,
|
|
99
|
+
in a more complex example here you would define the configuration of
|
|
100
|
+
the connection. In this example we use the `easy` function to
|
|
101
|
+
construct an arkitekt-app with a default configuration.
|
|
102
|
+
|
|
103
|
+
2. **Entering the Context**:
|
|
104
|
+
This is the most important concept to learn, every interaction you have with
|
|
105
|
+
mikro needs to happen within a context. This is needed because mikro uses
|
|
106
|
+
asyncrhonous programming to retrieve, and save data efficiently. The context
|
|
107
|
+
ensures that every connection gets cleaned up effienctly and safely.
|
|
108
|
+
|
|
109
|
+
3. **Retrieving Model**:
|
|
110
|
+
On calling `get_random_image` we are calling the graphql server and retrieve
|
|
111
|
+
the metadata of a reandom image. This function just
|
|
112
|
+
executes a default graphqlquery and constructs a typed python model out of it.
|
|
113
|
+
|
|
114
|
+
4. **Retrieving Data**:
|
|
115
|
+
Here we are actually doing operations on the image data. Every Image
|
|
116
|
+
has a `data` attribute. This data attribute resolves to a lazily loaded
|
|
117
|
+
xarray that connects to a zarr store on the s3 datalayer. What that means for you
|
|
118
|
+
is that you can use this as a normal xarray with dask array.
|
|
119
|
+
|
|
120
|
+
5. **Computing Data**
|
|
121
|
+
Only on Computing Data is the data actually downloaded from the datalayer. If you
|
|
122
|
+
only act on partial data, only partial data is downloaded. This is the magic of
|
|
123
|
+
zarr and xarray.
|
|
124
|
+
|
|
125
|
+
## Other usage options
|
|
126
|
+
|
|
127
|
+
If you dont want to use a context manager you can also choose to
|
|
128
|
+
use the connect/disconnect methods:
|
|
129
|
+
|
|
130
|
+
```python
|
|
131
|
+
from arkitekt_next import easy
|
|
132
|
+
from mikro_next.api.schema import get_image
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
app = easy()
|
|
136
|
+
app.enter()
|
|
137
|
+
|
|
138
|
+
g = get_image(107)
|
|
139
|
+
|
|
140
|
+
maximum_intensity = g.data.max().compute()
|
|
141
|
+
|
|
142
|
+
#later
|
|
143
|
+
app.exit()
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
```
|
|
147
|
+
:::warning
|
|
148
|
+
If you choose this approach, make sure that you call disconnect in your code at some
|
|
149
|
+
stage. Especially when using asynchronous links/transports (supporting subscriptions) in a sync
|
|
150
|
+
environment,as only on disconnect we will close the threaded loop that these transports required
|
|
151
|
+
to operate. Otherwise this connection will stay open.
|
|
152
|
+
:::
|
|
153
|
+
|
|
154
|
+
# Async Usage:
|
|
155
|
+
|
|
156
|
+
If you love asyncio, the way we do, you can also take full control over what happens in your app
|
|
157
|
+
within an asynchrouns loop. Actually this is the API we would recommend.
|
|
158
|
+
|
|
159
|
+
```python
|
|
160
|
+
from mikro import MikroApp, aget_representation
|
|
161
|
+
from fakts import Fakts
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
app = MikroApp()
|
|
165
|
+
|
|
166
|
+
async with app:
|
|
167
|
+
g = await aget_representation(107)
|
|
168
|
+
|
|
169
|
+
maximum_intensity = g.data.max() # DO NOT DO THIS IN YOUR ASYNC LOOP
|
|
170
|
+
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
:::warning
|
|
174
|
+
|
|
175
|
+
In this scenario we are using the asyncio event loop and do not spawn a seperate thread, so calling
|
|
176
|
+
g.data.max() actually calculates the array (e.g downloads everything blockingly in this loop)
|
|
177
|
+
|
|
178
|
+
:::
|
|
179
|
+
|
|
180
|
+
If you want to know more about why we use apps, composition and how we handle threads, check out koil
|
|
181
|
+
(mikros async-sync-helper library)
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
# mikro
|
|
2
|
+
|
|
3
|
+
[](https://codecov.io/gh/arkitektio/mikro-next)
|
|
4
|
+
[](https://pypi.org/project/mikro-next/)
|
|
5
|
+
[](https://pypi.org/project/mikro-next/)
|
|
6
|
+

|
|
7
|
+
[](https://pypi.python.org/pypi/mikro-next/)
|
|
8
|
+
[](https://pypi.python.org/pypi/mikro-next/)
|
|
9
|
+
[](https://pypi.python.org/pypi/mikro-next/)
|
|
10
|
+
|
|
11
|
+
mikro-next is the python client for the next version of the mikro-server environment.
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Quick Start
|
|
15
|
+
|
|
16
|
+
Let's discover **mikro in less than 5 minutes**.
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
### Inspiration
|
|
20
|
+
|
|
21
|
+
Mikro is the client app for the mikro-server, a graphql compliant server for hosting your microscopy data. Mikro tries to
|
|
22
|
+
facilitate a transition to use modern technologies for the storage and retrieval of microscopy data. It emphasizes the importance
|
|
23
|
+
of relations within your data and tries to make them accessible through a GraphQL Interface.
|
|
24
|
+
|
|
25
|
+
### Installation
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
pip install mikro-next
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### Design
|
|
32
|
+
|
|
33
|
+
Mikro is just a client and therefore only concerns itself with the querying (retrieval) and mutation (altering) of data on
|
|
34
|
+
the central server. Therefore its only composes two major components:
|
|
35
|
+
|
|
36
|
+
- Rath: A graphql client to query complex relationships in your data through simple queries.
|
|
37
|
+
- Datalayer: A way of accessing and retrieving binary data (image arrays, big tables,...) through known python apis like xarray and numpy
|
|
38
|
+
|
|
39
|
+
Under the hood Mikro is build on the growing ecosystem of graphql and pydantic as well as the amazing toolstack
|
|
40
|
+
of zarr, dask and xarray for scientific computation.
|
|
41
|
+
|
|
42
|
+
### Features
|
|
43
|
+
|
|
44
|
+
- Easy to extend with custom graphql logic (together with turms can generate APIs for very complex relationship)
|
|
45
|
+
- Interoperable and standardization (has bindings for Dataframes and Numpy arrays)
|
|
46
|
+
- Fully Typed and Validated(uses pydantic for validation)
|
|
47
|
+
|
|
48
|
+
### Prerequisits
|
|
49
|
+
|
|
50
|
+
You need a fully configured mikro-server running in your lab, that mikro can connect to. The easiest way to do this is to
|
|
51
|
+
use the [arkitekt.live](https://arkitekt.live) platform, which provides a fully managed mikro-server for your lab. Just
|
|
52
|
+
follow the instructions on the website to get started. If you just want a local test service, check out the
|
|
53
|
+
tests/integration/docker-compose.yml file, which contains a docker-compose file to start a mikro-server
|
|
54
|
+
locally with a postgres database and a minio object storage.
|
|
55
|
+
|
|
56
|
+
## Example Use case
|
|
57
|
+
|
|
58
|
+
The API of Mikro is best explained on this example:
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
from arkitekt_next import easy
|
|
62
|
+
from mikro_next.api.schema import get_random_image
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
with easy("my-app") as app:
|
|
66
|
+
g = get_random_image()
|
|
67
|
+
|
|
68
|
+
maximum_intensity_l = g.data.max()
|
|
69
|
+
maximum_intensity = maximum_intensity.compute()
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
1. **First we construct an App**:
|
|
73
|
+
App is the entrypoint of every client accessing the mikro service,
|
|
74
|
+
in a more complex example here you would define the configuration of
|
|
75
|
+
the connection. In this example we use the `easy` function to
|
|
76
|
+
construct an arkitekt-app with a default configuration.
|
|
77
|
+
|
|
78
|
+
2. **Entering the Context**:
|
|
79
|
+
This is the most important concept to learn, every interaction you have with
|
|
80
|
+
mikro needs to happen within a context. This is needed because mikro uses
|
|
81
|
+
asyncrhonous programming to retrieve, and save data efficiently. The context
|
|
82
|
+
ensures that every connection gets cleaned up effienctly and safely.
|
|
83
|
+
|
|
84
|
+
3. **Retrieving Model**:
|
|
85
|
+
On calling `get_random_image` we are calling the graphql server and retrieve
|
|
86
|
+
the metadata of a reandom image. This function just
|
|
87
|
+
executes a default graphqlquery and constructs a typed python model out of it.
|
|
88
|
+
|
|
89
|
+
4. **Retrieving Data**:
|
|
90
|
+
Here we are actually doing operations on the image data. Every Image
|
|
91
|
+
has a `data` attribute. This data attribute resolves to a lazily loaded
|
|
92
|
+
xarray that connects to a zarr store on the s3 datalayer. What that means for you
|
|
93
|
+
is that you can use this as a normal xarray with dask array.
|
|
94
|
+
|
|
95
|
+
5. **Computing Data**
|
|
96
|
+
Only on Computing Data is the data actually downloaded from the datalayer. If you
|
|
97
|
+
only act on partial data, only partial data is downloaded. This is the magic of
|
|
98
|
+
zarr and xarray.
|
|
99
|
+
|
|
100
|
+
## Other usage options
|
|
101
|
+
|
|
102
|
+
If you dont want to use a context manager you can also choose to
|
|
103
|
+
use the connect/disconnect methods:
|
|
104
|
+
|
|
105
|
+
```python
|
|
106
|
+
from arkitekt_next import easy
|
|
107
|
+
from mikro_next.api.schema import get_image
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
app = easy()
|
|
111
|
+
app.enter()
|
|
112
|
+
|
|
113
|
+
g = get_image(107)
|
|
114
|
+
|
|
115
|
+
maximum_intensity = g.data.max().compute()
|
|
116
|
+
|
|
117
|
+
#later
|
|
118
|
+
app.exit()
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
```
|
|
122
|
+
:::warning
|
|
123
|
+
If you choose this approach, make sure that you call disconnect in your code at some
|
|
124
|
+
stage. Especially when using asynchronous links/transports (supporting subscriptions) in a sync
|
|
125
|
+
environment,as only on disconnect we will close the threaded loop that these transports required
|
|
126
|
+
to operate. Otherwise this connection will stay open.
|
|
127
|
+
:::
|
|
128
|
+
|
|
129
|
+
# Async Usage:
|
|
130
|
+
|
|
131
|
+
If you love asyncio, the way we do, you can also take full control over what happens in your app
|
|
132
|
+
within an asynchrouns loop. Actually this is the API we would recommend.
|
|
133
|
+
|
|
134
|
+
```python
|
|
135
|
+
from mikro import MikroApp, aget_representation
|
|
136
|
+
from fakts import Fakts
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
app = MikroApp()
|
|
140
|
+
|
|
141
|
+
async with app:
|
|
142
|
+
g = await aget_representation(107)
|
|
143
|
+
|
|
144
|
+
maximum_intensity = g.data.max() # DO NOT DO THIS IN YOUR ASYNC LOOP
|
|
145
|
+
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
:::warning
|
|
149
|
+
|
|
150
|
+
In this scenario we are using the asyncio event loop and do not spawn a seperate thread, so calling
|
|
151
|
+
g.data.max() actually calculates the array (e.g downloads everything blockingly in this loop)
|
|
152
|
+
|
|
153
|
+
:::
|
|
154
|
+
|
|
155
|
+
If you want to know more about why we use apps, composition and how we handle threads, check out koil
|
|
156
|
+
(mikros async-sync-helper library)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from .mikro_next import MikroNext
|
|
2
|
+
from .utils import rechunk
|
|
3
|
+
|
|
4
|
+
try:
|
|
5
|
+
#
|
|
6
|
+
from .arkitekt import MikroService
|
|
7
|
+
except ImportError as e:
|
|
8
|
+
try:
|
|
9
|
+
import arkitekt
|
|
10
|
+
|
|
11
|
+
raise ImportError(
|
|
12
|
+
"Arkitekt is installed, but the MikroService could not be imported. This may indicate a version mismatch or missing dependencies."
|
|
13
|
+
) from e
|
|
14
|
+
except ImportError:
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
from .rekuest import structure_reg
|
|
20
|
+
|
|
21
|
+
print("Imported structure_reg")
|
|
22
|
+
except ImportError as e:
|
|
23
|
+
print("Could not import structure_reg", e)
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
"MikroNext",
|
|
29
|
+
"rechunk",
|
|
30
|
+
"structure_reg",
|
|
31
|
+
"MikroService",
|
|
32
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|