nmcp-precomputed 3.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. nmcp_precomputed-3.0.4/.github/workflows/publish-docker.yml +33 -0
  2. nmcp_precomputed-3.0.4/.github/workflows/publish-pypi.yml +26 -0
  3. nmcp_precomputed-3.0.4/.gitignore +167 -0
  4. nmcp_precomputed-3.0.4/Dockerfile +7 -0
  5. nmcp_precomputed-3.0.4/PKG-INFO +17 -0
  6. nmcp_precomputed-3.0.4/README.md +70 -0
  7. nmcp_precomputed-3.0.4/Taskfile.dist.yaml +33 -0
  8. nmcp_precomputed-3.0.4/dev.sh +9 -0
  9. nmcp_precomputed-3.0.4/docker-compose.yml +23 -0
  10. nmcp_precomputed-3.0.4/docker-entry.sh +9 -0
  11. nmcp_precomputed-3.0.4/pyproject.toml +38 -0
  12. nmcp_precomputed-3.0.4/src/nmcp/__init__.py +4 -0
  13. nmcp_precomputed-3.0.4/src/nmcp/__main__.py +13 -0
  14. nmcp_precomputed-3.0.4/src/nmcp/data/__init__.py +2 -0
  15. nmcp_precomputed-3.0.4/src/nmcp/data/precomputed_entry.py +10 -0
  16. nmcp_precomputed-3.0.4/src/nmcp/data/remote_data_client.py +466 -0
  17. nmcp_precomputed-3.0.4/src/nmcp/from_json.py +34 -0
  18. nmcp_precomputed-3.0.4/src/nmcp/from_service.py +32 -0
  19. nmcp_precomputed-3.0.4/src/nmcp/list_skeletons.py +24 -0
  20. nmcp_precomputed-3.0.4/src/nmcp/precomputed/__init__.py +5 -0
  21. nmcp_precomputed-3.0.4/src/nmcp/precomputed/nmcp_precomputed.py +252 -0
  22. nmcp_precomputed-3.0.4/src/nmcp/precomputed/nmcp_skeleton.py +142 -0
  23. nmcp_precomputed-3.0.4/src/nmcp/precomputed/segment_info.py +61 -0
  24. nmcp_precomputed-3.0.4/src/nmcp/precomputed/segment_property.py +33 -0
  25. nmcp_precomputed-3.0.4/src/nmcp/precomputed/segment_tag_property.py +85 -0
  26. nmcp_precomputed-3.0.4/src/nmcp/precomputed_worker.py +273 -0
  27. nmcp_precomputed-3.0.4/src/nmcp/remove_skeleton.py +21 -0
  28. nmcp_precomputed-3.0.4/version.json +4 -0
@@ -0,0 +1,33 @@
1
+ name: Publish Docker Images
2
+ on:
3
+ push:
4
+ branches:
5
+ - main
6
+
7
+ jobs:
8
+ publish:
9
+ runs-on: ubuntu-latest
10
+ steps:
11
+ - uses: actions/checkout@v3
12
+ - name: Set up Docker Buildx
13
+ id: buildx
14
+ uses: docker/setup-buildx-action@v2
15
+ - name: Install Task
16
+ uses: arduino/setup-task@v2
17
+ with:
18
+ version: 3.x
19
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
20
+ - name: Set up Node.js
21
+ uses: actions/setup-node@v3
22
+ with:
23
+ node-version: "18.x"
24
+ - name: Login to Github Packages
25
+ uses: docker/login-action@v2
26
+ with:
27
+ registry: ghcr.io
28
+ username: ${{ github.actor }}
29
+ password: ${{ secrets.GITHUB_TOKEN }}
30
+ - name: Build and Push Docker Images
31
+ run: |
32
+ chmod +x docker-entry.sh
33
+ task release
@@ -0,0 +1,26 @@
1
+ name: Publish to PyPi
2
+ on:
3
+ push:
4
+ branches:
5
+ - main
6
+
7
+ jobs:
8
+ publish:
9
+ runs-on: ubuntu-latest
10
+ steps:
11
+ - uses: actions/checkout@v3
12
+ - name: Pull latest changes
13
+ run: git pull origin main
14
+ - name: Set up Python 3.10
15
+ uses: actions/setup-python@v2
16
+ with:
17
+ python-version: "3.10"
18
+ - name: Install dependencies
19
+ run: |
20
+ pip install --upgrade setuptools wheel twine build
21
+ python -m build
22
+ twine check dist/*
23
+ - name: Publish on PyPI
24
+ uses: pypa/gh-action-pypi-publish@release/v1
25
+ with:
26
+ password: ${{ secrets.AIND_PYPI_TOKEN }}
@@ -0,0 +1,167 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
110
+ .pdm.toml
111
+ .pdm-python
112
+ .pdm-build/
113
+
114
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
115
+ __pypackages__/
116
+
117
+ # Celery stuff
118
+ celerybeat-schedule
119
+ celerybeat.pid
120
+
121
+ # SageMath parsed files
122
+ *.sage.py
123
+
124
+ # Environments
125
+ .env
126
+ .venv
127
+ env/
128
+ venv/
129
+ ENV/
130
+ env.bak/
131
+ venv.bak/
132
+
133
+ # Spyder project settings
134
+ .spyderproject
135
+ .spyproject
136
+
137
+ # Rope project settings
138
+ .ropeproject
139
+
140
+ # mkdocs documentation
141
+ /site
142
+
143
+ # mypy
144
+ .mypy_cache/
145
+ .dmypy.json
146
+ dmypy.json
147
+
148
+ # Pyre type checker
149
+ .pyre/
150
+
151
+ # pytype static type analyzer
152
+ .pytype/
153
+
154
+ # Cython debug symbols
155
+ cython_debug/
156
+
157
+ .idea/
158
+
159
+ mouse_connectivity/
160
+
161
+ # Taskfile.dist.yaml is used by the GitHub workflow. Allow a local task file to push images to other hubs, etc.
162
+ Taskfile.yaml
163
+
164
+ GEMINI.md
165
+ CLAUDE.md
166
+ AGENT.md
167
+ .claude/
@@ -0,0 +1,7 @@
1
+ FROM python:3.10
2
+
3
+ COPY docker-entry.sh ./
4
+
5
+ RUN pip install --no-cache-dir --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ nmcp-precomputed==3.0.7
6
+
7
+ CMD ["./docker-entry.sh"]
@@ -0,0 +1,17 @@
1
+ Metadata-Version: 2.4
2
+ Name: nmcp-precomputed
3
+ Version: 3.0.4
4
+ Summary: Neuron Morphology Community Portal Precomputed Toolbox
5
+ License: MIT
6
+ Requires-Python: <3.11,>=3.10
7
+ Requires-Dist: allensdk
8
+ Requires-Dist: cloud-files
9
+ Requires-Dist: cloud-volume
10
+ Requires-Dist: gql[requests]
11
+ Requires-Dist: numpy<1.24
12
+ Requires-Dist: pandas
13
+ Provides-Extra: publish
14
+ Requires-Dist: build; extra == 'publish'
15
+ Requires-Dist: twine; extra == 'publish'
16
+ Provides-Extra: test
17
+ Requires-Dist: pytest==8.2.0; extra == 'test'
@@ -0,0 +1,70 @@
1
+ # NMCP Precomputed
2
+ The NMCP Precomputed service generates reconstruction skeletons in Neuroglancer's precomputed format. It periodically
3
+ polls the API service for reconstructions that are ready for new or updated skeletons.
4
+
5
+ Given the current behavior or Neuroglancer layers and the NMCP viewer feature requirements, the service currently
6
+ generated separate data sources for the full reconstruction, as well as axon-only and dendrite-only versions.
7
+
8
+ The service uses the chunked API for acquiring reconstruction data and should be compatible with dense reconstruction
9
+ sources.
10
+
11
+ ## Local Development
12
+ A local S3-compatible service may be used during local development to avoid AWS or other cloud service storage charges.
13
+
14
+ _Note that at this time, cloud-files only appears to support this option from Linux, WSL, or MacOS._
15
+
16
+ One functional option is to run a local [MinIO](https://www.min.io/) Docker container. The following steps describe the
17
+ process (and assume the default configuration of port 9000 for MinIO). For additional details see either the MinIO
18
+ documentation, [cloud-files](https://github.com/seung-lab/cloud-files), and
19
+ [cloud-volume](https://github.com/seung-lab/cloud-volume).
20
+
21
+ A preconfigured instance is defined in `docker-compose.yml` that can be started with the following script
22
+
23
+ ```bash
24
+ docker compose -p nmcp up -d
25
+ ```
26
+
27
+ Attach to the running container to configure the precomputed bucket.
28
+
29
+ Find the container id
30
+ ```bash
31
+ docker ps
32
+ ```
33
+
34
+ Attach to the instance
35
+ ```bash
36
+ docker exec -it <container-id> /bin/bash
37
+ ```
38
+
39
+ Define an `mc` alias for the server, create the bucket, and allow public access for the Neuroglancer viewer. The
40
+ following assumes the username/password defined in `docker-compose.yml`. This is performed in the container after
41
+ attaching above, not on the host (unless you choose to install the `mc` tools on your host machine).
42
+ ```bash
43
+ mc alias set myminio http://localhost:9000 minio_root_user minio_root_password
44
+ mc mb myminio/aind-neuron-morphology-community-portal-local/ngv01/
45
+ mc anonymous set public myminio/aind-neuron-morphology-community-portal-local/ngv01
46
+ ```
47
+
48
+ Exit the container instance. On the host (in the Python environment created for this project w/cloud-files installed),
49
+ create an alias for the server.
50
+
51
+ ```bash
52
+ cloudfiles alias add minio s3://http://127.0.0.1:9000/
53
+ ```
54
+
55
+ Add the username and password as a secrets file in `~/.cloudvolume/secrets/minio-secret.json` (assumes defaults used
56
+ in the compose file).
57
+ ```json
58
+ {
59
+ "AWS_ACCESS_KEY_ID": "minio_root_user",
60
+ "AWS_SECRET_ACCESS_KEY": "minio_root_password"
61
+ }
62
+ ```
63
+
64
+ Although the bucket is set for public access in this development example, adding the authentication when generating
65
+ the precomputed data mimics the typical behavior in actual deployments.
66
+
67
+ When running the precomputed worker, pass the alias as the precomputed output argument, _e.g._, `-o minio://aind-neuron-morphology-community-portal-local/ngv01`
68
+
69
+ Here the current standard path is used as the base location (`ngv01`), however anything can used so long as it is also
70
+ used in the `NMCP_PRECOMPUTED` environment variable for `nmcp-client`.
@@ -0,0 +1,33 @@
1
+ version: '3'
2
+
3
+ vars:
4
+ REPOSITORY: ghcr.io
5
+ IMAGE: allenneuraldynamics/nmcp-precomputed-worker
6
+ REPOIMAGE: "{{.REPOSITORY}}/{{.IMAGE}}"
7
+ VERSION:
8
+ sh: jq -r .version version.json
9
+ MAJOR:
10
+ sh: string={{.VERSION}} && arr=(${string//"."/ }) && echo ${arr[0]}
11
+ MINOR:
12
+ sh: string={{.VERSION}} && arr=(${string//"."/ }) && echo ${arr[1]}
13
+ REVISION:
14
+ sh: string={{.VERSION}} && arr=(${string//"."/ }) && echo ${arr[2]}
15
+ IMAGEWITHVERSION: "{{.REPOIMAGE}}:{{.MAJOR}}"
16
+ IMAGEWITHMINORVERSION: "{{.IMAGEWITHVERSION}}.{{.MINOR}}"
17
+ IMAGEWITHREVERSION: "{{.IMAGEWITHMINORVERSION}}.{{.REVISION}}"
18
+ IMAGELATEST: "{{.REPOIMAGE}}:latest"
19
+
20
+ tasks:
21
+ build:
22
+ cmds:
23
+ - docker build --platform linux/amd64 --tag {{.IMAGEWITHVERSION}} .
24
+ - docker tag {{.IMAGEWITHVERSION}} {{.IMAGEWITHMINORVERSION}}
25
+ - docker tag {{.IMAGEWITHMINORVERSION}} {{.IMAGEWITHREVERSION}}
26
+ - docker tag {{.IMAGEWITHREVERSION}} {{.IMAGELATEST}}
27
+ release:
28
+ cmds:
29
+ - task: build
30
+ - docker push {{.IMAGEWITHVERSION}}
31
+ - docker push {{.IMAGEWITHMINORVERSION}}
32
+ - docker push {{.IMAGEWITHREVERSION}}
33
+ - docker push {{.IMAGELATEST}}
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # Start service(s) that are not typically under local development (e.g., databases), but are required to run and test services that may be under development.
4
+
5
+ if [ -z "NMCP_COMPOSE_PROJECT" ]; then
6
+ export NMCP_COMPOSE_PROJECT="nmcp"
7
+ fi
8
+
9
+ docker compose -p ${NMCP_COMPOSE_PROJECT} up
@@ -0,0 +1,23 @@
1
+ # Services that will be run regardless of environment and will not be built locally as part of development.
2
+ services:
3
+ s3:
4
+ image: minio/minio:latest
5
+ hostname: nmcp-s3
6
+ ports:
7
+ - "9000:9000"
8
+ - "9001:9001"
9
+ environment:
10
+ MINIO_ROOT_USER: minio_root_user
11
+ MINIO_ROOT_PASSWORD: minio_root_password
12
+ volumes:
13
+ - s3:/data
14
+ command: [ "server", "--console-address", ":9001", "/data" ]
15
+ networks:
16
+ - back_tier
17
+ restart: unless-stopped
18
+
19
+ volumes:
20
+ s3:
21
+
22
+ networks:
23
+ back_tier:
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env bash
2
+
3
+ logName=$(date '+%Y-%m-%d_%H-%M-%S');
4
+
5
+ mkdir -p /var/log/nmcp
6
+
7
+ export PYTHONPATH=$PWD
8
+
9
+ python -m nmcp -u $GRAPHQL_URL -a $SERVER_AUTHENTICATION_KEY -o $PRECOMPUTED_OUTPUT >> /var/log/nmcp/nmcp-precomputed-${logName}.log 2>&1
@@ -0,0 +1,38 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [tool.hatch.build.targets.wheel]
6
+ packages = ["src/nmcp"]
7
+
8
+ [tool.hatch.build.targets.sdist]
9
+ exclude = [
10
+ "/tests"
11
+ ]
12
+
13
+ [project]
14
+ name = "nmcp-precomputed"
15
+ version = "3.0.4"
16
+ description = "Neuron Morphology Community Portal Precomputed Toolbox"
17
+ license = { text = "MIT" }
18
+ requires-python = ">=3.10,<3.11"
19
+ dependencies = [
20
+ "numpy<1.24",
21
+ "pandas",
22
+ "gql[requests]",
23
+ "allensdk",
24
+ "cloud-volume",
25
+ "cloud-files"
26
+ ]
27
+
28
+ [tool.pytest.ini_options]
29
+ addopts = ["-W ignore::DeprecationWarning"]
30
+
31
+ [project.optional-dependencies]
32
+ test = [
33
+ "pytest==8.2.0"
34
+ ]
35
+ publish = [
36
+ "build",
37
+ "twine"
38
+ ]
@@ -0,0 +1,4 @@
1
+ from .precomputed import SegmentInfo, SegmentProperty, SegmentTagProperty, SomaSegmentTagProperty, NmcpPropertyValues
2
+ from .precomputed import (ensure_bucket_folders, create_from_json_files, create_from_dict, create_from_data,
3
+ remove_skeleton, list_skeletons, extract_neuron_properties, SkeletonComponents)
4
+ from .data import RemoteDataClient, PrecomputedEntry
@@ -0,0 +1,13 @@
1
+ import argparse
2
+
3
+ from .precomputed_worker import main
4
+
5
+ parser = argparse.ArgumentParser()
6
+
7
+ parser.add_argument("-u", "--url", help="URL of the GraphQL service")
8
+ parser.add_argument("-a", "--authkey", help="authorization header for GraphQL service")
9
+ parser.add_argument("-o", "--output", help="the output cloud volume location")
10
+
11
+ args = parser.parse_args()
12
+
13
+ main(args.url, args.authkey, args.output)
@@ -0,0 +1,2 @@
1
+ from .remote_data_client import RemoteDataClient
2
+ from .precomputed_entry import PrecomputedEntry
@@ -0,0 +1,10 @@
1
+ from dataclasses import dataclass
2
+
3
+
4
+ @dataclass
5
+ class PrecomputedEntry:
6
+ id: str
7
+ skeletonId: int
8
+ version: int | None
9
+ reconstructionId: str
10
+ generatedAt: float | None