unidas 0.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unidas-0.0.0/.github/ISSUE_TEMPLATE/bug_report.md +27 -0
- unidas-0.0.0/.github/ISSUE_TEMPLATE/config.yml +8 -0
- unidas-0.0.0/.github/pull_request_template.md +21 -0
- unidas-0.0.0/.github/workflows/lint.yml +26 -0
- unidas-0.0.0/.github/workflows/runtests.yml +65 -0
- unidas-0.0.0/.github/workflows/upload_pypi.yml +25 -0
- unidas-0.0.0/.gitignore +161 -0
- unidas-0.0.0/.pre-commit-config.yaml +26 -0
- unidas-0.0.0/LICENSE +21 -0
- unidas-0.0.0/PKG-INFO +158 -0
- unidas-0.0.0/README.md +119 -0
- unidas-0.0.0/pyproject.toml +136 -0
- unidas-0.0.0/src/unidas.py +764 -0
- unidas-0.0.0/test/conftest.py +50 -0
- unidas-0.0.0/test/test_unidas.py +297 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: Bug report
|
|
3
|
+
about: Report a bug or unexpected behavior
|
|
4
|
+
title: ''
|
|
5
|
+
labels: bug
|
|
6
|
+
assignees: ''
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## Description
|
|
10
|
+
<!--
|
|
11
|
+
Provide a clear description of the issue.
|
|
12
|
+
-->
|
|
13
|
+
|
|
14
|
+
## Example
|
|
15
|
+
<!--
|
|
16
|
+
Include a short example that reproduces the issue, if applicable. If you can share the data file that will be very helpful.
|
|
17
|
+
-->
|
|
18
|
+
|
|
19
|
+
## Expected behavior
|
|
20
|
+
<!--
|
|
21
|
+
Describe the expected behavior, if applicable.
|
|
22
|
+
-->
|
|
23
|
+
|
|
24
|
+
## Versions
|
|
25
|
+
- OS [e.g. Ubuntu 20.04]:
|
|
26
|
+
- Unidas Version [e.g. 0.0.5]:
|
|
27
|
+
- Python Version [e.g. 3.10]:
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
blank_issues_enabled: true
|
|
2
|
+
contact_links:
|
|
3
|
+
- name: Ask a question
|
|
4
|
+
url: https://github.com/DASDAE/unidas/discussions/categories/q-a
|
|
5
|
+
about: Please ask and answer questions in the discussion board
|
|
6
|
+
- name: Share an idea, a missing feature or anything else
|
|
7
|
+
url: https://github.com/DASDAE/unidas/discussions/
|
|
8
|
+
about: Please give us any feedback in the discussion board
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
<!--
|
|
2
|
+
Thanks for contributing to Unidas, community contributions are most welcomed!
|
|
3
|
+
-->
|
|
4
|
+
|
|
5
|
+
## Description
|
|
6
|
+
|
|
7
|
+
<!--
|
|
8
|
+
Please describe your PR here. What problem are you trying to solve, or what feature are you adding?
|
|
9
|
+
|
|
10
|
+
Also link any relevant issues/discussions (this can be done using the issue/discussion number preceded by a
|
|
11
|
+
pound sign, e.g. `#12` without the backticks)
|
|
12
|
+
-->
|
|
13
|
+
|
|
14
|
+
## Checklist
|
|
15
|
+
|
|
16
|
+
I have (if applicable):
|
|
17
|
+
|
|
18
|
+
- [ ] referenced the GitHub issue this PR closes.
|
|
19
|
+
- [ ] documented the new feature with docstrings or appropriate doc page.
|
|
20
|
+
- [ ] included a test.
|
|
21
|
+
- [ ] added the "ready_for_review" tag once the PR is ready to be reviewed.
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# Lint the code using the defined pre-commits
|
|
2
|
+
name: LintCode
|
|
3
|
+
on: [push]
|
|
4
|
+
|
|
5
|
+
jobs:
|
|
6
|
+
lint_code:
|
|
7
|
+
runs-on: ubuntu-latest
|
|
8
|
+
|
|
9
|
+
# only run if CI isn't turned off
|
|
10
|
+
if: github.event_name == 'push' || !contains(github.event.pull_request.labels.*.name, 'no_ci')
|
|
11
|
+
|
|
12
|
+
steps:
|
|
13
|
+
- uses: actions/checkout@v4
|
|
14
|
+
|
|
15
|
+
- name: Install uv
|
|
16
|
+
uses: astral-sh/setup-uv@v3
|
|
17
|
+
|
|
18
|
+
- uses: actions/setup-python@v5
|
|
19
|
+
with:
|
|
20
|
+
python-version: '3.12'
|
|
21
|
+
|
|
22
|
+
- name: install linting packages
|
|
23
|
+
run: uv tool install pre-commit
|
|
24
|
+
|
|
25
|
+
- name: run all precommits
|
|
26
|
+
run: uv tool run pre-commit run --all
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# Run full test suite using conda env and all optional deps.
|
|
2
|
+
name: TestCode
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches:
|
|
6
|
+
- main
|
|
7
|
+
pull_request:
|
|
8
|
+
branches:
|
|
9
|
+
- main
|
|
10
|
+
|
|
11
|
+
env:
|
|
12
|
+
# used to manually trigger cache reset. Just increment if needed.
|
|
13
|
+
CACHE_NUMBER: 1
|
|
14
|
+
|
|
15
|
+
# Cancel previous runs when this one starts.
|
|
16
|
+
concurrency:
|
|
17
|
+
group: TestCode-${{ github.event.pull_request.number || github.run_id }}
|
|
18
|
+
cancel-in-progress: true
|
|
19
|
+
|
|
20
|
+
jobs:
|
|
21
|
+
# Runs the tests on combinations of the supported python/os matrix.
|
|
22
|
+
test_code:
|
|
23
|
+
|
|
24
|
+
timeout-minutes: 25
|
|
25
|
+
runs-on: ${{ matrix.os }}
|
|
26
|
+
strategy:
|
|
27
|
+
matrix:
|
|
28
|
+
os: [ubuntu-latest, macos-latest, windows-latest]
|
|
29
|
+
python-version: ['3.10', '3.11', "3.12"]
|
|
30
|
+
|
|
31
|
+
# only run if CI isn't turned off
|
|
32
|
+
if: github.event_name == 'push' || !contains(github.event.pull_request.labels.*.name, 'no_ci')
|
|
33
|
+
|
|
34
|
+
env:
|
|
35
|
+
# set conda environment file with dependencies
|
|
36
|
+
env_file: "environment.yml"
|
|
37
|
+
|
|
38
|
+
steps:
|
|
39
|
+
- uses: actions/checkout@v4
|
|
40
|
+
|
|
41
|
+
- name: Install uv
|
|
42
|
+
uses: astral-sh/setup-uv@v3
|
|
43
|
+
|
|
44
|
+
- name: run pytest
|
|
45
|
+
run: uv run --all-extras --python ${{ matrix.python-version }} pytest -s --cov src --cov-append --cov-report=xml
|
|
46
|
+
|
|
47
|
+
# Runs examples in docstrings
|
|
48
|
+
- name: test docstrings
|
|
49
|
+
run: uv run --all-extras --python ${{ matrix.python-version }} pytest src --doctest-modules
|
|
50
|
+
|
|
51
|
+
# Upload coverage files
|
|
52
|
+
- uses: codecov/codecov-action@v5
|
|
53
|
+
with:
|
|
54
|
+
fail_ci_if_error: false
|
|
55
|
+
files: ./coverage.xml
|
|
56
|
+
flags: unittests
|
|
57
|
+
name: PR_tests
|
|
58
|
+
token: ${{ secrets.CODECOV_TOKEN }}
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# This is a very useful step for debugging, it allows you to ssh into the CI
|
|
62
|
+
# machine (https://github.com/marketplace/actions/debugging-with-tmate).
|
|
63
|
+
#
|
|
64
|
+
#- name: Setup tmate session
|
|
65
|
+
# uses: mxschmitt/action-tmate@v3
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Upload to PyPI when new code lands in main.
|
|
2
|
+
name: PublishPackage
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches:
|
|
6
|
+
- main
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
upload:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
environment: pypi
|
|
12
|
+
permissions:
|
|
13
|
+
# This must be enabled for trusted publishing.
|
|
14
|
+
id-token: write
|
|
15
|
+
steps:
|
|
16
|
+
- uses: actions/checkout@v4
|
|
17
|
+
|
|
18
|
+
- name: Install uv
|
|
19
|
+
uses: astral-sh/setup-uv@v3
|
|
20
|
+
|
|
21
|
+
- name: build and publish
|
|
22
|
+
shell: bash -l {0}
|
|
23
|
+
run: |
|
|
24
|
+
uv build
|
|
25
|
+
uv publish --trusted-publishing always
|
unidas-0.0.0/.gitignore
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
share/python-wheels/
|
|
24
|
+
*.egg-info/
|
|
25
|
+
.installed.cfg
|
|
26
|
+
*.egg
|
|
27
|
+
MANIFEST
|
|
28
|
+
|
|
29
|
+
# PyInstaller
|
|
30
|
+
# Usually these files are written by a python script from a template
|
|
31
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
32
|
+
*.manifest
|
|
33
|
+
*.spec
|
|
34
|
+
|
|
35
|
+
# Installer logs
|
|
36
|
+
pip-log.txt
|
|
37
|
+
pip-delete-this-directory.txt
|
|
38
|
+
|
|
39
|
+
# Unit test / coverage reports
|
|
40
|
+
htmlcov/
|
|
41
|
+
.tox/
|
|
42
|
+
.nox/
|
|
43
|
+
.coverage
|
|
44
|
+
.coverage.*
|
|
45
|
+
.cache
|
|
46
|
+
nosetests.xml
|
|
47
|
+
coverage.xml
|
|
48
|
+
*.cover
|
|
49
|
+
*.py,cover
|
|
50
|
+
.hypothesis/
|
|
51
|
+
.pytest_cache/
|
|
52
|
+
cover/
|
|
53
|
+
|
|
54
|
+
# Translations
|
|
55
|
+
*.mo
|
|
56
|
+
*.pot
|
|
57
|
+
|
|
58
|
+
# Django stuff:
|
|
59
|
+
*.log
|
|
60
|
+
local_settings.py
|
|
61
|
+
db.sqlite3
|
|
62
|
+
db.sqlite3-journal
|
|
63
|
+
|
|
64
|
+
# Flask stuff:
|
|
65
|
+
instance/
|
|
66
|
+
.webassets-cache
|
|
67
|
+
|
|
68
|
+
# Scrapy stuff:
|
|
69
|
+
.scrapy
|
|
70
|
+
|
|
71
|
+
# Sphinx documentation
|
|
72
|
+
docs/_build/
|
|
73
|
+
|
|
74
|
+
# PyBuilder
|
|
75
|
+
.pybuilder/
|
|
76
|
+
target/
|
|
77
|
+
|
|
78
|
+
# Jupyter Notebook
|
|
79
|
+
.ipynb_checkpoints
|
|
80
|
+
|
|
81
|
+
# IPython
|
|
82
|
+
profile_default/
|
|
83
|
+
ipython_config.py
|
|
84
|
+
|
|
85
|
+
# pyenv
|
|
86
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
87
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
88
|
+
# .python-version
|
|
89
|
+
|
|
90
|
+
# pipenv
|
|
91
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
92
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
93
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
94
|
+
# install all needed dependencies.
|
|
95
|
+
#Pipfile.lock
|
|
96
|
+
|
|
97
|
+
# poetry
|
|
98
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
99
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
100
|
+
# commonly ignored for libraries.
|
|
101
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
102
|
+
#poetry.lock
|
|
103
|
+
|
|
104
|
+
# pdm
|
|
105
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
106
|
+
#pdm.lock
|
|
107
|
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
108
|
+
# in version control.
|
|
109
|
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
|
110
|
+
.pdm.toml
|
|
111
|
+
.pdm-python
|
|
112
|
+
.pdm-build/
|
|
113
|
+
|
|
114
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
115
|
+
__pypackages__/
|
|
116
|
+
|
|
117
|
+
# Celery stuff
|
|
118
|
+
celerybeat-schedule
|
|
119
|
+
celerybeat.pid
|
|
120
|
+
|
|
121
|
+
# SageMath parsed files
|
|
122
|
+
*.sage.py
|
|
123
|
+
|
|
124
|
+
# Environments
|
|
125
|
+
.env
|
|
126
|
+
.venv
|
|
127
|
+
env/
|
|
128
|
+
venv/
|
|
129
|
+
ENV/
|
|
130
|
+
env.bak/
|
|
131
|
+
venv.bak/
|
|
132
|
+
|
|
133
|
+
# Spyder project settings
|
|
134
|
+
.spyderproject
|
|
135
|
+
.spyproject
|
|
136
|
+
|
|
137
|
+
# Rope project settings
|
|
138
|
+
.ropeproject
|
|
139
|
+
|
|
140
|
+
# mkdocs documentation
|
|
141
|
+
/site
|
|
142
|
+
|
|
143
|
+
# mypy
|
|
144
|
+
.mypy_cache/
|
|
145
|
+
.dmypy.json
|
|
146
|
+
dmypy.json
|
|
147
|
+
|
|
148
|
+
# Pyre type checker
|
|
149
|
+
.pyre/
|
|
150
|
+
|
|
151
|
+
# pytype static type analyzer
|
|
152
|
+
.pytype/
|
|
153
|
+
|
|
154
|
+
# Cython debug symbols
|
|
155
|
+
cython_debug/
|
|
156
|
+
|
|
157
|
+
# PyCharm
|
|
158
|
+
.idea/
|
|
159
|
+
|
|
160
|
+
# uv stuff
|
|
161
|
+
uv.lock
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
repos:
|
|
2
|
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
3
|
+
rev: v2.3.0
|
|
4
|
+
hooks:
|
|
5
|
+
- id: check-yaml
|
|
6
|
+
- id: end-of-file-fixer
|
|
7
|
+
- id: check-merge-conflict
|
|
8
|
+
- id: mixed-line-ending
|
|
9
|
+
args: ['--fix=lf']
|
|
10
|
+
|
|
11
|
+
# Ruff is a replacement for flake8 and many other linters (much faster too)
|
|
12
|
+
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
13
|
+
# Ruff version.
|
|
14
|
+
rev: v0.4.8
|
|
15
|
+
hooks:
|
|
16
|
+
- id: ruff
|
|
17
|
+
args: ["--fix"]
|
|
18
|
+
# Run the formatter.
|
|
19
|
+
- id: ruff-format
|
|
20
|
+
|
|
21
|
+
# ensures __future__ import annotations at top of files which require it
|
|
22
|
+
# for the typing features they are using.
|
|
23
|
+
- repo: https://github.com/frostming/fix-future-annotations
|
|
24
|
+
rev: 0.5.0
|
|
25
|
+
hooks:
|
|
26
|
+
- id: fix-future-annotations
|
unidas-0.0.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 unidas developers
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
unidas-0.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: unidas
|
|
3
|
+
Version: 0.0.0
|
|
4
|
+
Summary: A DAS compatibility library
|
|
5
|
+
Project-URL: Bug Tracker, https://github.com/unidas-dev/unidas
|
|
6
|
+
Project-URL: Documentation, https://github.com/unidas-dev/unidas
|
|
7
|
+
Project-URL: Homepage, https://github.com/unidas-dev/unidas
|
|
8
|
+
Author-email: Derrick Chambers <chambers.ja.derrick@gmail.com>
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Keywords: distributed-acoustic-sensing,geophysics
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Topic :: Scientific/Engineering
|
|
13
|
+
Requires-Python: >=3.10
|
|
14
|
+
Requires-Dist: numpy
|
|
15
|
+
Provides-Extra: dev
|
|
16
|
+
Requires-Dist: dascore; extra == 'dev'
|
|
17
|
+
Requires-Dist: daspy-toolbox; extra == 'dev'
|
|
18
|
+
Requires-Dist: lightguide; extra == 'dev'
|
|
19
|
+
Requires-Dist: numpy<2; extra == 'dev'
|
|
20
|
+
Requires-Dist: pooch; extra == 'dev'
|
|
21
|
+
Requires-Dist: pre-commit; extra == 'dev'
|
|
22
|
+
Requires-Dist: pytest; extra == 'dev'
|
|
23
|
+
Requires-Dist: pytest-cov; extra == 'dev'
|
|
24
|
+
Requires-Dist: ruff; extra == 'dev'
|
|
25
|
+
Requires-Dist: xdas; extra == 'dev'
|
|
26
|
+
Provides-Extra: extras
|
|
27
|
+
Requires-Dist: dascore; extra == 'extras'
|
|
28
|
+
Requires-Dist: daspy-toolbox; extra == 'extras'
|
|
29
|
+
Requires-Dist: lightguide; extra == 'extras'
|
|
30
|
+
Requires-Dist: numpy<2; extra == 'extras'
|
|
31
|
+
Requires-Dist: xdas; extra == 'extras'
|
|
32
|
+
Provides-Extra: test
|
|
33
|
+
Requires-Dist: pooch; extra == 'test'
|
|
34
|
+
Requires-Dist: pre-commit; extra == 'test'
|
|
35
|
+
Requires-Dist: pytest; extra == 'test'
|
|
36
|
+
Requires-Dist: pytest-cov; extra == 'test'
|
|
37
|
+
Requires-Dist: ruff; extra == 'test'
|
|
38
|
+
Description-Content-Type: text/markdown
|
|
39
|
+
|
|
40
|
+
# unidas
|
|
41
|
+
|
|
42
|
+
[](https://codecov.io/gh/dasdae/unidas)
|
|
43
|
+
[](https://pypi.python.org/pypi/unidas)
|
|
44
|
+
[](https://pypi.python.org/pypi/unidas)
|
|
45
|
+
[](https://opensource.org/license/mit)
|
|
46
|
+
|
|
47
|
+
A DAS compatibility package.
|
|
48
|
+
|
|
49
|
+
There is an increasing number of open-source libraries for working with distributed acoustic sensing (DAS) data. Each of these has its own strengths and weaknesses, and often it is desirable to use features from multiple libraries in research workflows. Moreover, creators of DAS packages which perform specific operations (e.g., machine learning for phase picking) currently have to choose a single DAS library to support, or undertake writing conversion codes on their own.
|
|
50
|
+
|
|
51
|
+
Unidas solves these problems by providing simple ways to interoperate between DAS libraries.
|
|
52
|
+
|
|
53
|
+
## Usage
|
|
54
|
+
|
|
55
|
+
There are two ways to use unidas. First, the `adapter` decorator allows a function to simply declare which library's data structure to use.
|
|
56
|
+
|
|
57
|
+
```python
|
|
58
|
+
import unidas
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@unidas.adapter("daspy.Section")
|
|
62
|
+
def daspy_function(sec, **kwargs):
|
|
63
|
+
"""A useful daspy function"""
|
|
64
|
+
# Regardless of the actual input type, adapter will convert it to a daspy section
|
|
65
|
+
# then convert it back after the return.
|
|
66
|
+
return sec
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
import dascore as dc
|
|
70
|
+
|
|
71
|
+
patch = dc.get_example_patch()
|
|
72
|
+
# even though we call a daspy function, the input/output is a dascore patch.
|
|
73
|
+
out = daspy_function(patch)
|
|
74
|
+
assert isinstance(out, dc.Patch)
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
You can also use `adpater` to wrap un-wrapped functions.
|
|
78
|
+
|
|
79
|
+
```python
|
|
80
|
+
import dascore as dc
|
|
81
|
+
import unidas
|
|
82
|
+
from xdas.signal import hilbert
|
|
83
|
+
|
|
84
|
+
dascore_hilbert = unidas.adapter("xdas.DataArray")(hilbert)
|
|
85
|
+
patch = dc.get_example_patch()
|
|
86
|
+
|
|
87
|
+
patch_hilberto = dascore_hilbert(patch)
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
The `convert` function converts from one library's data structure to another library's data structure.
|
|
91
|
+
|
|
92
|
+
```python
|
|
93
|
+
import daspy
|
|
94
|
+
import unidas
|
|
95
|
+
|
|
96
|
+
# Use lightguide's afk filter with a daspy section.
|
|
97
|
+
sec = daspy.read()
|
|
98
|
+
blast = unidas.convert(sec, to="lightguide.Blast")
|
|
99
|
+
blast.afk_filter(exponent=0.8)
|
|
100
|
+
sec_out = unidas.convert(blast, to='daspy.Section')
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Installation
|
|
104
|
+
Simply install unidas with pip or mamba:
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
pip install unidas
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
mamba install unidas
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
By design, unidas has no hard dependencies other than numpy, but an `ImportError` will be raised if the libraries needed to perform a requested conversion are not installed.
|
|
115
|
+
|
|
116
|
+
Unidas is single file (src/unidas.py) so it can also be vendored (copied directly into your project). If you do this, please consider sharing any improvements so the entire community can benefit.
|
|
117
|
+
|
|
118
|
+
## Guidance for package developers
|
|
119
|
+
If you are creating/maintaining a library for doing some kind of specialized DAS processing in python, we recommend you do two things:
|
|
120
|
+
|
|
121
|
+
1. Pick the DAS library you prefer and use it internally.
|
|
122
|
+
2. Apply the `adapter` decorator to your project's API.
|
|
123
|
+
|
|
124
|
+
Doing so will make your project easily accessible by users of all the libraries supported by unidas.
|
|
125
|
+
|
|
126
|
+
For example:
|
|
127
|
+
|
|
128
|
+
```python
|
|
129
|
+
import unidas
|
|
130
|
+
|
|
131
|
+
@unidas.adapter("daspy.Section")
|
|
132
|
+
def fancy_machine_learning_function(sec):
|
|
133
|
+
"""Cutting edge machine learning DAS research function."""
|
|
134
|
+
# Here we will use daspy internally, but the function accepts
|
|
135
|
+
# data structures from other libraries with no additional effort
|
|
136
|
+
# because of the adapter decorator.
|
|
137
|
+
|
|
138
|
+
... # Fancy stuff goes here.
|
|
139
|
+
|
|
140
|
+
return sec
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
## Adding support for new libraries to unidas
|
|
144
|
+
|
|
145
|
+
To add support for a new data structure/library, you need to do two things:
|
|
146
|
+
|
|
147
|
+
1. Create a subclass of `Converter` which has (at least) a conversion method to unidas' BaseDAS.
|
|
148
|
+
2. Add a conversion method to UnidasBasDASConverter to convert from unidas' BaseDAS back to your data structure.
|
|
149
|
+
3. Write a test in test/test_unidas.py (this is important for maintainability).
|
|
150
|
+
|
|
151
|
+
Feel free to open a discussion if you need help.
|
|
152
|
+
|
|
153
|
+
## Supported libraries (in alphabetical order)
|
|
154
|
+
|
|
155
|
+
- [DASCore](https://github.com/DASDAE/dascore)
|
|
156
|
+
- [DASPy](https://github.com/HMZ-03/DASPy)
|
|
157
|
+
- [lightguide](https://github.com/pyrocko/lightguide)
|
|
158
|
+
- [Xdas](https://github.com/xdas-dev/xdas)
|
unidas-0.0.0/README.md
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# unidas
|
|
2
|
+
|
|
3
|
+
[](https://codecov.io/gh/dasdae/unidas)
|
|
4
|
+
[](https://pypi.python.org/pypi/unidas)
|
|
5
|
+
[](https://pypi.python.org/pypi/unidas)
|
|
6
|
+
[](https://opensource.org/license/mit)
|
|
7
|
+
|
|
8
|
+
A DAS compatibility package.
|
|
9
|
+
|
|
10
|
+
There is an increasing number of open-source libraries for working with distributed acoustic sensing (DAS) data. Each of these has its own strengths and weaknesses, and often it is desirable to use features from multiple libraries in research workflows. Moreover, creators of DAS packages which perform specific operations (e.g., machine learning for phase picking) currently have to choose a single DAS library to support, or undertake writing conversion codes on their own.
|
|
11
|
+
|
|
12
|
+
Unidas solves these problems by providing simple ways to interoperate between DAS libraries.
|
|
13
|
+
|
|
14
|
+
## Usage
|
|
15
|
+
|
|
16
|
+
There are two ways to use unidas. First, the `adapter` decorator allows a function to simply declare which library's data structure to use.
|
|
17
|
+
|
|
18
|
+
```python
|
|
19
|
+
import unidas
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@unidas.adapter("daspy.Section")
|
|
23
|
+
def daspy_function(sec, **kwargs):
|
|
24
|
+
"""A useful daspy function"""
|
|
25
|
+
# Regardless of the actual input type, adapter will convert it to a daspy section
|
|
26
|
+
# then convert it back after the return.
|
|
27
|
+
return sec
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
import dascore as dc
|
|
31
|
+
|
|
32
|
+
patch = dc.get_example_patch()
|
|
33
|
+
# even though we call a daspy function, the input/output is a dascore patch.
|
|
34
|
+
out = daspy_function(patch)
|
|
35
|
+
assert isinstance(out, dc.Patch)
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
You can also use `adpater` to wrap un-wrapped functions.
|
|
39
|
+
|
|
40
|
+
```python
|
|
41
|
+
import dascore as dc
|
|
42
|
+
import unidas
|
|
43
|
+
from xdas.signal import hilbert
|
|
44
|
+
|
|
45
|
+
dascore_hilbert = unidas.adapter("xdas.DataArray")(hilbert)
|
|
46
|
+
patch = dc.get_example_patch()
|
|
47
|
+
|
|
48
|
+
patch_hilberto = dascore_hilbert(patch)
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
The `convert` function converts from one library's data structure to another library's data structure.
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
import daspy
|
|
55
|
+
import unidas
|
|
56
|
+
|
|
57
|
+
# Use lightguide's afk filter with a daspy section.
|
|
58
|
+
sec = daspy.read()
|
|
59
|
+
blast = unidas.convert(sec, to="lightguide.Blast")
|
|
60
|
+
blast.afk_filter(exponent=0.8)
|
|
61
|
+
sec_out = unidas.convert(blast, to='daspy.Section')
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
## Installation
|
|
65
|
+
Simply install unidas with pip or mamba:
|
|
66
|
+
|
|
67
|
+
```bash
|
|
68
|
+
pip install unidas
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
```bash
|
|
72
|
+
mamba install unidas
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
By design, unidas has no hard dependencies other than numpy, but an `ImportError` will be raised if the libraries needed to perform a requested conversion are not installed.
|
|
76
|
+
|
|
77
|
+
Unidas is single file (src/unidas.py) so it can also be vendored (copied directly into your project). If you do this, please consider sharing any improvements so the entire community can benefit.
|
|
78
|
+
|
|
79
|
+
## Guidance for package developers
|
|
80
|
+
If you are creating/maintaining a library for doing some kind of specialized DAS processing in python, we recommend you do two things:
|
|
81
|
+
|
|
82
|
+
1. Pick the DAS library you prefer and use it internally.
|
|
83
|
+
2. Apply the `adapter` decorator to your project's API.
|
|
84
|
+
|
|
85
|
+
Doing so will make your project easily accessible by users of all the libraries supported by unidas.
|
|
86
|
+
|
|
87
|
+
For example:
|
|
88
|
+
|
|
89
|
+
```python
|
|
90
|
+
import unidas
|
|
91
|
+
|
|
92
|
+
@unidas.adapter("daspy.Section")
|
|
93
|
+
def fancy_machine_learning_function(sec):
|
|
94
|
+
"""Cutting edge machine learning DAS research function."""
|
|
95
|
+
# Here we will use daspy internally, but the function accepts
|
|
96
|
+
# data structures from other libraries with no additional effort
|
|
97
|
+
# because of the adapter decorator.
|
|
98
|
+
|
|
99
|
+
... # Fancy stuff goes here.
|
|
100
|
+
|
|
101
|
+
return sec
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## Adding support for new libraries to unidas
|
|
105
|
+
|
|
106
|
+
To add support for a new data structure/library, you need to do two things:
|
|
107
|
+
|
|
108
|
+
1. Create a subclass of `Converter` which has (at least) a conversion method to unidas' BaseDAS.
|
|
109
|
+
2. Add a conversion method to UnidasBasDASConverter to convert from unidas' BaseDAS back to your data structure.
|
|
110
|
+
3. Write a test in test/test_unidas.py (this is important for maintainability).
|
|
111
|
+
|
|
112
|
+
Feel free to open a discussion if you need help.
|
|
113
|
+
|
|
114
|
+
## Supported libraries (in alphabetical order)
|
|
115
|
+
|
|
116
|
+
- [DASCore](https://github.com/DASDAE/dascore)
|
|
117
|
+
- [DASPy](https://github.com/HMZ-03/DASPy)
|
|
118
|
+
- [lightguide](https://github.com/pyrocko/lightguide)
|
|
119
|
+
- [Xdas](https://github.com/xdas-dev/xdas)
|