water-column-sonar-processing 0.0.9__tar.gz → 0.0.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of water-column-sonar-processing might be problematic. Click here for more details.
- water_column_sonar_processing-0.0.11/.env-test +18 -0
- water_column_sonar_processing-0.0.11/.github/workflows/test_action.yaml +24 -0
- water_column_sonar_processing-0.0.11/.gitignore +195 -0
- water_column_sonar_processing-0.0.11/.pre-commit-config.yaml +36 -0
- water_column_sonar_processing-0.0.11/.python-version +2 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/PKG-INFO +8 -2
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/README.md +7 -1
- water_column_sonar_processing-0.0.11/open-science-data-federation/ml/autoencoder_example.py +94 -0
- water_column_sonar_processing-0.0.11/open-science-data-federation/osdf_examples/foo.ipynb +65 -0
- water_column_sonar_processing-0.0.11/open-science-data-federation/osdf_examples/sonar_ai.ipynb +1241 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/pyproject.toml +7 -23
- water_column_sonar_processing-0.0.11/pytest.ini +13 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/requirements.txt +1 -1
- water_column_sonar_processing-0.0.11/requirements_dev.txt +10 -0
- water_column_sonar_processing-0.0.11/tests/conftest.py +23 -0
- water_column_sonar_processing-0.0.11/tests/example_input_bucket/example_directory/foo.txt +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_raw_to_zarr.py +28 -12
- water_column_sonar_processing-0.0.11/tests/test_resources/index/calibrated_cruises.csv +277 -0
- water_column_sonar_processing-0.0.11/tests/test_resources/raw_to_zarr/D20070724-T042400.bot +0 -0
- water_column_sonar_processing-0.0.11/tests/test_resources/raw_to_zarr/D20070724-T042400.idx +0 -0
- water_column_sonar_processing-0.0.11/tests/test_resources/raw_to_zarr/D20070724-T042400.raw +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/aws/dynamodb_manager.py +15 -11
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/aws/s3_manager.py +63 -42
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/cruise/create_empty_zarr_store.py +1 -1
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/geometry/geometry_manager.py +5 -7
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/model/zarr_manager.py +14 -10
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/processing/raw_to_zarr.py +49 -42
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing.egg-info/PKG-INFO +8 -2
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing.egg-info/SOURCES.txt +16 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing.egg-info/requires.txt +1 -1
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/LICENSE +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/setup.cfg +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_create_empty_zarr_store.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_dynamodb_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_geometry_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_geometry_simplification.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_index.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_pmtile_generation.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_process.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_resample_regrid.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_s3_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_s3fs_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_s3fs_with_moto.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_sns_sqs_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/tests/test_zarr_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/aws/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/aws/s3fs_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/aws/sns_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/aws/sqs_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/cruise/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/cruise/resample_regrid.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/geometry/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/geometry/geometry_simplification.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/geometry/pmtile_generation.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/index/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/index/index_manager.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/model/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/process.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/processing/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/processing/cruise_sampler.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/utility/__init__.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/utility/cleaner.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/utility/constants.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/utility/pipeline_status.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing/utility/timestamp.py +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing.egg-info/dependency_links.txt +0 -0
- {water_column_sonar_processing-0.0.9 → water_column_sonar_processing-0.0.11}/water_column_sonar_processing.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# DOTENV Values used for Testing
|
|
2
|
+
# See here for more information: <https://github.com/theskumar/python-dotenv>
|
|
3
|
+
|
|
4
|
+
AWS_REGION="us-east-1"
|
|
5
|
+
|
|
6
|
+
ACCESS_KEY_ID="ACCESS_KEY_ID_123"
|
|
7
|
+
SECRET_ACCESS_KEY="SECRET_ACCESS_KEY_456"
|
|
8
|
+
|
|
9
|
+
OUTPUT_BUCKET_ACCESS_KEY="OUTPUT_BUCKET_ACCESS_KEY_ABC"
|
|
10
|
+
OUTPUT_BUCKET_SECRET_ACCESS_KEY="OUTPUT_BUCKET_SECRET_ACCESS_KEY_123"
|
|
11
|
+
|
|
12
|
+
INPUT_BUCKET_NAME="test-input-bucket"
|
|
13
|
+
|
|
14
|
+
OUTPUT_BUCKET_NAME="test-output-bucket"
|
|
15
|
+
|
|
16
|
+
TABLE_NAME="test-table"
|
|
17
|
+
|
|
18
|
+
TOPIC_ARN="arn:aws:sns:${AWS_REGION}:123456789012:test-topic"
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
name: Python package
|
|
2
|
+
|
|
3
|
+
on: [push]
|
|
4
|
+
|
|
5
|
+
jobs:
|
|
6
|
+
build:
|
|
7
|
+
runs-on: ubuntu-latest
|
|
8
|
+
steps:
|
|
9
|
+
- name: Check out
|
|
10
|
+
uses: actions/checkout@v4
|
|
11
|
+
- name: Set up Python
|
|
12
|
+
uses: actions/setup-python@v5
|
|
13
|
+
with:
|
|
14
|
+
# Semantic version range syntax or exact version of a Python version
|
|
15
|
+
python-version: '3.10'
|
|
16
|
+
# Optional - x64 or x86 architecture, defaults to x64
|
|
17
|
+
# architecture: 'x64'
|
|
18
|
+
cache: 'pip'
|
|
19
|
+
- name: Install dependencies
|
|
20
|
+
run: |
|
|
21
|
+
python -m pip install --upgrade pip
|
|
22
|
+
pip install -r requirements_dev.txt
|
|
23
|
+
- name: Run the tests
|
|
24
|
+
run: python -m pytest
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
#tests/*.zarr
|
|
2
|
+
#tests/*.bot
|
|
3
|
+
#tests/*.raw
|
|
4
|
+
#tests/*.json
|
|
5
|
+
*.geojson
|
|
6
|
+
|
|
7
|
+
# Byte-compiled / optimized / DLL files
|
|
8
|
+
__pycache__/
|
|
9
|
+
*.py[cod]
|
|
10
|
+
*$py.class
|
|
11
|
+
|
|
12
|
+
# C extensions
|
|
13
|
+
*.so
|
|
14
|
+
|
|
15
|
+
# Distribution / packaging
|
|
16
|
+
.Python
|
|
17
|
+
build/
|
|
18
|
+
develop-eggs/
|
|
19
|
+
dist/
|
|
20
|
+
dist*
|
|
21
|
+
downloads/
|
|
22
|
+
eggs/
|
|
23
|
+
.eggs/
|
|
24
|
+
lib/
|
|
25
|
+
lib64/
|
|
26
|
+
parts/
|
|
27
|
+
sdist/
|
|
28
|
+
var/
|
|
29
|
+
wheels/
|
|
30
|
+
share/python-wheels/
|
|
31
|
+
*.egg-info/
|
|
32
|
+
.installed.cfg
|
|
33
|
+
*.egg
|
|
34
|
+
MANIFEST
|
|
35
|
+
|
|
36
|
+
# PyInstaller
|
|
37
|
+
# Usually these files are written by a python script from a template
|
|
38
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
39
|
+
*.manifest
|
|
40
|
+
*.spec
|
|
41
|
+
|
|
42
|
+
# Installer logs
|
|
43
|
+
pip-log.txt
|
|
44
|
+
pip-delete-this-directory.txt
|
|
45
|
+
|
|
46
|
+
# Unit test / coverage reports
|
|
47
|
+
htmlcov/
|
|
48
|
+
.tox/
|
|
49
|
+
.nox/
|
|
50
|
+
.coverage
|
|
51
|
+
.coverage.*
|
|
52
|
+
.cache
|
|
53
|
+
nosetests.xml
|
|
54
|
+
coverage.xml
|
|
55
|
+
*.cover
|
|
56
|
+
*.py,cover
|
|
57
|
+
.hypothesis/
|
|
58
|
+
.pytest_cache/
|
|
59
|
+
cover/
|
|
60
|
+
|
|
61
|
+
# Translations
|
|
62
|
+
*.mo
|
|
63
|
+
*.pot
|
|
64
|
+
|
|
65
|
+
# Django stuff:
|
|
66
|
+
*.log
|
|
67
|
+
local_settings.py
|
|
68
|
+
db.sqlite3
|
|
69
|
+
db.sqlite3-journal
|
|
70
|
+
|
|
71
|
+
# Flask stuff:
|
|
72
|
+
instance/
|
|
73
|
+
.webassets-cache
|
|
74
|
+
|
|
75
|
+
# Scrapy stuff:
|
|
76
|
+
.scrapy
|
|
77
|
+
|
|
78
|
+
# Sphinx documentation
|
|
79
|
+
docs/_build/
|
|
80
|
+
|
|
81
|
+
# PyBuilder
|
|
82
|
+
.pybuilder/
|
|
83
|
+
target/
|
|
84
|
+
|
|
85
|
+
# Jupyter Notebook
|
|
86
|
+
.ipynb_checkpoints
|
|
87
|
+
|
|
88
|
+
# IPython
|
|
89
|
+
profile_default/
|
|
90
|
+
ipython_config.py
|
|
91
|
+
|
|
92
|
+
# pyenv
|
|
93
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
94
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
95
|
+
# .python-version
|
|
96
|
+
|
|
97
|
+
# pipenv
|
|
98
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
99
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
100
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
101
|
+
# install all needed dependencies.
|
|
102
|
+
#Pipfile.lock
|
|
103
|
+
|
|
104
|
+
# poetry
|
|
105
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
106
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
107
|
+
# commonly ignored for libraries.
|
|
108
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
109
|
+
#poetry.lock
|
|
110
|
+
|
|
111
|
+
# pdm
|
|
112
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
113
|
+
#pdm.lock
|
|
114
|
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
115
|
+
# in version control.
|
|
116
|
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
|
117
|
+
.pdm.toml
|
|
118
|
+
.pdm-python
|
|
119
|
+
.pdm-build/
|
|
120
|
+
|
|
121
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
122
|
+
__pypackages__/
|
|
123
|
+
|
|
124
|
+
# Celery stuff
|
|
125
|
+
celerybeat-schedule
|
|
126
|
+
celerybeat.pid
|
|
127
|
+
|
|
128
|
+
# SageMath parsed files
|
|
129
|
+
*.sage.py
|
|
130
|
+
|
|
131
|
+
# Environments
|
|
132
|
+
.env
|
|
133
|
+
.venv
|
|
134
|
+
env/
|
|
135
|
+
venv/
|
|
136
|
+
ENV/
|
|
137
|
+
env.bak/
|
|
138
|
+
venv.bak/
|
|
139
|
+
|
|
140
|
+
# Spyder project settings
|
|
141
|
+
.spyderproject
|
|
142
|
+
.spyproject
|
|
143
|
+
|
|
144
|
+
# Rope project settings
|
|
145
|
+
.ropeproject
|
|
146
|
+
|
|
147
|
+
# mkdocs documentation
|
|
148
|
+
/site
|
|
149
|
+
|
|
150
|
+
# mypy
|
|
151
|
+
.mypy_cache/
|
|
152
|
+
.dmypy.json
|
|
153
|
+
dmypy.json
|
|
154
|
+
|
|
155
|
+
# Pyre type checker
|
|
156
|
+
.pyre/
|
|
157
|
+
|
|
158
|
+
# pytype static type analyzer
|
|
159
|
+
.pytype/
|
|
160
|
+
|
|
161
|
+
# Cython debug symbols
|
|
162
|
+
cython_debug/
|
|
163
|
+
|
|
164
|
+
# PyCharm
|
|
165
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
166
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
167
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
168
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
169
|
+
#.idea/
|
|
170
|
+
|
|
171
|
+
__pycache__
|
|
172
|
+
.DS_Store
|
|
173
|
+
|
|
174
|
+
*.env-prod*
|
|
175
|
+
*.zarr
|
|
176
|
+
*.csv
|
|
177
|
+
|
|
178
|
+
*.raw
|
|
179
|
+
*.bot
|
|
180
|
+
*.idx
|
|
181
|
+
|
|
182
|
+
.idea
|
|
183
|
+
.pytest_cache
|
|
184
|
+
.coverage
|
|
185
|
+
*.iml
|
|
186
|
+
#.python-version
|
|
187
|
+
target
|
|
188
|
+
coverage.xml
|
|
189
|
+
htmlcov
|
|
190
|
+
dist
|
|
191
|
+
build
|
|
192
|
+
*.egg-info
|
|
193
|
+
/setup.py
|
|
194
|
+
*.spec
|
|
195
|
+
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
repos:
|
|
2
|
+
- repo: local
|
|
3
|
+
hooks:
|
|
4
|
+
- id: trufflehog
|
|
5
|
+
name: TruffleHog
|
|
6
|
+
description: Detect secrets in your data.
|
|
7
|
+
entry: bash -c 'trufflehog git file://. --since-commit HEAD --no-verification --fail --no-update'
|
|
8
|
+
language: system
|
|
9
|
+
stages: [ "pre-commit", "pre-push" ]
|
|
10
|
+
|
|
11
|
+
# - repo: https://github.com/psf/black
|
|
12
|
+
# rev: 24.10.0
|
|
13
|
+
# hooks:
|
|
14
|
+
# - id: black
|
|
15
|
+
|
|
16
|
+
# - repo: https://github.com/PyCQA/flake8
|
|
17
|
+
# rev: 7.1.1
|
|
18
|
+
# hooks:
|
|
19
|
+
# - id: flake8
|
|
20
|
+
|
|
21
|
+
# - repo: https://github.com/astral-sh/ruff-pre-commit
|
|
22
|
+
# # Ruff version.
|
|
23
|
+
# rev: v0.7.2
|
|
24
|
+
# hooks:
|
|
25
|
+
# # Run the linter.
|
|
26
|
+
# - id: ruff
|
|
27
|
+
# args: [ --fix ]
|
|
28
|
+
# # Run the formatter.
|
|
29
|
+
# - id: ruff-format
|
|
30
|
+
|
|
31
|
+
# - repo: https://github.com/pycqa/isort
|
|
32
|
+
# rev: 5.13.2
|
|
33
|
+
# hooks:
|
|
34
|
+
# - id: isort
|
|
35
|
+
# name: isort (python)
|
|
36
|
+
# args: ["--profile", "black", "--filter-files"]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: water_column_sonar_processing
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.11
|
|
4
4
|
Summary: A processing tool for water column sonar data.
|
|
5
5
|
Author-email: Rudy Klucik <rudy.klucik@noaa.gov>
|
|
6
6
|
Project-URL: Homepage, https://github.com/CI-CMG/water-column-sonar-processing
|
|
@@ -24,7 +24,7 @@ Requires-Dist: numcodecs==0.13.1
|
|
|
24
24
|
Requires-Dist: numpy==1.26.4
|
|
25
25
|
Requires-Dist: pandas==2.2.3
|
|
26
26
|
Requires-Dist: pyarrow==18.1.0
|
|
27
|
-
Requires-Dist: python-dotenv==1.0.
|
|
27
|
+
Requires-Dist: python-dotenv==1.0.1
|
|
28
28
|
Requires-Dist: requests==2.32.3
|
|
29
29
|
Requires-Dist: s3fs==2023.12.1
|
|
30
30
|
Requires-Dist: scipy==1.14.1
|
|
@@ -114,6 +114,7 @@ python -m twine upload --repository pypi dist/*
|
|
|
114
114
|
```
|
|
115
115
|
|
|
116
116
|
# Pre Commit Hook
|
|
117
|
+
see here for installation: https://pre-commit.com/
|
|
117
118
|
https://dev.to/rafaelherik/using-trufflehog-and-pre-commit-hook-to-prevent-secret-exposure-edo
|
|
118
119
|
```
|
|
119
120
|
pre-commit install --allow-missing-config
|
|
@@ -132,3 +133,8 @@ https://colab.research.google.com/drive/1KiLMueXiz9WVB9o4RuzYeGjNZ6PsZU7a#scroll
|
|
|
132
133
|
20241125
|
|
133
134
|
5 failed, 35 passed, 3 skipped, 1 warning in 9.71s
|
|
134
135
|
3 failed, 38 passed, 3 skipped, 1 warning in 7.24s
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
# TODO:
|
|
139
|
+
add https://pypi.org/project/setuptools-scm/
|
|
140
|
+
for extracting the version
|
|
@@ -78,6 +78,7 @@ python -m twine upload --repository pypi dist/*
|
|
|
78
78
|
```
|
|
79
79
|
|
|
80
80
|
# Pre Commit Hook
|
|
81
|
+
see here for installation: https://pre-commit.com/
|
|
81
82
|
https://dev.to/rafaelherik/using-trufflehog-and-pre-commit-hook-to-prevent-secret-exposure-edo
|
|
82
83
|
```
|
|
83
84
|
pre-commit install --allow-missing-config
|
|
@@ -95,4 +96,9 @@ https://colab.research.google.com/drive/1KiLMueXiz9WVB9o4RuzYeGjNZ6PsZU7a#scroll
|
|
|
95
96
|
8 failed, 32 passed, 3 skipped, 1 warning in 6.92s
|
|
96
97
|
20241125
|
|
97
98
|
5 failed, 35 passed, 3 skipped, 1 warning in 9.71s
|
|
98
|
-
3 failed, 38 passed, 3 skipped, 1 warning in 7.24s
|
|
99
|
+
3 failed, 38 passed, 3 skipped, 1 warning in 7.24s
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
# TODO:
|
|
103
|
+
add https://pypi.org/project/setuptools-scm/
|
|
104
|
+
for extracting the version
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
# https://keras.io/examples/vision/autoencoder/
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import matplotlib.pyplot as plt
|
|
5
|
+
|
|
6
|
+
from keras import layers
|
|
7
|
+
from keras.datasets import mnist
|
|
8
|
+
from keras.models import Model
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def preprocess(array):
|
|
12
|
+
"""Normalizes the supplied array and reshapes it."""
|
|
13
|
+
array = array.astype("float32") / 255.0
|
|
14
|
+
array = np.reshape(array, (len(array), 28, 28, 1))
|
|
15
|
+
return array
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def noise(array):
|
|
19
|
+
"""Adds random noise to each image in the supplied array."""
|
|
20
|
+
noise_factor = 0.4
|
|
21
|
+
noisy_array = array + noise_factor * np.random.normal(
|
|
22
|
+
loc=0.0, scale=1.0, size=array.shape
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
return np.clip(noisy_array, 0.0, 1.0)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def display(array1, array2):
|
|
29
|
+
"""Displays ten random images from each array."""
|
|
30
|
+
n = 10
|
|
31
|
+
indices = np.random.randint(len(array1), size=n)
|
|
32
|
+
images1 = array1[indices, :]
|
|
33
|
+
images2 = array2[indices, :]
|
|
34
|
+
|
|
35
|
+
plt.figure(figsize=(20, 4))
|
|
36
|
+
for i, (image1, image2) in enumerate(zip(images1, images2)):
|
|
37
|
+
ax = plt.subplot(2, n, i + 1)
|
|
38
|
+
plt.imshow(image1.reshape(28, 28))
|
|
39
|
+
plt.gray()
|
|
40
|
+
ax.get_xaxis().set_visible(False)
|
|
41
|
+
ax.get_yaxis().set_visible(False)
|
|
42
|
+
|
|
43
|
+
ax = plt.subplot(2, n, i + 1 + n)
|
|
44
|
+
plt.imshow(image2.reshape(28, 28))
|
|
45
|
+
plt.gray()
|
|
46
|
+
ax.get_xaxis().set_visible(False)
|
|
47
|
+
ax.get_yaxis().set_visible(False)
|
|
48
|
+
|
|
49
|
+
plt.show()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# Since we only need images from the dataset to encode and decode, we
|
|
53
|
+
# won't use the labels.
|
|
54
|
+
(train_data, _), (test_data, _) = mnist.load_data()
|
|
55
|
+
|
|
56
|
+
# Normalize and reshape the data
|
|
57
|
+
train_data = preprocess(train_data)
|
|
58
|
+
test_data = preprocess(test_data)
|
|
59
|
+
|
|
60
|
+
# Create a copy of the data with added noise
|
|
61
|
+
noisy_train_data = noise(train_data)
|
|
62
|
+
noisy_test_data = noise(test_data)
|
|
63
|
+
|
|
64
|
+
# Display the train data and a version of it with added noise
|
|
65
|
+
display(train_data, noisy_train_data)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
input = layers.Input(shape=(28, 28, 1))
|
|
69
|
+
|
|
70
|
+
# Encoder
|
|
71
|
+
x = layers.Conv2D(32, (3, 3), activation="relu", padding="same")(input)
|
|
72
|
+
x = layers.MaxPooling2D((2, 2), padding="same")(x)
|
|
73
|
+
x = layers.Conv2D(32, (3, 3), activation="relu", padding="same")(x)
|
|
74
|
+
x = layers.MaxPooling2D((2, 2), padding="same")(x)
|
|
75
|
+
|
|
76
|
+
# Decoder
|
|
77
|
+
x = layers.Conv2DTranspose(32, (3, 3), strides=2, activation="relu", padding="same")(x)
|
|
78
|
+
x = layers.Conv2DTranspose(32, (3, 3), strides=2, activation="relu", padding="same")(x)
|
|
79
|
+
x = layers.Conv2D(1, (3, 3), activation="sigmoid", padding="same")(x)
|
|
80
|
+
|
|
81
|
+
# Autoencoder
|
|
82
|
+
autoencoder = Model(input, x)
|
|
83
|
+
autoencoder.compile(optimizer="adam", loss="binary_crossentropy")
|
|
84
|
+
autoencoder.summary()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
autoencoder.fit(
|
|
88
|
+
x=train_data,
|
|
89
|
+
y=train_data,
|
|
90
|
+
epochs=50,
|
|
91
|
+
batch_size=128,
|
|
92
|
+
shuffle=True,
|
|
93
|
+
validation_data=(test_data, test_data),
|
|
94
|
+
)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
{
|
|
2
|
+
"cells": [
|
|
3
|
+
{
|
|
4
|
+
"metadata": {},
|
|
5
|
+
"cell_type": "raw",
|
|
6
|
+
"source": "",
|
|
7
|
+
"id": "bd6b0dd9ea15dfe2"
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"metadata": {
|
|
11
|
+
"ExecuteTime": {
|
|
12
|
+
"end_time": "2024-11-14T16:07:00.035002Z",
|
|
13
|
+
"start_time": "2024-11-14T16:07:00.033231Z"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"cell_type": "code",
|
|
17
|
+
"source": "",
|
|
18
|
+
"id": "1ac76554b86e9eb0",
|
|
19
|
+
"outputs": [],
|
|
20
|
+
"execution_count": null
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
"metadata": {
|
|
24
|
+
"ExecuteTime": {
|
|
25
|
+
"end_time": "2024-11-14T16:07:00.100948Z",
|
|
26
|
+
"start_time": "2024-11-14T16:07:00.098301Z"
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
"cell_type": "code",
|
|
30
|
+
"source": "",
|
|
31
|
+
"id": "dbbb1a940f83f73c",
|
|
32
|
+
"outputs": [],
|
|
33
|
+
"execution_count": null
|
|
34
|
+
},
|
|
35
|
+
{
|
|
36
|
+
"metadata": {},
|
|
37
|
+
"cell_type": "code",
|
|
38
|
+
"outputs": [],
|
|
39
|
+
"execution_count": null,
|
|
40
|
+
"source": "",
|
|
41
|
+
"id": "7042e6cf142a6f09"
|
|
42
|
+
}
|
|
43
|
+
],
|
|
44
|
+
"metadata": {
|
|
45
|
+
"kernelspec": {
|
|
46
|
+
"display_name": "Python 3",
|
|
47
|
+
"language": "python",
|
|
48
|
+
"name": "python3"
|
|
49
|
+
},
|
|
50
|
+
"language_info": {
|
|
51
|
+
"codemirror_mode": {
|
|
52
|
+
"name": "ipython",
|
|
53
|
+
"version": 2
|
|
54
|
+
},
|
|
55
|
+
"file_extension": ".py",
|
|
56
|
+
"mimetype": "text/x-python",
|
|
57
|
+
"name": "python",
|
|
58
|
+
"nbconvert_exporter": "python",
|
|
59
|
+
"pygments_lexer": "ipython2",
|
|
60
|
+
"version": "2.7.6"
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
"nbformat": 4,
|
|
64
|
+
"nbformat_minor": 5
|
|
65
|
+
}
|