paddle 1.1.3__tar.gz → 1.1.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {paddle-1.1.3 → paddle-1.1.5}/.bumpversion.cfg +1 -1
- {paddle-1.1.3 → paddle-1.1.5}/.github/workflows/ci.yml +1 -1
- paddle-1.1.5/.gitignore +207 -0
- paddle-1.1.5/LICENSE +21 -0
- paddle-1.1.5/Makefile +26 -0
- {paddle-1.1.3 → paddle-1.1.5}/PKG-INFO +51 -7
- paddle-1.1.5/README.md +51 -0
- paddle-1.1.5/docker/.dockerignore +8 -0
- paddle-1.1.5/docker/Dockerfile +131 -0
- paddle-1.1.5/docker/entrypoint.sh +57 -0
- paddle-1.1.5/docker/nvim/init.lua +32 -0
- paddle-1.1.5/docker/nvim/lua/plugins.lua +89 -0
- paddle-1.1.5/docker/scripts/git-done +96 -0
- paddle-1.1.5/docker/scripts/git-send +45 -0
- paddle-1.1.5/docker-compose.override.yaml +0 -0
- paddle-1.1.5/docker-compose.yaml +29 -0
- {paddle-1.1.3 → paddle-1.1.5}/pyproject.toml +5 -5
- {paddle-1.1.3 → paddle-1.1.5}/src/paddle/__init__.py +1 -1
- paddle-1.1.5/src/paddle/nc2pt.py +39 -0
- paddle-1.1.5/src/paddle/pt2nc.py +124 -0
- {paddle-1.1.3 → paddle-1.1.5}/src/paddle/setup_profile.py +6 -3
- paddle-1.1.5/tests/.gitignore +2 -0
- {paddle-1.1.3 → paddle-1.1.5}/tests/test_saturn_adiabat.py +3 -2
- paddle-1.1.3/.gitignore +0 -13
- paddle-1.1.3/README.md +0 -8
- {paddle-1.1.3 → paddle-1.1.5}/.github/workflows/bump-and-tag.yaml +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/.github/workflows/cd.yml +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/.pre-commit-config.yaml +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/src/paddle/crm.py +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/src/paddle/evolve_kinetics.py +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/src/paddle/find_init_params.py +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/src/paddle/write_profile.py +0 -0
- {paddle-1.1.3 → paddle-1.1.5}/tests/data/saturn1d.yaml +0 -0
paddle-1.1.5/.gitignore
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[codz]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
share/python-wheels/
|
|
24
|
+
*.egg-info/
|
|
25
|
+
.installed.cfg
|
|
26
|
+
*.egg
|
|
27
|
+
MANIFEST
|
|
28
|
+
|
|
29
|
+
# PyInstaller
|
|
30
|
+
# Usually these files are written by a python script from a template
|
|
31
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
32
|
+
*.manifest
|
|
33
|
+
*.spec
|
|
34
|
+
|
|
35
|
+
# Installer logs
|
|
36
|
+
pip-log.txt
|
|
37
|
+
pip-delete-this-directory.txt
|
|
38
|
+
|
|
39
|
+
# Unit test / coverage reports
|
|
40
|
+
htmlcov/
|
|
41
|
+
.tox/
|
|
42
|
+
.nox/
|
|
43
|
+
.coverage
|
|
44
|
+
.coverage.*
|
|
45
|
+
.cache
|
|
46
|
+
nosetests.xml
|
|
47
|
+
coverage.xml
|
|
48
|
+
*.cover
|
|
49
|
+
*.py.cover
|
|
50
|
+
.hypothesis/
|
|
51
|
+
.pytest_cache/
|
|
52
|
+
cover/
|
|
53
|
+
|
|
54
|
+
# Translations
|
|
55
|
+
*.mo
|
|
56
|
+
*.pot
|
|
57
|
+
|
|
58
|
+
# Django stuff:
|
|
59
|
+
*.log
|
|
60
|
+
local_settings.py
|
|
61
|
+
db.sqlite3
|
|
62
|
+
db.sqlite3-journal
|
|
63
|
+
|
|
64
|
+
# Flask stuff:
|
|
65
|
+
instance/
|
|
66
|
+
.webassets-cache
|
|
67
|
+
|
|
68
|
+
# Scrapy stuff:
|
|
69
|
+
.scrapy
|
|
70
|
+
|
|
71
|
+
# Sphinx documentation
|
|
72
|
+
docs/_build/
|
|
73
|
+
|
|
74
|
+
# PyBuilder
|
|
75
|
+
.pybuilder/
|
|
76
|
+
target/
|
|
77
|
+
|
|
78
|
+
# Jupyter Notebook
|
|
79
|
+
.ipynb_checkpoints
|
|
80
|
+
|
|
81
|
+
# IPython
|
|
82
|
+
profile_default/
|
|
83
|
+
ipython_config.py
|
|
84
|
+
|
|
85
|
+
# pyenv
|
|
86
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
87
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
88
|
+
# .python-version
|
|
89
|
+
|
|
90
|
+
# pipenv
|
|
91
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
92
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
93
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
94
|
+
# install all needed dependencies.
|
|
95
|
+
#Pipfile.lock
|
|
96
|
+
|
|
97
|
+
# UV
|
|
98
|
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
|
99
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
100
|
+
# commonly ignored for libraries.
|
|
101
|
+
#uv.lock
|
|
102
|
+
|
|
103
|
+
# poetry
|
|
104
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
105
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
106
|
+
# commonly ignored for libraries.
|
|
107
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
108
|
+
#poetry.lock
|
|
109
|
+
#poetry.toml
|
|
110
|
+
|
|
111
|
+
# pdm
|
|
112
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
113
|
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
|
114
|
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
|
115
|
+
#pdm.lock
|
|
116
|
+
#pdm.toml
|
|
117
|
+
.pdm-python
|
|
118
|
+
.pdm-build/
|
|
119
|
+
|
|
120
|
+
# pixi
|
|
121
|
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
|
122
|
+
#pixi.lock
|
|
123
|
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
|
124
|
+
# in the .venv directory. It is recommended not to include this directory in version control.
|
|
125
|
+
.pixi
|
|
126
|
+
|
|
127
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
128
|
+
__pypackages__/
|
|
129
|
+
|
|
130
|
+
# Celery stuff
|
|
131
|
+
celerybeat-schedule
|
|
132
|
+
celerybeat.pid
|
|
133
|
+
|
|
134
|
+
# SageMath parsed files
|
|
135
|
+
*.sage.py
|
|
136
|
+
|
|
137
|
+
# Environments
|
|
138
|
+
.env
|
|
139
|
+
.envrc
|
|
140
|
+
.venv
|
|
141
|
+
env/
|
|
142
|
+
venv/
|
|
143
|
+
ENV/
|
|
144
|
+
env.bak/
|
|
145
|
+
venv.bak/
|
|
146
|
+
|
|
147
|
+
# Spyder project settings
|
|
148
|
+
.spyderproject
|
|
149
|
+
.spyproject
|
|
150
|
+
|
|
151
|
+
# Rope project settings
|
|
152
|
+
.ropeproject
|
|
153
|
+
|
|
154
|
+
# mkdocs documentation
|
|
155
|
+
/site
|
|
156
|
+
|
|
157
|
+
# mypy
|
|
158
|
+
.mypy_cache/
|
|
159
|
+
.dmypy.json
|
|
160
|
+
dmypy.json
|
|
161
|
+
|
|
162
|
+
# Pyre type checker
|
|
163
|
+
.pyre/
|
|
164
|
+
|
|
165
|
+
# pytype static type analyzer
|
|
166
|
+
.pytype/
|
|
167
|
+
|
|
168
|
+
# Cython debug symbols
|
|
169
|
+
cython_debug/
|
|
170
|
+
|
|
171
|
+
# PyCharm
|
|
172
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
173
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
174
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
175
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
176
|
+
#.idea/
|
|
177
|
+
|
|
178
|
+
# Abstra
|
|
179
|
+
# Abstra is an AI-powered process automation framework.
|
|
180
|
+
# Ignore directories containing user credentials, local state, and settings.
|
|
181
|
+
# Learn more at https://abstra.io/docs
|
|
182
|
+
.abstra/
|
|
183
|
+
|
|
184
|
+
# Visual Studio Code
|
|
185
|
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
|
186
|
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
|
187
|
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
|
188
|
+
# you could uncomment the following to ignore the entire vscode folder
|
|
189
|
+
# .vscode/
|
|
190
|
+
|
|
191
|
+
# Ruff stuff:
|
|
192
|
+
.ruff_cache/
|
|
193
|
+
|
|
194
|
+
# PyPI configuration file
|
|
195
|
+
.pypirc
|
|
196
|
+
|
|
197
|
+
# Cursor
|
|
198
|
+
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
|
199
|
+
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
|
200
|
+
# refer to https://docs.cursor.com/context/ignore-files
|
|
201
|
+
.cursorignore
|
|
202
|
+
.cursorindexingignore
|
|
203
|
+
|
|
204
|
+
# Marimo
|
|
205
|
+
marimo/_static/
|
|
206
|
+
marimo/_lsp/
|
|
207
|
+
__marimo__/
|
paddle-1.1.5/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Elijah Mullens
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
paddle-1.1.5/Makefile
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
ENV_FILE := .env
|
|
2
|
+
UID := $$(id -u)
|
|
3
|
+
GID := $$(id -g)
|
|
4
|
+
|
|
5
|
+
.PHONY: env up down build
|
|
6
|
+
|
|
7
|
+
env:
|
|
8
|
+
@echo "USER=$$(id -un)" > $(ENV_FILE)
|
|
9
|
+
@echo "USER_UID=$$(id -u)" >> $(ENV_FILE)
|
|
10
|
+
@echo "USER_GID=$$(id -g)" >> $(ENV_FILE)
|
|
11
|
+
@echo "Wrote $(ENV_FILE):"; cat $(ENV_FILE)
|
|
12
|
+
|
|
13
|
+
up: env
|
|
14
|
+
@docker compose up -d
|
|
15
|
+
|
|
16
|
+
down:
|
|
17
|
+
@docker compose down
|
|
18
|
+
|
|
19
|
+
ps:
|
|
20
|
+
@docker compose ps
|
|
21
|
+
|
|
22
|
+
start:
|
|
23
|
+
@docker compose exec --user $(UID):$(GID) dev bash
|
|
24
|
+
|
|
25
|
+
build: env
|
|
26
|
+
@docker compose up -d --build dev
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: paddle
|
|
3
|
-
Version: 1.1.
|
|
4
|
-
Summary:
|
|
3
|
+
Version: 1.1.5
|
|
4
|
+
Summary: Python Atmospheric Dynamics: Discovery and Learning about Exoplanets. An open-source, user-friendly python frontend of canoe
|
|
5
5
|
Project-URL: Homepage, https://github.com/elijah-mullens/paddle
|
|
6
6
|
Project-URL: Repository, https://github.com/elijah-mullens/paddle
|
|
7
7
|
Project-URL: Issues, https://github.com/elijah-mullens/paddle/issues
|
|
8
|
-
Author-email:
|
|
8
|
+
Author-email: Elijah Mullens <eem85@cornell.edu>, Cheng Li <chengcli@umich.edu>
|
|
9
|
+
License-File: LICENSE
|
|
9
10
|
Classifier: Development Status :: 3 - Alpha
|
|
10
11
|
Classifier: Intended Audience :: Developers
|
|
11
12
|
Classifier: Intended Audience :: Science/Research
|
|
@@ -21,18 +22,61 @@ Classifier: Topic :: Scientific/Engineering :: Astronomy
|
|
|
21
22
|
Classifier: Topic :: Scientific/Engineering :: Atmospheric Science
|
|
22
23
|
Classifier: Topic :: Scientific/Engineering :: Physics
|
|
23
24
|
Requires-Python: >=3.9
|
|
24
|
-
Requires-Dist: kintera>=1.
|
|
25
|
-
Requires-Dist: snapy>=0.
|
|
25
|
+
Requires-Dist: kintera>=1.1.5
|
|
26
|
+
Requires-Dist: snapy>=0.8.2
|
|
26
27
|
Requires-Dist: torch<=2.7.1,>=2.7.0
|
|
27
28
|
Provides-Extra: dev
|
|
28
29
|
Requires-Dist: pytest>=7; extra == 'dev'
|
|
29
30
|
Description-Content-Type: text/markdown
|
|
30
31
|
|
|
31
32
|
# paddle
|
|
33
|
+
Python Atmospheric Dynamics: Discovering and Learning about Exoplanets. An open-source, user-friendly python version of canoe.
|
|
32
34
|
|
|
33
|
-
|
|
35
|
+
## Install docker and docker-compose plugin
|
|
34
36
|
|
|
35
|
-
## Install
|
|
36
37
|
|
|
38
|
+
## Create a python virtual environment
|
|
39
|
+
```bash
|
|
40
|
+
python -m venv pyenv
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
# Create a docker container
|
|
44
|
+
```bash
|
|
45
|
+
make up
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
# Terminate a docker container
|
|
49
|
+
```bash
|
|
50
|
+
make down
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
# Start a docker container
|
|
54
|
+
```bash
|
|
55
|
+
make start
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
# Build a new docker image (rarely used)
|
|
59
|
+
```bash
|
|
60
|
+
make build
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## For Development
|
|
64
|
+
### Cache your github credential
|
|
65
|
+
```bash
|
|
66
|
+
git config credential.helper 'cache --timeout=86400'
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
### Install paddle package
|
|
37
70
|
```bash
|
|
38
71
|
pip install paddle
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Install pre-commit hook
|
|
75
|
+
```bash
|
|
76
|
+
pip install pre-commit
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Install pre-commit hook
|
|
80
|
+
```bash
|
|
81
|
+
pre-commit install
|
|
82
|
+
```
|
paddle-1.1.5/README.md
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# paddle
|
|
2
|
+
Python Atmospheric Dynamics: Discovering and Learning about Exoplanets. An open-source, user-friendly python version of canoe.
|
|
3
|
+
|
|
4
|
+
## Install docker and docker-compose plugin
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
## Create a python virtual environment
|
|
8
|
+
```bash
|
|
9
|
+
python -m venv pyenv
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
# Create a docker container
|
|
13
|
+
```bash
|
|
14
|
+
make up
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
# Terminate a docker container
|
|
18
|
+
```bash
|
|
19
|
+
make down
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
# Start a docker container
|
|
23
|
+
```bash
|
|
24
|
+
make start
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
# Build a new docker image (rarely used)
|
|
28
|
+
```bash
|
|
29
|
+
make build
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## For Development
|
|
33
|
+
### Cache your github credential
|
|
34
|
+
```bash
|
|
35
|
+
git config credential.helper 'cache --timeout=86400'
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
### Install paddle package
|
|
39
|
+
```bash
|
|
40
|
+
pip install paddle
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### Install pre-commit hook
|
|
44
|
+
```bash
|
|
45
|
+
pip install pre-commit
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
### Install pre-commit hook
|
|
49
|
+
```bash
|
|
50
|
+
pre-commit install
|
|
51
|
+
```
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
# syntax=docker/dockerfile:1.7
|
|
2
|
+
ARG CUDA_VER=12.9.1
|
|
3
|
+
ARG UBUNTU_VER=22.04
|
|
4
|
+
|
|
5
|
+
############################
|
|
6
|
+
# Base CUDA toolchain
|
|
7
|
+
############################
|
|
8
|
+
FROM nvidia/cuda:${CUDA_VER}-devel-ubuntu${UBUNTU_VER} AS base
|
|
9
|
+
|
|
10
|
+
ENV DEBIAN_FRONTEND=noninteractive \
|
|
11
|
+
TZ=UTC
|
|
12
|
+
|
|
13
|
+
# System deps (C++ toolchain, Python, venv, git, neovim, etc.)
|
|
14
|
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
15
|
+
gosu tini \
|
|
16
|
+
build-essential \
|
|
17
|
+
gcc g++ gfortran \
|
|
18
|
+
ninja-build \
|
|
19
|
+
cmake \
|
|
20
|
+
git curl ca-certificates \
|
|
21
|
+
python3 python3-pip python3-venv python3-dev \
|
|
22
|
+
pkg-config \
|
|
23
|
+
libssl-dev \
|
|
24
|
+
ripgrep fd-find unzip neovim \
|
|
25
|
+
clangd \
|
|
26
|
+
nodejs npm \
|
|
27
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
28
|
+
|
|
29
|
+
# Make fd available as `fd` (Ubuntu calls it fdfind)
|
|
30
|
+
RUN update-alternatives --install /usr/bin/fd fd /usr/bin/fdfind 50
|
|
31
|
+
|
|
32
|
+
# Default editor env
|
|
33
|
+
ENV EDITOR=nvim VISUAL=nvim
|
|
34
|
+
|
|
35
|
+
# System-wide git editor default (users can override)
|
|
36
|
+
RUN git config --system core.editor "nvim"
|
|
37
|
+
|
|
38
|
+
# Pre-warm plugins (optional, speeds first start)
|
|
39
|
+
# This runs Neovim headless to install plugins via lazy.nvim.
|
|
40
|
+
RUN nvim --headless "+Lazy! sync" +qa || true
|
|
41
|
+
|
|
42
|
+
############################
|
|
43
|
+
# Convenient command-line tools
|
|
44
|
+
############################
|
|
45
|
+
|
|
46
|
+
# ---- after base tooling is installed, before switching USER ----
|
|
47
|
+
# Create a tools dir and put it on PATH
|
|
48
|
+
RUN mkdir -p /opt/tools
|
|
49
|
+
ENV PATH="/opt/tools:${PATH}"
|
|
50
|
+
|
|
51
|
+
# Copy all helper scripts
|
|
52
|
+
COPY docker/scripts/ /opt/tools/
|
|
53
|
+
# Ensure executables
|
|
54
|
+
RUN find /opt/tools -type f -name "*.sh" -exec chmod +x {} \; \
|
|
55
|
+
&& for f in /opt/tools/*.sh; do ln -sf "$f" "/usr/local/bin/$(basename "${f%.sh}")"; done
|
|
56
|
+
# The symlink makes command-line tools available (no .sh needed)
|
|
57
|
+
|
|
58
|
+
############################
|
|
59
|
+
# Configure non-root user and Python venv
|
|
60
|
+
############################
|
|
61
|
+
|
|
62
|
+
# Python venv in /opt/venv (global, fast, easy)
|
|
63
|
+
RUN python3 -m venv /opt/venv
|
|
64
|
+
ENV VIRTUAL_ENV=/opt/venv
|
|
65
|
+
ENV PATH="/opt/venv/bin:${PATH}"
|
|
66
|
+
|
|
67
|
+
# Upgrade pip/setuptools/wheel early
|
|
68
|
+
RUN pip install --upgrade pip setuptools wheel build
|
|
69
|
+
|
|
70
|
+
# Optional: pin pip resolver behavior and set default index/extra-index if needed
|
|
71
|
+
# COPY docker/pip.conf /etc/pip.conf
|
|
72
|
+
|
|
73
|
+
# Put a base Neovim config in /etc/skel so future users get it,
|
|
74
|
+
# and also install for the existing dev user.
|
|
75
|
+
#RUN mkdir -p /etc/skel/.config/nvim
|
|
76
|
+
#COPY docker/nvim/ /etc/skel/.config/nvim/
|
|
77
|
+
#RUN mkdir -p /home/${USERNAME}/.config/nvim && \
|
|
78
|
+
# cp -r /etc/skel/.config/nvim/* /home/${USERNAME}/.config/nvim/ && \
|
|
79
|
+
# chown -R ${USERNAME}:${USERNAME} /home/${USERNAME}/.config/nvim
|
|
80
|
+
|
|
81
|
+
############################
|
|
82
|
+
# Cache Python wheels separately (optional but recommended)
|
|
83
|
+
############################
|
|
84
|
+
FROM base AS wheels
|
|
85
|
+
WORKDIR /tmp/wheels
|
|
86
|
+
# If you use requirements.txt:
|
|
87
|
+
COPY docker/requirements.txt .
|
|
88
|
+
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
89
|
+
pip wheel -r requirements.txt -w /tmp/wheelhouse
|
|
90
|
+
|
|
91
|
+
# If you use Poetry/pyproject, replace the above with:
|
|
92
|
+
# COPY pyproject.toml poetry.lock ./
|
|
93
|
+
# RUN pip install "poetry>=1.8.0"
|
|
94
|
+
# RUN --mount=type=cache,target=/root/.cache/pip poetry export -f requirements.txt --without-hashes | tee req.txt
|
|
95
|
+
# RUN --mount=type=cache,target=/root/.cache/pip pip wheel -r req.txt -w /tmp/wheelhouse
|
|
96
|
+
|
|
97
|
+
############################
|
|
98
|
+
# Dev image
|
|
99
|
+
############################
|
|
100
|
+
FROM base AS dev
|
|
101
|
+
WORKDIR /workspace
|
|
102
|
+
|
|
103
|
+
# Load for login shells
|
|
104
|
+
#RUN printf 'alias vi=nvim\nalias vim=nvim\nexport TERM=xterm-256color\n' >> /home/${USERNAME}/.bashrc
|
|
105
|
+
|
|
106
|
+
# Bring in prebuilt wheels (fast installs)
|
|
107
|
+
COPY --from=wheels /tmp/wheelhouse /tmp/wheelhouse
|
|
108
|
+
COPY docker/requirements.txt /workspace/docker/requirements.txt
|
|
109
|
+
|
|
110
|
+
# Install Python deps from wheels first, then fall back to index
|
|
111
|
+
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
112
|
+
pip install --no-index --find-links=/tmp/wheelhouse -r /workspace/docker/requirements.txt \
|
|
113
|
+
|| pip install -r /workspace/docker/requirements.txt
|
|
114
|
+
|
|
115
|
+
# (Optional) CUDA env defaults for many toolchains
|
|
116
|
+
ENV TORCH_CUDA_ARCH_LIST="8.6;8.9;9.0+PTX" \
|
|
117
|
+
CUDA_CACHE_MAXSIZE=2147483647
|
|
118
|
+
|
|
119
|
+
# Put your entrypoint in place
|
|
120
|
+
COPY docker/entrypoint.sh /usr/local/bin/entrypoint.sh
|
|
121
|
+
RUN chmod +x /usr/local/bin/entrypoint.sh
|
|
122
|
+
|
|
123
|
+
# Keep the workspace owned by the non-root user
|
|
124
|
+
#RUN chown -R ${USERNAME}:${USERNAME} /workspace
|
|
125
|
+
#USER ${USERNAME}
|
|
126
|
+
|
|
127
|
+
#ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
|
128
|
+
#CMD ["/bin/bash"]
|
|
129
|
+
|
|
130
|
+
ENTRYPOINT ["/usr/bin/tini","--","/usr/local/bin/entrypoint.sh"]
|
|
131
|
+
CMD ["/bin/bash"]
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
: "${USER_UID:=1000}"
|
|
5
|
+
: "${USER_GID:=1000}"
|
|
6
|
+
: "${USERNAME:=cli}"
|
|
7
|
+
|
|
8
|
+
# Activate venv for login shells and non-interactive shells
|
|
9
|
+
export VIRTUAL_ENV=/opt/venv
|
|
10
|
+
export PATH="$VIRTUAL_ENV/bin:$PATH"
|
|
11
|
+
|
|
12
|
+
# If user mounts code to /workspace, ensure ownership doesn’t break builds
|
|
13
|
+
|
|
14
|
+
# Create group if missing
|
|
15
|
+
if ! getent group "${USER_GID}" >/dev/null 2>&1; then
|
|
16
|
+
groupadd -g "${USER_GID}" "${USERNAME}" || groupadd -g "${USER_GID}" "grp${USER_GID}" || true
|
|
17
|
+
fi
|
|
18
|
+
|
|
19
|
+
# Create user if missing
|
|
20
|
+
if ! id -u "${USER_UID}" >/dev/null 2>&1; then
|
|
21
|
+
useradd -m -u "${USER_UID}" -g "${USER_GID}" -s /bin/bash "${USERNAME}" || true
|
|
22
|
+
fi
|
|
23
|
+
|
|
24
|
+
# Ensure home exists
|
|
25
|
+
HOME_DIR="$(getent passwd "${USER_UID}" | cut -d: -f6)"
|
|
26
|
+
mkdir -p "${HOME_DIR}"
|
|
27
|
+
|
|
28
|
+
# Make sure our common writable paths are owned (skip bind mounts like /workspace)
|
|
29
|
+
for d in /opt/venv; do
|
|
30
|
+
if [ -d "$d" ]; then chown -R "${USER_UID}:${USER_GID}" "$d" || true; fi
|
|
31
|
+
done
|
|
32
|
+
|
|
33
|
+
# Export editor defaults for the user
|
|
34
|
+
echo 'export EDITOR=nvim; export VISUAL=nvim' >> "${HOME_DIR}/.bashrc" || true
|
|
35
|
+
chown "${USER_UID}:${USER_GID}" "${HOME_DIR}/.bashrc" || true
|
|
36
|
+
|
|
37
|
+
# Configure git
|
|
38
|
+
if [ -f /host/.gitconfig ] && [ ! -e "${HOME_DIR}/.gitconfig" ]; then
|
|
39
|
+
ln -s /host/.gitconfig "${HOME_DIR}/.gitconfig"
|
|
40
|
+
chown -h "${USER_UID}:${USER_GID}" "${HOME_DIR}/.gitconfig" || true
|
|
41
|
+
|
|
42
|
+
#cp /host/.gitconfig "${HOME_DIR}/.gitconfig"
|
|
43
|
+
#chown "${USER_UID}:${USER_GID}" "${HOME_DIR}/.gitconfig"
|
|
44
|
+
#chmod 600 "${HOME_DIR}/.gitconfig"
|
|
45
|
+
fi
|
|
46
|
+
|
|
47
|
+
# Drop privileges (use tini/gosu if installed; otherwise su-exec/ runuser)
|
|
48
|
+
if command -v gosu >/dev/null 2>&1; then
|
|
49
|
+
exec gosu "${USER_UID}:${USER_GID}" "$@"
|
|
50
|
+
else
|
|
51
|
+
exec runuser -u "$(id -nu "${USER_UID}")" -- "$@"
|
|
52
|
+
fi
|
|
53
|
+
|
|
54
|
+
# Print helpful banner
|
|
55
|
+
echo "Dev container ready. Python: $(python --version)"
|
|
56
|
+
echo "CUDA version: $(nvcc --version | sed -n 's/^.*release \(.*\),.*/\1/p')"
|
|
57
|
+
exec "$@"
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
-- Basic settings
|
|
2
|
+
vim.g.mapleader = " "
|
|
3
|
+
vim.o.termguicolors = true
|
|
4
|
+
vim.o.number = true
|
|
5
|
+
vim.o.relativenumber = true
|
|
6
|
+
vim.o.signcolumn = "yes"
|
|
7
|
+
vim.o.updatetime = 300
|
|
8
|
+
vim.o.clipboard = "unnamedplus"
|
|
9
|
+
vim.o.expandtab = true
|
|
10
|
+
vim.o.shiftwidth = 2
|
|
11
|
+
vim.o.tabstop = 2
|
|
12
|
+
|
|
13
|
+
-- Lazy.nvim bootstrap
|
|
14
|
+
local lazypath = vim.fn.stdpath("data") .. "/lazy/lazy.nvim"
|
|
15
|
+
if not vim.loop.fs_stat(lazypath) then
|
|
16
|
+
vim.fn.system({
|
|
17
|
+
"git","clone","--filter=blob:none",
|
|
18
|
+
"https://github.com/folke/lazy.nvim.git",
|
|
19
|
+
"--branch=stable", lazypath
|
|
20
|
+
})
|
|
21
|
+
end
|
|
22
|
+
vim.opt.rtp:prepend(lazypath)
|
|
23
|
+
|
|
24
|
+
require("lazy").setup(require("plugins"), {
|
|
25
|
+
ui = { border = "rounded" }
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
-- Keymaps (Telescope)
|
|
29
|
+
vim.keymap.set("n", "<leader>ff", "<cmd>Telescope find_files<cr>")
|
|
30
|
+
vim.keymap.set("n", "<leader>fg", "<cmd>Telescope live_grep<cr>")
|
|
31
|
+
vim.keymap.set("n", "<leader>fb", "<cmd>Telescope buffers<cr>")
|
|
32
|
+
vim.keymap.set("n", "<leader>fh", "<cmd>Telescope help_tags<cr>")
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
return {
|
|
2
|
+
-- Core UX
|
|
3
|
+
{ "nvim-lua/plenary.nvim" },
|
|
4
|
+
{ "nvim-telescope/telescope.nvim", cmd = "Telescope" },
|
|
5
|
+
|
|
6
|
+
-- Syntax & TS
|
|
7
|
+
{
|
|
8
|
+
"nvim-treesitter/nvim-treesitter",
|
|
9
|
+
build = ":TSUpdate",
|
|
10
|
+
config = function()
|
|
11
|
+
require("nvim-treesitter.configs").setup {
|
|
12
|
+
ensure_installed = { "lua", "vim", "python", "cpp", "cuda", "cmake", "markdown" },
|
|
13
|
+
highlight = { enable = true },
|
|
14
|
+
indent = { enable = true },
|
|
15
|
+
}
|
|
16
|
+
end
|
|
17
|
+
},
|
|
18
|
+
|
|
19
|
+
-- Statusline
|
|
20
|
+
{
|
|
21
|
+
"nvim-lualine/lualine.nvim",
|
|
22
|
+
config = function()
|
|
23
|
+
require("lualine").setup { options = { theme = "auto" } }
|
|
24
|
+
end
|
|
25
|
+
},
|
|
26
|
+
|
|
27
|
+
-- Git goodies
|
|
28
|
+
{
|
|
29
|
+
"lewis6991/gitsigns.nvim",
|
|
30
|
+
config = function() require("gitsigns").setup() end
|
|
31
|
+
},
|
|
32
|
+
|
|
33
|
+
-- LSP, Mason, completion
|
|
34
|
+
{ "williamboman/mason.nvim",
|
|
35
|
+
config = function() require("mason").setup() end
|
|
36
|
+
},
|
|
37
|
+
{ "williamboman/mason-lspconfig.nvim",
|
|
38
|
+
dependencies = { "neovim/nvim-lspconfig" },
|
|
39
|
+
config = function()
|
|
40
|
+
require("mason-lspconfig").setup {
|
|
41
|
+
ensure_installed = { "clangd", "pyright" }
|
|
42
|
+
}
|
|
43
|
+
local lspconfig = require("lspconfig")
|
|
44
|
+
lspconfig.clangd.setup {}
|
|
45
|
+
lspconfig.pyright.setup {}
|
|
46
|
+
end
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
"hrsh7th/nvim-cmp",
|
|
50
|
+
dependencies = {
|
|
51
|
+
"hrsh7th/cmp-nvim-lsp",
|
|
52
|
+
"hrsh7th/cmp-buffer",
|
|
53
|
+
"hrsh7th/cmp-path",
|
|
54
|
+
"L3MON4D3/LuaSnip",
|
|
55
|
+
},
|
|
56
|
+
config = function()
|
|
57
|
+
local cmp = require("cmp")
|
|
58
|
+
cmp.setup({
|
|
59
|
+
snippet = { expand = function(args) require("luasnip").lsp_expand(args.body) end },
|
|
60
|
+
mapping = cmp.mapping.preset.insert({
|
|
61
|
+
["<C-Space>"] = cmp.mapping.complete(),
|
|
62
|
+
["<CR>"] = cmp.mapping.confirm({ select = true }),
|
|
63
|
+
["<C-e>"] = cmp.mapping.abort(),
|
|
64
|
+
}),
|
|
65
|
+
sources = cmp.config.sources({
|
|
66
|
+
{ name = "nvim_lsp" }, { name = "path" }, { name = "buffer" }
|
|
67
|
+
}),
|
|
68
|
+
})
|
|
69
|
+
-- LSP capabilities for completion
|
|
70
|
+
local caps = require("cmp_nvim_lsp").default_capabilities()
|
|
71
|
+
require("lspconfig").clangd.setup { capabilities = caps }
|
|
72
|
+
require("lspconfig").pyright.setup { capabilities = caps }
|
|
73
|
+
end
|
|
74
|
+
},
|
|
75
|
+
|
|
76
|
+
-- Formatting/Linting (optional)
|
|
77
|
+
{
|
|
78
|
+
"nvimtools/none-ls.nvim",
|
|
79
|
+
config = function()
|
|
80
|
+
local null_ls = require("null-ls")
|
|
81
|
+
null_ls.setup({
|
|
82
|
+
sources = {
|
|
83
|
+
null_ls.builtins.formatting.clang_format,
|
|
84
|
+
null_ls.builtins.formatting.black,
|
|
85
|
+
},
|
|
86
|
+
})
|
|
87
|
+
end
|
|
88
|
+
},
|
|
89
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# sync-main-and-delete-current.sh
|
|
3
|
+
# Usage: sync-main-and-delete-current.sh [-f] [-r] [-s]
|
|
4
|
+
# -f force delete even if branch not merged (uses -D)
|
|
5
|
+
# -r also delete remote branch on origin
|
|
6
|
+
# -s stash uncommitted changes before switching branches
|
|
7
|
+
|
|
8
|
+
set -eu
|
|
9
|
+
|
|
10
|
+
FORCE=0
|
|
11
|
+
DEL_REMOTE=0
|
|
12
|
+
DO_STASH=0
|
|
13
|
+
|
|
14
|
+
while getopts "frs" opt; do
|
|
15
|
+
case "$opt" in
|
|
16
|
+
f) FORCE=1 ;;
|
|
17
|
+
r) DEL_REMOTE=1 ;;
|
|
18
|
+
s) DO_STASH=1 ;;
|
|
19
|
+
*) echo "Usage: $0 [-f] [-r] [-s]"; exit 2 ;;
|
|
20
|
+
esac
|
|
21
|
+
done
|
|
22
|
+
|
|
23
|
+
# Ensure we're in a git repo
|
|
24
|
+
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
|
25
|
+
echo "Not inside a git repository." >&2
|
|
26
|
+
exit 1
|
|
27
|
+
fi
|
|
28
|
+
|
|
29
|
+
# Determine current branch (avoid empty on detached HEAD)
|
|
30
|
+
CURRENT_BRANCH="$(git symbolic-ref --short -q HEAD || true)"
|
|
31
|
+
if [ -z "${CURRENT_BRANCH}" ] || [ "${CURRENT_BRANCH}" = "HEAD" ]; then
|
|
32
|
+
echo "Detached HEAD; please checkout a branch first." >&2
|
|
33
|
+
exit 1
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
if [ "${CURRENT_BRANCH}" = "main" ]; then
|
|
37
|
+
echo "Already on 'main'; nothing to delete." >&2
|
|
38
|
+
exit 1
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
# Optionally stash changes to avoid switch failures
|
|
42
|
+
if [ "${DO_STASH}" -eq 1 ]; then
|
|
43
|
+
# Only stash if there are changes
|
|
44
|
+
if ! git diff --quiet || ! git diff --cached --quiet; then
|
|
45
|
+
echo "Stashing local changes..."
|
|
46
|
+
git stash push -u -m "auto-stash by sync-main-and-delete-current"
|
|
47
|
+
fi
|
|
48
|
+
fi
|
|
49
|
+
|
|
50
|
+
echo "Fetching from origin..."
|
|
51
|
+
git fetch origin
|
|
52
|
+
|
|
53
|
+
# Ensure local main exists; create it tracking origin/main if needed
|
|
54
|
+
if ! git show-ref --verify --quiet refs/heads/main; then
|
|
55
|
+
echo "Local 'main' missing; creating tracking branch..."
|
|
56
|
+
git branch --track main origin/main >/dev/null 2>&1 || true
|
|
57
|
+
fi
|
|
58
|
+
|
|
59
|
+
echo "Switching to main..."
|
|
60
|
+
# Use checkout for POSIX sh compatibility
|
|
61
|
+
git checkout main
|
|
62
|
+
|
|
63
|
+
echo "Fast-forwarding main to origin/main..."
|
|
64
|
+
# Strictly fast-forward to avoid accidental merge commits
|
|
65
|
+
git merge --ff-only origin/main
|
|
66
|
+
|
|
67
|
+
# Decide delete mode
|
|
68
|
+
DELETE_FLAG="-d"
|
|
69
|
+
if [ "${FORCE}" -eq 1 ]; then
|
|
70
|
+
DELETE_FLAG="-D"
|
|
71
|
+
fi
|
|
72
|
+
|
|
73
|
+
# Verify merged status when not forcing
|
|
74
|
+
if [ "${FORCE}" -ne 1 ]; then
|
|
75
|
+
if git merge-base --is-ancestor "${CURRENT_BRANCH}" main; then
|
|
76
|
+
:
|
|
77
|
+
else
|
|
78
|
+
echo "Branch '${CURRENT_BRANCH}' is not merged into 'main'. Use -f to force delete." >&2
|
|
79
|
+
exit 1
|
|
80
|
+
fi
|
|
81
|
+
fi
|
|
82
|
+
|
|
83
|
+
echo "Deleting local branch '${CURRENT_BRANCH}' (${DELETE_FLAG})..."
|
|
84
|
+
git branch "${DELETE_FLAG}" "${CURRENT_BRANCH}"
|
|
85
|
+
|
|
86
|
+
if [ "${DEL_REMOTE}" -eq 1 ]; then
|
|
87
|
+
# Delete remote branch only if it exists
|
|
88
|
+
if git ls-remote --exit-code --heads origin "refs/heads/${CURRENT_BRANCH}" >/dev/null 2>&1; then
|
|
89
|
+
echo "Deleting remote branch 'origin/${CURRENT_BRANCH}'..."
|
|
90
|
+
git push origin --delete "${CURRENT_BRANCH}"
|
|
91
|
+
else
|
|
92
|
+
echo "Remote branch 'origin/${CURRENT_BRANCH}' not found; skipping."
|
|
93
|
+
fi
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
echo "Done."
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
MSG="${1:-wip}"
|
|
5
|
+
|
|
6
|
+
# Ensure we're inside a git repo
|
|
7
|
+
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
|
8
|
+
echo "Not inside a git repository." >&2
|
|
9
|
+
exit 1
|
|
10
|
+
fi
|
|
11
|
+
|
|
12
|
+
# Resolve repo root and branch
|
|
13
|
+
REPO_ROOT="$(git rev-parse --show-toplevel)"
|
|
14
|
+
cd "$REPO_ROOT"
|
|
15
|
+
|
|
16
|
+
# Try the normal way; fall back if detached HEAD
|
|
17
|
+
BRANCH="$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "")"
|
|
18
|
+
if [[ "$BRANCH" == "HEAD" || -z "$BRANCH" ]]; then
|
|
19
|
+
# Fallback: guess from upstream or default to 'main'
|
|
20
|
+
BRANCH="$(git symbolic-ref --short -q HEAD 2>/dev/null || echo main)"
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
# Stage changes
|
|
24
|
+
git add -A
|
|
25
|
+
|
|
26
|
+
# Commit only if there’s something staged
|
|
27
|
+
if ! git diff --cached --quiet; then
|
|
28
|
+
git commit -m "$MSG"
|
|
29
|
+
else
|
|
30
|
+
echo "No staged changes; nothing to commit."
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
# Ensure we have an upstream
|
|
34
|
+
if ! git rev-parse --abbrev-ref --symbolic-full-name "@{u}" >/dev/null 2>&1; then
|
|
35
|
+
# If origin doesn't exist, error clearly
|
|
36
|
+
if ! git remote get-url origin >/dev/null 2>&1; then
|
|
37
|
+
echo "Remote 'origin' not configured. Set it first: git remote add origin <url>" >&2
|
|
38
|
+
exit 1
|
|
39
|
+
fi
|
|
40
|
+
git push -u origin "$BRANCH"
|
|
41
|
+
else
|
|
42
|
+
# Rebase pull to avoid merge commits; allow autostash for local changes
|
|
43
|
+
git pull --rebase --autostash || true
|
|
44
|
+
git push
|
|
45
|
+
fi
|
|
File without changes
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
services:
|
|
2
|
+
dev:
|
|
3
|
+
image: paddle
|
|
4
|
+
build:
|
|
5
|
+
context: .
|
|
6
|
+
dockerfile: docker/Dockerfile
|
|
7
|
+
args:
|
|
8
|
+
CUDA_VER: "12.9.1"
|
|
9
|
+
UBUNTU_VER: "22.04"
|
|
10
|
+
environment:
|
|
11
|
+
- USERNAME=${USER}
|
|
12
|
+
- USER_UID=${USER_UID}
|
|
13
|
+
- USER_GID=${USER_GID}
|
|
14
|
+
deploy:
|
|
15
|
+
resources:
|
|
16
|
+
reservations:
|
|
17
|
+
devices:
|
|
18
|
+
# If your system does not have gpu, remove it from the []
|
|
19
|
+
- capabilities: [gpu]
|
|
20
|
+
volumes:
|
|
21
|
+
# This is your main working directory
|
|
22
|
+
- ${HOME}/projects:/projects
|
|
23
|
+
# This is where you put your large simulation data
|
|
24
|
+
- ${HOME}/data/:/data
|
|
25
|
+
working_dir: /projects
|
|
26
|
+
tty: true
|
|
27
|
+
stdin_open: true
|
|
28
|
+
ports:
|
|
29
|
+
- "8888:8888"
|
|
@@ -4,13 +4,13 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "paddle"
|
|
7
|
-
version = "1.1.
|
|
8
|
-
description = "
|
|
7
|
+
version = "1.1.5"
|
|
8
|
+
description = "Python Atmospheric Dynamics: Discovery and Learning about Exoplanets. An open-source, user-friendly python frontend of canoe"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.9"
|
|
11
11
|
authors = [
|
|
12
|
-
{ name = "Cheng Li", email = "chengcli@umich.edu" },
|
|
13
12
|
{ name = "Elijah Mullens", email = "eem85@cornell.edu" },
|
|
13
|
+
{ name = "Cheng Li", email = "chengcli@umich.edu" },
|
|
14
14
|
]
|
|
15
15
|
keywords = []
|
|
16
16
|
classifiers = [
|
|
@@ -32,8 +32,8 @@ classifiers = [
|
|
|
32
32
|
|
|
33
33
|
dependencies = [
|
|
34
34
|
"torch>=2.7.0,<=2.7.1",
|
|
35
|
-
"kintera>=1.
|
|
36
|
-
"snapy>=0.
|
|
35
|
+
"kintera>=1.1.5",
|
|
36
|
+
"snapy>=0.8.2",
|
|
37
37
|
]
|
|
38
38
|
|
|
39
39
|
[project.optional-dependencies]
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
#! /usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Read variables in a NetCDF file and write them to jit saved torch tensors.
|
|
5
|
+
Usage: python nc2pt.py input.nc output.pt
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from netCDF4 import Dataset
|
|
9
|
+
import torch
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def save_tensors(tensor_map: dict[str, torch.Tensor], filename: str):
|
|
13
|
+
class TensorModule(torch.nn.Module):
|
|
14
|
+
def __init__(self, tensors):
|
|
15
|
+
super().__init__()
|
|
16
|
+
for name, tensor in tensors.items():
|
|
17
|
+
self.register_buffer(name, tensor)
|
|
18
|
+
|
|
19
|
+
module = TensorModule(tensor_map)
|
|
20
|
+
scripted = torch.jit.script(module) # Needed for LibTorch compatibility
|
|
21
|
+
scripted.save(filename)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
fname = "sod.out0.00019.nc"
|
|
25
|
+
|
|
26
|
+
nc = Dataset(fname, "r")
|
|
27
|
+
out_fname = "sod.out0.00019.pt"
|
|
28
|
+
|
|
29
|
+
data = {}
|
|
30
|
+
for varname in nc.variables:
|
|
31
|
+
var = nc.variables[varname][:]
|
|
32
|
+
if var.ndim == 4: # (time, x1, x2, x3) -> (time, x3, x2, x1)
|
|
33
|
+
data[varname] = torch.tensor(var).permute(0, 3, 2, 1).squeeze()
|
|
34
|
+
elif var.ndim == 3: # (x1, x2, x3) -> (x3, x2, x1)
|
|
35
|
+
data[varname] = torch.tensor(var).permute(2, 1, 0).squeeze()
|
|
36
|
+
else:
|
|
37
|
+
data[varname] = torch.tensor(var).squeeze()
|
|
38
|
+
|
|
39
|
+
save_tensors(data, out_fname)
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Convert a sequence of PyTorch .pt dumps into a CF‐compliant NetCDF4 file
|
|
4
|
+
with dimensions (time, x, y, z) plus a 'species' axis for mole fractions.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import tarfile
|
|
9
|
+
import re
|
|
10
|
+
import torch
|
|
11
|
+
import numpy as np
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from netCDF4 import Dataset
|
|
14
|
+
|
|
15
|
+
# ──────────────────────────────── CONFIG ────────────────────────────────
|
|
16
|
+
INPUT_DIR = "."
|
|
17
|
+
OUTPUT_FILE = "thermo_x_xfrac_to_conc.nc"
|
|
18
|
+
# ────────────────────────────────────────────────────────────────────────
|
|
19
|
+
|
|
20
|
+
# find all .pt files, skip size==0, sort by timestamp
|
|
21
|
+
pt_files = []
|
|
22
|
+
for fn in os.listdir(INPUT_DIR):
|
|
23
|
+
if not fn.endswith(".pt"):
|
|
24
|
+
continue
|
|
25
|
+
full = os.path.join(INPUT_DIR, fn)
|
|
26
|
+
if os.path.getsize(full) == 0:
|
|
27
|
+
continue
|
|
28
|
+
# expect names like thermo_x_xfrac_to_conc_<epoch>.pt
|
|
29
|
+
m = re.search(r"(\d+)\.pt$", fn)
|
|
30
|
+
if not m:
|
|
31
|
+
continue
|
|
32
|
+
pt_files.append((int(m.group(1)), full))
|
|
33
|
+
|
|
34
|
+
pt_files.sort(key=lambda x: x[0])
|
|
35
|
+
times_epoch = [ts for ts, _ in pt_files]
|
|
36
|
+
|
|
37
|
+
# load the first file to infer shapes
|
|
38
|
+
module = torch.jit.load(pt_files[0][1])
|
|
39
|
+
data = {name: param for name, param in module.named_parameters()}
|
|
40
|
+
|
|
41
|
+
temp0 = data["temp"].numpy()
|
|
42
|
+
pres0 = data["pres"].numpy()
|
|
43
|
+
xfrac0 = data["xfrac"].numpy()
|
|
44
|
+
nx3, nx2, nx1 = temp0.shape
|
|
45
|
+
nspecies = xfrac0.shape[3]
|
|
46
|
+
nt = len(pt_files)
|
|
47
|
+
|
|
48
|
+
# pre‐allocate arrays in (time, x1, x2, x3) order
|
|
49
|
+
temp_arr = np.empty((nt, nx1, nx2, nx3), dtype=temp0.dtype)
|
|
50
|
+
pres_arr = np.empty((nt, nx1, nx2, nx3), dtype=pres0.dtype)
|
|
51
|
+
xfrac_arr = np.empty((nspecies, nt, nx1, nx2, nx3), dtype=xfrac0.dtype)
|
|
52
|
+
|
|
53
|
+
# load all timesteps
|
|
54
|
+
for i, (_, path) in enumerate(pt_files):
|
|
55
|
+
module = torch.jit.load(path)
|
|
56
|
+
data = {name: param for name, param in module.named_parameters()}
|
|
57
|
+
t_np = data["temp"].numpy() # (z, y, x)
|
|
58
|
+
p_np = data["pres"].numpy() # (z, y, x)
|
|
59
|
+
x_np = data["xfrac"].numpy() # (species, z, y, x)
|
|
60
|
+
|
|
61
|
+
# reorder to (x, y, z)
|
|
62
|
+
temp_arr[i] = t_np.transpose(2, 1, 0)
|
|
63
|
+
pres_arr[i] = p_np.transpose(2, 1, 0)
|
|
64
|
+
for j in range(nspecies):
|
|
65
|
+
xfrac_arr[j, i] = x_np[:, :, :, j].transpose(2, 1, 0)
|
|
66
|
+
|
|
67
|
+
# create NetCDF4 file
|
|
68
|
+
ds = Dataset(OUTPUT_FILE, "w", format="NETCDF4")
|
|
69
|
+
|
|
70
|
+
# dimensions
|
|
71
|
+
ds.createDimension("time", nt)
|
|
72
|
+
ds.createDimension("x3", nx3)
|
|
73
|
+
ds.createDimension("x2", nx2)
|
|
74
|
+
ds.createDimension("x1", nx1)
|
|
75
|
+
|
|
76
|
+
# coordinate variables
|
|
77
|
+
tvar = ds.createVariable("time", "f4", ("time",))
|
|
78
|
+
tvar.units = "seconds since 1970-01-01 00:00:00 UTC"
|
|
79
|
+
tvar.calendar = "gregorian"
|
|
80
|
+
tvar[:] = np.array(times_epoch, dtype="f4")
|
|
81
|
+
|
|
82
|
+
zvar = ds.createVariable("x1", "f4", ("x1",))
|
|
83
|
+
yvar = ds.createVariable("x2", "f4", ("x2",))
|
|
84
|
+
xvar = ds.createVariable("x3", "f4", ("x3",))
|
|
85
|
+
|
|
86
|
+
xvar.axis = "X"
|
|
87
|
+
yvar.axis = "Y"
|
|
88
|
+
zvar.axis = "Z"
|
|
89
|
+
|
|
90
|
+
xvar[:] = np.arange(nx3)
|
|
91
|
+
yvar[:] = np.arange(nx2)
|
|
92
|
+
zvar[:] = np.arange(nx1)
|
|
93
|
+
|
|
94
|
+
# data variables
|
|
95
|
+
temp_v = ds.createVariable("temp", "f4", ("time", "x1", "x2", "x3"), zlib=True)
|
|
96
|
+
temp_v.units = "K"
|
|
97
|
+
temp_v.long_name = "temperature"
|
|
98
|
+
|
|
99
|
+
pres_v = ds.createVariable("pres", "f4", ("time", "x1", "x2", "x3"), zlib=True)
|
|
100
|
+
pres_v.units = "Pa"
|
|
101
|
+
pres_v.long_name = "pressure"
|
|
102
|
+
|
|
103
|
+
xfrac_v = []
|
|
104
|
+
for i in range(nspecies):
|
|
105
|
+
xfrac_v.append(
|
|
106
|
+
ds.createVariable(f"xfrac{i}", "f4", ("time", "x1", "x2", "x3"), zlib=True)
|
|
107
|
+
)
|
|
108
|
+
xfrac_v[i].units = "1"
|
|
109
|
+
xfrac_v[i].long_name = "mole fraction of each species"
|
|
110
|
+
|
|
111
|
+
# write the data
|
|
112
|
+
temp_v[:] = temp_arr
|
|
113
|
+
pres_v[:] = pres_arr
|
|
114
|
+
for i in range(nspecies):
|
|
115
|
+
xfrac_v[i][:] = xfrac_arr[i]
|
|
116
|
+
|
|
117
|
+
# global metadata
|
|
118
|
+
ds.title = "Debug fields for thermo_x.xfrac_to_conc"
|
|
119
|
+
ds.institution = "University of Michigan"
|
|
120
|
+
ds.source = "converted from .pt files"
|
|
121
|
+
ds.history = f"Created {datetime.utcnow().isoformat()}Z"
|
|
122
|
+
|
|
123
|
+
ds.close()
|
|
124
|
+
print(f"Converted file: {OUTPUT_FILE}")
|
|
@@ -140,7 +140,10 @@ def integrate_dry_adiabat(
|
|
|
140
140
|
|
|
141
141
|
|
|
142
142
|
def setup_profile(
|
|
143
|
-
block: snapy.MeshBlock,
|
|
143
|
+
block: snapy.MeshBlock,
|
|
144
|
+
param: dict[str, float] = {},
|
|
145
|
+
method: str = "moist-adiabat",
|
|
146
|
+
verbose: bool = False,
|
|
144
147
|
) -> torch.Tensor:
|
|
145
148
|
"""
|
|
146
149
|
Set up an adiabatic initial condition for the mesh block.
|
|
@@ -227,7 +230,7 @@ def setup_profile(
|
|
|
227
230
|
dz = coord.buffer("dx1f")[ifirst]
|
|
228
231
|
|
|
229
232
|
# half a grid to cell center
|
|
230
|
-
thermo_x.extrapolate_ad(temp, pres, xfrac, grav, dz / 2.0)
|
|
233
|
+
thermo_x.extrapolate_ad(temp, pres, xfrac, grav, dz / 2.0, verbose=verbose)
|
|
231
234
|
|
|
232
235
|
# adiabatic extrapolation
|
|
233
236
|
if method == "isothermal":
|
|
@@ -257,7 +260,7 @@ def setup_profile(
|
|
|
257
260
|
elif method.split("-")[0] == "neutral":
|
|
258
261
|
temp, pres, xfrac = integrate_neutral(thermo_x, temp, pres, xfrac, grav, dz)
|
|
259
262
|
else:
|
|
260
|
-
thermo_x.extrapolate_ad(temp, pres, xfrac, grav, dz)
|
|
263
|
+
thermo_x.extrapolate_ad(temp, pres, xfrac, grav, dz, verbose=verbose)
|
|
261
264
|
|
|
262
265
|
if torch.any(temp < Tmin):
|
|
263
266
|
i_isothermal = i + 1
|
|
@@ -9,11 +9,12 @@ from snapy import (
|
|
|
9
9
|
MeshBlock,
|
|
10
10
|
)
|
|
11
11
|
from kintera import ThermoX
|
|
12
|
+
from pathlib import Path
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
def setup_saturn_profile():
|
|
15
16
|
# path = resources.files("paddle") / "data" / "saturn1d.yaml"
|
|
16
|
-
path = "data" / "saturn1d.yaml"
|
|
17
|
+
path = Path("data") / "saturn1d.yaml"
|
|
17
18
|
print(f"Reading input file: {path}")
|
|
18
19
|
|
|
19
20
|
op_block = MeshBlockOptions.from_yaml(str(path))
|
|
@@ -41,7 +42,7 @@ def setup_saturn_profile():
|
|
|
41
42
|
method=method,
|
|
42
43
|
max_iter=50,
|
|
43
44
|
ftol=1.0e-2,
|
|
44
|
-
verbose=
|
|
45
|
+
verbose=False,
|
|
45
46
|
)
|
|
46
47
|
|
|
47
48
|
w = setup_profile(block, param, method=method)
|
paddle-1.1.3/.gitignore
DELETED
paddle-1.1.3/README.md
DELETED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|