FlowerPower 0.3.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of FlowerPower might be problematic. Click here for more details.
- flowerpower-0.3.1/.gitignore +13 -0
- flowerpower-0.3.1/.python-version +1 -0
- flowerpower-0.3.1/PKG-INFO +112 -0
- flowerpower-0.3.1/README.md +77 -0
- flowerpower-0.3.1/docker/Caddyfile.bak +45 -0
- flowerpower-0.3.1/docker/Dockerfile +69 -0
- flowerpower-0.3.1/docker/assets/GitHub.copilot-1.199.0.vsix +0 -0
- flowerpower-0.3.1/docker/assets/GitHub.copilot-chat-0.15.2024043005.vsix +0 -0
- flowerpower-0.3.1/docker/conf/Caddyfile +37 -0
- flowerpower-0.3.1/docker/conf/mosquitto.conf +2 -0
- flowerpower-0.3.1/docker/conf/nginx.conf +62 -0
- flowerpower-0.3.1/docker/docker-compose.yml +126 -0
- flowerpower-0.3.1/examples/hello-world3/README.md +6 -0
- flowerpower-0.3.1/examples/hello-world3/conf/pipeline.yml +35 -0
- flowerpower-0.3.1/examples/hello-world3/conf/scheduler.yml +83 -0
- flowerpower-0.3.1/examples/hello-world3/conf/tracker.yml +25 -0
- flowerpower-0.3.1/examples/hello-world3/pipelines/flow1.py +9 -0
- flowerpower-0.3.1/examples/hello_world/conf/pipeline.yml +57 -0
- flowerpower-0.3.1/examples/hello_world/conf/scheduler.yml +82 -0
- flowerpower-0.3.1/examples/hello_world/conf/tracker.yml +30 -0
- flowerpower-0.3.1/examples/hello_world/pipelines/my_flow.py +50 -0
- flowerpower-0.3.1/examples/hello_world2/README.md +6 -0
- flowerpower-0.3.1/examples/hello_world2/conf/pipeline.yml +45 -0
- flowerpower-0.3.1/examples/hello_world2/conf/scheduler.yml +85 -0
- flowerpower-0.3.1/examples/hello_world2/conf/tracker.yml +29 -0
- flowerpower-0.3.1/examples/hello_world2/pipelines/my_flow.py +11 -0
- flowerpower-0.3.1/image.png +0 -0
- flowerpower-0.3.1/pyproject.toml +61 -0
- flowerpower-0.3.1/requirements-dev.lock +309 -0
- flowerpower-0.3.1/requirements.lock +277 -0
- flowerpower-0.3.1/src/flowerpower/__init__.py +2 -0
- flowerpower-0.3.1/src/flowerpower/catalog.py +27 -0
- flowerpower-0.3.1/src/flowerpower/cfg.py +276 -0
- flowerpower-0.3.1/src/flowerpower/cli.py +223 -0
- flowerpower-0.3.1/src/flowerpower/constants.py +147 -0
- flowerpower-0.3.1/src/flowerpower/event_handler.py +23 -0
- flowerpower-0.3.1/src/flowerpower/helpers.py +57 -0
- flowerpower-0.3.1/src/flowerpower/main.py +46 -0
- flowerpower-0.3.1/src/flowerpower/pipeline.py +622 -0
- flowerpower-0.3.1/src/flowerpower/scheduler.py +302 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
3.12.2
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: FlowerPower
|
|
3
|
+
Version: 0.3.1
|
|
4
|
+
Summary: A simple workflow framework. Hamilton + APScheduler = FlowerPower
|
|
5
|
+
Author-email: "Volker L." <ligno.blades@gmail.com>
|
|
6
|
+
Keywords: apscheduler,dask,hamilton,pipeline,ray,scheduler,workflow
|
|
7
|
+
Requires-Python: >=3.11
|
|
8
|
+
Requires-Dist: fsspec>=2024.5.0
|
|
9
|
+
Requires-Dist: munch>=4.0.0
|
|
10
|
+
Requires-Dist: python-dotenv>=1.0.1
|
|
11
|
+
Requires-Dist: pyyaml>=6.0.1
|
|
12
|
+
Requires-Dist: rich>=13.7.1
|
|
13
|
+
Requires-Dist: sf-hamilton-sdk>=0.5.2
|
|
14
|
+
Requires-Dist: sf-hamilton[visualization]>=1.69.0
|
|
15
|
+
Requires-Dist: typer>=0.12.3
|
|
16
|
+
Provides-Extra: dask
|
|
17
|
+
Requires-Dist: dask[complete]>=2024.7.1; extra == 'dask'
|
|
18
|
+
Provides-Extra: mongodb
|
|
19
|
+
Requires-Dist: pymongo>=4.7.2; extra == 'mongodb'
|
|
20
|
+
Provides-Extra: mqtt
|
|
21
|
+
Requires-Dist: paho-mqtt>=2.1.0; extra == 'mqtt'
|
|
22
|
+
Provides-Extra: ray
|
|
23
|
+
Requires-Dist: ray>=2.34.0; extra == 'ray'
|
|
24
|
+
Provides-Extra: redis
|
|
25
|
+
Requires-Dist: redis>=5.0.4; extra == 'redis'
|
|
26
|
+
Provides-Extra: scheduler
|
|
27
|
+
Requires-Dist: aiosqlite>=0.20.0; extra == 'scheduler'
|
|
28
|
+
Requires-Dist: apscheduler>=4.0.0a5; extra == 'scheduler'
|
|
29
|
+
Requires-Dist: asyncpg>=0.29.0; extra == 'scheduler'
|
|
30
|
+
Requires-Dist: greenlet>=3.0.3; extra == 'scheduler'
|
|
31
|
+
Requires-Dist: sqlalchemy>=2.0.30; extra == 'scheduler'
|
|
32
|
+
Provides-Extra: ui
|
|
33
|
+
Requires-Dist: sf-hamilton-ui>=0.0.11; extra == 'ui'
|
|
34
|
+
Description-Content-Type: text/markdown
|
|
35
|
+
|
|
36
|
+
# FlowerPower
|
|
37
|
+
|
|
38
|
+

|
|
39
|
+
|
|
40
|
+
FlowerPower is a simple workflow framework based on the fantastic [Hamilton](https://github.com/DAGWorks-Inc/hamilton) and [Advanced Python Scheduler - APScheduler](https://github.com/agronholm/apscheduler)
|
|
41
|
+
|
|
42
|
+
## Installation
|
|
43
|
+
|
|
44
|
+
```shell
|
|
45
|
+
pip install "flowerpower" git+https://github.com/legout/flowerpower
|
|
46
|
+
# with scheduler
|
|
47
|
+
pip install "flowerpower[scheduler]" git+https://github.com/legout/flowerpower
|
|
48
|
+
# with mqtt event broker
|
|
49
|
+
pip install "flowerpower[scheduler,mqtt]" git+https://github.com/legout/flowerpower
|
|
50
|
+
# with redis event broker
|
|
51
|
+
pip install "flowerpower[scheduler,redis]" git+https://github.com/legout/flowerpower
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Usage
|
|
55
|
+
|
|
56
|
+
### 0) Optional: Dev Services
|
|
57
|
+
```shell
|
|
58
|
+
curl -O https://raw.githubusercontent.com/legout/flowerpower/main/docker/Dockerfile
|
|
59
|
+
curl -O https://raw.githubusercontent.com/legout/flowerpower/main/docker/docker-compose.yml
|
|
60
|
+
|
|
61
|
+
# Hamilton UI, which allows you to track and visualize your pipelines
|
|
62
|
+
docker-compose up hamilton_ui -d
|
|
63
|
+
# jupyterlab and code-server
|
|
64
|
+
docker-compose up jupytercode -d
|
|
65
|
+
# s3 compatible object storage
|
|
66
|
+
docker-compose up minio -d
|
|
67
|
+
# mosquitto mqtt broker if you want to use mqtt as the event broker
|
|
68
|
+
docker-compose up mqtt -d
|
|
69
|
+
# valkey (OSS redis) if you want to use redis as the event broker
|
|
70
|
+
docker-compose up redis -d
|
|
71
|
+
# mongodb if you want to use mongodb as the data store
|
|
72
|
+
docker-compose up mongodb -d
|
|
73
|
+
# postgres db if you want to use postgres as data store and/or event broker. This db is also used for hamilton ui
|
|
74
|
+
docker-compose up postgres -d
|
|
75
|
+
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
### a) Initialze a new flowerpower project
|
|
80
|
+
```shell
|
|
81
|
+
mkdir new-project
|
|
82
|
+
cd new-project
|
|
83
|
+
python -m flowerpower.cli init
|
|
84
|
+
```
|
|
85
|
+
This adds basic config files `conf/pipelines.yml`, `conf/scheduler.yml` and `conf/tracker.yml`
|
|
86
|
+
|
|
87
|
+
### b) Add a new pipeline
|
|
88
|
+
```shell
|
|
89
|
+
python -m flowerpower.cli add-pipeline my_flow
|
|
90
|
+
```
|
|
91
|
+
A new file `pipelines/my_flow.py` is created and the relevant entries are added to the config files.
|
|
92
|
+
|
|
93
|
+
### c) Setup the new pipeline
|
|
94
|
+
Edit `pipelines/my_flow.py` and add the pipeline functions.
|
|
95
|
+
|
|
96
|
+
FlowerPower uses [Hamilton](https://github.com/DAGWorks-Inc/hamilton) that converts your pipeline functions into nodes and then creates a [Directed Acyclic Graph (DAG)](https://en.wikipedia.org/wiki/Directed_acyclic_graph).
|
|
97
|
+
|
|
98
|
+
It is therefore mandatory to write your pipeline files according to the Hamilton paradigm. You can read more about this in the Hamilton documentaion chapter [Function, Nodes and DataFlow](https://hamilton.dagworks.io/en/latest/concepts/node/)
|
|
99
|
+
|
|
100
|
+
Optinally edit the config files `conf/pipelines.yml`, `conf/scheduler.yml` and `conf/tracker.yml`
|
|
101
|
+
|
|
102
|
+
### d) Run or Scheduler the new pipeline
|
|
103
|
+
```shell
|
|
104
|
+
python -m flowerpower.cli run-pipeline my_flow
|
|
105
|
+
# or schedule with a 30 seconds interval
|
|
106
|
+
python -m flowerpower.cli schedule-pipeline my_flow interval --interval-params seconds=30 --auto-start
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
# FlowerPower
|
|
2
|
+
|
|
3
|
+

|
|
4
|
+
|
|
5
|
+
FlowerPower is a simple workflow framework based on the fantastic [Hamilton](https://github.com/DAGWorks-Inc/hamilton) and [Advanced Python Scheduler - APScheduler](https://github.com/agronholm/apscheduler)
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```shell
|
|
10
|
+
pip install "flowerpower" git+https://github.com/legout/flowerpower
|
|
11
|
+
# with scheduler
|
|
12
|
+
pip install "flowerpower[scheduler]" git+https://github.com/legout/flowerpower
|
|
13
|
+
# with mqtt event broker
|
|
14
|
+
pip install "flowerpower[scheduler,mqtt]" git+https://github.com/legout/flowerpower
|
|
15
|
+
# with redis event broker
|
|
16
|
+
pip install "flowerpower[scheduler,redis]" git+https://github.com/legout/flowerpower
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Usage
|
|
20
|
+
|
|
21
|
+
### 0) Optional: Dev Services
|
|
22
|
+
```shell
|
|
23
|
+
curl -O https://raw.githubusercontent.com/legout/flowerpower/main/docker/Dockerfile
|
|
24
|
+
curl -O https://raw.githubusercontent.com/legout/flowerpower/main/docker/docker-compose.yml
|
|
25
|
+
|
|
26
|
+
# Hamilton UI, which allows you to track and visualize your pipelines
|
|
27
|
+
docker-compose up hamilton_ui -d
|
|
28
|
+
# jupyterlab and code-server
|
|
29
|
+
docker-compose up jupytercode -d
|
|
30
|
+
# s3 compatible object storage
|
|
31
|
+
docker-compose up minio -d
|
|
32
|
+
# mosquitto mqtt broker if you want to use mqtt as the event broker
|
|
33
|
+
docker-compose up mqtt -d
|
|
34
|
+
# valkey (OSS redis) if you want to use redis as the event broker
|
|
35
|
+
docker-compose up redis -d
|
|
36
|
+
# mongodb if you want to use mongodb as the data store
|
|
37
|
+
docker-compose up mongodb -d
|
|
38
|
+
# postgres db if you want to use postgres as data store and/or event broker. This db is also used for hamilton ui
|
|
39
|
+
docker-compose up postgres -d
|
|
40
|
+
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
### a) Initialze a new flowerpower project
|
|
45
|
+
```shell
|
|
46
|
+
mkdir new-project
|
|
47
|
+
cd new-project
|
|
48
|
+
python -m flowerpower.cli init
|
|
49
|
+
```
|
|
50
|
+
This adds basic config files `conf/pipelines.yml`, `conf/scheduler.yml` and `conf/tracker.yml`
|
|
51
|
+
|
|
52
|
+
### b) Add a new pipeline
|
|
53
|
+
```shell
|
|
54
|
+
python -m flowerpower.cli add-pipeline my_flow
|
|
55
|
+
```
|
|
56
|
+
A new file `pipelines/my_flow.py` is created and the relevant entries are added to the config files.
|
|
57
|
+
|
|
58
|
+
### c) Setup the new pipeline
|
|
59
|
+
Edit `pipelines/my_flow.py` and add the pipeline functions.
|
|
60
|
+
|
|
61
|
+
FlowerPower uses [Hamilton](https://github.com/DAGWorks-Inc/hamilton) that converts your pipeline functions into nodes and then creates a [Directed Acyclic Graph (DAG)](https://en.wikipedia.org/wiki/Directed_acyclic_graph).
|
|
62
|
+
|
|
63
|
+
It is therefore mandatory to write your pipeline files according to the Hamilton paradigm. You can read more about this in the Hamilton documentaion chapter [Function, Nodes and DataFlow](https://hamilton.dagworks.io/en/latest/concepts/node/)
|
|
64
|
+
|
|
65
|
+
Optinally edit the config files `conf/pipelines.yml`, `conf/scheduler.yml` and `conf/tracker.yml`
|
|
66
|
+
|
|
67
|
+
### d) Run or Scheduler the new pipeline
|
|
68
|
+
```shell
|
|
69
|
+
python -m flowerpower.cli run-pipeline my_flow
|
|
70
|
+
# or schedule with a 30 seconds interval
|
|
71
|
+
python -m flowerpower.cli schedule-pipeline my_flow interval --interval-params seconds=30 --auto-start
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
{
|
|
2
|
+
http_port 80
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
:80 {
|
|
6
|
+
route /jupytercode/* {
|
|
7
|
+
uri strip_prefix /jupytercode
|
|
8
|
+
reverse_proxy jupytercode:8888
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
route /minio/* {
|
|
12
|
+
uri strip_prefix /minio
|
|
13
|
+
reverse_proxy minio:9001
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
route /mqtt/* {
|
|
17
|
+
uri strip_prefix /mqtt
|
|
18
|
+
reverse_proxy mqtt:1883
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
route /redis/* {
|
|
22
|
+
uri strip_prefix /redis
|
|
23
|
+
reverse_proxy redis:6379
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
route /mongodb/* {
|
|
27
|
+
uri strip_prefix /mongodb
|
|
28
|
+
reverse_proxy mongodb:27017
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
route /nodered/* {
|
|
32
|
+
uri strip_prefix /nodered
|
|
33
|
+
reverse_proxy nodered:1880
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
route /postgres/* {
|
|
37
|
+
uri strip_prefix /postgres
|
|
38
|
+
reverse_proxy postgres:5432
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
route /hamilton_ui/* {
|
|
42
|
+
uri strip_prefix /hamilton_ui
|
|
43
|
+
reverse_proxy hamilton_ui:8242
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
FROM quay.io/jupyter/base-notebook
|
|
2
|
+
|
|
3
|
+
ARG CODE_VERSION=4.89.1
|
|
4
|
+
ARG PLATFORM=arm64
|
|
5
|
+
|
|
6
|
+
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
|
7
|
+
|
|
8
|
+
USER root
|
|
9
|
+
|
|
10
|
+
# Install all OS dependencies for a fully functional Server
|
|
11
|
+
RUN apt-get update --yes && \
|
|
12
|
+
apt-get install --yes --no-install-recommends \
|
|
13
|
+
# Common useful utilities
|
|
14
|
+
curl \
|
|
15
|
+
git \
|
|
16
|
+
nano-tiny \
|
|
17
|
+
tzdata \
|
|
18
|
+
unzip \
|
|
19
|
+
# git-over-ssh
|
|
20
|
+
openssh-client \
|
|
21
|
+
# Enable clipboard on Linux host systems
|
|
22
|
+
xclip && \
|
|
23
|
+
apt-get clean && rm -rf /var/lib/apt/lists/*
|
|
24
|
+
|
|
25
|
+
# Create alternative for nano -> nano-tiny
|
|
26
|
+
RUN update-alternatives --install /usr/bin/nano nano /bin/nano-tiny 10
|
|
27
|
+
|
|
28
|
+
# Install code-server
|
|
29
|
+
|
|
30
|
+
RUN curl -fOL https://github.com/coder/code-server/releases/download/v$CODE_VERSION/code-server_${CODE_VERSION}_${PLATFORM}.deb \
|
|
31
|
+
&& dpkg -i code-server_${CODE_VERSION}_${PLATFORM}.deb \
|
|
32
|
+
&& rm -f code-server_${CODE_VERSION}_${PLATFORM}.deb
|
|
33
|
+
|
|
34
|
+
# Switch back to jovyan to avoid accidental container runs as root
|
|
35
|
+
USER ${NB_UID}
|
|
36
|
+
|
|
37
|
+
#conda install jupyterlab -y && \
|
|
38
|
+
RUN conda install jupyter-server-proxy jupyter-vscode-proxy -y && \
|
|
39
|
+
conda clean --tarballs --index-cache --packages --yes && \
|
|
40
|
+
find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
|
|
41
|
+
find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete && \
|
|
42
|
+
conda clean --force-pkgs-dirs --all --yes
|
|
43
|
+
# && \
|
|
44
|
+
#sudo echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> /etc/skel/.bashrc && \
|
|
45
|
+
#echo ". ${CONDA_DIR}/etc/profile.d/conda.sh && conda activate base" >> ~/.bashrc
|
|
46
|
+
|
|
47
|
+
COPY assets/GitHub.copilot-1.199.0.vsix GitHub.copilot-1.199.0.vsix
|
|
48
|
+
COPY assets/GitHub.copilot-chat-0.15.2024043005.vsix GitHub.copilot-chat-0.15.2024043005.vsix
|
|
49
|
+
|
|
50
|
+
RUN code-server --install-extension ms-toolsai.jupyter && \
|
|
51
|
+
code-server --install-extension ms-python.python && \
|
|
52
|
+
code-server --install-extension mtxr.sqltools && \
|
|
53
|
+
code-server --install-extension bungcip.better-toml && \
|
|
54
|
+
code-server --install-extension ms-python.black-formatter && \
|
|
55
|
+
code-server --install-extension ms-python.isort && \
|
|
56
|
+
code-server --install-extension redhat.vscode-yaml && \
|
|
57
|
+
code-server --install-extension charliermarsh.ruff && \
|
|
58
|
+
code-server --install-extension esbenp.prettier-vscode && \
|
|
59
|
+
code-server --install-extension mtxr.sqltools-driver-sqlite && \
|
|
60
|
+
code-server --install-extension mtxr.sqltools-driver-pg && \
|
|
61
|
+
code-server --install-extension GitHub.copilot-1.199.0.vsix && \
|
|
62
|
+
code-server --install-extension GitHub.copilot-chat-0.15.2024043005.vsix && \
|
|
63
|
+
rm -r GitHub.copilot-*.vsix && \
|
|
64
|
+
rm -r /home/jovyan/.local/share/code-server/CachedExtensionVSIXs/
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
#EXPOSE 8888
|
|
68
|
+
#EXPOSE 3000
|
|
69
|
+
#ENTRYPOINT ["jupyter", "lab", "--ip=0.0.0.0", "--allow-root", "--no-browser", "--NotebookApp.token=''"]
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
jupytercode.localhost {
|
|
4
|
+
reverse_proxy jupytercode:8888
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
minio.localhost {
|
|
8
|
+
reverse_proxy minio:9001
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
minio_api.localhost {
|
|
12
|
+
reverse_proxy minio:9000
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
mqtt.localhost {
|
|
16
|
+
reverse_proxy jupytercode:1883
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
redis.localhost {
|
|
20
|
+
reverse_proxy minio:6379
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
mongodb.localhost {
|
|
24
|
+
reverse_proxy mqtt:27017
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
nodered.localhost {
|
|
28
|
+
reverse_proxy mqtt:1880
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
hamilton_ui.localhost {
|
|
32
|
+
reverse_proxy hamilton_ui:8242
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
postgres.localhost {
|
|
36
|
+
reverse_proxy hamilton_ui:5432
|
|
37
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
events { worker_connections 1024; }
|
|
2
|
+
|
|
3
|
+
http {
|
|
4
|
+
server {
|
|
5
|
+
listen 80;
|
|
6
|
+
|
|
7
|
+
location /jupytercode/ {
|
|
8
|
+
proxy_pass http://jupytercode:8888/;
|
|
9
|
+
proxy_set_header Host $host;
|
|
10
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
location /minio/ {
|
|
14
|
+
proxy_pass http://minio:9001/;
|
|
15
|
+
proxy_set_header Host $host;
|
|
16
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
location /mqtt/ {
|
|
20
|
+
proxy_pass http://mqtt:1883/;
|
|
21
|
+
proxy_set_header Host $host;
|
|
22
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
location /redis/ {
|
|
27
|
+
proxy_pass http://redis:6379/;
|
|
28
|
+
proxy_set_header Host $host;
|
|
29
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
location /mongodb/ {
|
|
33
|
+
proxy_pass http://mongodb:27017/;
|
|
34
|
+
proxy_set_header Host $host;
|
|
35
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
location /nodered/ {
|
|
39
|
+
proxy_pass http://nodered:1880/;
|
|
40
|
+
proxy_set_header Host $host;
|
|
41
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
location /postgres/ {
|
|
45
|
+
proxy_pass http://postgres:5432/;
|
|
46
|
+
proxy_set_header Host $host;
|
|
47
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
location /backend/ {
|
|
51
|
+
proxy_pass http://backend:8241/;
|
|
52
|
+
proxy_set_header Host $host;
|
|
53
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
location /hamilton_ui/ {
|
|
57
|
+
proxy_pass http://hamilton_ui:8242/;
|
|
58
|
+
proxy_set_header Host $host;
|
|
59
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
version: "2"
|
|
2
|
+
services:
|
|
3
|
+
# jupytercode:
|
|
4
|
+
# build:
|
|
5
|
+
# context: .
|
|
6
|
+
# args:
|
|
7
|
+
# - CODE_VERSION=4.89.1
|
|
8
|
+
# - PLATFORM=arm64
|
|
9
|
+
# image: jupytercode:latest
|
|
10
|
+
# user: root
|
|
11
|
+
# environment:
|
|
12
|
+
# - GRANT_SUDO=yes
|
|
13
|
+
# ports:
|
|
14
|
+
# - 8888:8888
|
|
15
|
+
# volumes:
|
|
16
|
+
# - jupytercode:/home/jovyan
|
|
17
|
+
#
|
|
18
|
+
minio:
|
|
19
|
+
image: minio/minio
|
|
20
|
+
ports:
|
|
21
|
+
- 9000:9000
|
|
22
|
+
- 9001:9001
|
|
23
|
+
environment:
|
|
24
|
+
MINIO_ACCESS_KEY: minio
|
|
25
|
+
MINIO_SECRET_KEY: minio1234
|
|
26
|
+
volumes:
|
|
27
|
+
- minio_data:/data
|
|
28
|
+
command: server /data --console-address ":9001"
|
|
29
|
+
|
|
30
|
+
mqtt:
|
|
31
|
+
image: eclipse-mosquitto
|
|
32
|
+
volumes:
|
|
33
|
+
- ./conf/mosquitto.conf:/mosquitto/config/mosquitto.conf
|
|
34
|
+
ports:
|
|
35
|
+
- 1883:1883
|
|
36
|
+
command: mosquitto -c /mosquitto/config/mosquitto.conf
|
|
37
|
+
|
|
38
|
+
redis:
|
|
39
|
+
image: valkey/valkey
|
|
40
|
+
ports:
|
|
41
|
+
- 6379:6379
|
|
42
|
+
|
|
43
|
+
mongodb:
|
|
44
|
+
image: mongo
|
|
45
|
+
ports:
|
|
46
|
+
- 27017:27017
|
|
47
|
+
|
|
48
|
+
# nodered:
|
|
49
|
+
# image: nodered/node-red
|
|
50
|
+
# volumes:
|
|
51
|
+
# - nodered:/data
|
|
52
|
+
# ports:
|
|
53
|
+
# - 1880:1880
|
|
54
|
+
#
|
|
55
|
+
postgres:
|
|
56
|
+
image: postgres:12
|
|
57
|
+
volumes:
|
|
58
|
+
- postgres_data:/var/lib/postgresql/data
|
|
59
|
+
environment:
|
|
60
|
+
- POSTGRES_DB=flowerpower
|
|
61
|
+
- POSTGRES_USER=edge
|
|
62
|
+
- POSTGRES_PASSWORD=edge
|
|
63
|
+
ports:
|
|
64
|
+
- 5432:5432
|
|
65
|
+
restart: always
|
|
66
|
+
|
|
67
|
+
backend:
|
|
68
|
+
image: dagworks/ui-backend:latest
|
|
69
|
+
entrypoint: ["/bin/bash", "-c", "cd /code/server && ls && ./entrypoint.sh"]
|
|
70
|
+
ports:
|
|
71
|
+
- 8241:8241
|
|
72
|
+
environment:
|
|
73
|
+
- DB_HOST=postgres
|
|
74
|
+
- DB_PORT=5432
|
|
75
|
+
- DB_NAME=flowerpower
|
|
76
|
+
- DB_USER=edge
|
|
77
|
+
- DB_PASSWORD=edge # TODO: Change this to a secret
|
|
78
|
+
- HAMILTON_BLOB_STORE=local
|
|
79
|
+
- HAMILTON_ENV=local # local env
|
|
80
|
+
- HAMILTON_LOCAL_BLOB_DIR=/data/blobs # TODO -- set this up to be a better one
|
|
81
|
+
- DJANGO_SECRET_KEY=do_not_use_in_production
|
|
82
|
+
- HAMILTON_TELEMETRY_ENABLED=false #${HAMILTON_TELEMETRY_ENABLED-true}
|
|
83
|
+
- HAMILTON_AUTH_MODE=permissive
|
|
84
|
+
depends_on:
|
|
85
|
+
- postgres
|
|
86
|
+
volumes:
|
|
87
|
+
- backend_data:/data/
|
|
88
|
+
|
|
89
|
+
hamilton_ui:
|
|
90
|
+
image: dagworks/ui-frontend:latest
|
|
91
|
+
ports:
|
|
92
|
+
- 8242:8242
|
|
93
|
+
environment:
|
|
94
|
+
- NODE_ENV=development
|
|
95
|
+
- REACT_APP_AUTH_MODE=local
|
|
96
|
+
- REACT_APP_USE_POSTHOG=false
|
|
97
|
+
depends_on:
|
|
98
|
+
- backend
|
|
99
|
+
|
|
100
|
+
# nginx:
|
|
101
|
+
# image: nginx:latest
|
|
102
|
+
# volumes:
|
|
103
|
+
# - ./nginx.conf:/etc/nginx/nginx.conf
|
|
104
|
+
# ports:
|
|
105
|
+
# - 80:80
|
|
106
|
+
# - 443:443
|
|
107
|
+
# depends_on:
|
|
108
|
+
# - hamilton_ui
|
|
109
|
+
|
|
110
|
+
# caddy:
|
|
111
|
+
# image: caddy:latest
|
|
112
|
+
# volumes:
|
|
113
|
+
# - .conf/Caddyfile:/etc/caddy/Caddyfile
|
|
114
|
+
# ports:
|
|
115
|
+
# - 80:80
|
|
116
|
+
# - 443:443
|
|
117
|
+
# depends_on:
|
|
118
|
+
# - hamilton_ui
|
|
119
|
+
|
|
120
|
+
volumes:
|
|
121
|
+
jupytercode:
|
|
122
|
+
minio_data:
|
|
123
|
+
minio_config:
|
|
124
|
+
nodered:
|
|
125
|
+
postgres_data:
|
|
126
|
+
backend_data:
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# ---------------- Pipelines Configuration ----------------- #
|
|
2
|
+
|
|
3
|
+
# ------------------------ Example ------------------------- #
|
|
4
|
+
#
|
|
5
|
+
# path: pipelines
|
|
6
|
+
#
|
|
7
|
+
# ## pipeline parameter
|
|
8
|
+
#
|
|
9
|
+
# params:
|
|
10
|
+
# flow1: ## pipeline name
|
|
11
|
+
# step1: ## step name
|
|
12
|
+
# param1_1: 123 ## step parameters
|
|
13
|
+
# param1_2: abc
|
|
14
|
+
# step2:
|
|
15
|
+
# param2_1: true
|
|
16
|
+
#
|
|
17
|
+
# ## run configuration
|
|
18
|
+
#
|
|
19
|
+
# run:
|
|
20
|
+
# prod: # environment name
|
|
21
|
+
# flow1:
|
|
22
|
+
# inputs: ## input parameters
|
|
23
|
+
# final_vars: [step2] ## final output vars
|
|
24
|
+
# with_tracker: true ## whether to track the run
|
|
25
|
+
#
|
|
26
|
+
# dev:
|
|
27
|
+
# flow1:
|
|
28
|
+
# inputs:
|
|
29
|
+
# final_vars: [step2]
|
|
30
|
+
# with_tracker: false
|
|
31
|
+
#
|
|
32
|
+
# ---------------------------------------------------------- #
|
|
33
|
+
|
|
34
|
+
run:
|
|
35
|
+
params:
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
# ---------------- Scheduler Configuration ----------------- #
|
|
2
|
+
|
|
3
|
+
# ------------------------ Example ------------------------- #
|
|
4
|
+
#
|
|
5
|
+
# ## data store configuration
|
|
6
|
+
#
|
|
7
|
+
# ### postgres
|
|
8
|
+
# data_store:
|
|
9
|
+
# type: sqlalchemy
|
|
10
|
+
# url: postgresql+asyncpg://edge:edge@postgres/flowerpower
|
|
11
|
+
#
|
|
12
|
+
# ### sqlite
|
|
13
|
+
# data_store:
|
|
14
|
+
# type: sqlalchemy
|
|
15
|
+
# url: sqlite+aiosqlite:///flowerpower.db
|
|
16
|
+
#
|
|
17
|
+
# ### memory
|
|
18
|
+
# data_store:
|
|
19
|
+
# type: memory
|
|
20
|
+
#
|
|
21
|
+
# ### mongodb
|
|
22
|
+
# data_store:
|
|
23
|
+
# type: mongodb
|
|
24
|
+
# url: mongodb://localhost:27017/scheduler
|
|
25
|
+
#
|
|
26
|
+
# ## event broker configuration
|
|
27
|
+
#
|
|
28
|
+
# ### postgres
|
|
29
|
+
# event_broker:
|
|
30
|
+
# type: asyncpg
|
|
31
|
+
# url: postgresql+asyncpg://edge:edge@postgres/flowerpower
|
|
32
|
+
#
|
|
33
|
+
# ### mqtt
|
|
34
|
+
# event_broker:
|
|
35
|
+
# type: mqtt
|
|
36
|
+
# host: localhost
|
|
37
|
+
# port: 1883
|
|
38
|
+
# username: edge
|
|
39
|
+
# password: edge
|
|
40
|
+
|
|
41
|
+
# ### redis
|
|
42
|
+
# event_broker:
|
|
43
|
+
# type: redis
|
|
44
|
+
# host: localhost
|
|
45
|
+
# port: 6379
|
|
46
|
+
|
|
47
|
+
# ### local
|
|
48
|
+
# event_broker:
|
|
49
|
+
# type: local # or memory
|
|
50
|
+
#
|
|
51
|
+
# ## pipeline schedule configuration
|
|
52
|
+
#
|
|
53
|
+
# pipeline:
|
|
54
|
+
# my_flow:
|
|
55
|
+
# type: cron ## options: interval, calendarinterval, date
|
|
56
|
+
# start_time:
|
|
57
|
+
# end_time:
|
|
58
|
+
# ## optional cron arguments
|
|
59
|
+
# crontab: * * * * *
|
|
60
|
+
# year:
|
|
61
|
+
# month:
|
|
62
|
+
# week:
|
|
63
|
+
# day:
|
|
64
|
+
# days_of_week:
|
|
65
|
+
# hour:
|
|
66
|
+
# minute:
|
|
67
|
+
# second:
|
|
68
|
+
# timezone:
|
|
69
|
+
# ## optional interval arguments
|
|
70
|
+
# weeks:
|
|
71
|
+
# days:
|
|
72
|
+
# hours:
|
|
73
|
+
# minutes:
|
|
74
|
+
# seconds:
|
|
75
|
+
# microseconds:
|
|
76
|
+
#
|
|
77
|
+
# ---------------------------------------------------------- #
|
|
78
|
+
|
|
79
|
+
data_path:
|
|
80
|
+
type: memory
|
|
81
|
+
event_broker:
|
|
82
|
+
type: local
|
|
83
|
+
pipeline:
|