FlowerPower 0.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of FlowerPower might be problematic. Click here for more details.

Files changed (74) hide show
  1. flowerpower-0.9.0/.github/workflows/publish.yml +32 -0
  2. flowerpower-0.9.0/.gitignore +21 -0
  3. flowerpower-0.9.0/.python-version +1 -0
  4. flowerpower-0.9.0/PKG-INFO +549 -0
  5. flowerpower-0.9.0/README.md +497 -0
  6. flowerpower-0.9.0/image.png +0 -0
  7. flowerpower-0.9.0/pyproject.toml +102 -0
  8. flowerpower-0.9.0/requirements-dev.lock +517 -0
  9. flowerpower-0.9.0/requirements.lock +307 -0
  10. flowerpower-0.9.0/src/flowerpower/__init__.py +3 -0
  11. flowerpower-0.9.0/src/flowerpower/_catalog.py +27 -0
  12. flowerpower-0.9.0/src/flowerpower/_cfg.py +0 -0
  13. flowerpower-0.9.0/src/flowerpower/_cli.py +618 -0
  14. flowerpower-0.9.0/src/flowerpower/cfg/__init__.py +204 -0
  15. flowerpower-0.9.0/src/flowerpower/cfg/base.py +39 -0
  16. flowerpower-0.9.0/src/flowerpower/cfg/pipeline/params.py +0 -0
  17. flowerpower-0.9.0/src/flowerpower/cfg/pipeline/run.py +17 -0
  18. flowerpower-0.9.0/src/flowerpower/cfg/pipeline/schedule.py +84 -0
  19. flowerpower-0.9.0/src/flowerpower/cfg/pipeline/tracker.py +14 -0
  20. flowerpower-0.9.0/src/flowerpower/cfg/project/open_telemetry.py +8 -0
  21. flowerpower-0.9.0/src/flowerpower/cfg/project/tracker.py +10 -0
  22. flowerpower-0.9.0/src/flowerpower/cfg/project/worker.py +19 -0
  23. flowerpower-0.9.0/src/flowerpower/cli/__init__.py +89 -0
  24. flowerpower-0.9.0/src/flowerpower/cli/cfg.py +44 -0
  25. flowerpower-0.9.0/src/flowerpower/cli/mqtt.py +18 -0
  26. flowerpower-0.9.0/src/flowerpower/cli/pipeline.py +529 -0
  27. flowerpower-0.9.0/src/flowerpower/cli/scheduler.py +308 -0
  28. flowerpower-0.9.0/src/flowerpower/cli/utils.py +94 -0
  29. flowerpower-0.9.0/src/flowerpower/event_handler.py +23 -0
  30. flowerpower-0.9.0/src/flowerpower/flowerpower.py +102 -0
  31. flowerpower-0.9.0/src/flowerpower/http/api/cfg.py +48 -0
  32. flowerpower-0.9.0/src/flowerpower/http/api/pipeline.py +407 -0
  33. flowerpower-0.9.0/src/flowerpower/http/api/scheduler.py +77 -0
  34. flowerpower-0.9.0/src/flowerpower/http/main.py +70 -0
  35. flowerpower-0.9.0/src/flowerpower/http/models/pipeline.py +50 -0
  36. flowerpower-0.9.0/src/flowerpower/http/models/scheduler.py +1 -0
  37. flowerpower-0.9.0/src/flowerpower/http/setup.py +39 -0
  38. flowerpower-0.9.0/src/flowerpower/http/ui.py +0 -0
  39. flowerpower-0.9.0/src/flowerpower/http/utils.py +18 -0
  40. flowerpower-0.9.0/src/flowerpower/io/base.py +746 -0
  41. flowerpower-0.9.0/src/flowerpower/io/loader/csv.py +37 -0
  42. flowerpower-0.9.0/src/flowerpower/io/loader/deltatable.py +78 -0
  43. flowerpower-0.9.0/src/flowerpower/io/loader/duckdb.py +333 -0
  44. flowerpower-0.9.0/src/flowerpower/io/loader/json.py +37 -0
  45. flowerpower-0.9.0/src/flowerpower/io/loader/mqtt.py +98 -0
  46. flowerpower-0.9.0/src/flowerpower/io/loader/parquet.py +13 -0
  47. flowerpower-0.9.0/src/flowerpower/io/metadata.py +221 -0
  48. flowerpower-0.9.0/src/flowerpower/io/saver/csv.py +36 -0
  49. flowerpower-0.9.0/src/flowerpower/io/saver/deltatable.py +127 -0
  50. flowerpower-0.9.0/src/flowerpower/io/saver/duckdb.py +261 -0
  51. flowerpower-0.9.0/src/flowerpower/io/saver/json.py +36 -0
  52. flowerpower-0.9.0/src/flowerpower/io/saver/mqtt.py +0 -0
  53. flowerpower-0.9.0/src/flowerpower/io/saver/parquet.py +36 -0
  54. flowerpower-0.9.0/src/flowerpower/mqtt.py +431 -0
  55. flowerpower-0.9.0/src/flowerpower/pipeline.py +2302 -0
  56. flowerpower-0.9.0/src/flowerpower/scheduler.py +676 -0
  57. flowerpower-0.9.0/src/flowerpower/tui.py +79 -0
  58. flowerpower-0.9.0/src/flowerpower/utils/_filesystem.py +1366 -0
  59. flowerpower-0.9.0/src/flowerpower/utils/datastore.py +62 -0
  60. flowerpower-0.9.0/src/flowerpower/utils/eventbroker.py +125 -0
  61. flowerpower-0.9.0/src/flowerpower/utils/executor.py +58 -0
  62. flowerpower-0.9.0/src/flowerpower/utils/filesystem/__init__.py +10 -0
  63. flowerpower-0.9.0/src/flowerpower/utils/filesystem/base.py +309 -0
  64. flowerpower-0.9.0/src/flowerpower/utils/filesystem/ext.py +1199 -0
  65. flowerpower-0.9.0/src/flowerpower/utils/misc.py +243 -0
  66. flowerpower-0.9.0/src/flowerpower/utils/monkey.py +85 -0
  67. flowerpower-0.9.0/src/flowerpower/utils/open_telemetry.py +29 -0
  68. flowerpower-0.9.0/src/flowerpower/utils/polars.py +567 -0
  69. flowerpower-0.9.0/src/flowerpower/utils/scheduler.py +311 -0
  70. flowerpower-0.9.0/src/flowerpower/utils/sql.py +241 -0
  71. flowerpower-0.9.0/src/flowerpower/utils/storage_options.py +259 -0
  72. flowerpower-0.9.0/src/flowerpower/utils/templates.py +155 -0
  73. flowerpower-0.9.0/src/flowerpower/utils/trigger.py +139 -0
  74. flowerpower-0.9.0/uv.lock +3526 -0
@@ -0,0 +1,32 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ paths:
6
+ - 'pyproject.toml'
7
+
8
+ jobs:
9
+ publish:
10
+ runs-on: ubuntu-latest
11
+
12
+ steps:
13
+ - name: Checkout code
14
+ uses: actions/checkout@v2
15
+
16
+ - name: Install UV
17
+ run: |
18
+ pip install uv
19
+
20
+ - name: Install dependencies
21
+ run: |
22
+ uv sync
23
+
24
+ - name: Build package
25
+ run: |
26
+ uv build --sdist --wheel
27
+
28
+ - name: Publish to PyPI
29
+ env:
30
+ PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
31
+ run: |
32
+ uv publish --token $PYPI_TOKEN
@@ -0,0 +1,21 @@
1
+ # python generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # venv
10
+ .venv
11
+ .env
12
+ .vscode
13
+ docker/config/.bash_history
14
+ docker/assets
15
+ dev/simple
16
+ dev/
17
+ flowerpower.db
18
+ **/.ipynb_checkpoints/
19
+ Digraph.gv
20
+ Digraph.gv.pdf
21
+ test/test2.json
@@ -0,0 +1 @@
1
+ 3.12.5
@@ -0,0 +1,549 @@
1
+ Metadata-Version: 2.4
2
+ Name: FlowerPower
3
+ Version: 0.9.0
4
+ Summary: A simple workflow framework. Hamilton + APScheduler = FlowerPower
5
+ Author-email: "Volker L." <ligno.blades@gmail.com>
6
+ Keywords: apscheduler,dask,hamilton,pipeline,ray,scheduler,workflow
7
+ Requires-Python: >=3.11
8
+ Requires-Dist: dill>=0.3.8
9
+ Requires-Dist: fsspec>=2024.5.0
10
+ Requires-Dist: munch>=4.0.0
11
+ Requires-Dist: pendulum>=3.0.0
12
+ Requires-Dist: pydantic>=2.10.2
13
+ Requires-Dist: python-dotenv>=1.0.1
14
+ Requires-Dist: pyyaml>=6.0.1
15
+ Requires-Dist: rich>=13.9.3
16
+ Requires-Dist: sf-hamilton-sdk>=0.5.2
17
+ Requires-Dist: sf-hamilton[visualization]>=1.69.0
18
+ Requires-Dist: tqdm>=4.67.1
19
+ Requires-Dist: typer>=0.12.3
20
+ Provides-Extra: filesystem-ext
21
+ Requires-Dist: orjson>=3.10.12; extra == 'filesystem-ext'
22
+ Requires-Dist: polars>=1.15.0; extra == 'filesystem-ext'
23
+ Requires-Dist: pyarrow>=18.1.0; extra == 'filesystem-ext'
24
+ Provides-Extra: mongodb
25
+ Requires-Dist: pymongo>=4.7.2; extra == 'mongodb'
26
+ Provides-Extra: mqtt
27
+ Requires-Dist: orjson>=3.10.11; extra == 'mqtt'
28
+ Requires-Dist: paho-mqtt>=2.1.0; extra == 'mqtt'
29
+ Provides-Extra: opentelemetry
30
+ Requires-Dist: opentelemetry-api>=1.5.0; extra == 'opentelemetry'
31
+ Requires-Dist: opentelemetry-exporter-jaeger>=1.21.0; extra == 'opentelemetry'
32
+ Requires-Dist: opentelemetry-sdk>=1.5.0; extra == 'opentelemetry'
33
+ Provides-Extra: ray
34
+ Requires-Dist: ray>=2.34.0; extra == 'ray'
35
+ Provides-Extra: redis
36
+ Requires-Dist: redis>=5.0.4; extra == 'redis'
37
+ Provides-Extra: scheduler
38
+ Requires-Dist: aiosqlite>=0.20.0; extra == 'scheduler'
39
+ Requires-Dist: apscheduler>=4.0.0a5; extra == 'scheduler'
40
+ Requires-Dist: asyncpg>=0.29.0; extra == 'scheduler'
41
+ Requires-Dist: greenlet>=3.0.3; extra == 'scheduler'
42
+ Requires-Dist: sqlalchemy>=2.0.30; extra == 'scheduler'
43
+ Provides-Extra: tui
44
+ Requires-Dist: textual>=0.85.2; extra == 'tui'
45
+ Provides-Extra: ui
46
+ Requires-Dist: sf-hamilton-ui>=0.0.11; extra == 'ui'
47
+ Provides-Extra: webserver
48
+ Requires-Dist: orjson>=3.10.11; extra == 'webserver'
49
+ Requires-Dist: sanic-ext>=23.12.0; extra == 'webserver'
50
+ Requires-Dist: sanic>=24.6.0; extra == 'webserver'
51
+ Description-Content-Type: text/markdown
52
+
53
+ <div align="center">
54
+ <h1>FlowerPower</h1>
55
+ <h3>Simple Workflow Framework - Hamilton + APScheduler = FlowerPower</h3>
56
+ <img src="./image.png" alt="FlowerPower Logo" width="600" height="400">
57
+ </div>
58
+
59
+ ---
60
+
61
+ ## 📚 Table of Contents
62
+ 1. [Overview](#overview)
63
+ 2. [Installation](#installation)
64
+ 3. [Getting Started](#getting-started)
65
+ - [Initialize Project](#initialize-project)
66
+ - [Add Pipeline](#add-pipeline)
67
+ - [Setup Pipeline](#setup-pipeline)
68
+ - [Run Pipeline](#run-pipeline)
69
+ - [Schedule Pipeline](#schedule-pipeline)
70
+ - [Start Worker](#start-worker)
71
+ - [Track Pipeline](#track-pipeline)
72
+ 4. [Development](#development)
73
+ - [Dev Services](#dev-services)
74
+
75
+ ---
76
+
77
+ ## 🔍 Overview
78
+
79
+ FlowerPower is a simple workflow framework based on two fantastic Python libraries:
80
+
81
+ - **[Hamilton](https://github.com/DAGWorks-Inc/hamilton)**: Creates DAGs from your pipeline functions
82
+ - **[APScheduler](https://github.com/agronholm/apscheduler)**: Handles pipeline scheduling
83
+
84
+ ### Key Features
85
+
86
+ - 🔄 **Pipeline Workflows**: Create and execute complex DAG-based workflows
87
+ - ⏰ **Scheduling**: Run pipelines at specific times or intervals
88
+ - ⚙️ **Parameterization**: Easily configure pipeline parameters
89
+ - 📊 **Tracking**: Monitor executions with Hamilton UI
90
+ - 🛠️ **Flexible Configuration**: Simple YAML-based setup
91
+ - 📡 **Distributed Execution**: Support for distributed environments
92
+
93
+ [More details in Hamilton docs](https://hamilton.dagworks.io/en/latest/)
94
+
95
+ ---
96
+
97
+ ## 📦 Installation
98
+
99
+ ```bash
100
+ # Basic installation
101
+ pip install flowerpower
102
+
103
+ # With scheduling support
104
+ pip install "flowerpower[scheduler]"
105
+
106
+ # Additional components
107
+ pip install "flowerpower[mqtt]" # MQTT broker
108
+ pip install "flowerpower[redis]" # Redis broker
109
+ pip install "flowerpower[mongodb]" # MongoDB store
110
+ pip install "flowerpower[ray]" # Ray computing
111
+ pip install "flowerpower[dask]" # Dask computing
112
+ pip install "flowerpower[ui]" # Hamilton UI
113
+ pip install "flowerpower[websever]" # Web server
114
+ ```
115
+
116
+ ---
117
+
118
+ ## 🚀 Getting Started
119
+
120
+ ### Initialize Project
121
+
122
+ **Option 1: Command Line**
123
+ ```bash
124
+ flowerpower init new-project
125
+ cd new-project
126
+ ```
127
+
128
+ **Option 2: Python**
129
+ ```python
130
+ from flowerpower import init
131
+ init("new-project")
132
+ ```
133
+
134
+ This creates basic config files:
135
+ - `conf/project.yml`
136
+
137
+
138
+ ### 📦 Optional: Project Management with UV (Recommended)
139
+
140
+ It is recommended to use the project manager `uv` to manage your project dependencies.
141
+
142
+ **Installation**
143
+ ```bash
144
+ pip install uv
145
+ ```
146
+ > For more installation options, visit: https://docs.astral.sh/uv/getting-started/installation/
147
+
148
+ **Project Initialization**
149
+ ```bash
150
+ uv init --app --no-readme --vcs git
151
+ ```
152
+ ---
153
+
154
+ ### Pipeline Management
155
+
156
+ #### Creating a New Pipeline
157
+
158
+ **Option 1: Command Line**
159
+ ```bash
160
+ flowerpower new my_flow
161
+ ```
162
+
163
+ **Option 2: Python**
164
+ ```python
165
+ # Using PipelineManager
166
+ from flowerpower.pipeline import PipelineManager
167
+ pm = PipelineManager()
168
+ pm.new("my_flow")
169
+
170
+ # Or using the new function directly
171
+ from flowerpower.pipeline import new
172
+ new("my_flow")
173
+ ```
174
+
175
+ This creates the new pipeline and configuration file:
176
+ - `pipelines/my_flow.py`
177
+ - `conf/pipelines/my_flow.yml`
178
+
179
+ #### Setting Up a Pipeline
180
+
181
+ 1. **Add Pipeline Functions**
182
+ Build your pipeline by adding the functions (nodes) to `pipelines/my_flow.py` that build the DAG, following the Hamilton paradigm.
183
+
184
+ 2. **Parameterize Functions**
185
+
186
+ You can parameterize functions in two ways:
187
+
188
+ **Method 1: Default Values**
189
+ ```python
190
+ def add_int_col(
191
+ df: pd.DataFrame,
192
+ col_name: str = "foo",
193
+ values: str = "bar"
194
+ ) -> pd.DataFrame:
195
+ return df.assign(**{col_name: values})
196
+ ```
197
+
198
+ **Method 2: Configuration File**
199
+
200
+ In `conf/pipelines/my_flow.yml`:
201
+ ```yaml
202
+ ...
203
+ func:
204
+ add_int_col:
205
+ col_name: foo
206
+ values: bar
207
+ ...
208
+ ```
209
+
210
+ Add the `@parameterize` decorator to the function in your pipeline file:
211
+ ```python
212
+ @parameterize(**PARAMS.add_int_col)
213
+ def add_int_col(
214
+ df: pd.DataFrame,
215
+ col_name: str,
216
+ values: int
217
+ ) -> pd.DataFrame:
218
+ return df.assign(**{col_name: values})
219
+ ```
220
+
221
+ ---
222
+
223
+ ### Running Pipelines
224
+
225
+ #### Configuration
226
+
227
+ You can configure the pipeline parameters `inputs`, and `final_vars`, and other parameters in the pipeline
228
+ configuration file `conf/pipelines/my_flow.yml` or directly in the pipeline execution function.
229
+
230
+ #### Using the Pipeline Configuration
231
+ ```yaml
232
+ ...
233
+ run:
234
+ inputs:
235
+ data_path: path/to/data.csv
236
+ fs_protocol: local
237
+ final_vars: [add_int_col, final_df]
238
+ # optional parameters
239
+ with_tracker: false
240
+ executor: threadpool # or processpool, ray, dask
241
+ ...
242
+ ```
243
+
244
+ #### Execution Methods
245
+ There are three ways to execute a pipeline:
246
+
247
+ 1. **Direct Execution**
248
+ - Runs in current process
249
+ - No data store required
250
+
251
+ 2. **Job Execution**
252
+ - Runs as APScheduler job
253
+ - Returns job results
254
+ - Requires data store and event broker
255
+
256
+ 3. **Async Job Addition**
257
+ - Adds to APScheduler
258
+ - Returns job ID
259
+ - Results retrievable from data store
260
+
261
+
262
+ #### Command Line Usage
263
+ ```bash
264
+ # Note: add --inputs and --final-vars and other optional parameters if not specified in the config file
265
+ # Direct execution
266
+ flowerpower run my_flow
267
+ # Job execution
268
+ flowerpower run-job my_flow
269
+
270
+ # Add as scheduled job
271
+ flowerpower add-job my_flow
272
+ ```
273
+
274
+ You can also use the `--inputs` and `--final-vars` flags to override the configuration file parameters or if they are not specified in the configuration file.
275
+
276
+ ```bash
277
+ flowerpower run my_flow \
278
+ --inputs data_path=path/to/data.csv,fs_protocol=local \
279
+ --final-vars final_df \
280
+ --executor threadpool
281
+ --without-tracker
282
+ ```
283
+
284
+ #### Python Usage
285
+ ```python
286
+ from flowerpower.pipeline import Pipeline, run, run_job, add_job
287
+
288
+ # Using Pipeline class
289
+ p = Pipeline("my_flow")
290
+ # Note: add inputs, final_vars, and other optional arguments if not specified in the config file
291
+ result = p.run()
292
+ result = p.run_job()
293
+ job_id = p.add_job()
294
+
295
+ # Using functions
296
+ result = run("my_flow")
297
+ result = run_job("my_flow")
298
+ job_id = add_job("my_flow")
299
+ ```
300
+
301
+ You can also use the `inputs` and `final-vars` arguments to override the configuration file parameters or if they are not specified in the configuration file.
302
+
303
+ ```python
304
+ result = run(
305
+ "my_flow",
306
+ inputs={
307
+ "data_path": "path/to/data.csv",
308
+ "fs_protocol": "local"
309
+ },
310
+ final_vars=["final_df"],
311
+ executor="threadpool",
312
+ with_tracker=False
313
+ )
314
+ ```
315
+
316
+ ---
317
+ ## ⏰ Scheduling Pipelines
318
+
319
+ ### Setting Up Schedules
320
+
321
+ #### Command Line Options
322
+
323
+ ```bash
324
+ # Run every 30 seconds
325
+ flowerpower schedule my_flow \
326
+ --type interval \
327
+ --interval-params seconds=30
328
+
329
+ # Run at specific date/time
330
+ flowerpower schedule my_flow \
331
+ --type date \
332
+ --date-params year=2022,month=1,day=1,hour=0,minute=0,second=0
333
+
334
+ # Run with cron parameters
335
+ flowerpower schedule my_flow \
336
+ --type cron \
337
+ --cron-params second=0,minute=0,hour=0,day=1,month=1,day_of_week=0
338
+
339
+ # Run with crontab expression
340
+ flowerpower schedule my_flow \
341
+ --type cron \
342
+ --crontab "0 0 1 1 0"
343
+ ```
344
+
345
+ #### Python Usage
346
+ ```python
347
+ from flowerpower.scheduler import schedule, Pipeline
348
+
349
+ # Using Pipeline class
350
+ p = Pipeline("my_flow")
351
+ p.schedule("interval", seconds=30)
352
+
353
+ # Using schedule function
354
+ schedule("my_flow", "interval", seconds=30)
355
+ ```
356
+
357
+ ---
358
+
359
+ ## 👷 Worker Management
360
+
361
+ ### Starting a Worker
362
+
363
+ **Command Line**
364
+ ```bash
365
+ flowerpower start-worker
366
+ ```
367
+
368
+ **Python**
369
+ ```python
370
+ # Using the SchedulerManager class
371
+ from flowerpower.scheduler import SchedulerManager
372
+ sm = SchedulerManager()
373
+ sm.start_worker()
374
+
375
+ # Using the start_worker function
376
+ from flowerpower.scheduler import start_worker
377
+ start_worker()
378
+ ```
379
+
380
+ ### Worker Configuration
381
+
382
+ Configure your worker in `conf/project.yml`:
383
+
384
+ ```yaml
385
+ # PostgreSQL Configuration
386
+ data_store:
387
+ type: postgres
388
+ uri: postgresql+asyncpq://user:password@localhost:5432/flowerpower
389
+
390
+ # Redis Event Broker
391
+ event_broker:
392
+ type: redis
393
+ uri: redis://localhost:6379
394
+ # Alternative configuration:
395
+ # host: localhost
396
+ # port: 6379
397
+ ```
398
+
399
+ #### Alternative Data Store Options
400
+
401
+ **SQLite**
402
+ ```yaml
403
+ data_store:
404
+ type: sqlite
405
+ uri: sqlite+aiosqlite:///flowerpower.db
406
+ ```
407
+
408
+ **MySQL**
409
+ ```yaml
410
+ data_store:
411
+ type: mysql
412
+ uri: mysql+aiomysql://user:password@localhost:3306/flowerpower
413
+ ```
414
+
415
+ **MongoDB**
416
+ ```yaml
417
+ data_store:
418
+ type: mongodb
419
+ uri: mongodb://localhost:27017/flowerpower
420
+ ```
421
+
422
+ **In-Memory**
423
+ ```yaml
424
+ data_store:
425
+ type: memory
426
+ ```
427
+
428
+ #### Alternative Event Broker Options
429
+
430
+ **MQTT**
431
+ ```yaml
432
+ event_broker:
433
+ type: mqtt
434
+ host: localhost
435
+ port: 1883
436
+ username: user # optional if required
437
+ password: supersecret # optional if required
438
+ ```
439
+ **Redis**
440
+ ```yaml
441
+ event_broker:
442
+ type: redis
443
+ uri: redis://localhost:6379
444
+ # Alternative configuration:
445
+ # host: localhost
446
+ # port: 6379
447
+ ```
448
+
449
+ **In-Memory**
450
+ ```yaml
451
+ event_broker:
452
+ type: memory
453
+ ```
454
+
455
+ ---
456
+
457
+ ## 📊 Pipeline Tracking
458
+
459
+ ### Hamilton UI Setup
460
+
461
+ #### Local Installation
462
+ ```bash
463
+ # Install UI package
464
+ pip install "flowerpower[ui]"
465
+
466
+ # Start UI server
467
+ flowerpower hamilton-ui
468
+ ```
469
+ > Access the UI at: http://localhost:8241
470
+
471
+ #### Docker Installation
472
+ ```bash
473
+ # Clone Hamilton repository
474
+ git clone https://github.com/dagworks-inc/hamilton
475
+ cd hamilton/ui
476
+
477
+ # Start UI server
478
+ ./run.sh
479
+ ```
480
+ > Access the UI at: http://localhost:8242
481
+
482
+ ### Tracker Configuration
483
+
484
+ Configure tracking in `conf/project.yml`:
485
+
486
+ ```yaml
487
+ username: my_email@example.com
488
+ api_url: http://localhost:8241
489
+ ui_url: http://localhost:8242
490
+ api_key: optional_key
491
+ ```
492
+
493
+ And specify the `tracker` parameter in the pipeline configuration `conf/pipelines/my_flow.yml:
494
+
495
+ ```yaml
496
+ ...
497
+ tracker:
498
+ project_id: 1
499
+ tags:
500
+ environment: dev
501
+ version: 1.0
502
+ dag_name: my_flow_123
503
+ ...
504
+ ```
505
+
506
+ ---
507
+
508
+ ## 🛠️ Development Services
509
+
510
+ ### Local Development Setup
511
+
512
+ Download the docker-compose configuration:
513
+ ```bash
514
+ curl -O https://raw.githubusercontent.com/legout/flowerpower/main/docker/docker-compose.yml
515
+ ```
516
+
517
+ ### Starting Services
518
+
519
+ ```bash
520
+ # MQTT Broker (EMQX)
521
+ docker-compose up mqtt -d
522
+
523
+ # Redis
524
+ docker-compose up redis -d
525
+
526
+ # MongoDB
527
+ docker-compose up mongodb -d
528
+
529
+ # PostgreSQL
530
+ docker-compose up postgres -d
531
+ ```
532
+
533
+ ---
534
+
535
+ ## 📝 License
536
+
537
+ [MIT License](LICENSE)
538
+
539
+ ---
540
+
541
+ ## 🤝 Contributing
542
+
543
+ Contributions are welcome! Please feel free to submit a Pull Request.
544
+
545
+ ---
546
+
547
+ ## 📫 Support
548
+
549
+ For support, please open an issue in the GitHub repository.