ml-dash 0.0.11__py3-none-any.whl → 0.5.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. ml_dash/__init__.py +59 -1
  2. ml_dash/auto_start.py +42 -0
  3. ml_dash/cli.py +67 -0
  4. ml_dash/cli_commands/__init__.py +1 -0
  5. ml_dash/cli_commands/download.py +797 -0
  6. ml_dash/cli_commands/list.py +343 -0
  7. ml_dash/cli_commands/upload.py +1298 -0
  8. ml_dash/client.py +955 -0
  9. ml_dash/config.py +114 -11
  10. ml_dash/experiment.py +1020 -0
  11. ml_dash/files.py +688 -0
  12. ml_dash/log.py +181 -0
  13. ml_dash/metric.py +292 -0
  14. ml_dash/params.py +188 -0
  15. ml_dash/storage.py +1115 -0
  16. ml_dash-0.5.9.dist-info/METADATA +244 -0
  17. ml_dash-0.5.9.dist-info/RECORD +20 -0
  18. ml_dash-0.5.9.dist-info/WHEEL +4 -0
  19. ml_dash-0.5.9.dist-info/entry_points.txt +3 -0
  20. ml_dash/app.py +0 -33
  21. ml_dash/file_events.py +0 -71
  22. ml_dash/file_handlers.py +0 -141
  23. ml_dash/file_utils.py +0 -5
  24. ml_dash/file_watcher.py +0 -30
  25. ml_dash/main.py +0 -60
  26. ml_dash/mime_types.py +0 -20
  27. ml_dash/schema/__init__.py +0 -110
  28. ml_dash/schema/archive.py +0 -165
  29. ml_dash/schema/directories.py +0 -59
  30. ml_dash/schema/experiments.py +0 -65
  31. ml_dash/schema/files/__init__.py +0 -204
  32. ml_dash/schema/files/file_helpers.py +0 -79
  33. ml_dash/schema/files/images.py +0 -27
  34. ml_dash/schema/files/metrics.py +0 -64
  35. ml_dash/schema/files/parameters.py +0 -50
  36. ml_dash/schema/files/series.py +0 -235
  37. ml_dash/schema/files/videos.py +0 -27
  38. ml_dash/schema/helpers.py +0 -66
  39. ml_dash/schema/projects.py +0 -65
  40. ml_dash/schema/schema_helpers.py +0 -19
  41. ml_dash/schema/users.py +0 -33
  42. ml_dash/sse.py +0 -18
  43. ml_dash-0.0.11.dist-info/METADATA +0 -67
  44. ml_dash-0.0.11.dist-info/RECORD +0 -30
  45. ml_dash-0.0.11.dist-info/WHEEL +0 -5
  46. ml_dash-0.0.11.dist-info/top_level.txt +0 -1
  47. /ml_dash/{example.py → py.typed} +0 -0
@@ -0,0 +1,244 @@
1
+ Metadata-Version: 2.3
2
+ Name: ml-dash
3
+ Version: 0.5.9
4
+ Summary: ML experiment tracking and data storage
5
+ Keywords: machine-learning,experiment-tracking,mlops,data-storage
6
+ Author: Ge Yang, Tom Tao
7
+ License: MIT License
8
+
9
+ Copyright (c) 2025 Ge Yang, Tom Tao
10
+
11
+ Permission is hereby granted, free of charge, to any person obtaining a copy
12
+ of this software and associated documentation files (the "Software"), to deal
13
+ in the Software without restriction, including without limitation the rights
14
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
15
+ copies of the Software, and to permit persons to whom the Software is
16
+ furnished to do so, subject to the following conditions:
17
+
18
+ The above copyright notice and this permission notice shall be included in all
19
+ copies or substantial portions of the Software.
20
+
21
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
22
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
23
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
24
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
25
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
26
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27
+ SOFTWARE.
28
+ Classifier: Development Status :: 4 - Beta
29
+ Classifier: Intended Audience :: Developers
30
+ Classifier: Intended Audience :: Science/Research
31
+ Classifier: License :: OSI Approved :: MIT License
32
+ Classifier: Programming Language :: Python :: 3
33
+ Classifier: Programming Language :: Python :: 3.9
34
+ Classifier: Programming Language :: Python :: 3.10
35
+ Classifier: Programming Language :: Python :: 3.11
36
+ Classifier: Programming Language :: Python :: 3.12
37
+ Classifier: Programming Language :: Python :: 3.13
38
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
39
+ Requires-Dist: httpx>=0.27.0
40
+ Requires-Dist: pyjwt>=2.8.0
41
+ Requires-Dist: imageio>=2.31.0
42
+ Requires-Dist: imageio-ffmpeg>=0.4.9
43
+ Requires-Dist: scikit-image>=0.21.0
44
+ Requires-Dist: rich>=13.0.0
45
+ Requires-Dist: pytest>=8.0.0 ; extra == 'dev'
46
+ Requires-Dist: pytest-asyncio>=0.23.0 ; extra == 'dev'
47
+ Requires-Dist: sphinx>=7.2.0 ; extra == 'dev'
48
+ Requires-Dist: furo>=2024.0.0 ; extra == 'dev'
49
+ Requires-Dist: sphinx-autodoc-typehints>=2.0.0 ; extra == 'dev'
50
+ Requires-Dist: sphinx-autobuild>=2024.0.0 ; extra == 'dev'
51
+ Requires-Dist: sphinx-copybutton>=0.5.0 ; extra == 'dev'
52
+ Requires-Dist: sphinx-design>=0.5.0 ; extra == 'dev'
53
+ Requires-Dist: sphinx-tabs>=3.4.0 ; extra == 'dev'
54
+ Requires-Dist: sphinxcontrib-mermaid>=0.9.0 ; extra == 'dev'
55
+ Requires-Dist: sphinxext-opengraph>=0.9.0 ; extra == 'dev'
56
+ Requires-Dist: myst-parser>=2.0.0 ; extra == 'dev'
57
+ Requires-Dist: linkify-it-py>=2.0.0 ; extra == 'dev'
58
+ Requires-Dist: ruff>=0.3.0 ; extra == 'dev'
59
+ Requires-Dist: mypy>=1.9.0 ; extra == 'dev'
60
+ Requires-Python: >=3.9
61
+ Provides-Extra: dev
62
+ Description-Content-Type: text/markdown
63
+
64
+ # ML-Dash
65
+
66
+ A simple and flexible SDK for ML experiment metricing and data storage.
67
+
68
+ ## Features
69
+
70
+ - **Three Usage Styles**: Decorator, context manager, or direct instantiation
71
+ - **Dual Operation Modes**: Remote (API server) or local (filesystem)
72
+ - **Auto-creation**: Automatically creates namespace, project, and folder hierarchy
73
+ - **Upsert Behavior**: Updates existing experiments or creates new ones
74
+ - **Experiment Lifecycle**: Automatic status tracking (RUNNING, COMPLETED, FAILED, CANCELLED)
75
+ - **Organized File Storage**: Prefix-based file organization with unique snowflake IDs
76
+ - **Rich Metadata**: Tags, bindrs, descriptions, and custom metadata support
77
+ - **Simple API**: Minimal configuration, maximum flexibility
78
+
79
+ ## Installation
80
+
81
+ <table>
82
+ <tr>
83
+ <td>Using uv (recommended)</td>
84
+ <td>Using pip</td>
85
+ </tr>
86
+ <tr>
87
+ <td>
88
+
89
+ ```bash
90
+ uv add ml-dash
91
+ ```
92
+
93
+ </td>
94
+ <td>
95
+
96
+ ```bash
97
+ pip install ml-dash
98
+ ```
99
+
100
+ </td>
101
+ </tr>
102
+ </table>
103
+
104
+ ## Getting Started
105
+
106
+ ### Remote Mode (with API Server)
107
+
108
+ ```python
109
+ from ml_dash import Experiment
110
+
111
+ with Experiment(
112
+ name="my-experiment",
113
+ project="my-project",
114
+ remote="https://api.dash.ml",
115
+ api_key="your-jwt-token"
116
+ ) as experiment:
117
+ print(f"Experiment ID: {experiment.id}")
118
+ ```
119
+
120
+ ### Local Mode (Filesystem)
121
+
122
+ ```python
123
+ from ml_dash import Experiment
124
+
125
+ with Experiment(
126
+ name="my-experiment",
127
+ project="my-project",
128
+ local_path=".ml-dash"
129
+ ) as experiment:
130
+ pass # Your code here
131
+ ```
132
+
133
+ See [examples/](examples/) for more complete examples.
134
+
135
+ ## Development Setup
136
+
137
+ ### Installing Dev Dependencies
138
+
139
+ To contribute to ML-Dash or run tests, install the development dependencies:
140
+
141
+ <table>
142
+ <tr>
143
+ <td>Using uv (recommended)</td>
144
+ <td>Using pip</td>
145
+ </tr>
146
+ <tr>
147
+ <td>
148
+
149
+ ```bash
150
+ uv sync --extra dev
151
+ ```
152
+
153
+ </td>
154
+ <td>
155
+
156
+ ```bash
157
+ pip install -e ".[dev]"
158
+ ```
159
+
160
+ </td>
161
+ </tr>
162
+ </table>
163
+
164
+ This installs:
165
+ - `pytest>=8.0.0` - Testing framework
166
+ - `pytest-asyncio>=0.23.0` - Async test support
167
+ - `sphinx>=7.2.0` - Documentation builder
168
+ - `sphinx-rtd-theme>=2.0.0` - Read the Docs theme
169
+ - `sphinx-autobuild>=2024.0.0` - Live preview for documentation
170
+ - `myst-parser>=2.0.0` - Markdown support for Sphinx
171
+ - `ruff>=0.3.0` - Linter and formatter
172
+ - `mypy>=1.9.0` - Type checker
173
+
174
+ ### Running Tests
175
+
176
+ <table>
177
+ <tr>
178
+ <td>Using uv</td>
179
+ <td>Using pytest directly</td>
180
+ </tr>
181
+ <tr>
182
+ <td>
183
+
184
+ ```bash
185
+ uv run pytest
186
+ ```
187
+
188
+ </td>
189
+ <td>
190
+
191
+ ```bash
192
+ pytest
193
+ ```
194
+
195
+ </td>
196
+ </tr>
197
+ </table>
198
+
199
+ ### Building Documentation
200
+
201
+ Documentation is built using Sphinx with Read the Docs theme.
202
+
203
+ <table>
204
+ <tr>
205
+ <td>Build docs</td>
206
+ <td>Live preview</td>
207
+ <td>Clean build</td>
208
+ </tr>
209
+ <tr>
210
+ <td>
211
+
212
+ ```bash
213
+ uv run python -m sphinx -b html docs docs/_build/html
214
+ ```
215
+
216
+ </td>
217
+ <td>
218
+
219
+ ```bash
220
+ uv run sphinx-autobuild docs docs/_build/html
221
+ ```
222
+
223
+ </td>
224
+ <td>
225
+
226
+ ```bash
227
+ rm -rf docs/_build
228
+ ```
229
+
230
+ </td>
231
+ </tr>
232
+ </table>
233
+
234
+ The live preview command starts a local server and automatically rebuilds when files change.
235
+
236
+ Alternatively, you can use the Makefile from within the docs directory:
237
+
238
+ ```bash
239
+ cd docs
240
+ make html # Build HTML documentation
241
+ make clean # Clean build files
242
+ ```
243
+
244
+ For maintainers, to build and publish a new release: `uv build && uv publish`
@@ -0,0 +1,20 @@
1
+ ml_dash/__init__.py,sha256=o_LrWVJBY_VkUGhSBs5wdb_NqEsHD1AK9HGsjZGxHxQ,1414
2
+ ml_dash/auto_start.py,sha256=c3XcXFpZdvjtWauEoK5043Gw9k0L_5IDq4fdiB2ha88,959
3
+ ml_dash/cli.py,sha256=lyWVVhmsflSXQt2UCDb8IqC-mSRQwwlB2l1qEIYBUb8,1743
4
+ ml_dash/cli_commands/__init__.py,sha256=bjAmV7MsW-bhtW_4SnLJ0Cfkt9h82vMDC8ebW1Ke8KE,38
5
+ ml_dash/cli_commands/download.py,sha256=TomyUFwelqfQHfh60K7rCyCwEZVp1CkMToogprgC64Q,29614
6
+ ml_dash/cli_commands/list.py,sha256=Cx9yWsTV5HPaevYpQ6BugCEr5z_4bhxQ0T51OXExuTU,10900
7
+ ml_dash/cli_commands/upload.py,sha256=jo6FVdbuokTz64rjvOEWWhLBzlh2gM0Ru4TRNv9hX60,47943
8
+ ml_dash/client.py,sha256=31C2Kb3KULwhrb3UlpCFY7HDA3-kvj3XVmWUvXEvQHY,27993
9
+ ml_dash/config.py,sha256=iQbHCu4lM_Sg8YadyEXSJ6Ht9yKIJHN26L7L-rMH4gE,3112
10
+ ml_dash/experiment.py,sha256=K36HkHJb_O2-vdaPPOCq74_2nZtfiLaS0o7qhTntD8Q,30646
11
+ ml_dash/files.py,sha256=JptjoxGJiXJ-nkj6C7vDhw-cgJRCB0cHt_SIUJG665o,23024
12
+ ml_dash/log.py,sha256=0yXaNnFwYeBI3tRLHX3kkqWRpg0MbSGwmgjnOfsElCk,5350
13
+ ml_dash/metric.py,sha256=c0Zl0wEufmQuVfwIMvrORLwqe92Iaf0PfKRgmlgQWzQ,10343
14
+ ml_dash/params.py,sha256=xaByDSVar4D1pZqxTANkMPeZTL5-V7ewJe5TXfPLhMQ,5980
15
+ ml_dash/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ ml_dash/storage.py,sha256=1NLkIOZeFv0e97zieEX0tJT3qHTuM8UN6hvsFRQ6TTo,38941
17
+ ml_dash-0.5.9.dist-info/WHEEL,sha256=z-mOpxbJHqy3cq6SvUThBZdaLGFZzdZPtgWLcP2NKjQ,79
18
+ ml_dash-0.5.9.dist-info/entry_points.txt,sha256=dYs2EHX1uRNO7AQGNnVaJJpgiy0Z9q7tiy4fHSyaf3Q,46
19
+ ml_dash-0.5.9.dist-info/METADATA,sha256=Acfle-Q8_jExDsli8yuZ8EMP1knWSkPtONo1FYB1sM4,6175
20
+ ml_dash-0.5.9.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: uv 0.9.15
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ ml-dash = ml_dash.cli:main
3
+
ml_dash/app.py DELETED
@@ -1,33 +0,0 @@
1
- import os
2
- from sanic import Sanic
3
- from sanic.exceptions import FileNotFound
4
- from sanic.response import file
5
-
6
- # gets current directory
7
- BASE = os.path.realpath(__file__)
8
- print(BASE)
9
- print(os.path.dirname(BASE))
10
-
11
- build_path = os.path.join(os.path.dirname(BASE), "../app-build")
12
- print(build_path)
13
-
14
- app = Sanic()
15
- # serve js file for webpack
16
- app.static('/', build_path)
17
-
18
-
19
- # app.static('/main.js', './app-build/main.js', name='main.js')
20
-
21
- @app.route('/')
22
- @app.exception(FileNotFound)
23
- async def index(request, exception=None):
24
- print('hey ====', [exception])
25
- return await file(build_path + '/index.html')
26
-
27
-
28
- if __name__ == '__main__':
29
- app.run(
30
- host='0.0.0.0',
31
- port=int(os.environ.get('PORT', 3002)),
32
- workers=int(os.environ.get('WEB_CONCURRENCY', 1)),
33
- debug=bool(os.environ.get('DEBUG', '')))
ml_dash/file_events.py DELETED
@@ -1,71 +0,0 @@
1
- from hachiko.hachiko import AIOEventHandler, AIOWatchdog
2
- from asyncio import coroutine, Queue, sleep
3
- from sanic import response
4
- from sanic.exceptions import RequestTimeout
5
-
6
- from ml_dash.file_utils import path_match
7
- from termcolor import cprint
8
-
9
- from . import config
10
- import json
11
-
12
- subscriptions = []
13
- watcher = None
14
-
15
-
16
- class Handler(AIOEventHandler):
17
- def __init__(self, *args, **kwargs):
18
- super().__init__(*args, **kwargs)
19
-
20
- @coroutine
21
- async def on_any_event(self, event):
22
- _event = dict(src_path=event.src_path, event_type=event.event_type, is_directory=event.is_directory)
23
- for que in subscriptions:
24
- await que.put(_event)
25
- # self._loop.create_task(que.put(event))
26
-
27
-
28
- def setup_watch_queue(app, loop):
29
- print('setting up watch queue')
30
- start_watcher()
31
- cprint('watcher setup complete!', "green")
32
-
33
-
34
- def start_watcher():
35
- global watcher
36
-
37
- handler = Handler()
38
- print('starting file watcher...')
39
- watcher = AIOWatchdog(config.Args.logdir, event_handler=handler)
40
- watcher.start()
41
- print('watcher start is complete')
42
-
43
-
44
- import os
45
-
46
-
47
- # server does not have access to a disconnect event.
48
- # currently subscriptions only grows.
49
- # Will add timeout based cleanup after.
50
- async def file_events(request, file_path="", query="*"):
51
- q = Queue()
52
- subscriptions.append(q)
53
-
54
- async def streaming_fn(response):
55
- try:
56
- while True:
57
- print('subscription que started')
58
- file_event = await q.get()
59
- src_path = file_event['src_path']
60
- if src_path.startswith(os.path.join(config.Args.logdir, file_path)) and path_match(file_path, query):
61
- file_event['src_path'] = src_path[len(config.Args.logdir):]
62
- print("=>>", file_event)
63
- response.write(f"data: {json.dumps(file_event)}\r\n\r\n".encode())
64
- sleep(0.1)
65
- # todo: this timeout doesn't really work.
66
- # todo: also add handling of stream is terminated logic (separate from above).
67
- except RequestTimeout:
68
- subscriptions.remove(q)
69
-
70
- return response.stream(streaming_fn, content_type="text/event-stream")
71
- # subscriptions.remove(q)
ml_dash/file_handlers.py DELETED
@@ -1,141 +0,0 @@
1
- import os
2
- import stat
3
- from glob import iglob
4
- from shutil import rmtree
5
- from sanic import response
6
-
7
- from . import config
8
-
9
-
10
- def get_type(mode):
11
- if stat.S_ISDIR(mode) or stat.S_ISLNK(mode):
12
- type = 'dir'
13
- else:
14
- type = 'file'
15
- return type
16
-
17
-
18
- async def remove_path(request, file_path=""):
19
- print(file_path)
20
- path = os.path.join(config.Args.logdir, file_path)
21
- if os.path.isdir(path):
22
- rmtree(path)
23
- res = response.text("ok", status=204)
24
- elif os.path.isfile(path):
25
- os.remove(path)
26
- res = response.text("ok", status=204)
27
- else:
28
- res = response.text('Not found', status=404)
29
- return res
30
-
31
-
32
- from contextlib import contextmanager
33
-
34
-
35
- @contextmanager
36
- def cwdContext(path):
37
- owd = os.getcwd()
38
- os.chdir(path)
39
- try:
40
- yield
41
- finally:
42
- os.chdir(owd)
43
-
44
-
45
- async def batch_get_path(request):
46
- try:
47
- data = request.json
48
-
49
- file_paths = data['paths']
50
- options = data['options']
51
-
52
- batch_res_data = dict()
53
-
54
- if options.get('json', False):
55
- for path in file_paths:
56
- from ml_logger.helpers import load_from_pickle
57
- batch_res_data[path] = [_ for _ in load_from_pickle(path)]
58
-
59
- res = response.json(batch_res_data, status=200, content_type='application/json')
60
- return res
61
-
62
- except Exception as e:
63
- print('Exception: ', e)
64
- res = response.text('Internal Error' + str(e), status=502)
65
- return res
66
-
67
-
68
- async def get_path(request, file_path=""):
69
- print(file_path)
70
-
71
- as_records = request.args.get('records')
72
- as_json = request.args.get('json')
73
- as_log = request.args.get('log')
74
- as_attachment = int(request.args.get('download', '0'))
75
- is_recursive = request.args.get('recursive')
76
- show_hidden = request.args.get('hidden')
77
- query = request.args.get('query', "*").strip()
78
-
79
- _start = request.args.get('start', None)
80
- _stop = request.args.get('stop', None)
81
- start = None if _start is None else int(_start)
82
- stop = None if _stop is None else int(_stop)
83
-
84
- reservoir_k = int(request.args.get('reservoir', '200'))
85
-
86
- # limit for the search itself.
87
- search_limit = 500
88
-
89
- path = os.path.join(config.Args.logdir, file_path)
90
- print("=============>", [query], [path], os.path.isdir(path))
91
-
92
- if os.path.isdir(path):
93
- from itertools import islice
94
- with cwdContext(path):
95
- print(os.getcwd(), query, is_recursive)
96
- file_paths = list(islice(iglob(query, recursive=is_recursive), start or 0, stop or 200))
97
- files = map(file_stat, file_paths)
98
- res = response.json(files, status=200)
99
- elif os.path.isfile(path):
100
- if as_records:
101
- from ml_logger.helpers import load_pickle_as_dataframe
102
- df = load_pickle_as_dataframe(path, reservoir_k)
103
- res = response.text(df.to_json(orient="records"), status=200, content_type='application/json')
104
- elif as_log:
105
- from ml_logger.helpers import load_pickle_as_dataframe
106
- df = load_pickle_as_dataframe(path, reservoir_k)
107
- res = response.text(df.to_json(orient="records"), status=200, content_type='application/json')
108
- elif as_json:
109
- from ml_logger.helpers import load_from_pickle
110
- data = [_ for _ in load_from_pickle(path)]
111
- res = response.json(data, status=200, content_type='application/json')
112
- elif type(start) is int or type(stop) is int:
113
- from itertools import islice
114
- with open(path, 'r') as f:
115
- text = ''.join([l for l in islice(f, start, stop)])
116
- res = response.text(text, status=200)
117
- else:
118
- # todo: check the file handling here. Does this use correct
119
- # mimeType for text files?
120
- res = await response.file(path)
121
- if as_attachment:
122
- res.headers['Content-Disposition'] = 'attachment'
123
- else:
124
- res = response.text('Not found', status=404)
125
- return res
126
-
127
-
128
- # use glob! LOL
129
- def file_stat(file_path):
130
- # this looped over is very slow. Fine for a small list of files though.
131
- stat_res = os.stat(file_path)
132
- ft = get_type(stat_res.st_mode)
133
- sz = stat_res.st_size
134
- return dict(
135
- name=os.path.basename(file_path),
136
- path=file_path,
137
- mtime=stat_res.st_mtime,
138
- ctime=stat_res.st_ctime,
139
- type=ft,
140
- size=sz,
141
- )
ml_dash/file_utils.py DELETED
@@ -1,5 +0,0 @@
1
- def path_match(query, pattern):
2
- import glob, re
3
- regex = fnmatch.translate(pattern)
4
- reobj = re.compile(regex)
5
- return reobj.match(query)
ml_dash/file_watcher.py DELETED
@@ -1,30 +0,0 @@
1
- # from . import config
2
- import asyncio
3
- from hachiko.hachiko import AIOWatchdog
4
-
5
-
6
- class Handler:
7
- def dispatch(self, *args, **kwargs):
8
- print(args, kwargs)
9
-
10
- @asyncio.coroutine
11
- def watch_fs(path):
12
- watch = AIOWatchdog(path, event_handler=Handler())
13
- watch.start()
14
- while True:
15
- yield from asyncio.sleep(10)
16
- watch.stop()
17
-
18
-
19
-
20
- if __name__ == "__main__":
21
- # asyncio.get_event_loop().run_until_complete(watch_fs("/Users/ge/machine_learning/berkeley-playground/ins-runs"))
22
- # asyncio.get_event_loop().run_until_complete(watch_fs("."))
23
- path = "."
24
- watch = AIOWatchdog(path, event_handler=Handler())
25
- watch.start()
26
- import time
27
- print('watch is setup')
28
- while True:
29
- time.sleep(10)
30
-
ml_dash/main.py DELETED
@@ -1,60 +0,0 @@
1
- from ml_dash.schema import schema
2
- from sanic_graphql import GraphQLView
3
-
4
- from .file_events import file_events, setup_watch_queue
5
- from .file_handlers import get_path, remove_path, batch_get_path
6
-
7
- from sanic import Sanic
8
- from sanic_cors import CORS
9
-
10
- app = Sanic(__name__)
11
- # CORS(app)
12
- CORS(app, resources={r"/*": {"origins": "*"}}, automatic_options=True)
13
-
14
- # @app.listener('before_server_start')
15
- # def init_graphql(app, loop):
16
- # app.add_route(GraphQLView.as_view(schema=schema, executor=AsyncioExecutor(loop=loop)), '/graphql')
17
-
18
- # new graphQL endpoints
19
- app.add_route(GraphQLView.as_view(schema=schema, graphiql=True), '/graphql',
20
- methods=['GET', 'POST', 'FETCH', 'OPTIONS'])
21
- app.add_route(GraphQLView.as_view(schema=schema, batch=True), '/graphql/batch',
22
- methods=['GET', 'POST', 'FETCH', 'OPTIONS'])
23
-
24
- # # Serving static app
25
- # app.add_route(get_path, '/*', methods=['GET', 'OPTIONS'])
26
-
27
- # old RPC endpoints
28
- app.add_route(get_path, '/files/', methods=['GET', 'OPTIONS'])
29
- app.add_route(get_path, '/files/<file_path:path>', methods=['GET', 'OPTIONS'])
30
- app.add_route(batch_get_path, '/batch-files', methods=['GET', 'OPTIONS'])
31
- app.add_route(remove_path, '/files/<file_path:path>', methods=['DELETE'])
32
- app.add_route(file_events, '/file-events', methods=['GET', 'OPTIONS'])
33
- app.add_route(file_events, '/file-events/<file_path:path>', methods=['GET', 'OPTIONS'])
34
- app.listener('before_server_start')(setup_watch_queue)
35
-
36
-
37
- # app.add_task(start_watcher)
38
-
39
-
40
- def run(logdir=None, **kwargs):
41
- from . import config
42
- from termcolor import cprint
43
-
44
- if logdir:
45
- config.Args.logdir = logdir
46
-
47
- cprint("launched server with config:", "green")
48
- cprint("Args:", 'yellow')
49
- print(vars(config.Args))
50
- cprint("Sanic Server Args:", 'yellow')
51
- print(vars(config.ServerArgs))
52
-
53
- config.ServerArgs.update(**kwargs)
54
- app.run(**vars(config.ServerArgs))
55
-
56
-
57
- if __name__ == "__main__":
58
- # see: https://sanic.readthedocs.io/en/latest/sanic/deploying.html
59
- # call this as `python -m ml_logger.main`
60
- run()
ml_dash/mime_types.py DELETED
@@ -1,20 +0,0 @@
1
- ignored = ['.bzr', '$RECYCLE.BIN', '.DAV', '.DS_Store', '.git', '.hg', '.htaccess', '.htpasswd', '.Spotlight-V100',
2
- '.svn', '__MACOSX', 'ehthumbs.db', 'robots.txt', 'Thumbs.db', 'thumbs.tps']
3
- datatypes = {'audio': 'm4a,mp3,oga,ogg,webma,wav',
4
- 'archive': '7z,zip,rar,gz,tar',
5
- 'image': 'gif,ico,jpe,jpeg,jpg,png,svg,webp',
6
- 'pdf': 'pdf',
7
- 'quicktime': '3g2,3gp,3gp2,3gpp,mov,qt',
8
- 'source': 'atom,bat,bash,c,cmd,coffee,css,hml,js,json,java,less,markdown,md,php,pl,py,rb,rss,sass,scpt,swift,scss,sh,xml,yml,plist',
9
- 'text': 'txt',
10
- 'video': 'mp4,m4v,ogv,webm',
11
- 'website': 'htm,html,mhtm,mhtml,xhtm,xhtml'}
12
- icontypes = {'fa-music': 'm4a,mp3,oga,ogg,webma,wav',
13
- 'fa-archive': '7z,zip,rar,gz,tar',
14
- 'fa-picture-o': 'gif,ico,jpe,jpeg,jpg,png,svg,webp',
15
- 'fa-file-text': 'pdf',
16
- 'fa-film': '3g2,3gp,3gp2,3gpp,mov,qt',
17
- 'fa-code': 'atom,plist,bat,bash,c,cmd,coffee,css,hml,js,json,java,less,markdown,md,php,pl,py,rb,rss,sass,scpt,swift,scss,sh,xml,yml',
18
- 'fa-file-text-o': 'txt',
19
- 'fa-film': 'mp4,m4v,ogv,webm',
20
- 'fa-globe': 'htm,html,mhtm,mhtml,xhtm,xhtml'}