ml-dash 0.0.17__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ml_dash/ARCHITECTURE.md +382 -0
- ml_dash/__init__.py +14 -1
- ml_dash/autolog.py +32 -0
- ml_dash/backends/__init__.py +11 -0
- ml_dash/backends/base.py +124 -0
- ml_dash/backends/dash_backend.py +571 -0
- ml_dash/backends/local_backend.py +90 -0
- ml_dash/components/__init__.py +13 -0
- ml_dash/components/files.py +246 -0
- ml_dash/components/logs.py +104 -0
- ml_dash/components/metrics.py +169 -0
- ml_dash/components/parameters.py +144 -0
- ml_dash/job_logger.py +42 -0
- ml_dash/ml_logger.py +234 -0
- ml_dash/run.py +331 -0
- ml_dash-0.4.0.dist-info/METADATA +1424 -0
- ml_dash-0.4.0.dist-info/RECORD +19 -0
- ml_dash-0.4.0.dist-info/WHEEL +4 -0
- ml_dash-0.4.0.dist-info/entry_points.txt +3 -0
- app-build/asset-manifest.json +0 -15
- app-build/favicon.ico +0 -0
- app-build/github-markdown.css +0 -957
- app-build/index.html +0 -1
- app-build/manifest.json +0 -15
- app-build/monaco-editor-worker-loader-proxy.js +0 -6
- app-build/precache-manifest.ffc09f8a591c529a1bd5c6f21f49815f.js +0 -26
- app-build/service-worker.js +0 -34
- ml_dash/app.py +0 -60
- ml_dash/config.py +0 -16
- ml_dash/example.py +0 -0
- ml_dash/file_events.py +0 -71
- ml_dash/file_handlers.py +0 -141
- ml_dash/file_utils.py +0 -5
- ml_dash/file_watcher.py +0 -30
- ml_dash/main.py +0 -60
- ml_dash/mime_types.py +0 -20
- ml_dash/schema/__init__.py +0 -110
- ml_dash/schema/archive.py +0 -165
- ml_dash/schema/directories.py +0 -59
- ml_dash/schema/experiments.py +0 -65
- ml_dash/schema/files/__init__.py +0 -204
- ml_dash/schema/files/file_helpers.py +0 -79
- ml_dash/schema/files/images.py +0 -27
- ml_dash/schema/files/metrics.py +0 -64
- ml_dash/schema/files/parameters.py +0 -50
- ml_dash/schema/files/series.py +0 -235
- ml_dash/schema/files/videos.py +0 -27
- ml_dash/schema/helpers.py +0 -66
- ml_dash/schema/projects.py +0 -65
- ml_dash/schema/schema_helpers.py +0 -19
- ml_dash/schema/users.py +0 -33
- ml_dash/sse.py +0 -18
- ml_dash-0.0.17.dist-info/METADATA +0 -67
- ml_dash-0.0.17.dist-info/RECORD +0 -38
- ml_dash-0.0.17.dist-info/WHEEL +0 -5
- ml_dash-0.0.17.dist-info/top_level.txt +0 -2
app-build/index.html
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500"><link rel="stylesheet" href="/github-markdown.css"><style>body,body>div{margin:0;position:absolute;top:0;bottom:0;left:0;right:0;height:100%;width:100%}</style><script src="https://cdnjs.cloudflare.com/ajax/libs/monaco-editor/0.13.1/min/vs/loader.js" type="text/javascript"/><script>require.config({paths:{vs:"https://cdnjs.cloudflare.com/ajax/libs/monaco-editor/0.15.6/min/vs","monaco-vim":"https://unpkg.com/monaco-vim/dist/monaco-vim"}}),window.MonacoEnvironment={getWorkerUrl:function(o,n){return"/monaco-editor-worker-loader-proxy.js"}}</script><link rel="manifest" href="/manifest.json"/><title>React App</title><link href="/static/css/1.bf45e866.chunk.css" rel="stylesheet"><link href="/static/css/main.c46f1339.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(l){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],f=0,i=[];f<n.length;f++)t=n[f],p[t]&&i.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(l[r]=o[r]);for(s&&s(e);i.length;)i.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=f(f.s=t[0]))}return e}var t={},p={2:0},c=[];function f(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return l[e].call(r.exports,r,r.exports,f),r.l=!0,r.exports}f.m=l,f.c=t,f.d=function(e,r,t){f.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},f.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},f.t=function(r,e){if(1&e&&(r=f(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(f.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)f.d(t,n,function(e){return r[e]}.bind(null,n));return t},f.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return f.d(r,"a",r),r},f.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},f.p="/";var r=window.webpackJsonp=window.webpackJsonp||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/1.43af8409.chunk.js"></script><script src="/static/js/main.114ca498.chunk.js"></script></body></html>
|
app-build/manifest.json
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"short_name": "React App",
|
|
3
|
-
"name": "Create React App Sample",
|
|
4
|
-
"icons": [
|
|
5
|
-
{
|
|
6
|
-
"src": "favicon.ico",
|
|
7
|
-
"sizes": "64x64 32x32 24x24 16x16",
|
|
8
|
-
"type": "image/x-icon"
|
|
9
|
-
}
|
|
10
|
-
],
|
|
11
|
-
"start_url": ".",
|
|
12
|
-
"display": "standalone",
|
|
13
|
-
"theme_color": "#000000",
|
|
14
|
-
"background_color": "#ffffff"
|
|
15
|
-
}
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
self.__precacheManifest = [
|
|
2
|
-
{
|
|
3
|
-
"revision": "229c360febb4351a89df",
|
|
4
|
-
"url": "/static/js/runtime~main.229c360f.js"
|
|
5
|
-
},
|
|
6
|
-
{
|
|
7
|
-
"revision": "114ca498d1bcac44edb2",
|
|
8
|
-
"url": "/static/js/main.114ca498.chunk.js"
|
|
9
|
-
},
|
|
10
|
-
{
|
|
11
|
-
"revision": "43af84097b0b91a97f62",
|
|
12
|
-
"url": "/static/js/1.43af8409.chunk.js"
|
|
13
|
-
},
|
|
14
|
-
{
|
|
15
|
-
"revision": "114ca498d1bcac44edb2",
|
|
16
|
-
"url": "/static/css/main.c46f1339.chunk.css"
|
|
17
|
-
},
|
|
18
|
-
{
|
|
19
|
-
"revision": "43af84097b0b91a97f62",
|
|
20
|
-
"url": "/static/css/1.bf45e866.chunk.css"
|
|
21
|
-
},
|
|
22
|
-
{
|
|
23
|
-
"revision": "c8c21208dec68a5f5f8f7d4f4c258eb0",
|
|
24
|
-
"url": "/index.html"
|
|
25
|
-
}
|
|
26
|
-
];
|
app-build/service-worker.js
DELETED
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Welcome to your Workbox-powered service worker!
|
|
3
|
-
*
|
|
4
|
-
* You'll need to register this file in your web app and you should
|
|
5
|
-
* disable HTTP caching for this file too.
|
|
6
|
-
* See https://goo.gl/nhQhGp
|
|
7
|
-
*
|
|
8
|
-
* The rest of the code is auto-generated. Please don't update this file
|
|
9
|
-
* directly; instead, make changes to your Workbox build configuration
|
|
10
|
-
* and re-run your build process.
|
|
11
|
-
* See https://goo.gl/2aRDsh
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
importScripts("https://storage.googleapis.com/workbox-cdn/releases/3.6.3/workbox-sw.js");
|
|
15
|
-
|
|
16
|
-
importScripts(
|
|
17
|
-
"/precache-manifest.ffc09f8a591c529a1bd5c6f21f49815f.js"
|
|
18
|
-
);
|
|
19
|
-
|
|
20
|
-
workbox.clientsClaim();
|
|
21
|
-
|
|
22
|
-
/**
|
|
23
|
-
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
|
|
24
|
-
* requests for URLs in the manifest.
|
|
25
|
-
* See https://goo.gl/S9QRab
|
|
26
|
-
*/
|
|
27
|
-
self.__precacheManifest = [].concat(self.__precacheManifest || []);
|
|
28
|
-
workbox.precaching.suppressWarnings();
|
|
29
|
-
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
|
|
30
|
-
|
|
31
|
-
workbox.routing.registerNavigationRoute("/index.html", {
|
|
32
|
-
|
|
33
|
-
blacklist: [/^\/_/,/\/[^\/]+\.[^\/]+$/],
|
|
34
|
-
});
|
ml_dash/app.py
DELETED
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from sanic import Sanic
|
|
3
|
-
from sanic.exceptions import FileNotFound
|
|
4
|
-
from sanic.response import file
|
|
5
|
-
from params_proto import cli_parse, Proto
|
|
6
|
-
|
|
7
|
-
# gets current directory
|
|
8
|
-
BASE = os.path.realpath(__file__)
|
|
9
|
-
build_path = os.path.join(os.path.dirname(BASE), "../ml-dash-client-build")
|
|
10
|
-
print(build_path)
|
|
11
|
-
|
|
12
|
-
app = Sanic()
|
|
13
|
-
# serve js file for webpack
|
|
14
|
-
app.static('/', build_path)
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
@app.route('/')
|
|
18
|
-
@app.exception(FileNotFound)
|
|
19
|
-
async def index(request, exception=None):
|
|
20
|
-
print('hey ====', [exception])
|
|
21
|
-
return await file(build_path + '/index.html')
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@cli_parse
|
|
25
|
-
class AppServerArgs:
|
|
26
|
-
"""
|
|
27
|
-
Configuration Arguments for the Sanic App that serves
|
|
28
|
-
the static web-application.
|
|
29
|
-
|
|
30
|
-
[Usage]
|
|
31
|
-
|
|
32
|
-
To launch the web-app client, do
|
|
33
|
-
|
|
34
|
-
python -m ml_dash.app port=3001 host=0.0.0.0 workers=4 debug=True
|
|
35
|
-
"""
|
|
36
|
-
host = Proto("", help="use 0.0.0.0 if you want external clients to be able to access this.")
|
|
37
|
-
port = Proto(3001, help="the port")
|
|
38
|
-
workers = Proto(1, help="the number of worker processes")
|
|
39
|
-
debug = False
|
|
40
|
-
access_log = True
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
if __name__ == '__main__':
|
|
44
|
-
import socket
|
|
45
|
-
from termcolor import cprint, colored as c
|
|
46
|
-
|
|
47
|
-
hostname = socket.gethostname()
|
|
48
|
-
host_ip = socket.gethostbyname(hostname)
|
|
49
|
-
|
|
50
|
-
print(f"""
|
|
51
|
-
You can now view {c('ml-dash client', 'white')} in the browser.
|
|
52
|
-
|
|
53
|
-
Local: {c(f'http://localhost:{AppServerArgs.port}/', 'green')}
|
|
54
|
-
On Your Network: {c(f'http://{host_ip}:{AppServerArgs.port}/', 'green')}
|
|
55
|
-
|
|
56
|
-
To update to the newer version, do
|
|
57
|
-
{c('~>', 'blue')} {c('pip install --upgrade ml-dash', 'red')}
|
|
58
|
-
|
|
59
|
-
""")
|
|
60
|
-
app.run(**vars(AppServerArgs))
|
ml_dash/config.py
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
from params_proto import cli_parse, Proto
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
@cli_parse
|
|
7
|
-
class Args:
|
|
8
|
-
logdir = Proto(os.path.realpath("."), help="the root directory for all of the logs")
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
@cli_parse
|
|
12
|
-
class ServerArgs:
|
|
13
|
-
host = Proto("", help="use 0.0.0.0 if you want external clients to be able to access this.")
|
|
14
|
-
port = Proto(8081, help="the port")
|
|
15
|
-
workers = Proto(1, help="the number of worker processes")
|
|
16
|
-
debug = False
|
ml_dash/example.py
DELETED
|
File without changes
|
ml_dash/file_events.py
DELETED
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
from hachiko.hachiko import AIOEventHandler, AIOWatchdog
|
|
2
|
-
from asyncio import coroutine, Queue, sleep
|
|
3
|
-
from sanic import response
|
|
4
|
-
from sanic.exceptions import RequestTimeout
|
|
5
|
-
|
|
6
|
-
from ml_dash.file_utils import path_match
|
|
7
|
-
from termcolor import cprint
|
|
8
|
-
|
|
9
|
-
from . import config
|
|
10
|
-
import json
|
|
11
|
-
|
|
12
|
-
subscriptions = []
|
|
13
|
-
watcher = None
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class Handler(AIOEventHandler):
|
|
17
|
-
def __init__(self, *args, **kwargs):
|
|
18
|
-
super().__init__(*args, **kwargs)
|
|
19
|
-
|
|
20
|
-
@coroutine
|
|
21
|
-
async def on_any_event(self, event):
|
|
22
|
-
_event = dict(src_path=event.src_path, event_type=event.event_type, is_directory=event.is_directory)
|
|
23
|
-
for que in subscriptions:
|
|
24
|
-
await que.put(_event)
|
|
25
|
-
# self._loop.create_task(que.put(event))
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def setup_watch_queue(app, loop):
|
|
29
|
-
print('setting up watch queue')
|
|
30
|
-
start_watcher()
|
|
31
|
-
cprint('watcher setup complete!', "green")
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def start_watcher():
|
|
35
|
-
global watcher
|
|
36
|
-
|
|
37
|
-
handler = Handler()
|
|
38
|
-
print('starting file watcher...')
|
|
39
|
-
watcher = AIOWatchdog(config.Args.logdir, event_handler=handler)
|
|
40
|
-
watcher.start()
|
|
41
|
-
print('watcher start is complete')
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
import os
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
# server does not have access to a disconnect event.
|
|
48
|
-
# currently subscriptions only grows.
|
|
49
|
-
# Will add timeout based cleanup after.
|
|
50
|
-
async def file_events(request, file_path="", query="*"):
|
|
51
|
-
q = Queue()
|
|
52
|
-
subscriptions.append(q)
|
|
53
|
-
|
|
54
|
-
async def streaming_fn(response):
|
|
55
|
-
try:
|
|
56
|
-
while True:
|
|
57
|
-
print('subscription que started')
|
|
58
|
-
file_event = await q.get()
|
|
59
|
-
src_path = file_event['src_path']
|
|
60
|
-
if src_path.startswith(os.path.join(config.Args.logdir, file_path)) and path_match(file_path, query):
|
|
61
|
-
file_event['src_path'] = src_path[len(config.Args.logdir):]
|
|
62
|
-
print("=>>", file_event)
|
|
63
|
-
response.write(f"data: {json.dumps(file_event)}\r\n\r\n".encode())
|
|
64
|
-
sleep(0.1)
|
|
65
|
-
# todo: this timeout doesn't really work.
|
|
66
|
-
# todo: also add handling of stream is terminated logic (separate from above).
|
|
67
|
-
except RequestTimeout:
|
|
68
|
-
subscriptions.remove(q)
|
|
69
|
-
|
|
70
|
-
return response.stream(streaming_fn, content_type="text/event-stream")
|
|
71
|
-
# subscriptions.remove(q)
|
ml_dash/file_handlers.py
DELETED
|
@@ -1,141 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import stat
|
|
3
|
-
from glob import iglob
|
|
4
|
-
from shutil import rmtree
|
|
5
|
-
from sanic import response
|
|
6
|
-
|
|
7
|
-
from . import config
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def get_type(mode):
|
|
11
|
-
if stat.S_ISDIR(mode) or stat.S_ISLNK(mode):
|
|
12
|
-
type = 'dir'
|
|
13
|
-
else:
|
|
14
|
-
type = 'file'
|
|
15
|
-
return type
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
async def remove_path(request, file_path=""):
|
|
19
|
-
print(file_path)
|
|
20
|
-
path = os.path.join(config.Args.logdir, file_path)
|
|
21
|
-
if os.path.isdir(path):
|
|
22
|
-
rmtree(path)
|
|
23
|
-
res = response.text("ok", status=204)
|
|
24
|
-
elif os.path.isfile(path):
|
|
25
|
-
os.remove(path)
|
|
26
|
-
res = response.text("ok", status=204)
|
|
27
|
-
else:
|
|
28
|
-
res = response.text('Not found', status=404)
|
|
29
|
-
return res
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
from contextlib import contextmanager
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
@contextmanager
|
|
36
|
-
def cwdContext(path):
|
|
37
|
-
owd = os.getcwd()
|
|
38
|
-
os.chdir(path)
|
|
39
|
-
try:
|
|
40
|
-
yield
|
|
41
|
-
finally:
|
|
42
|
-
os.chdir(owd)
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
async def batch_get_path(request):
|
|
46
|
-
try:
|
|
47
|
-
data = request.json
|
|
48
|
-
|
|
49
|
-
file_paths = data['paths']
|
|
50
|
-
options = data['options']
|
|
51
|
-
|
|
52
|
-
batch_res_data = dict()
|
|
53
|
-
|
|
54
|
-
if options.get('json', False):
|
|
55
|
-
for path in file_paths:
|
|
56
|
-
from ml_logger.helpers import load_from_pickle
|
|
57
|
-
batch_res_data[path] = [_ for _ in load_from_pickle(path)]
|
|
58
|
-
|
|
59
|
-
res = response.json(batch_res_data, status=200, content_type='application/json')
|
|
60
|
-
return res
|
|
61
|
-
|
|
62
|
-
except Exception as e:
|
|
63
|
-
print('Exception: ', e)
|
|
64
|
-
res = response.text('Internal Error' + str(e), status=502)
|
|
65
|
-
return res
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
async def get_path(request, file_path=""):
|
|
69
|
-
print(file_path)
|
|
70
|
-
|
|
71
|
-
as_records = request.args.get('records')
|
|
72
|
-
as_json = request.args.get('json')
|
|
73
|
-
as_log = request.args.get('log')
|
|
74
|
-
as_attachment = int(request.args.get('download', '0'))
|
|
75
|
-
is_recursive = request.args.get('recursive')
|
|
76
|
-
show_hidden = request.args.get('hidden')
|
|
77
|
-
query = request.args.get('query', "*").strip()
|
|
78
|
-
|
|
79
|
-
_start = request.args.get('start', None)
|
|
80
|
-
_stop = request.args.get('stop', None)
|
|
81
|
-
start = None if _start is None else int(_start)
|
|
82
|
-
stop = None if _stop is None else int(_stop)
|
|
83
|
-
|
|
84
|
-
reservoir_k = int(request.args.get('reservoir', '200'))
|
|
85
|
-
|
|
86
|
-
# limit for the search itself.
|
|
87
|
-
search_limit = 500
|
|
88
|
-
|
|
89
|
-
path = os.path.join(config.Args.logdir, file_path)
|
|
90
|
-
print("=============>", [query], [path], os.path.isdir(path))
|
|
91
|
-
|
|
92
|
-
if os.path.isdir(path):
|
|
93
|
-
from itertools import islice
|
|
94
|
-
with cwdContext(path):
|
|
95
|
-
print(os.getcwd(), query, is_recursive)
|
|
96
|
-
file_paths = list(islice(iglob(query, recursive=is_recursive), start or 0, stop or 200))
|
|
97
|
-
files = map(file_stat, file_paths)
|
|
98
|
-
res = response.json(files, status=200)
|
|
99
|
-
elif os.path.isfile(path):
|
|
100
|
-
if as_records:
|
|
101
|
-
from ml_logger.helpers import load_pickle_as_dataframe
|
|
102
|
-
df = load_pickle_as_dataframe(path, reservoir_k)
|
|
103
|
-
res = response.text(df.to_json(orient="records"), status=200, content_type='application/json')
|
|
104
|
-
elif as_log:
|
|
105
|
-
from ml_logger.helpers import load_pickle_as_dataframe
|
|
106
|
-
df = load_pickle_as_dataframe(path, reservoir_k)
|
|
107
|
-
res = response.text(df.to_json(orient="records"), status=200, content_type='application/json')
|
|
108
|
-
elif as_json:
|
|
109
|
-
from ml_logger.helpers import load_from_pickle
|
|
110
|
-
data = [_ for _ in load_from_pickle(path)]
|
|
111
|
-
res = response.json(data, status=200, content_type='application/json')
|
|
112
|
-
elif type(start) is int or type(stop) is int:
|
|
113
|
-
from itertools import islice
|
|
114
|
-
with open(path, 'r') as f:
|
|
115
|
-
text = ''.join([l for l in islice(f, start, stop)])
|
|
116
|
-
res = response.text(text, status=200)
|
|
117
|
-
else:
|
|
118
|
-
# todo: check the file handling here. Does this use correct
|
|
119
|
-
# mimeType for text files?
|
|
120
|
-
res = await response.file(path)
|
|
121
|
-
if as_attachment:
|
|
122
|
-
res.headers['Content-Disposition'] = 'attachment'
|
|
123
|
-
else:
|
|
124
|
-
res = response.text('Not found', status=404)
|
|
125
|
-
return res
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
# use glob! LOL
|
|
129
|
-
def file_stat(file_path):
|
|
130
|
-
# this looped over is very slow. Fine for a small list of files though.
|
|
131
|
-
stat_res = os.stat(file_path)
|
|
132
|
-
ft = get_type(stat_res.st_mode)
|
|
133
|
-
sz = stat_res.st_size
|
|
134
|
-
return dict(
|
|
135
|
-
name=os.path.basename(file_path),
|
|
136
|
-
path=file_path,
|
|
137
|
-
mtime=stat_res.st_mtime,
|
|
138
|
-
ctime=stat_res.st_ctime,
|
|
139
|
-
type=ft,
|
|
140
|
-
size=sz,
|
|
141
|
-
)
|
ml_dash/file_utils.py
DELETED
ml_dash/file_watcher.py
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
# from . import config
|
|
2
|
-
import asyncio
|
|
3
|
-
from hachiko.hachiko import AIOWatchdog
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class Handler:
|
|
7
|
-
def dispatch(self, *args, **kwargs):
|
|
8
|
-
print(args, kwargs)
|
|
9
|
-
|
|
10
|
-
@asyncio.coroutine
|
|
11
|
-
def watch_fs(path):
|
|
12
|
-
watch = AIOWatchdog(path, event_handler=Handler())
|
|
13
|
-
watch.start()
|
|
14
|
-
while True:
|
|
15
|
-
yield from asyncio.sleep(10)
|
|
16
|
-
watch.stop()
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
if __name__ == "__main__":
|
|
21
|
-
# asyncio.get_event_loop().run_until_complete(watch_fs("/Users/ge/machine_learning/berkeley-playground/ins-runs"))
|
|
22
|
-
# asyncio.get_event_loop().run_until_complete(watch_fs("."))
|
|
23
|
-
path = "."
|
|
24
|
-
watch = AIOWatchdog(path, event_handler=Handler())
|
|
25
|
-
watch.start()
|
|
26
|
-
import time
|
|
27
|
-
print('watch is setup')
|
|
28
|
-
while True:
|
|
29
|
-
time.sleep(10)
|
|
30
|
-
|
ml_dash/main.py
DELETED
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
from ml_dash.schema import schema
|
|
2
|
-
from sanic_graphql import GraphQLView
|
|
3
|
-
|
|
4
|
-
from .file_events import file_events, setup_watch_queue
|
|
5
|
-
from .file_handlers import get_path, remove_path, batch_get_path
|
|
6
|
-
|
|
7
|
-
from sanic import Sanic
|
|
8
|
-
from sanic_cors import CORS
|
|
9
|
-
|
|
10
|
-
app = Sanic(__name__)
|
|
11
|
-
# CORS(app)
|
|
12
|
-
CORS(app, resources={r"/*": {"origins": "*"}}, automatic_options=True)
|
|
13
|
-
|
|
14
|
-
# @app.listener('before_server_start')
|
|
15
|
-
# def init_graphql(app, loop):
|
|
16
|
-
# app.add_route(GraphQLView.as_view(schema=schema, executor=AsyncioExecutor(loop=loop)), '/graphql')
|
|
17
|
-
|
|
18
|
-
# new graphQL endpoints
|
|
19
|
-
app.add_route(GraphQLView.as_view(schema=schema, graphiql=True), '/graphql',
|
|
20
|
-
methods=['GET', 'POST', 'FETCH', 'OPTIONS'])
|
|
21
|
-
app.add_route(GraphQLView.as_view(schema=schema, batch=True), '/graphql/batch',
|
|
22
|
-
methods=['GET', 'POST', 'FETCH', 'OPTIONS'])
|
|
23
|
-
|
|
24
|
-
# # Serving static app
|
|
25
|
-
# app.add_route(get_path, '/*', methods=['GET', 'OPTIONS'])
|
|
26
|
-
|
|
27
|
-
# old RPC endpoints
|
|
28
|
-
app.add_route(get_path, '/files/', methods=['GET', 'OPTIONS'])
|
|
29
|
-
app.add_route(get_path, '/files/<file_path:path>', methods=['GET', 'OPTIONS'])
|
|
30
|
-
app.add_route(batch_get_path, '/batch-files', methods=['GET', 'OPTIONS'])
|
|
31
|
-
app.add_route(remove_path, '/files/<file_path:path>', methods=['DELETE'])
|
|
32
|
-
app.add_route(file_events, '/file-events', methods=['GET', 'OPTIONS'])
|
|
33
|
-
app.add_route(file_events, '/file-events/<file_path:path>', methods=['GET', 'OPTIONS'])
|
|
34
|
-
app.listener('before_server_start')(setup_watch_queue)
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
# app.add_task(start_watcher)
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def run(logdir=None, **kwargs):
|
|
41
|
-
from . import config
|
|
42
|
-
from termcolor import cprint
|
|
43
|
-
|
|
44
|
-
if logdir:
|
|
45
|
-
config.Args.logdir = logdir
|
|
46
|
-
|
|
47
|
-
cprint("launched server with config:", "green")
|
|
48
|
-
cprint("Args:", 'yellow')
|
|
49
|
-
print(vars(config.Args))
|
|
50
|
-
cprint("Sanic Server Args:", 'yellow')
|
|
51
|
-
print(vars(config.ServerArgs))
|
|
52
|
-
|
|
53
|
-
config.ServerArgs.update(**kwargs)
|
|
54
|
-
app.run(**vars(config.ServerArgs))
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
if __name__ == "__main__":
|
|
58
|
-
# see: https://sanic.readthedocs.io/en/latest/sanic/deploying.html
|
|
59
|
-
# call this as `python -m ml_logger.main`
|
|
60
|
-
run()
|
ml_dash/mime_types.py
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
ignored = ['.bzr', '$RECYCLE.BIN', '.DAV', '.DS_Store', '.git', '.hg', '.htaccess', '.htpasswd', '.Spotlight-V100',
|
|
2
|
-
'.svn', '__MACOSX', 'ehthumbs.db', 'robots.txt', 'Thumbs.db', 'thumbs.tps']
|
|
3
|
-
datatypes = {'audio': 'm4a,mp3,oga,ogg,webma,wav',
|
|
4
|
-
'archive': '7z,zip,rar,gz,tar',
|
|
5
|
-
'image': 'gif,ico,jpe,jpeg,jpg,png,svg,webp',
|
|
6
|
-
'pdf': 'pdf',
|
|
7
|
-
'quicktime': '3g2,3gp,3gp2,3gpp,mov,qt',
|
|
8
|
-
'source': 'atom,bat,bash,c,cmd,coffee,css,hml,js,json,java,less,markdown,md,php,pl,py,rb,rss,sass,scpt,swift,scss,sh,xml,yml,plist',
|
|
9
|
-
'text': 'txt',
|
|
10
|
-
'video': 'mp4,m4v,ogv,webm',
|
|
11
|
-
'website': 'htm,html,mhtm,mhtml,xhtm,xhtml'}
|
|
12
|
-
icontypes = {'fa-music': 'm4a,mp3,oga,ogg,webma,wav',
|
|
13
|
-
'fa-archive': '7z,zip,rar,gz,tar',
|
|
14
|
-
'fa-picture-o': 'gif,ico,jpe,jpeg,jpg,png,svg,webp',
|
|
15
|
-
'fa-file-text': 'pdf',
|
|
16
|
-
'fa-film': '3g2,3gp,3gp2,3gpp,mov,qt',
|
|
17
|
-
'fa-code': 'atom,plist,bat,bash,c,cmd,coffee,css,hml,js,json,java,less,markdown,md,php,pl,py,rb,rss,sass,scpt,swift,scss,sh,xml,yml',
|
|
18
|
-
'fa-file-text-o': 'txt',
|
|
19
|
-
'fa-film': 'mp4,m4v,ogv,webm',
|
|
20
|
-
'fa-globe': 'htm,html,mhtm,mhtml,xhtm,xhtml'}
|
ml_dash/schema/__init__.py
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
from graphene import relay, ObjectType, Float, Schema, List, String, Field
|
|
2
|
-
from ml_dash.schema.files.series import Series, get_series, SeriesArguments
|
|
3
|
-
from ml_dash.schema.files.metrics import Metrics, get_metrics
|
|
4
|
-
from ml_dash.schema.schema_helpers import bind, bind_args
|
|
5
|
-
from ml_dash.schema.users import User, get_users, get_user
|
|
6
|
-
from ml_dash.schema.projects import Project
|
|
7
|
-
from ml_dash.schema.directories import Directory, get_directory
|
|
8
|
-
from ml_dash.schema.files import File, FileConnection, MutateTextFile, MutateJSONFile, MutateYamlFile, \
|
|
9
|
-
DeleteFile, DeleteDirectory, glob_files
|
|
10
|
-
# MutateJSONFile, MutateYamlFile
|
|
11
|
-
from ml_dash.schema.experiments import Experiment
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
# class Experiment(graphene.ObjectType):
|
|
15
|
-
# class Meta:
|
|
16
|
-
# interfaces = relay.Node,
|
|
17
|
-
#
|
|
18
|
-
# parameter_keys = graphene.List(description="keys in the parameter file")
|
|
19
|
-
# metric_keys = graphene.List(description="the x data")
|
|
20
|
-
# video_keys = graphene.List(description="the x data")
|
|
21
|
-
# img_keys = graphene.List(description="the x data")
|
|
22
|
-
# diff_keys = graphene.List(description="the x data")
|
|
23
|
-
# log_keys = graphene.List(description="the x data")
|
|
24
|
-
# view_config = ""
|
|
25
|
-
#
|
|
26
|
-
# class TimeSeries(graphene.ObjectType):
|
|
27
|
-
# class Meta:
|
|
28
|
-
# interfaces = relay.Node,
|
|
29
|
-
#
|
|
30
|
-
# x_data = graphene.List(description="the x data")
|
|
31
|
-
# y_data = graphene.List(description="the y data")
|
|
32
|
-
# serialized = graphene.String(description='string serialized data')
|
|
33
|
-
#
|
|
34
|
-
#
|
|
35
|
-
# class TimeSeriesWithStd(graphene.ObjectType):
|
|
36
|
-
# class Meta:
|
|
37
|
-
# interfaces = relay.Node,
|
|
38
|
-
#
|
|
39
|
-
# x_data = graphene.List(description="the x data")
|
|
40
|
-
# y_data = graphene.List(description="the y data")
|
|
41
|
-
# std_data = graphene.List(description="the standard deviation data")
|
|
42
|
-
# quantile_25_data = graphene.List(description="the standard deviation data")
|
|
43
|
-
# quantile_50_data = graphene.List(description="the standard deviation data")
|
|
44
|
-
# quantile_75_data = graphene.List(description="the standard deviation data")
|
|
45
|
-
# quantile_100_data = graphene.List(description="the standard deviation data")
|
|
46
|
-
# mode_data = graphene.List(description="the standard deviation data")
|
|
47
|
-
# mean_data = graphene.List(description="the standard deviation data")
|
|
48
|
-
# serialized = graphene.String(description='string serialized data')
|
|
49
|
-
#
|
|
50
|
-
#
|
|
51
|
-
# class LineChart(graphene.ObjectType):
|
|
52
|
-
# class Meta:
|
|
53
|
-
# interfaces = relay.Node,
|
|
54
|
-
#
|
|
55
|
-
# key = graphene.String(description="The path to the metrics file (including metrics.pkl)")
|
|
56
|
-
# x_key = graphene.String(description="key for the x axis")
|
|
57
|
-
# x_label = graphene.String(description="label for the x axis")
|
|
58
|
-
# y_key = graphene.String(description="key for the y axis")
|
|
59
|
-
# y_label = graphene.String(description="label for the x axis")
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
class EditText(relay.ClientIDMutation):
|
|
63
|
-
class Input:
|
|
64
|
-
text = String(required=True, description='updated content for the text file')
|
|
65
|
-
|
|
66
|
-
text = String(description="the updated content for the text file")
|
|
67
|
-
|
|
68
|
-
@classmethod
|
|
69
|
-
def mutate_and_get_payload(cls, root, info, text, ):
|
|
70
|
-
return dict(text=text)
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
class Query(ObjectType):
|
|
74
|
-
node = relay.Node.Field()
|
|
75
|
-
# context?
|
|
76
|
-
# todo: files
|
|
77
|
-
# todo: series
|
|
78
|
-
|
|
79
|
-
users = Field(List(User), resolver=bind_args(get_users))
|
|
80
|
-
user = Field(User, username=String(), resolver=bind_args(get_user))
|
|
81
|
-
series = Field(Series, resolver=bind_args(get_series), **SeriesArguments)
|
|
82
|
-
|
|
83
|
-
project = relay.Node.Field(Project)
|
|
84
|
-
metrics = relay.Node.Field(Metrics)
|
|
85
|
-
directory = relay.Node.Field(Directory)
|
|
86
|
-
|
|
87
|
-
glob = Field(List(File),
|
|
88
|
-
cwd=String(required=True),
|
|
89
|
-
query=String(),
|
|
90
|
-
resolver=bind_args(glob_files))
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
class Mutation(ObjectType):
|
|
94
|
-
# todo: create_file
|
|
95
|
-
# done: edit_file
|
|
96
|
-
# done: remove_file
|
|
97
|
-
# todo: move_file
|
|
98
|
-
# todo: copy_file
|
|
99
|
-
|
|
100
|
-
# do we need to have separate deleteDirectory? (look up relay client-side macros)
|
|
101
|
-
|
|
102
|
-
delete_file = DeleteFile.Field()
|
|
103
|
-
delete_directory = DeleteDirectory.Field()
|
|
104
|
-
# update_text = EditText.Field()
|
|
105
|
-
update_text = MutateTextFile.Field()
|
|
106
|
-
update_json = MutateJSONFile.Field()
|
|
107
|
-
update_yaml = MutateYamlFile.Field()
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
schema = Schema(query=Query, mutation=Mutation)
|