ml-dash 0.0.11__py3-none-any.whl → 0.5.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. ml_dash/__init__.py +59 -1
  2. ml_dash/auto_start.py +42 -0
  3. ml_dash/cli.py +67 -0
  4. ml_dash/cli_commands/__init__.py +1 -0
  5. ml_dash/cli_commands/download.py +797 -0
  6. ml_dash/cli_commands/list.py +343 -0
  7. ml_dash/cli_commands/upload.py +1298 -0
  8. ml_dash/client.py +955 -0
  9. ml_dash/config.py +114 -11
  10. ml_dash/experiment.py +1020 -0
  11. ml_dash/files.py +688 -0
  12. ml_dash/log.py +181 -0
  13. ml_dash/metric.py +292 -0
  14. ml_dash/params.py +188 -0
  15. ml_dash/storage.py +1115 -0
  16. ml_dash-0.5.9.dist-info/METADATA +244 -0
  17. ml_dash-0.5.9.dist-info/RECORD +20 -0
  18. ml_dash-0.5.9.dist-info/WHEEL +4 -0
  19. ml_dash-0.5.9.dist-info/entry_points.txt +3 -0
  20. ml_dash/app.py +0 -33
  21. ml_dash/file_events.py +0 -71
  22. ml_dash/file_handlers.py +0 -141
  23. ml_dash/file_utils.py +0 -5
  24. ml_dash/file_watcher.py +0 -30
  25. ml_dash/main.py +0 -60
  26. ml_dash/mime_types.py +0 -20
  27. ml_dash/schema/__init__.py +0 -110
  28. ml_dash/schema/archive.py +0 -165
  29. ml_dash/schema/directories.py +0 -59
  30. ml_dash/schema/experiments.py +0 -65
  31. ml_dash/schema/files/__init__.py +0 -204
  32. ml_dash/schema/files/file_helpers.py +0 -79
  33. ml_dash/schema/files/images.py +0 -27
  34. ml_dash/schema/files/metrics.py +0 -64
  35. ml_dash/schema/files/parameters.py +0 -50
  36. ml_dash/schema/files/series.py +0 -235
  37. ml_dash/schema/files/videos.py +0 -27
  38. ml_dash/schema/helpers.py +0 -66
  39. ml_dash/schema/projects.py +0 -65
  40. ml_dash/schema/schema_helpers.py +0 -19
  41. ml_dash/schema/users.py +0 -33
  42. ml_dash/sse.py +0 -18
  43. ml_dash-0.0.11.dist-info/METADATA +0 -67
  44. ml_dash-0.0.11.dist-info/RECORD +0 -30
  45. ml_dash-0.0.11.dist-info/WHEEL +0 -5
  46. ml_dash-0.0.11.dist-info/top_level.txt +0 -1
  47. /ml_dash/{example.py → py.typed} +0 -0
@@ -1,110 +0,0 @@
1
- from graphene import relay, ObjectType, Float, Schema, List, String, Field
2
- from ml_dash.schema.files.series import Series, get_series, SeriesArguments
3
- from ml_dash.schema.files.metrics import Metrics, get_metrics
4
- from ml_dash.schema.schema_helpers import bind, bind_args
5
- from ml_dash.schema.users import User, get_users, get_user
6
- from ml_dash.schema.projects import Project
7
- from ml_dash.schema.directories import Directory, get_directory
8
- from ml_dash.schema.files import File, FileConnection, MutateTextFile, MutateJSONFile, MutateYamlFile, \
9
- DeleteFile, DeleteDirectory, glob_files
10
- # MutateJSONFile, MutateYamlFile
11
- from ml_dash.schema.experiments import Experiment
12
-
13
-
14
- # class Experiment(graphene.ObjectType):
15
- # class Meta:
16
- # interfaces = relay.Node,
17
- #
18
- # parameter_keys = graphene.List(description="keys in the parameter file")
19
- # metric_keys = graphene.List(description="the x data")
20
- # video_keys = graphene.List(description="the x data")
21
- # img_keys = graphene.List(description="the x data")
22
- # diff_keys = graphene.List(description="the x data")
23
- # log_keys = graphene.List(description="the x data")
24
- # view_config = ""
25
- #
26
- # class TimeSeries(graphene.ObjectType):
27
- # class Meta:
28
- # interfaces = relay.Node,
29
- #
30
- # x_data = graphene.List(description="the x data")
31
- # y_data = graphene.List(description="the y data")
32
- # serialized = graphene.String(description='string serialized data')
33
- #
34
- #
35
- # class TimeSeriesWithStd(graphene.ObjectType):
36
- # class Meta:
37
- # interfaces = relay.Node,
38
- #
39
- # x_data = graphene.List(description="the x data")
40
- # y_data = graphene.List(description="the y data")
41
- # std_data = graphene.List(description="the standard deviation data")
42
- # quantile_25_data = graphene.List(description="the standard deviation data")
43
- # quantile_50_data = graphene.List(description="the standard deviation data")
44
- # quantile_75_data = graphene.List(description="the standard deviation data")
45
- # quantile_100_data = graphene.List(description="the standard deviation data")
46
- # mode_data = graphene.List(description="the standard deviation data")
47
- # mean_data = graphene.List(description="the standard deviation data")
48
- # serialized = graphene.String(description='string serialized data')
49
- #
50
- #
51
- # class LineChart(graphene.ObjectType):
52
- # class Meta:
53
- # interfaces = relay.Node,
54
- #
55
- # key = graphene.String(description="The path to the metrics file (including metrics.pkl)")
56
- # x_key = graphene.String(description="key for the x axis")
57
- # x_label = graphene.String(description="label for the x axis")
58
- # y_key = graphene.String(description="key for the y axis")
59
- # y_label = graphene.String(description="label for the x axis")
60
-
61
-
62
- class EditText(relay.ClientIDMutation):
63
- class Input:
64
- text = String(required=True, description='updated content for the text file')
65
-
66
- text = String(description="the updated content for the text file")
67
-
68
- @classmethod
69
- def mutate_and_get_payload(cls, root, info, text, ):
70
- return dict(text=text)
71
-
72
-
73
- class Query(ObjectType):
74
- node = relay.Node.Field()
75
- # context?
76
- # todo: files
77
- # todo: series
78
-
79
- users = Field(List(User), resolver=bind_args(get_users))
80
- user = Field(User, username=String(), resolver=bind_args(get_user))
81
- series = Field(Series, resolver=bind_args(get_series), **SeriesArguments)
82
-
83
- project = relay.Node.Field(Project)
84
- metrics = relay.Node.Field(Metrics)
85
- directory = relay.Node.Field(Directory)
86
-
87
- glob = Field(List(File),
88
- cwd=String(required=True),
89
- query=String(),
90
- resolver=bind_args(glob_files))
91
-
92
-
93
- class Mutation(ObjectType):
94
- # todo: create_file
95
- # done: edit_file
96
- # done: remove_file
97
- # todo: move_file
98
- # todo: copy_file
99
-
100
- # do we need to have separate deleteDirectory? (look up relay client-side macros)
101
-
102
- delete_file = DeleteFile.Field()
103
- delete_directory = DeleteDirectory.Field()
104
- # update_text = EditText.Field()
105
- update_text = MutateTextFile.Field()
106
- update_json = MutateJSONFile.Field()
107
- update_yaml = MutateYamlFile.Field()
108
-
109
-
110
- schema = Schema(query=Query, mutation=Mutation)
ml_dash/schema/archive.py DELETED
@@ -1,165 +0,0 @@
1
- from graphene import relay, ObjectType, Float, Schema, AbstractType, List, String, Union, Field
2
-
3
-
4
- # class Team(ObjectType):
5
- # class Meta:
6
- # interfaces = relay.Node,
7
- #
8
- # name = String(description='string serialized data')
9
- # description = String(description='string serialized data')
10
- # users = List(lambda: User)
11
- # projects = List(lambda: Project)
12
- #
13
- #
14
- # class Binder(ObjectType):
15
- # class Meta:
16
- # interfaces = relay.Node,
17
- #
18
- # name = String(description="keys in the parameter file")
19
- # children = List("Binder", description="child binders")
20
- # files = List(lambda: FileAndDirectory, description="keys in the parameter file")
21
- # # date_created
22
- # # date_modified
23
-
24
-
25
- class Directory(ObjectType):
26
- class Meta:
27
- interfaces = relay.Node,
28
-
29
- name = String(description='string serialized data')
30
- description = String(description='string serialized data')
31
- children = List(lambda: FileAndDirectory)
32
-
33
-
34
- # File Types
35
- class File(AbstractType):
36
- class Meta:
37
- interfaces = relay.Node,
38
-
39
- name = String(description='string serialized data')
40
- description = String(description='string serialized data')
41
-
42
-
43
- class Parameters(ObjectType, File):
44
- pass
45
-
46
-
47
- class Metrics(ObjectType, File):
48
- """this is just the file type."""
49
- pass
50
-
51
-
52
- class Image(ObjectType, File):
53
- pass
54
-
55
-
56
- class Video(ObjectType, File):
57
- pass
58
-
59
-
60
- class TextFile(ObjectType, File):
61
- pass
62
-
63
-
64
- class BinaryFile(ObjectType, File):
65
- pass
66
-
67
-
68
- class JsonFile(ObjectType, File):
69
- pass
70
-
71
-
72
- class YamlFile(ObjectType, File):
73
- pass
74
-
75
-
76
- class FileAndDirectory(Union):
77
- class Meta:
78
- types = (Directory, File, Parameters, Metrics, TextFile, BinaryFile, JsonFile)
79
-
80
-
81
- class ExperimentView(ObjectType):
82
- class Meta:
83
- interfaces = relay.Node,
84
-
85
- charts = List(lambda: Chart)
86
-
87
-
88
- class Point(ObjectType):
89
- class Meta:
90
- interfaces = relay.Node,
91
-
92
- x = Float()
93
- y = Float()
94
-
95
-
96
- class LineSeries(ObjectType):
97
- class Meta:
98
- interfaces = relay.Node,
99
- description = "simple line series"
100
-
101
- data = List(lambda: Point)
102
- label = String(description="label for the time series")
103
-
104
-
105
- class StdSeries(ObjectType):
106
- class Meta:
107
- interfaces = relay.Node,
108
- description = "line series with standard deviation"
109
-
110
- data = List(lambda: Point)
111
- label = String(description="label for the series")
112
-
113
-
114
- class QuantileSeries(ObjectType):
115
- class Meta:
116
- interfaces = relay.Node,
117
- description = "line series with quantile ticks"
118
-
119
- data = List(lambda: Point)
120
- label = String(description="label for the time series")
121
-
122
-
123
- class Series(Union):
124
- class Meta:
125
- types = (LineSeries, StdSeries, QuantileSeries)
126
-
127
-
128
- class LineChart(ObjectType):
129
- class Meta:
130
- interfaces = relay.Node,
131
-
132
- time_series = List(lambda: Series)
133
-
134
- title = String(description="title for the chart")
135
- x_label = String(description="label for the x axis")
136
- y_label = String(description="label for the y axis")
137
- x_low = String(description="lower range for the x axis")
138
- x_high = String(description="higher range for x axis")
139
- y_low = String(description="lower range for y axis")
140
- y_high = String(description="higher range for y axis")
141
-
142
-
143
- class ParameterDomain(ObjectType):
144
- class Meta:
145
- interfaces = relay.Node,
146
-
147
- name = String()
148
- domain_low = Float()
149
- domain_high = Float()
150
-
151
-
152
- class ParallelCoordinates(ObjectType):
153
- class Meta:
154
- interfaces = relay.Node,
155
-
156
- # todo: {key, value}
157
- data = List(ObjectType)
158
- domains = List(ParameterDomain)
159
-
160
-
161
- class Chart(Union):
162
- """this is also a file type."""
163
-
164
- class Meta:
165
- types = LineChart, ParallelCoordinates
@@ -1,59 +0,0 @@
1
- from os import listdir
2
- from os.path import isfile, join, split
3
- from graphene import ObjectType, relay, String, Field
4
- from ml_dash import schema
5
-
6
-
7
- class Directory(ObjectType):
8
- class Meta:
9
- interfaces = relay.Node,
10
-
11
- name = String(description='name of the directory')
12
- path = String(description='absolute path of the directory')
13
-
14
- readme = Field(lambda: schema.files.File)
15
-
16
- def resolve_readme(self, info, *args, **kwargs):
17
- # note: keep it simple, just use README for now.
18
- readmes = schema.files.find_files_by_query(cwd=self.id, query="README.md")
19
- return readmes[0] if readmes else None
20
-
21
- dash_configs = relay.ConnectionField(lambda: schema.files.FileConnection)
22
-
23
- def resolve_dash_configs(self, info, *args, **kwargs):
24
- return schema.files.find_files_by_query(cwd=self.id, query="*.dashcfg")
25
-
26
- experiments = relay.ConnectionField(lambda: schema.experiments.ExperimentConnection)
27
-
28
- def resolve_experiments(self, info, **kwargs):
29
- return schema.experiments.find_experiments(cwd=self.id)
30
-
31
- directories = relay.ConnectionField(lambda: schema.directories.DirectoryConnection)
32
-
33
- def resolve_directories(self, info, **kwargs):
34
- from ml_dash.config import Args
35
- root_dir = join(Args.logdir, self.id[1:])
36
- return [get_directory(join(self.id, _))
37
- for _ in listdir(root_dir) if not isfile(join(root_dir, _))]
38
-
39
- files = relay.ConnectionField(lambda: schema.files.FileConnection)
40
-
41
- def resolve_files(self, info, **kwargs):
42
- from ml_dash.config import Args
43
- root_dir = join(Args.logdir, self.id[1:])
44
- return [schema.Directory(id=join(self.id, _), name=_)
45
- for _ in listdir(root_dir) if isfile(join(root_dir, _))]
46
-
47
- @classmethod
48
- def get_node(cls, info, id):
49
- return get_directory(id)
50
-
51
-
52
- class DirectoryConnection(relay.Connection):
53
- class Meta:
54
- node = Directory
55
-
56
-
57
- def get_directory(id):
58
- _id = id.rstrip('/')
59
- return Directory(id=_id, name=split(_id[1:])[-1], path=_id)
@@ -1,65 +0,0 @@
1
- from os import listdir
2
- from os.path import isfile, join, basename, realpath, isabs
3
-
4
- from graphene import ObjectType, relay, String, Field
5
- from ml_dash import schema
6
- from ml_dash.schema import files
7
- from ml_dash.schema.files.file_helpers import find_files
8
- from ml_dash.schema.files.metrics import find_metrics
9
-
10
-
11
- class Experiment(ObjectType):
12
- class Meta:
13
- interfaces = relay.Node,
14
-
15
- name = String(description='name of the directory')
16
- path = String(description="path to the experiment")
17
- parameters = Field(lambda: files.parameters.Parameters, )
18
- metrics = Field(lambda: files.metrics.Metrics)
19
-
20
- def resolve_parameters(self, info):
21
- return files.parameters.get_parameters(self.parameters)
22
-
23
- def resolve_metrics(self, info):
24
- for m in find_metrics(self.id):
25
- return m
26
- return None
27
-
28
- directories = relay.ConnectionField(lambda: schema.directories.DirectoryConnection)
29
- files = relay.ConnectionField(lambda: schema.files.FileConnection)
30
-
31
- def resolve_directories(self, info, **kwargs):
32
- from ml_dash.config import Args
33
- root_dir = join(Args.logdir, self.id[1:])
34
- return [schema.directories.get_directory(join(self.id, _))
35
- for _ in listdir(root_dir) if not isfile(join(root_dir, _))]
36
-
37
- def resolve_files(self, info, **kwargs):
38
- from ml_dash.config import Args
39
- root_dir = join(Args.logdir, self.id[1:])
40
- return [schema.files.File(id=join(self.id, _), name=_)
41
- for _ in listdir(root_dir) if isfile(join(root_dir, _))]
42
-
43
- @classmethod
44
- def get_node(cls, info, id):
45
- return Experiment(id=id)
46
-
47
-
48
- class ExperimentConnection(relay.Connection):
49
- class Meta:
50
- node = Experiment
51
-
52
-
53
- def find_experiments(cwd, **kwargs):
54
- from ml_dash.config import Args
55
- assert isabs(cwd), "the current work directory need to be an absolute path."
56
- _cwd = realpath(join(Args.logdir, cwd[1:])).rstrip('/')
57
- parameter_files = find_files(_cwd, "**/parameters.pkl", **kwargs)
58
- return [
59
- # note: not sure about the name.
60
- Experiment(id=join(cwd.rstrip('/'), p['dir']),
61
- name=basename(p['dir']) or ".",
62
- path=join(cwd.rstrip('/'), p['dir']),
63
- parameters=join(cwd.rstrip('/'), p['path']), )
64
- for p in parameter_files
65
- ]
@@ -1,204 +0,0 @@
1
- import os
2
- from os.path import split, isabs, realpath, join, basename, dirname
3
- from graphene import ObjectType, relay, String, Int, Mutation, ID, Field, Node, Boolean
4
- from graphene.types.generic import GenericScalar
5
- from graphql_relay import from_global_id
6
- from ml_dash.schema.files.file_helpers import find_files
7
-
8
- from . import parameters, metrics
9
-
10
-
11
- class File(ObjectType):
12
- class Meta:
13
- interfaces = relay.Node,
14
-
15
- name = String(description='name of the directory')
16
- stem = String(description="stem of the file name")
17
-
18
- def resolve_stem(self, info, ):
19
- return self.name.split("/")[-1].split('.')[0]
20
-
21
- path = String(description='path to the file')
22
- rel_path = String(description='relative path to the file')
23
- text = String(description='text content of the file', start=Int(required=False, default_value=0),
24
- stop=Int(required=False, default_value=None))
25
-
26
- def resolve_text(self, info, start=0, stop=None):
27
- from ml_dash.config import Args
28
- with open(join(Args.logdir, self.id[1:]), "r") as f:
29
- lines = list(f)[start: stop]
30
- return "".join(lines)
31
-
32
- json = GenericScalar(description="the json content of the file")
33
-
34
- def resolve_json(self, info):
35
- import json
36
- try:
37
- with open(self.id, 'r') as f:
38
- return json.load(f)
39
- except FileNotFoundError:
40
- return None
41
-
42
- yaml = GenericScalar(description="the content of the file using yaml")
43
-
44
- def resolve_yaml(self, info):
45
- import ruamel.yaml
46
- if ruamel.yaml.version_info < (0, 15):
47
- yaml = ruamel.yaml
48
- load_fn = yaml.safe_load
49
- else:
50
- from ruamel.yaml import YAML
51
- yaml = YAML()
52
- yaml.explict_start = True
53
- load_fn = yaml.load
54
-
55
- from ml_dash.config import Args
56
- with open(join(Args.logdir, self.id[1:]), "r") as f:
57
- return load_fn('\n'.join(f))
58
-
59
- @classmethod
60
- def get_node(cls, info, id):
61
- return get_file(id)
62
-
63
-
64
- class FileConnection(relay.Connection):
65
- class Meta:
66
- node = File
67
-
68
-
69
- def get_file(id):
70
- # path = os.path.join(Args.logdir, id[1:])
71
- return File(id=id, name=split(id[1:])[1])
72
-
73
-
74
- def find_files_by_query(cwd, query="**/*.*", **kwargs):
75
- from ml_dash.config import Args
76
- assert isabs(cwd), "the current work directory need to be an absolute path."
77
- _cwd = realpath(join(Args.logdir, cwd[1:])).rstrip('/')
78
- parameter_files = find_files(_cwd, query)
79
- return [
80
- # note: not sure about the name.
81
- File(id=join(cwd.rstrip('/'), p['path']),
82
- name=basename(p['path']),
83
- path=join(cwd.rstrip('/'), p['path']),
84
- rel_path=p['path'], )
85
- for p in parameter_files
86
- ]
87
-
88
-
89
- def glob_files(cwd, query="*.*"):
90
- return find_files_by_query(cwd=cwd, query=query)
91
-
92
-
93
- def save_text_to_file(path, text):
94
- from ml_dash.config import Args
95
- assert isabs(path), "the path has to be absolute path."
96
- _path = join(Args.logdir, path[1:])
97
- with open(_path, "w") as f:
98
- f.write(text)
99
- return get_file(path)
100
-
101
-
102
- def save_json_to_file(path, data):
103
- from ml_dash.config import Args
104
- assert isabs(path), "the path has to be absolute path."
105
- _path = join(Args.logdir, path[1:])
106
- # note: assume all text format
107
- with open(_path, "w+") as f:
108
- import json
109
- _ = json.dumps(data, sortKeys=True, indent=2)
110
- f.write(_)
111
- return get_file(path)
112
-
113
-
114
- def remove_file(path):
115
- """remove does not work with directories"""
116
- from ml_dash.config import Args
117
- assert isabs(path), "the path has to be absolute path."
118
- _path = join(Args.logdir, path[1:])
119
- os.remove(_path)
120
-
121
-
122
- def remove_directory(path):
123
- """rmtree does not work with files"""
124
- import shutil
125
- from ml_dash.config import Args
126
- assert isabs(path), "the path has to be absolute path."
127
- _path = join(Args.logdir, path[1:])
128
- shutil.rmtree(_path)
129
-
130
-
131
- def save_yaml_to_file(path, data):
132
- raise NotImplementedError
133
-
134
-
135
- class MutateTextFile(relay.ClientIDMutation):
136
- class Input:
137
- id = ID()
138
- text = String(required=True)
139
-
140
- file = Field(File)
141
-
142
- @classmethod
143
- def mutate_and_get_payload(cls, root, info, id, text, client_mutation_id):
144
- _type, id = from_global_id(id)
145
- return MutateTextFile(file=save_text_to_file(id, text))
146
-
147
-
148
- class MutateYamlFile(relay.ClientIDMutation):
149
- class Arguments:
150
- data = String()
151
-
152
- file = Field(File)
153
-
154
- @classmethod
155
- def mutate_and_get_payload(self, root, info, data, client_mutation_id):
156
- _type, id = from_global_id(client_mutation_id)
157
- return MutateYamlFile(file=save_yaml_to_file(id, data))
158
-
159
-
160
- class MutateJSONFile(relay.ClientIDMutation):
161
- class Arguments:
162
- id = ID()
163
- data = String()
164
-
165
- file = Field(File)
166
-
167
- @classmethod
168
- def mutate_and_get_payload(self, root, info, id, data, client_mutation_id):
169
- _type, id = from_global_id(client_mutation_id)
170
- return MutateJSONFile(file=save_json_to_file(id, data))
171
-
172
-
173
- class DeleteFile(relay.ClientIDMutation):
174
- class Input:
175
- id = ID()
176
-
177
- ok = Boolean()
178
- id = ID()
179
-
180
- @classmethod
181
- def mutate_and_get_payload(cls, root, info, id, client_mutation_id):
182
- _type, path = from_global_id(id)
183
- try:
184
- remove_file(path)
185
- return DeleteFile(ok=True, id=id)
186
- except FileNotFoundError:
187
- return DeleteFile(ok=False)
188
-
189
-
190
- class DeleteDirectory(relay.ClientIDMutation):
191
- class Input:
192
- id = ID()
193
-
194
- ok = Boolean()
195
- id = ID()
196
-
197
- @classmethod
198
- def mutate_and_get_payload(cls, root, info, id, client_mutation_id):
199
- _type, path = from_global_id(id)
200
- try:
201
- remove_directory(path)
202
- return DeleteDirectory(ok=True, id=id)
203
- except FileNotFoundError:
204
- return DeleteDirectory(ok=False)
@@ -1,79 +0,0 @@
1
- from glob import iglob
2
- from os import stat
3
- from os.path import basename, join, realpath, dirname
4
-
5
- from ml_dash.file_handlers import cwdContext
6
-
7
-
8
- def file_stat(file_path):
9
- # note: this when looped over is very slow. Fine for a small list of files though.
10
- stat_res = stat(file_path)
11
- sz = stat_res.st_size
12
- return dict(
13
- name=basename(file_path),
14
- path=file_path,
15
- dir=dirname(file_path),
16
- time_modified=stat_res.st_mtime,
17
- time_created=stat_res.st_ctime,
18
- # type=ft,
19
- size=sz,
20
- )
21
-
22
-
23
- def find_files(cwd, query, start=None, stop=None):
24
- """
25
- find files by iGlob.
26
-
27
- :param cwd: the context folder for the glob, excluded from returned path list.
28
- :param query: glob query
29
- :param start: starting index for iGlob.
30
- :param stop: ending index for iGlob
31
- :return:
32
- """
33
- from itertools import islice
34
- with cwdContext(cwd):
35
- file_paths = list(islice(iglob(query, recursive=True), start or 0, stop or 200))
36
- files = [file_stat(_) for _ in file_paths]
37
- return files
38
-
39
-
40
- def read_dataframe(path, k=200):
41
- from ml_logger.helpers import load_pickle_as_dataframe
42
- try:
43
- return load_pickle_as_dataframe(path, k)
44
- except FileNotFoundError:
45
- return None
46
-
47
-
48
- def read_records(path, k=200):
49
- from ml_logger.helpers import load_pickle_as_dataframe
50
- df = load_pickle_as_dataframe(path, k)
51
- return df.to_json(orient="records")
52
-
53
-
54
- def read_log(path, k=200):
55
- from ml_logger.helpers import load_pickle_as_dataframe
56
- df = load_pickle_as_dataframe(path, k)
57
- return df.to_json(orient="records")
58
-
59
-
60
- def read_json(path):
61
- from ml_logger.helpers import load_from_pickle
62
- data = [_ for _ in load_from_pickle(path)]
63
- return data
64
-
65
-
66
- def read_text(path, start, stop):
67
- from itertools import islice
68
- with open(path, 'r') as f:
69
- text = ''.join([l for l in islice(f, start, stop)])
70
- return text
71
-
72
-
73
- def read_binary():
74
- raise NotImplementedError()
75
- # todo: check the file handling here. Does this use correct
76
- # mimeType for text files?
77
- # res = await response.file(path)
78
- # if as_attachment:
79
- # res.headers['Content-Disposition'] = 'attachment'