sciveo 0.1.25__tar.gz → 0.1.26__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sciveo-0.1.25 → sciveo-0.1.26}/PKG-INFO +1 -1
- sciveo-0.1.26/sciveo/media/pipelines/processors/sci/time_series/predictor.py +108 -0
- sciveo-0.1.26/sciveo/media/pipelines/processors/sci/time_series/trainer.py +185 -0
- sciveo-0.1.26/sciveo/tools/aws/__init__.py +56 -0
- sciveo-0.1.26/sciveo/version.py +2 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo.egg-info/PKG-INFO +1 -1
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo.egg-info/SOURCES.txt +3 -0
- sciveo-0.1.25/sciveo/version.py +0 -2
- {sciveo-0.1.25 → sciveo-0.1.26}/README.md +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/api/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/api/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/api/upload.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/cli.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/common/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/common/configuration.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/common/model.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/common/optimizers.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/common/sampling.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/content/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/content/dataset.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/content/experiment.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/content/project.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/content/runner.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/encoders/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/encoders/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/encoders/normalizer.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/nlp/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/nlp/search.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/time_series/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/time_series/dataset.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/time_series/predictor.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/time_series/trainer.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/ml/time_series/window_generator.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/job_daemon.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/layouts/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/layouts/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/pipeline.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/postprocessors/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/postprocessors/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/postprocessors/default.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/audio/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/audio/audio.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/audio/audio_extractor_process.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/aws.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/file/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/file/archive.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/album.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/album_in_image.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/depth_esimation.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/embeddings.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/filters.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/generators.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/histogram.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/mask.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/resize.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/segmentation.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/image/watermark.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/media_info.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/nlp/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/nlp/address.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/qr.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/sci/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/sci/base.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/sci/dataset.py +0 -0
- {sciveo-0.1.25/sciveo/media/pipelines/processors/video → sciveo-0.1.26/sciveo/media/pipelines/processors/sci/time_series}/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/tpu_base.py +0 -0
- {sciveo-0.1.25/sciveo/media/pipelines/web → sciveo-0.1.26/sciveo/media/pipelines/processors/video}/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/video/generators.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/video/motion_detection.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/video/resize.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/video/video_album.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/video/video_frames.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/video/video_resample.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/queues.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/server.py +0 -0
- {sciveo-0.1.25/sciveo/tools/aws → sciveo-0.1.26/sciveo/media/pipelines/web}/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/web/server.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/monitoring/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/monitoring/monitor.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/monitoring/start.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/network/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/network/camera.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/network/sniffer.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/network/tools.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/__init__.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/array.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/aws/priority_queue.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/aws/s3.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/common.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/compress.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/configuration.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/crypto.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/daemon.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/formating.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/hardware.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/http.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/logger.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/os.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/random.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/remote.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/simple_counter.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/synchronized.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo/tools/timers.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo.egg-info/dependency_links.txt +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo.egg-info/entry_points.txt +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo.egg-info/requires.txt +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/sciveo.egg-info/top_level.txt +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/setup.cfg +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/setup.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/test/test_compress.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/test/test_configuration.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/test/test_crypto.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/test/test_monitoring.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/test/test_runner.py +0 -0
- {sciveo-0.1.25 → sciveo-0.1.26}/test/test_sampling.py +0 -0
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Pavlin Georgiev, Softel Labs
|
|
3
|
+
#
|
|
4
|
+
# This is a proprietary file and may not be copied,
|
|
5
|
+
# distributed, or modified without express permission
|
|
6
|
+
# from the owner. For licensing inquiries, please
|
|
7
|
+
# contact pavlin@softel.bg.
|
|
8
|
+
#
|
|
9
|
+
# 2024
|
|
10
|
+
#
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
import boto3
|
|
14
|
+
|
|
15
|
+
from sciveo.tools.logger import *
|
|
16
|
+
from sciveo.media.pipelines.processors.sci.base import *
|
|
17
|
+
from sciveo.media.ml.time_series.predictor import *
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TimeSeriesPredictorProcessor(SciBaseProcessor):
|
|
21
|
+
def __init__(self, processor_config, max_progress) -> None:
|
|
22
|
+
super().__init__(processor_config, max_progress)
|
|
23
|
+
|
|
24
|
+
self.default.update({
|
|
25
|
+
"time_column": "Date Time",
|
|
26
|
+
"time_format": '%d.%m.%Y %H:%M:%S'
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
def run(self, job, input):
|
|
30
|
+
progress_per_media = self.max_progress / max(1, len(input))
|
|
31
|
+
debug("run", job["id"], progress_per_media, "input", input)
|
|
32
|
+
next_media = []
|
|
33
|
+
try:
|
|
34
|
+
if job['content_type'] == "project":
|
|
35
|
+
project_path = os.path.join(self.base_tmp_path, "projects", job['content_id'])
|
|
36
|
+
mkdirs(project_path)
|
|
37
|
+
|
|
38
|
+
self.download_content(job, project_path)
|
|
39
|
+
MediaJobState.queue().inc_progress(job["id"], 10)
|
|
40
|
+
|
|
41
|
+
list_content = {}
|
|
42
|
+
|
|
43
|
+
dataset_df = []
|
|
44
|
+
for content in job['content']:
|
|
45
|
+
list_content.setdefault(content["content_type"], []).append(content)
|
|
46
|
+
|
|
47
|
+
if content["content_type"] == "file":
|
|
48
|
+
if content['key'].split(".")[-1] == "timeseries":
|
|
49
|
+
list_content.setdefault("timeseries", []).append(content)
|
|
50
|
+
|
|
51
|
+
if content["content_type"] == "dataset":
|
|
52
|
+
# Datasets with S3 located files
|
|
53
|
+
if "bucket" in content and "key" in content:
|
|
54
|
+
remote_path, local_path, local_file_name = self.content_path(content, project_path)
|
|
55
|
+
file_extension = local_file_name.split(".")[-1]
|
|
56
|
+
|
|
57
|
+
if file_extension == "csv":
|
|
58
|
+
df = pd.read_csv(local_path)
|
|
59
|
+
x_col = content['data'].get("dataset", {}).get("x_col", self["time_column"])
|
|
60
|
+
df.set_index(x_col, inplace=True)
|
|
61
|
+
dataset_df.append(df)
|
|
62
|
+
|
|
63
|
+
dataset_df = pd.concat(dataset_df).drop_duplicates(keep='first').sort_index().reset_index()
|
|
64
|
+
|
|
65
|
+
content_project = list_content["project"][0]
|
|
66
|
+
|
|
67
|
+
MediaJobState.queue().inc_progress(job["id"], 5)
|
|
68
|
+
|
|
69
|
+
if "timeseries" in list_content:
|
|
70
|
+
content_predictor = self.new_content(content_project, "prediction", name=f"Prediction on {dataset_df.shape}")
|
|
71
|
+
next_media.append(content_predictor)
|
|
72
|
+
|
|
73
|
+
text_content = self.content_text(content_predictor, "timeseries predictions plots", f"""Time series predictions.\n\n{self.describe_df(dataset_df)}""")
|
|
74
|
+
next_media.append(text_content)
|
|
75
|
+
|
|
76
|
+
content = list_content["timeseries"][0]
|
|
77
|
+
remote_path, local_path, local_file_name = self.content_path(content, project_path)
|
|
78
|
+
predictor = TimeSeriesPredictor(local_path)
|
|
79
|
+
ds = TimeSeriesDataSet(dataset_df, self["time_column"], format=self["time_format"])
|
|
80
|
+
predictions, X, x_plot = predictor.predict(ds.data)
|
|
81
|
+
|
|
82
|
+
MediaJobState.queue().inc_progress(job["id"], 5)
|
|
83
|
+
|
|
84
|
+
columns = predictor.model_data["columns"][:11] # TODO: Should be configurable in the Predictor, so plot only few columns
|
|
85
|
+
plot_progress_inc = 20 / len(columns)
|
|
86
|
+
for i, y_col in enumerate(columns):
|
|
87
|
+
image_content = self.content_image(text_content, y_col, project_path)
|
|
88
|
+
predictor.plot(predictions, X, x_plot, i, image_local_path=image_content["local_path"])
|
|
89
|
+
image_content["data"] = {
|
|
90
|
+
"info": {
|
|
91
|
+
"size": os.path.getsize(image_content["local_path"]),
|
|
92
|
+
"plot": y_col
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
next_media.append(image_content)
|
|
96
|
+
MediaJobState.queue().inc_progress(job["id"], plot_progress_inc)
|
|
97
|
+
MediaJobState.queue().inc_progress(job["id"], 10)
|
|
98
|
+
|
|
99
|
+
if self["output"]:
|
|
100
|
+
job["output"] += next_media
|
|
101
|
+
if self["append-content"]:
|
|
102
|
+
job["append-content"] += next_media
|
|
103
|
+
except Exception as e:
|
|
104
|
+
exception(e)
|
|
105
|
+
return next_media
|
|
106
|
+
|
|
107
|
+
def name(self):
|
|
108
|
+
return "sci-timeseries-predictor"
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Pavlin Georgiev, Softel Labs
|
|
3
|
+
#
|
|
4
|
+
# This is a proprietary file and may not be copied,
|
|
5
|
+
# distributed, or modified without express permission
|
|
6
|
+
# from the owner. For licensing inquiries, please
|
|
7
|
+
# contact pavlin@softel.bg.
|
|
8
|
+
#
|
|
9
|
+
# 2024
|
|
10
|
+
#
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
import pandas as pd
|
|
14
|
+
import tensorflow as tf
|
|
15
|
+
from tensorflow.keras.callbacks import Callback
|
|
16
|
+
|
|
17
|
+
from sciveo.tools.logger import *
|
|
18
|
+
from sciveo.media.pipelines.processors.sci.base import *
|
|
19
|
+
from sciveo.media.ml.time_series.predictor import *
|
|
20
|
+
from sciveo.media.ml.time_series.trainer import *
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TrainerProgressCallback(Callback):
|
|
24
|
+
def __init__(self, job, epochs, max_progress):
|
|
25
|
+
self.job = job
|
|
26
|
+
self.progress_per_epoch = 2 * max_progress / epochs
|
|
27
|
+
|
|
28
|
+
def on_epoch_end(self, epoch, logs=None):
|
|
29
|
+
debug("epoch", epoch, "finished")
|
|
30
|
+
MediaJobState.queue().inc_progress(self.job["id"], self.progress_per_epoch)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TimeSeriesTrainerProcessor(SciBaseProcessor):
|
|
34
|
+
def __init__(self, processor_config, max_progress) -> None:
|
|
35
|
+
super().__init__(processor_config, max_progress)
|
|
36
|
+
|
|
37
|
+
self.default.update({
|
|
38
|
+
"model_name": "none",
|
|
39
|
+
"time_column": "Date Time",
|
|
40
|
+
"time_format": '%d.%m.%Y %H:%M:%S',
|
|
41
|
+
"ratios": [["train", 0.70], ["val", 0.20], ["test", 0.10]],
|
|
42
|
+
"max_epochs": 20, "patience": 2,
|
|
43
|
+
"window_size": 24,
|
|
44
|
+
"test_plot_from": 200, "test_plots": 20, "test_plot_column_id": 1
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
def run(self, job, input):
|
|
48
|
+
progress_per_media = self.max_progress / max(1, len(input))
|
|
49
|
+
debug("run", job["id"], progress_per_media, "input", input)
|
|
50
|
+
next_media = []
|
|
51
|
+
try:
|
|
52
|
+
if job['content_type'] == "project":
|
|
53
|
+
project_path = os.path.join(self.base_tmp_path, "projects", job['content_id'])
|
|
54
|
+
mkdirs(project_path)
|
|
55
|
+
|
|
56
|
+
self.download_content(job, project_path)
|
|
57
|
+
MediaJobState.queue().inc_progress(job["id"], 10)
|
|
58
|
+
|
|
59
|
+
list_content = {}
|
|
60
|
+
|
|
61
|
+
dataset_df = []
|
|
62
|
+
for content in job['content']:
|
|
63
|
+
list_content.setdefault(content["content_type"], []).append(content)
|
|
64
|
+
|
|
65
|
+
if content["content_type"] == "file":
|
|
66
|
+
if content['key'].split(".")[-1] == "timeseries":
|
|
67
|
+
list_content.setdefault("timeseries", []).append(content)
|
|
68
|
+
|
|
69
|
+
if content["content_type"] == "dataset":
|
|
70
|
+
# Datasets with S3 located files
|
|
71
|
+
if "bucket" in content and "key" in content:
|
|
72
|
+
remote_path, local_path, local_file_name = self.content_path(content, project_path)
|
|
73
|
+
file_extension = local_file_name.split(".")[-1]
|
|
74
|
+
|
|
75
|
+
if file_extension == "csv":
|
|
76
|
+
df = pd.read_csv(local_path)
|
|
77
|
+
x_col = content['data'].get("dataset", {}).get("x_col", df.keys()[0])
|
|
78
|
+
df.set_index(x_col, inplace=True)
|
|
79
|
+
dataset_df.append(df)
|
|
80
|
+
|
|
81
|
+
dataset_df = pd.concat(dataset_df).drop_duplicates(keep='first').sort_index().reset_index()
|
|
82
|
+
|
|
83
|
+
content_project = list_content["project"][0]
|
|
84
|
+
MediaJobState.queue().inc_progress(job["id"], 5)
|
|
85
|
+
|
|
86
|
+
if len(dataset_df) > 0:
|
|
87
|
+
content_trainer = self.new_content(content_project, "training", name=f"Training on {dataset_df.shape}")
|
|
88
|
+
next_media.append(content_trainer)
|
|
89
|
+
|
|
90
|
+
ds = TimeSeriesDataSet(dataset_df, self["time_column"], format=self["time_format"])
|
|
91
|
+
ds.normalize()
|
|
92
|
+
ds.split(ratios=self["ratios"])
|
|
93
|
+
|
|
94
|
+
trainer = TimeSeriesTrainer(ds, self["window_size"], self["window_size"], self["window_size"])
|
|
95
|
+
trainer.create()
|
|
96
|
+
progress_callback = TrainerProgressCallback(job, self["max_epochs"], 50)
|
|
97
|
+
history = trainer.train(self["max_epochs"], self["patience"], progress_callback)
|
|
98
|
+
trainer_eval = trainer.evaluate()
|
|
99
|
+
|
|
100
|
+
model_name, model_path = trainer.save(project_path, self["model_name"])
|
|
101
|
+
|
|
102
|
+
model_data = {
|
|
103
|
+
"eval": trainer_eval,
|
|
104
|
+
"history": {
|
|
105
|
+
"loss": history.history["loss"],
|
|
106
|
+
"val_loss": history.history["val_loss"]
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
model_content = self.content_file(content_trainer, model_name, model_path, data=model_data)
|
|
111
|
+
next_media.append(model_content)
|
|
112
|
+
|
|
113
|
+
text = f"""
|
|
114
|
+
Time series model.
|
|
115
|
+
....
|
|
116
|
+
....
|
|
117
|
+
model name {model_name}
|
|
118
|
+
model size {os.path.getsize(model_path)}
|
|
119
|
+
|
|
120
|
+
eval
|
|
121
|
+
{self.df_to_html(pd.DataFrame(trainer_eval))}
|
|
122
|
+
|
|
123
|
+
train loss
|
|
124
|
+
{self.df_to_html(pd.DataFrame({"loss": history.history["loss"], "val_loss": history.history["val_loss"]}))}
|
|
125
|
+
"""
|
|
126
|
+
text_content = self.content_text(content_trainer, "Train timeseries model", text)
|
|
127
|
+
next_media.append(text_content)
|
|
128
|
+
|
|
129
|
+
# TODO: Create numeric from history.history["loss"], history.history["val_loss"]
|
|
130
|
+
|
|
131
|
+
ds.denormalize("test")
|
|
132
|
+
predictor = TimeSeriesPredictor(model_path)
|
|
133
|
+
|
|
134
|
+
# Plot some test predictions
|
|
135
|
+
columns = predictor.model_data["columns"]
|
|
136
|
+
plot_progress_inc = 10 / self["test_plots"]
|
|
137
|
+
y_col = columns[self["test_plot_column_id"]]
|
|
138
|
+
for i in range(self["test_plot_from"], self["test_plot_from"] + self["test_plots"]):
|
|
139
|
+
image_content = self.content_image(text_content, y_col, project_path)
|
|
140
|
+
self.plot_chunk(ds.dataset["test"], predictor, i, self["test_plot_column_id"], image_content["local_path"])
|
|
141
|
+
image_content["data"] = {
|
|
142
|
+
"info": {
|
|
143
|
+
"size": os.path.getsize(image_content["local_path"]),
|
|
144
|
+
"plot": y_col
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
next_media.append(image_content)
|
|
148
|
+
MediaJobState.queue().inc_progress(job["id"], plot_progress_inc)
|
|
149
|
+
|
|
150
|
+
MediaJobState.queue().inc_progress(job["id"], 10)
|
|
151
|
+
|
|
152
|
+
if self["output"]:
|
|
153
|
+
job["output"] += next_media
|
|
154
|
+
if self["append-content"]:
|
|
155
|
+
job["append-content"] += next_media
|
|
156
|
+
except Exception as e:
|
|
157
|
+
exception(e)
|
|
158
|
+
return next_media
|
|
159
|
+
|
|
160
|
+
def plot_chunk(self, df, predictor, k, i, image_local_path):
|
|
161
|
+
k += 1
|
|
162
|
+
L = predictor.model_data["window"]["input_width"]
|
|
163
|
+
X = df[-(k + 1)*L:-k*L]
|
|
164
|
+
|
|
165
|
+
predictions, X, x_plot = predictor.predict(X)
|
|
166
|
+
|
|
167
|
+
idx_from = -k * L
|
|
168
|
+
idx_to = -(k - 1) * L
|
|
169
|
+
if idx_to != 0:
|
|
170
|
+
labels = df[idx_from:idx_to]
|
|
171
|
+
else:
|
|
172
|
+
labels = df[idx_from:]
|
|
173
|
+
labels = labels.values.reshape(X.shape).astype('float32')
|
|
174
|
+
labels = tf.convert_to_tensor(labels)
|
|
175
|
+
|
|
176
|
+
predictor.plot(
|
|
177
|
+
predictions, X, x_plot, i, labels,
|
|
178
|
+
image_local_path
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
def content_type(self):
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
def name(self):
|
|
185
|
+
return "sci-timeseries-trainer"
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Pavlin Georgiev, Softel Labs
|
|
3
|
+
#
|
|
4
|
+
# This is a proprietary file and may not be copied,
|
|
5
|
+
# distributed, or modified without express permission
|
|
6
|
+
# from the owner. For licensing inquiries, please
|
|
7
|
+
# contact pavlin@softel.bg.
|
|
8
|
+
#
|
|
9
|
+
# 2023
|
|
10
|
+
#
|
|
11
|
+
|
|
12
|
+
from builtins import ImportError
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
|
|
16
|
+
import os
|
|
17
|
+
import time
|
|
18
|
+
|
|
19
|
+
from sciveo.tools.logger import *
|
|
20
|
+
from sciveo.tools.daemon import TasksDaemon, __upload_content__
|
|
21
|
+
from sciveo.content.runner import ProjectRunner
|
|
22
|
+
from sciveo.content.dataset import Dataset
|
|
23
|
+
from sciveo.monitoring.start import MonitorStart
|
|
24
|
+
from sciveo.network.tools import NetworkTools
|
|
25
|
+
from sciveo.version import __version__
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
TasksDaemon.current = TasksDaemon(num_threads=int(os.environ.get("SCIVEO_TASKS_NUM_THREADS", 1)))
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# New Experiment
|
|
32
|
+
def open():
|
|
33
|
+
if ProjectRunner.current is not None:
|
|
34
|
+
return ProjectRunner.current.project
|
|
35
|
+
else:
|
|
36
|
+
error("there is no started project")
|
|
37
|
+
|
|
38
|
+
def start(project, function, configuration={}, **kwargs):
|
|
39
|
+
TasksDaemon.current.start()
|
|
40
|
+
ProjectRunner.current = ProjectRunner(project=project, function=function, configuration=configuration, **kwargs)
|
|
41
|
+
ProjectRunner.current.run()
|
|
42
|
+
|
|
43
|
+
# Dataset info
|
|
44
|
+
def dataset(info={}):
|
|
45
|
+
return Dataset.get(info)
|
|
46
|
+
|
|
47
|
+
# Monitoring start
|
|
48
|
+
def monitor(**kwargs):
|
|
49
|
+
MonitorStart(**kwargs)()
|
|
50
|
+
|
|
51
|
+
# Network tools
|
|
52
|
+
def network(**kwargs):
|
|
53
|
+
return NetworkTools(**kwargs)
|
|
54
|
+
|
|
55
|
+
except ImportError as e:
|
|
56
|
+
pass
|
|
@@ -74,6 +74,9 @@ sciveo/media/pipelines/processors/nlp/address.py
|
|
|
74
74
|
sciveo/media/pipelines/processors/sci/__init__.py
|
|
75
75
|
sciveo/media/pipelines/processors/sci/base.py
|
|
76
76
|
sciveo/media/pipelines/processors/sci/dataset.py
|
|
77
|
+
sciveo/media/pipelines/processors/sci/time_series/__init__.py
|
|
78
|
+
sciveo/media/pipelines/processors/sci/time_series/predictor.py
|
|
79
|
+
sciveo/media/pipelines/processors/sci/time_series/trainer.py
|
|
77
80
|
sciveo/media/pipelines/processors/video/__init__.py
|
|
78
81
|
sciveo/media/pipelines/processors/video/generators.py
|
|
79
82
|
sciveo/media/pipelines/processors/video/motion_detection.py
|
sciveo-0.1.25/sciveo/version.py
DELETED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sciveo-0.1.25 → sciveo-0.1.26}/sciveo/media/pipelines/processors/audio/audio_extractor_process.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|