@epfml/discojs 2.0.0 → 2.1.2-p20240506085037.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aggregator/base.d.ts +180 -0
- package/dist/aggregator/base.js +236 -0
- package/dist/aggregator/get.d.ts +16 -0
- package/dist/aggregator/get.js +31 -0
- package/dist/aggregator/index.d.ts +7 -0
- package/dist/aggregator/index.js +4 -0
- package/dist/aggregator/mean.d.ts +23 -0
- package/dist/aggregator/mean.js +69 -0
- package/dist/aggregator/secure.d.ts +27 -0
- package/dist/aggregator/secure.js +91 -0
- package/dist/async_informant.d.ts +15 -0
- package/dist/async_informant.js +42 -0
- package/dist/client/base.d.ts +76 -0
- package/dist/client/base.js +88 -0
- package/dist/client/decentralized/base.d.ts +32 -0
- package/dist/client/decentralized/base.js +192 -0
- package/dist/client/decentralized/index.d.ts +2 -0
- package/dist/client/decentralized/index.js +2 -0
- package/dist/client/decentralized/messages.d.ts +28 -0
- package/dist/client/decentralized/messages.js +44 -0
- package/dist/client/decentralized/peer.d.ts +40 -0
- package/dist/client/decentralized/peer.js +189 -0
- package/dist/client/decentralized/peer_pool.d.ts +12 -0
- package/dist/client/decentralized/peer_pool.js +44 -0
- package/dist/client/event_connection.d.ts +34 -0
- package/dist/client/event_connection.js +105 -0
- package/dist/client/federated/base.d.ts +54 -0
- package/dist/client/federated/base.js +151 -0
- package/dist/client/federated/index.d.ts +2 -0
- package/dist/client/federated/index.js +2 -0
- package/dist/client/federated/messages.d.ts +30 -0
- package/dist/client/federated/messages.js +24 -0
- package/dist/client/index.d.ts +8 -0
- package/dist/client/index.js +8 -0
- package/dist/client/local.d.ts +3 -0
- package/dist/client/local.js +3 -0
- package/dist/client/messages.d.ts +30 -0
- package/dist/client/messages.js +26 -0
- package/dist/client/types.d.ts +2 -0
- package/dist/client/types.js +4 -0
- package/dist/client/utils.d.ts +2 -0
- package/dist/client/utils.js +7 -0
- package/dist/dataset/data/data.d.ts +48 -0
- package/dist/dataset/data/data.js +72 -0
- package/dist/dataset/data/data_split.d.ts +8 -0
- package/dist/dataset/data/data_split.js +1 -0
- package/dist/dataset/data/image_data.d.ts +11 -0
- package/dist/dataset/data/image_data.js +38 -0
- package/dist/dataset/data/index.d.ts +6 -0
- package/dist/dataset/data/index.js +5 -0
- package/dist/dataset/data/preprocessing/base.d.ts +16 -0
- package/dist/dataset/data/preprocessing/base.js +1 -0
- package/dist/dataset/data/preprocessing/image_preprocessing.d.ts +13 -0
- package/dist/dataset/data/preprocessing/image_preprocessing.js +40 -0
- package/dist/dataset/data/preprocessing/index.d.ts +4 -0
- package/dist/dataset/data/preprocessing/index.js +3 -0
- package/dist/dataset/data/preprocessing/tabular_preprocessing.d.ts +13 -0
- package/dist/dataset/data/preprocessing/tabular_preprocessing.js +45 -0
- package/dist/dataset/data/preprocessing/text_preprocessing.d.ts +13 -0
- package/dist/dataset/data/preprocessing/text_preprocessing.js +85 -0
- package/dist/dataset/data/tabular_data.d.ts +11 -0
- package/dist/dataset/data/tabular_data.js +25 -0
- package/dist/dataset/data/text_data.d.ts +11 -0
- package/dist/dataset/data/text_data.js +14 -0
- package/dist/{core/dataset → dataset}/data_loader/data_loader.d.ts +3 -5
- package/dist/dataset/data_loader/data_loader.js +2 -0
- package/dist/dataset/data_loader/image_loader.d.ts +20 -3
- package/dist/dataset/data_loader/image_loader.js +98 -23
- package/dist/dataset/data_loader/index.d.ts +5 -2
- package/dist/dataset/data_loader/index.js +4 -7
- package/dist/dataset/data_loader/tabular_loader.d.ts +34 -3
- package/dist/dataset/data_loader/tabular_loader.js +75 -15
- package/dist/dataset/data_loader/text_loader.d.ts +14 -0
- package/dist/dataset/data_loader/text_loader.js +25 -0
- package/dist/dataset/dataset.d.ts +5 -0
- package/dist/dataset/dataset.js +1 -0
- package/dist/dataset/dataset_builder.d.ts +60 -0
- package/dist/dataset/dataset_builder.js +142 -0
- package/dist/dataset/index.d.ts +5 -0
- package/dist/dataset/index.js +3 -0
- package/dist/default_tasks/cifar10/index.d.ts +2 -0
- package/dist/default_tasks/cifar10/index.js +60 -0
- package/dist/default_tasks/cifar10/model.d.ts +434 -0
- package/dist/default_tasks/cifar10/model.js +2385 -0
- package/dist/default_tasks/geotags/index.d.ts +2 -0
- package/dist/default_tasks/geotags/index.js +65 -0
- package/dist/default_tasks/geotags/model.d.ts +593 -0
- package/dist/default_tasks/geotags/model.js +4715 -0
- package/dist/default_tasks/index.d.ts +8 -0
- package/dist/default_tasks/index.js +8 -0
- package/dist/default_tasks/lus_covid.d.ts +2 -0
- package/dist/default_tasks/lus_covid.js +89 -0
- package/dist/default_tasks/mnist.d.ts +2 -0
- package/dist/default_tasks/mnist.js +61 -0
- package/dist/default_tasks/simple_face/index.d.ts +2 -0
- package/dist/default_tasks/simple_face/index.js +48 -0
- package/dist/default_tasks/simple_face/model.d.ts +513 -0
- package/dist/default_tasks/simple_face/model.js +4301 -0
- package/dist/default_tasks/skin_mnist.d.ts +2 -0
- package/dist/default_tasks/skin_mnist.js +80 -0
- package/dist/default_tasks/titanic.d.ts +2 -0
- package/dist/default_tasks/titanic.js +88 -0
- package/dist/default_tasks/wikitext.d.ts +2 -0
- package/dist/default_tasks/wikitext.js +38 -0
- package/dist/index.d.ts +18 -2
- package/dist/index.js +18 -6
- package/dist/{core/informant → informant}/graph_informant.d.ts +1 -1
- package/dist/informant/graph_informant.js +20 -0
- package/dist/informant/index.d.ts +1 -0
- package/dist/informant/index.js +1 -0
- package/dist/{core/logging → logging}/console_logger.d.ts +2 -2
- package/dist/logging/console_logger.js +22 -0
- package/dist/logging/index.d.ts +2 -0
- package/dist/logging/index.js +1 -0
- package/dist/{core/logging → logging}/logger.d.ts +3 -3
- package/dist/logging/logger.js +1 -0
- package/dist/memory/base.d.ts +119 -0
- package/dist/memory/base.js +9 -0
- package/dist/memory/empty.d.ts +20 -0
- package/dist/memory/empty.js +43 -0
- package/dist/memory/index.d.ts +3 -1
- package/dist/memory/index.js +3 -5
- package/dist/memory/model_type.d.ts +9 -0
- package/dist/memory/model_type.js +10 -0
- package/dist/{core/privacy.d.ts → privacy.d.ts} +1 -1
- package/dist/{core/privacy.js → privacy.js} +11 -16
- package/dist/serialization/index.d.ts +2 -0
- package/dist/serialization/index.js +2 -0
- package/dist/serialization/model.d.ts +5 -0
- package/dist/serialization/model.js +67 -0
- package/dist/{core/serialization → serialization}/weights.d.ts +2 -2
- package/dist/serialization/weights.js +37 -0
- package/dist/task/data_example.js +14 -0
- package/dist/task/digest.d.ts +5 -0
- package/dist/task/digest.js +14 -0
- package/dist/{core/task → task}/display_information.d.ts +5 -3
- package/dist/task/display_information.js +46 -0
- package/dist/task/index.d.ts +7 -0
- package/dist/task/index.js +5 -0
- package/dist/task/label_type.d.ts +9 -0
- package/dist/task/label_type.js +28 -0
- package/dist/task/summary.js +13 -0
- package/dist/task/task.d.ts +12 -0
- package/dist/task/task.js +22 -0
- package/dist/task/task_handler.d.ts +5 -0
- package/dist/task/task_handler.js +20 -0
- package/dist/task/task_provider.d.ts +5 -0
- package/dist/task/task_provider.js +1 -0
- package/dist/{core/task → task}/training_information.d.ts +9 -10
- package/dist/task/training_information.js +88 -0
- package/dist/training/disco.d.ts +40 -0
- package/dist/training/disco.js +107 -0
- package/dist/training/index.d.ts +2 -0
- package/dist/training/index.js +1 -0
- package/dist/training/trainer/distributed_trainer.d.ts +20 -0
- package/dist/training/trainer/distributed_trainer.js +36 -0
- package/dist/training/trainer/local_trainer.d.ts +12 -0
- package/dist/training/trainer/local_trainer.js +19 -0
- package/dist/training/trainer/trainer.d.ts +33 -0
- package/dist/training/trainer/trainer.js +52 -0
- package/dist/{core/training → training}/trainer/trainer_builder.d.ts +5 -7
- package/dist/training/trainer/trainer_builder.js +43 -0
- package/dist/types.d.ts +8 -0
- package/dist/types.js +1 -0
- package/dist/utils/event_emitter.d.ts +40 -0
- package/dist/utils/event_emitter.js +57 -0
- package/dist/validation/index.d.ts +1 -0
- package/dist/validation/index.js +1 -0
- package/dist/validation/validator.d.ts +28 -0
- package/dist/validation/validator.js +132 -0
- package/dist/weights/aggregation.d.ts +21 -0
- package/dist/weights/aggregation.js +44 -0
- package/dist/weights/index.d.ts +2 -0
- package/dist/weights/index.js +2 -0
- package/dist/weights/weights_container.d.ts +68 -0
- package/dist/weights/weights_container.js +96 -0
- package/package.json +25 -16
- package/README.md +0 -53
- package/dist/core/async_buffer.d.ts +0 -41
- package/dist/core/async_buffer.js +0 -97
- package/dist/core/async_informant.d.ts +0 -20
- package/dist/core/async_informant.js +0 -69
- package/dist/core/client/base.d.ts +0 -33
- package/dist/core/client/base.js +0 -35
- package/dist/core/client/decentralized/base.d.ts +0 -32
- package/dist/core/client/decentralized/base.js +0 -212
- package/dist/core/client/decentralized/clear_text.d.ts +0 -14
- package/dist/core/client/decentralized/clear_text.js +0 -96
- package/dist/core/client/decentralized/index.d.ts +0 -4
- package/dist/core/client/decentralized/index.js +0 -9
- package/dist/core/client/decentralized/messages.d.ts +0 -41
- package/dist/core/client/decentralized/messages.js +0 -54
- package/dist/core/client/decentralized/peer.d.ts +0 -26
- package/dist/core/client/decentralized/peer.js +0 -210
- package/dist/core/client/decentralized/peer_pool.d.ts +0 -14
- package/dist/core/client/decentralized/peer_pool.js +0 -92
- package/dist/core/client/decentralized/sec_agg.d.ts +0 -22
- package/dist/core/client/decentralized/sec_agg.js +0 -190
- package/dist/core/client/decentralized/secret_shares.d.ts +0 -3
- package/dist/core/client/decentralized/secret_shares.js +0 -39
- package/dist/core/client/decentralized/types.d.ts +0 -2
- package/dist/core/client/decentralized/types.js +0 -7
- package/dist/core/client/event_connection.d.ts +0 -37
- package/dist/core/client/event_connection.js +0 -158
- package/dist/core/client/federated/client.d.ts +0 -37
- package/dist/core/client/federated/client.js +0 -273
- package/dist/core/client/federated/index.d.ts +0 -2
- package/dist/core/client/federated/index.js +0 -7
- package/dist/core/client/federated/messages.d.ts +0 -38
- package/dist/core/client/federated/messages.js +0 -25
- package/dist/core/client/index.d.ts +0 -5
- package/dist/core/client/index.js +0 -11
- package/dist/core/client/local.d.ts +0 -8
- package/dist/core/client/local.js +0 -36
- package/dist/core/client/messages.d.ts +0 -28
- package/dist/core/client/messages.js +0 -33
- package/dist/core/client/utils.d.ts +0 -2
- package/dist/core/client/utils.js +0 -19
- package/dist/core/dataset/data/data.d.ts +0 -11
- package/dist/core/dataset/data/data.js +0 -20
- package/dist/core/dataset/data/data_split.d.ts +0 -5
- package/dist/core/dataset/data/data_split.js +0 -2
- package/dist/core/dataset/data/image_data.d.ts +0 -8
- package/dist/core/dataset/data/image_data.js +0 -64
- package/dist/core/dataset/data/index.d.ts +0 -5
- package/dist/core/dataset/data/index.js +0 -11
- package/dist/core/dataset/data/preprocessing.d.ts +0 -13
- package/dist/core/dataset/data/preprocessing.js +0 -33
- package/dist/core/dataset/data/tabular_data.d.ts +0 -8
- package/dist/core/dataset/data/tabular_data.js +0 -40
- package/dist/core/dataset/data_loader/data_loader.js +0 -10
- package/dist/core/dataset/data_loader/image_loader.d.ts +0 -17
- package/dist/core/dataset/data_loader/image_loader.js +0 -141
- package/dist/core/dataset/data_loader/index.d.ts +0 -3
- package/dist/core/dataset/data_loader/index.js +0 -9
- package/dist/core/dataset/data_loader/tabular_loader.d.ts +0 -29
- package/dist/core/dataset/data_loader/tabular_loader.js +0 -101
- package/dist/core/dataset/dataset.d.ts +0 -2
- package/dist/core/dataset/dataset.js +0 -2
- package/dist/core/dataset/dataset_builder.d.ts +0 -18
- package/dist/core/dataset/dataset_builder.js +0 -96
- package/dist/core/dataset/index.d.ts +0 -4
- package/dist/core/dataset/index.js +0 -14
- package/dist/core/index.d.ts +0 -18
- package/dist/core/index.js +0 -41
- package/dist/core/informant/graph_informant.js +0 -23
- package/dist/core/informant/index.d.ts +0 -3
- package/dist/core/informant/index.js +0 -9
- package/dist/core/informant/training_informant/base.d.ts +0 -31
- package/dist/core/informant/training_informant/base.js +0 -83
- package/dist/core/informant/training_informant/decentralized.d.ts +0 -5
- package/dist/core/informant/training_informant/decentralized.js +0 -22
- package/dist/core/informant/training_informant/federated.d.ts +0 -14
- package/dist/core/informant/training_informant/federated.js +0 -32
- package/dist/core/informant/training_informant/index.d.ts +0 -4
- package/dist/core/informant/training_informant/index.js +0 -11
- package/dist/core/informant/training_informant/local.d.ts +0 -6
- package/dist/core/informant/training_informant/local.js +0 -20
- package/dist/core/logging/console_logger.js +0 -33
- package/dist/core/logging/index.d.ts +0 -3
- package/dist/core/logging/index.js +0 -9
- package/dist/core/logging/logger.js +0 -9
- package/dist/core/logging/trainer_logger.d.ts +0 -24
- package/dist/core/logging/trainer_logger.js +0 -59
- package/dist/core/memory/base.d.ts +0 -22
- package/dist/core/memory/base.js +0 -9
- package/dist/core/memory/empty.d.ts +0 -14
- package/dist/core/memory/empty.js +0 -75
- package/dist/core/memory/index.d.ts +0 -3
- package/dist/core/memory/index.js +0 -9
- package/dist/core/memory/model_type.d.ts +0 -4
- package/dist/core/memory/model_type.js +0 -9
- package/dist/core/serialization/index.d.ts +0 -2
- package/dist/core/serialization/index.js +0 -6
- package/dist/core/serialization/model.d.ts +0 -5
- package/dist/core/serialization/model.js +0 -55
- package/dist/core/serialization/weights.js +0 -64
- package/dist/core/task/data_example.js +0 -24
- package/dist/core/task/display_information.js +0 -49
- package/dist/core/task/index.d.ts +0 -3
- package/dist/core/task/index.js +0 -8
- package/dist/core/task/model_compile_data.d.ts +0 -6
- package/dist/core/task/model_compile_data.js +0 -22
- package/dist/core/task/summary.js +0 -19
- package/dist/core/task/task.d.ts +0 -10
- package/dist/core/task/task.js +0 -31
- package/dist/core/task/training_information.js +0 -66
- package/dist/core/tasks/cifar10.d.ts +0 -3
- package/dist/core/tasks/cifar10.js +0 -65
- package/dist/core/tasks/geotags.d.ts +0 -3
- package/dist/core/tasks/geotags.js +0 -67
- package/dist/core/tasks/index.d.ts +0 -6
- package/dist/core/tasks/index.js +0 -10
- package/dist/core/tasks/lus_covid.d.ts +0 -3
- package/dist/core/tasks/lus_covid.js +0 -87
- package/dist/core/tasks/mnist.d.ts +0 -3
- package/dist/core/tasks/mnist.js +0 -60
- package/dist/core/tasks/simple_face.d.ts +0 -2
- package/dist/core/tasks/simple_face.js +0 -41
- package/dist/core/tasks/titanic.d.ts +0 -3
- package/dist/core/tasks/titanic.js +0 -88
- package/dist/core/training/disco.d.ts +0 -23
- package/dist/core/training/disco.js +0 -130
- package/dist/core/training/index.d.ts +0 -2
- package/dist/core/training/index.js +0 -7
- package/dist/core/training/trainer/distributed_trainer.d.ts +0 -20
- package/dist/core/training/trainer/distributed_trainer.js +0 -65
- package/dist/core/training/trainer/local_trainer.d.ts +0 -11
- package/dist/core/training/trainer/local_trainer.js +0 -34
- package/dist/core/training/trainer/round_tracker.d.ts +0 -30
- package/dist/core/training/trainer/round_tracker.js +0 -47
- package/dist/core/training/trainer/trainer.d.ts +0 -65
- package/dist/core/training/trainer/trainer.js +0 -160
- package/dist/core/training/trainer/trainer_builder.js +0 -95
- package/dist/core/training/training_schemes.d.ts +0 -5
- package/dist/core/training/training_schemes.js +0 -10
- package/dist/core/types.d.ts +0 -4
- package/dist/core/types.js +0 -2
- package/dist/core/validation/index.d.ts +0 -1
- package/dist/core/validation/index.js +0 -5
- package/dist/core/validation/validator.d.ts +0 -17
- package/dist/core/validation/validator.js +0 -104
- package/dist/core/weights/aggregation.d.ts +0 -8
- package/dist/core/weights/aggregation.js +0 -96
- package/dist/core/weights/index.d.ts +0 -2
- package/dist/core/weights/index.js +0 -7
- package/dist/core/weights/weights_container.d.ts +0 -19
- package/dist/core/weights/weights_container.js +0 -64
- package/dist/imports.d.ts +0 -2
- package/dist/imports.js +0 -7
- package/dist/memory/memory.d.ts +0 -26
- package/dist/memory/memory.js +0 -160
- package/dist/{core/task → task}/data_example.d.ts +1 -1
- package/dist/{core/task → task}/summary.d.ts +1 -1
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getPreprocessImage = exports.ImagePreprocessing = void 0;
|
|
4
|
-
var __1 = require("../..");
|
|
5
|
-
var ImagePreprocessing;
|
|
6
|
-
(function (ImagePreprocessing) {
|
|
7
|
-
ImagePreprocessing["Normalize"] = "normalize";
|
|
8
|
-
ImagePreprocessing["Resize"] = "resize";
|
|
9
|
-
})(ImagePreprocessing = exports.ImagePreprocessing || (exports.ImagePreprocessing = {}));
|
|
10
|
-
function getPreprocessImage(task) {
|
|
11
|
-
var preprocessImage = function (tensorContainer) {
|
|
12
|
-
var _a, _b;
|
|
13
|
-
// TODO unsafe cast, tfjs does not provide the right interface
|
|
14
|
-
var info = task.trainingInformation;
|
|
15
|
-
var _c = tensorContainer, xs = _c.xs, ys = _c.ys;
|
|
16
|
-
if ((_a = info.preprocessingFunctions) === null || _a === void 0 ? void 0 : _a.includes(ImagePreprocessing.Normalize)) {
|
|
17
|
-
xs = xs.div(__1.tf.scalar(255));
|
|
18
|
-
}
|
|
19
|
-
if (((_b = info.preprocessingFunctions) === null || _b === void 0 ? void 0 : _b.includes(ImagePreprocessing.Resize)) &&
|
|
20
|
-
info.IMAGE_H !== undefined &&
|
|
21
|
-
info.IMAGE_W !== undefined) {
|
|
22
|
-
xs = __1.tf.image.resizeBilinear(xs, [
|
|
23
|
-
info.IMAGE_H, info.IMAGE_W
|
|
24
|
-
]);
|
|
25
|
-
}
|
|
26
|
-
return {
|
|
27
|
-
xs: xs,
|
|
28
|
-
ys: ys
|
|
29
|
-
};
|
|
30
|
-
};
|
|
31
|
-
return preprocessImage;
|
|
32
|
-
}
|
|
33
|
-
exports.getPreprocessImage = getPreprocessImage;
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { Task } from '../..';
|
|
2
|
-
import { Dataset } from '../dataset';
|
|
3
|
-
import { Data } from './data';
|
|
4
|
-
export declare class TabularData extends Data {
|
|
5
|
-
static init(dataset: Dataset, task: Task, size?: number): Promise<Data>;
|
|
6
|
-
batch(): Data;
|
|
7
|
-
preprocess(): Data;
|
|
8
|
-
}
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.TabularData = void 0;
|
|
4
|
-
var tslib_1 = require("tslib");
|
|
5
|
-
var data_1 = require("./data");
|
|
6
|
-
var TabularData = /** @class */ (function (_super) {
|
|
7
|
-
(0, tslib_1.__extends)(TabularData, _super);
|
|
8
|
-
function TabularData() {
|
|
9
|
-
return _super !== null && _super.apply(this, arguments) || this;
|
|
10
|
-
}
|
|
11
|
-
TabularData.init = function (dataset, task, size) {
|
|
12
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
13
|
-
var e_1;
|
|
14
|
-
return (0, tslib_1.__generator)(this, function (_a) {
|
|
15
|
-
switch (_a.label) {
|
|
16
|
-
case 0:
|
|
17
|
-
_a.trys.push([0, 2, , 3]);
|
|
18
|
-
return [4 /*yield*/, dataset.iterator()];
|
|
19
|
-
case 1:
|
|
20
|
-
_a.sent();
|
|
21
|
-
return [3 /*break*/, 3];
|
|
22
|
-
case 2:
|
|
23
|
-
e_1 = _a.sent();
|
|
24
|
-
throw new Error('Data input format is not compatible with the chosen task');
|
|
25
|
-
case 3: return [2 /*return*/, new TabularData(dataset, task, size)];
|
|
26
|
-
}
|
|
27
|
-
});
|
|
28
|
-
});
|
|
29
|
-
};
|
|
30
|
-
TabularData.prototype.batch = function () {
|
|
31
|
-
var batchSize = this.task.trainingInformation.batchSize;
|
|
32
|
-
var newDataset = batchSize === undefined ? this.dataset : this.dataset.batch(batchSize);
|
|
33
|
-
return new TabularData(newDataset, this.task, this.size);
|
|
34
|
-
};
|
|
35
|
-
TabularData.prototype.preprocess = function () {
|
|
36
|
-
return this;
|
|
37
|
-
};
|
|
38
|
-
return TabularData;
|
|
39
|
-
}(data_1.Data));
|
|
40
|
-
exports.TabularData = TabularData;
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.DataLoader = void 0;
|
|
4
|
-
var DataLoader = /** @class */ (function () {
|
|
5
|
-
function DataLoader(task) {
|
|
6
|
-
this.task = task;
|
|
7
|
-
}
|
|
8
|
-
return DataLoader;
|
|
9
|
-
}());
|
|
10
|
-
exports.DataLoader = DataLoader;
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import { tf } from '../..';
|
|
2
|
-
import { Dataset } from '../dataset';
|
|
3
|
-
import { DataSplit } from '../data';
|
|
4
|
-
import { DataLoader, DataConfig } from '../data_loader';
|
|
5
|
-
/**
|
|
6
|
-
* TODO @s314cy:
|
|
7
|
-
* Load labels and correctly match them with their respective images, with the following constraints:
|
|
8
|
-
* 1. Images are given as 1 image/1 file
|
|
9
|
-
* 2. Labels are given as multiple labels/1 file, each label file can contain a different amount of labels
|
|
10
|
-
*/
|
|
11
|
-
export declare abstract class ImageLoader<Source> extends DataLoader<Source> {
|
|
12
|
-
abstract readImageFrom(source: Source): Promise<tf.Tensor3D>;
|
|
13
|
-
load(image: Source, config?: DataConfig): Promise<Dataset>;
|
|
14
|
-
private buildDataset;
|
|
15
|
-
loadAll(images: Source[], config?: DataConfig): Promise<DataSplit>;
|
|
16
|
-
shuffle(array: number[]): void;
|
|
17
|
-
}
|
|
@@ -1,141 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ImageLoader = void 0;
|
|
4
|
-
var tslib_1 = require("tslib");
|
|
5
|
-
var immutable_1 = require("immutable");
|
|
6
|
-
var __1 = require("../..");
|
|
7
|
-
var data_1 = require("../data");
|
|
8
|
-
var data_loader_1 = require("../data_loader");
|
|
9
|
-
/**
|
|
10
|
-
* TODO @s314cy:
|
|
11
|
-
* Load labels and correctly match them with their respective images, with the following constraints:
|
|
12
|
-
* 1. Images are given as 1 image/1 file
|
|
13
|
-
* 2. Labels are given as multiple labels/1 file, each label file can contain a different amount of labels
|
|
14
|
-
*/
|
|
15
|
-
var ImageLoader = /** @class */ (function (_super) {
|
|
16
|
-
(0, tslib_1.__extends)(ImageLoader, _super);
|
|
17
|
-
function ImageLoader() {
|
|
18
|
-
return _super !== null && _super.apply(this, arguments) || this;
|
|
19
|
-
}
|
|
20
|
-
ImageLoader.prototype.load = function (image, config) {
|
|
21
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
22
|
-
var tensorContainer;
|
|
23
|
-
var _a;
|
|
24
|
-
return (0, tslib_1.__generator)(this, function (_b) {
|
|
25
|
-
switch (_b.label) {
|
|
26
|
-
case 0:
|
|
27
|
-
if (!(config === undefined || config.labels === undefined)) return [3 /*break*/, 2];
|
|
28
|
-
return [4 /*yield*/, this.readImageFrom(image)];
|
|
29
|
-
case 1:
|
|
30
|
-
tensorContainer = _b.sent();
|
|
31
|
-
return [3 /*break*/, 4];
|
|
32
|
-
case 2:
|
|
33
|
-
_a = {};
|
|
34
|
-
return [4 /*yield*/, this.readImageFrom(image)];
|
|
35
|
-
case 3:
|
|
36
|
-
tensorContainer = (_a.xs = _b.sent(),
|
|
37
|
-
_a.ys = config.labels[0],
|
|
38
|
-
_a);
|
|
39
|
-
_b.label = 4;
|
|
40
|
-
case 4: return [2 /*return*/, __1.tf.data.array([tensorContainer])];
|
|
41
|
-
}
|
|
42
|
-
});
|
|
43
|
-
});
|
|
44
|
-
};
|
|
45
|
-
ImageLoader.prototype.buildDataset = function (images, labels, indices, config) {
|
|
46
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
47
|
-
var dataset;
|
|
48
|
-
var _this = this;
|
|
49
|
-
return (0, tslib_1.__generator)(this, function (_a) {
|
|
50
|
-
switch (_a.label) {
|
|
51
|
-
case 0:
|
|
52
|
-
dataset = __1.tf.data.generator(function () {
|
|
53
|
-
var withLabels = (config === null || config === void 0 ? void 0 : config.labels) !== undefined;
|
|
54
|
-
var index = 0;
|
|
55
|
-
var iterator = {
|
|
56
|
-
next: function () { return (0, tslib_1.__awaiter)(_this, void 0, void 0, function () {
|
|
57
|
-
var sample, label, value;
|
|
58
|
-
return (0, tslib_1.__generator)(this, function (_a) {
|
|
59
|
-
switch (_a.label) {
|
|
60
|
-
case 0:
|
|
61
|
-
if (index === indices.length) {
|
|
62
|
-
return [2 /*return*/, { done: true }];
|
|
63
|
-
}
|
|
64
|
-
return [4 /*yield*/, this.readImageFrom(images[indices[index]])];
|
|
65
|
-
case 1:
|
|
66
|
-
sample = _a.sent();
|
|
67
|
-
label = withLabels ? labels[indices[index]] : undefined;
|
|
68
|
-
value = withLabels ? { xs: sample, ys: label } : sample;
|
|
69
|
-
index++;
|
|
70
|
-
return [2 /*return*/, {
|
|
71
|
-
value: value,
|
|
72
|
-
done: false
|
|
73
|
-
}];
|
|
74
|
-
}
|
|
75
|
-
});
|
|
76
|
-
}); }
|
|
77
|
-
};
|
|
78
|
-
return iterator; // Lazy
|
|
79
|
-
});
|
|
80
|
-
return [4 /*yield*/, data_1.ImageData.init(dataset, this.task, indices.length)];
|
|
81
|
-
case 1: return [2 /*return*/, _a.sent()];
|
|
82
|
-
}
|
|
83
|
-
});
|
|
84
|
-
});
|
|
85
|
-
};
|
|
86
|
-
ImageLoader.prototype.loadAll = function (images, config) {
|
|
87
|
-
var _a, _b;
|
|
88
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
89
|
-
var labels, indices, numberOfClasses, dataset, trainSize, trainIndices, valIndices, trainDataset, valDataset;
|
|
90
|
-
return (0, tslib_1.__generator)(this, function (_c) {
|
|
91
|
-
switch (_c.label) {
|
|
92
|
-
case 0:
|
|
93
|
-
labels = [];
|
|
94
|
-
indices = (0, immutable_1.Range)(0, images.length).toArray();
|
|
95
|
-
if ((config === null || config === void 0 ? void 0 : config.labels) !== undefined) {
|
|
96
|
-
numberOfClasses = (_b = (_a = this.task.trainingInformation) === null || _a === void 0 ? void 0 : _a.LABEL_LIST) === null || _b === void 0 ? void 0 : _b.length;
|
|
97
|
-
if (numberOfClasses === undefined) {
|
|
98
|
-
throw new Error('wanted labels but none found in task');
|
|
99
|
-
}
|
|
100
|
-
labels = __1.tf.oneHot(__1.tf.tensor1d(config.labels, 'int32'), numberOfClasses).arraySync();
|
|
101
|
-
}
|
|
102
|
-
if ((config === null || config === void 0 ? void 0 : config.shuffle) === undefined || (config === null || config === void 0 ? void 0 : config.shuffle)) {
|
|
103
|
-
this.shuffle(indices);
|
|
104
|
-
}
|
|
105
|
-
if (!((config === null || config === void 0 ? void 0 : config.validationSplit) === undefined || (config === null || config === void 0 ? void 0 : config.validationSplit) === 0)) return [3 /*break*/, 2];
|
|
106
|
-
return [4 /*yield*/, this.buildDataset(images, labels, indices, config)];
|
|
107
|
-
case 1:
|
|
108
|
-
dataset = _c.sent();
|
|
109
|
-
return [2 /*return*/, {
|
|
110
|
-
train: dataset,
|
|
111
|
-
validation: undefined
|
|
112
|
-
}];
|
|
113
|
-
case 2:
|
|
114
|
-
trainSize = Math.floor(images.length * (1 - config.validationSplit));
|
|
115
|
-
trainIndices = indices.slice(0, trainSize);
|
|
116
|
-
valIndices = indices.slice(trainSize);
|
|
117
|
-
return [4 /*yield*/, this.buildDataset(images, labels, trainIndices, config)];
|
|
118
|
-
case 3:
|
|
119
|
-
trainDataset = _c.sent();
|
|
120
|
-
return [4 /*yield*/, this.buildDataset(images, labels, valIndices, config)];
|
|
121
|
-
case 4:
|
|
122
|
-
valDataset = _c.sent();
|
|
123
|
-
return [2 /*return*/, {
|
|
124
|
-
train: trainDataset,
|
|
125
|
-
validation: valDataset
|
|
126
|
-
}];
|
|
127
|
-
}
|
|
128
|
-
});
|
|
129
|
-
});
|
|
130
|
-
};
|
|
131
|
-
ImageLoader.prototype.shuffle = function (array) {
|
|
132
|
-
for (var i = 0; i < array.length; i++) {
|
|
133
|
-
var j = Math.floor(Math.random() * i);
|
|
134
|
-
var swap = array[i];
|
|
135
|
-
array[i] = array[j];
|
|
136
|
-
array[j] = swap;
|
|
137
|
-
}
|
|
138
|
-
};
|
|
139
|
-
return ImageLoader;
|
|
140
|
-
}(data_loader_1.DataLoader));
|
|
141
|
-
exports.ImageLoader = ImageLoader;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.TabularLoader = exports.ImageLoader = exports.DataLoader = void 0;
|
|
4
|
-
var data_loader_1 = require("./data_loader");
|
|
5
|
-
Object.defineProperty(exports, "DataLoader", { enumerable: true, get: function () { return data_loader_1.DataLoader; } });
|
|
6
|
-
var image_loader_1 = require("./image_loader");
|
|
7
|
-
Object.defineProperty(exports, "ImageLoader", { enumerable: true, get: function () { return image_loader_1.ImageLoader; } });
|
|
8
|
-
var tabular_loader_1 = require("./tabular_loader");
|
|
9
|
-
Object.defineProperty(exports, "TabularLoader", { enumerable: true, get: function () { return tabular_loader_1.TabularLoader; } });
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { tf, Task } from '../..';
|
|
2
|
-
import { Dataset } from '../dataset';
|
|
3
|
-
import { DataSplit } from '../data';
|
|
4
|
-
import { DataLoader, DataConfig } from '../data_loader';
|
|
5
|
-
export declare abstract class TabularLoader<Source> extends DataLoader<Source> {
|
|
6
|
-
private readonly delimiter;
|
|
7
|
-
constructor(task: Task, delimiter: string);
|
|
8
|
-
/**
|
|
9
|
-
* Creates a CSV dataset object based off the given source.
|
|
10
|
-
* @param source File object, URL string or local file system path.
|
|
11
|
-
* @param csvConfig Object expected by TF.js to create a CSVDataset.
|
|
12
|
-
* @returns The CSVDataset object built upon the given source.
|
|
13
|
-
*/
|
|
14
|
-
abstract loadTabularDatasetFrom(source: Source, csvConfig: Record<string, unknown>): tf.data.CSVDataset;
|
|
15
|
-
/**
|
|
16
|
-
* Expects delimiter-separated tabular data made of N columns. The data may be
|
|
17
|
-
* potentially split among several sources. Every source should contain N-1
|
|
18
|
-
* feature columns and 1 single label column.
|
|
19
|
-
* @param source List of File objects, URLs or file system paths.
|
|
20
|
-
* @param config
|
|
21
|
-
* @returns A TF.js dataset built upon read tabular data stored in the given sources.
|
|
22
|
-
*/
|
|
23
|
-
load(source: Source, config?: DataConfig): Promise<Dataset>;
|
|
24
|
-
/**
|
|
25
|
-
* Creates the CSV datasets based off the given sources, then fuses them into a single CSV
|
|
26
|
-
* dataset.
|
|
27
|
-
*/
|
|
28
|
-
loadAll(sources: Source[], config: DataConfig): Promise<DataSplit>;
|
|
29
|
-
}
|
|
@@ -1,101 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.TabularLoader = void 0;
|
|
4
|
-
var tslib_1 = require("tslib");
|
|
5
|
-
var immutable_1 = require("immutable");
|
|
6
|
-
var data_1 = require("../data");
|
|
7
|
-
var data_loader_1 = require("../data_loader");
|
|
8
|
-
// window size from which the dataset shuffling will sample
|
|
9
|
-
var BUFFER_SIZE = 1000;
|
|
10
|
-
var TabularLoader = /** @class */ (function (_super) {
|
|
11
|
-
(0, tslib_1.__extends)(TabularLoader, _super);
|
|
12
|
-
function TabularLoader(task, delimiter) {
|
|
13
|
-
var _this = _super.call(this, task) || this;
|
|
14
|
-
_this.delimiter = delimiter;
|
|
15
|
-
return _this;
|
|
16
|
-
}
|
|
17
|
-
/**
|
|
18
|
-
* Expects delimiter-separated tabular data made of N columns. The data may be
|
|
19
|
-
* potentially split among several sources. Every source should contain N-1
|
|
20
|
-
* feature columns and 1 single label column.
|
|
21
|
-
* @param source List of File objects, URLs or file system paths.
|
|
22
|
-
* @param config
|
|
23
|
-
* @returns A TF.js dataset built upon read tabular data stored in the given sources.
|
|
24
|
-
*/
|
|
25
|
-
TabularLoader.prototype.load = function (source, config) {
|
|
26
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
27
|
-
var columnConfigs, csvConfig, dataset;
|
|
28
|
-
return (0, tslib_1.__generator)(this, function (_a) {
|
|
29
|
-
/**
|
|
30
|
-
* Prepare the CSV config object based off the given features and labels.
|
|
31
|
-
* If labels is empty, then the returned dataset is comprised of samples only.
|
|
32
|
-
* Otherwise, each entry is of the form `{ xs, ys }` with `xs` as features and `ys`
|
|
33
|
-
* as labels.
|
|
34
|
-
*/
|
|
35
|
-
if ((config === null || config === void 0 ? void 0 : config.features) === undefined) {
|
|
36
|
-
// TODO @s314cy
|
|
37
|
-
throw new Error('Not implemented');
|
|
38
|
-
}
|
|
39
|
-
columnConfigs = (0, immutable_1.Map)((0, immutable_1.Set)(config.features).map(function (feature) { return [feature, { required: false, isLabel: false }]; })).merge((0, immutable_1.Set)(config.labels).map(function (label) { return [label, { required: true, isLabel: true }]; }));
|
|
40
|
-
csvConfig = {
|
|
41
|
-
hasHeader: true,
|
|
42
|
-
columnConfigs: columnConfigs.toObject(),
|
|
43
|
-
configuredColumnsOnly: true,
|
|
44
|
-
delimiter: this.delimiter
|
|
45
|
-
};
|
|
46
|
-
dataset = this.loadTabularDatasetFrom(source, csvConfig).map(function (t) {
|
|
47
|
-
if (typeof t === 'object' && ('xs' in t) && ('ys' in t)) {
|
|
48
|
-
return t;
|
|
49
|
-
}
|
|
50
|
-
throw new TypeError('Expected TensorContainerObject');
|
|
51
|
-
}).map(function (t) {
|
|
52
|
-
// TODO order may not be stable between tensor
|
|
53
|
-
var _a = t, xs = _a.xs, ys = _a.ys;
|
|
54
|
-
return {
|
|
55
|
-
xs: Object.values(xs),
|
|
56
|
-
ys: Object.values(ys)
|
|
57
|
-
};
|
|
58
|
-
});
|
|
59
|
-
return [2 /*return*/, ((config === null || config === void 0 ? void 0 : config.shuffle) === undefined || (config === null || config === void 0 ? void 0 : config.shuffle)) ? dataset.shuffle(BUFFER_SIZE) : dataset];
|
|
60
|
-
});
|
|
61
|
-
});
|
|
62
|
-
};
|
|
63
|
-
/**
|
|
64
|
-
* Creates the CSV datasets based off the given sources, then fuses them into a single CSV
|
|
65
|
-
* dataset.
|
|
66
|
-
*/
|
|
67
|
-
TabularLoader.prototype.loadAll = function (sources, config) {
|
|
68
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
69
|
-
var datasets, dataset, data;
|
|
70
|
-
var _this = this;
|
|
71
|
-
return (0, tslib_1.__generator)(this, function (_a) {
|
|
72
|
-
switch (_a.label) {
|
|
73
|
-
case 0: return [4 /*yield*/, Promise.all(sources.map(function (source) { return (0, tslib_1.__awaiter)(_this, void 0, void 0, function () { return (0, tslib_1.__generator)(this, function (_a) {
|
|
74
|
-
switch (_a.label) {
|
|
75
|
-
case 0: return [4 /*yield*/, this.load(source, (0, tslib_1.__assign)((0, tslib_1.__assign)({}, config), { shuffle: false }))];
|
|
76
|
-
case 1: return [2 /*return*/, _a.sent()];
|
|
77
|
-
}
|
|
78
|
-
}); }); }))];
|
|
79
|
-
case 1:
|
|
80
|
-
datasets = _a.sent();
|
|
81
|
-
dataset = (0, immutable_1.List)(datasets).reduce(function (acc, dataset) { return acc.concatenate(dataset); });
|
|
82
|
-
dataset = (config === null || config === void 0 ? void 0 : config.shuffle) ? dataset.shuffle(BUFFER_SIZE) : dataset;
|
|
83
|
-
return [4 /*yield*/, data_1.TabularData.init(dataset, this.task,
|
|
84
|
-
// dataset.size does not work for csv datasets
|
|
85
|
-
// https://github.com/tensorflow/tfjs/issues/5845
|
|
86
|
-
undefined)
|
|
87
|
-
// TODO: Implement validation split for tabular data (tricky due to streaming)
|
|
88
|
-
];
|
|
89
|
-
case 2:
|
|
90
|
-
data = _a.sent();
|
|
91
|
-
// TODO: Implement validation split for tabular data (tricky due to streaming)
|
|
92
|
-
return [2 /*return*/, {
|
|
93
|
-
train: data
|
|
94
|
-
}];
|
|
95
|
-
}
|
|
96
|
-
});
|
|
97
|
-
});
|
|
98
|
-
};
|
|
99
|
-
return TabularLoader;
|
|
100
|
-
}(data_loader_1.DataLoader));
|
|
101
|
-
exports.TabularLoader = TabularLoader;
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
import { Task } from '..';
|
|
2
|
-
import { DataSplit } from './data';
|
|
3
|
-
import { DataConfig, DataLoader } from './data_loader/data_loader';
|
|
4
|
-
export declare class DatasetBuilder<Source> {
|
|
5
|
-
private readonly task;
|
|
6
|
-
private readonly dataLoader;
|
|
7
|
-
private sources;
|
|
8
|
-
private readonly labelledSources;
|
|
9
|
-
private built;
|
|
10
|
-
constructor(dataLoader: DataLoader<Source>, task: Task);
|
|
11
|
-
addFiles(sources: Source[], label?: string): void;
|
|
12
|
-
clearFiles(label?: string): void;
|
|
13
|
-
private resetBuiltState;
|
|
14
|
-
private getLabels;
|
|
15
|
-
build(config?: DataConfig): Promise<DataSplit>;
|
|
16
|
-
isBuilt(): boolean;
|
|
17
|
-
size(): number;
|
|
18
|
-
}
|
|
@@ -1,96 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.DatasetBuilder = void 0;
|
|
4
|
-
var tslib_1 = require("tslib");
|
|
5
|
-
var DatasetBuilder = /** @class */ (function () {
|
|
6
|
-
function DatasetBuilder(dataLoader, task) {
|
|
7
|
-
this.dataLoader = dataLoader;
|
|
8
|
-
this.task = task;
|
|
9
|
-
this.sources = [];
|
|
10
|
-
this.labelledSources = new Map();
|
|
11
|
-
this.built = false;
|
|
12
|
-
}
|
|
13
|
-
DatasetBuilder.prototype.addFiles = function (sources, label) {
|
|
14
|
-
if (this.built) {
|
|
15
|
-
this.resetBuiltState();
|
|
16
|
-
}
|
|
17
|
-
if (label === undefined) {
|
|
18
|
-
this.sources = this.sources.concat(sources);
|
|
19
|
-
}
|
|
20
|
-
else {
|
|
21
|
-
var currentSources = this.labelledSources.get(label);
|
|
22
|
-
if (currentSources === undefined) {
|
|
23
|
-
this.labelledSources.set(label, sources);
|
|
24
|
-
}
|
|
25
|
-
else {
|
|
26
|
-
this.labelledSources.set(label, currentSources.concat(sources));
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
};
|
|
30
|
-
DatasetBuilder.prototype.clearFiles = function (label) {
|
|
31
|
-
if (this.built) {
|
|
32
|
-
this.resetBuiltState();
|
|
33
|
-
}
|
|
34
|
-
if (label === undefined) {
|
|
35
|
-
this.sources = [];
|
|
36
|
-
}
|
|
37
|
-
else {
|
|
38
|
-
this.labelledSources.delete(label);
|
|
39
|
-
}
|
|
40
|
-
};
|
|
41
|
-
// If files are added or removed, then this should be called since the latest
|
|
42
|
-
// version of the dataset_builder has not yet been built.
|
|
43
|
-
DatasetBuilder.prototype.resetBuiltState = function () {
|
|
44
|
-
this.built = false;
|
|
45
|
-
};
|
|
46
|
-
DatasetBuilder.prototype.getLabels = function () {
|
|
47
|
-
// We need to duplicate the labels as we need one for each soure.
|
|
48
|
-
// Say for label A we have sources [img1, img2, img3], then we
|
|
49
|
-
// need labels [A, A, A].
|
|
50
|
-
var labels = [];
|
|
51
|
-
Array.from(this.labelledSources.values()).forEach(function (sources, index) {
|
|
52
|
-
var sourcesLabels = Array.from({ length: sources.length }, function (_) { return index.toString(); });
|
|
53
|
-
labels = labels.concat(sourcesLabels);
|
|
54
|
-
});
|
|
55
|
-
return labels.flat();
|
|
56
|
-
};
|
|
57
|
-
DatasetBuilder.prototype.build = function (config) {
|
|
58
|
-
return (0, tslib_1.__awaiter)(this, void 0, void 0, function () {
|
|
59
|
-
var dataTuple, defaultConfig, defaultConfig, sources;
|
|
60
|
-
return (0, tslib_1.__generator)(this, function (_a) {
|
|
61
|
-
switch (_a.label) {
|
|
62
|
-
case 0:
|
|
63
|
-
// Require that at leat one source collection is non-empty, but not both
|
|
64
|
-
if ((this.sources.length > 0) === (this.labelledSources.size > 0)) {
|
|
65
|
-
throw new Error('Please provide dataset input files');
|
|
66
|
-
}
|
|
67
|
-
if (!(this.sources.length > 0)) return [3 /*break*/, 2];
|
|
68
|
-
defaultConfig = (0, tslib_1.__assign)({ features: this.task.trainingInformation.inputColumns, labels: this.task.trainingInformation.outputColumns }, config);
|
|
69
|
-
return [4 /*yield*/, this.dataLoader.loadAll(this.sources, defaultConfig)];
|
|
70
|
-
case 1:
|
|
71
|
-
dataTuple = _a.sent();
|
|
72
|
-
return [3 /*break*/, 4];
|
|
73
|
-
case 2:
|
|
74
|
-
defaultConfig = (0, tslib_1.__assign)({ labels: this.getLabels() }, config);
|
|
75
|
-
sources = Array.from(this.labelledSources.values()).flat();
|
|
76
|
-
return [4 /*yield*/, this.dataLoader.loadAll(sources, defaultConfig)];
|
|
77
|
-
case 3:
|
|
78
|
-
dataTuple = _a.sent();
|
|
79
|
-
_a.label = 4;
|
|
80
|
-
case 4:
|
|
81
|
-
// TODO @s314cy: Support .csv labels for image datasets (supervised training or testing)
|
|
82
|
-
this.built = true;
|
|
83
|
-
return [2 /*return*/, dataTuple];
|
|
84
|
-
}
|
|
85
|
-
});
|
|
86
|
-
});
|
|
87
|
-
};
|
|
88
|
-
DatasetBuilder.prototype.isBuilt = function () {
|
|
89
|
-
return this.built;
|
|
90
|
-
};
|
|
91
|
-
DatasetBuilder.prototype.size = function () {
|
|
92
|
-
return Math.max(this.sources.length, this.labelledSources.size);
|
|
93
|
-
};
|
|
94
|
-
return DatasetBuilder;
|
|
95
|
-
}());
|
|
96
|
-
exports.DatasetBuilder = DatasetBuilder;
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.DataLoader = exports.TabularLoader = exports.ImageLoader = exports.ImagePreprocessing = exports.ImageData = exports.TabularData = exports.Data = exports.DatasetBuilder = void 0;
|
|
4
|
-
var dataset_builder_1 = require("./dataset_builder");
|
|
5
|
-
Object.defineProperty(exports, "DatasetBuilder", { enumerable: true, get: function () { return dataset_builder_1.DatasetBuilder; } });
|
|
6
|
-
var data_1 = require("./data");
|
|
7
|
-
Object.defineProperty(exports, "Data", { enumerable: true, get: function () { return data_1.Data; } });
|
|
8
|
-
Object.defineProperty(exports, "TabularData", { enumerable: true, get: function () { return data_1.TabularData; } });
|
|
9
|
-
Object.defineProperty(exports, "ImageData", { enumerable: true, get: function () { return data_1.ImageData; } });
|
|
10
|
-
Object.defineProperty(exports, "ImagePreprocessing", { enumerable: true, get: function () { return data_1.ImagePreprocessing; } });
|
|
11
|
-
var data_loader_1 = require("./data_loader");
|
|
12
|
-
Object.defineProperty(exports, "ImageLoader", { enumerable: true, get: function () { return data_loader_1.ImageLoader; } });
|
|
13
|
-
Object.defineProperty(exports, "TabularLoader", { enumerable: true, get: function () { return data_loader_1.TabularLoader; } });
|
|
14
|
-
Object.defineProperty(exports, "DataLoader", { enumerable: true, get: function () { return data_loader_1.DataLoader; } });
|
package/dist/core/index.d.ts
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
export * as tf from '@tensorflow/tfjs';
|
|
2
|
-
export * as data from './dataset';
|
|
3
|
-
export * as serialization from './serialization';
|
|
4
|
-
export * as training from './training';
|
|
5
|
-
export * as privacy from './privacy';
|
|
6
|
-
export { GraphInformant, TrainingInformant, informant } from './informant';
|
|
7
|
-
export { Base as Client } from './client';
|
|
8
|
-
export * as client from './client';
|
|
9
|
-
export { WeightsContainer, aggregation } from './weights';
|
|
10
|
-
export { AsyncBuffer } from './async_buffer';
|
|
11
|
-
export { AsyncInformant } from './async_informant';
|
|
12
|
-
export { Logger, ConsoleLogger, TrainerLog } from './logging';
|
|
13
|
-
export { Memory, ModelType, ModelInfo, Path, ModelSource, Empty as EmptyMemory } from './memory';
|
|
14
|
-
export { Disco, TrainingSchemes } from './training';
|
|
15
|
-
export { Validator } from './validation';
|
|
16
|
-
export { TrainingInformation, DisplayInformation, isTask, Task, isTaskID, TaskID } from './task';
|
|
17
|
-
export * as tasks from './tasks';
|
|
18
|
-
export * from './types';
|
package/dist/core/index.js
DELETED
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.tasks = exports.isTaskID = exports.isTask = exports.Validator = exports.TrainingSchemes = exports.Disco = exports.EmptyMemory = exports.ModelType = exports.Memory = exports.TrainerLog = exports.ConsoleLogger = exports.Logger = exports.AsyncInformant = exports.AsyncBuffer = exports.aggregation = exports.WeightsContainer = exports.client = exports.Client = exports.informant = exports.TrainingInformant = exports.GraphInformant = exports.privacy = exports.training = exports.serialization = exports.data = exports.tf = void 0;
|
|
4
|
-
var tslib_1 = require("tslib");
|
|
5
|
-
exports.tf = (0, tslib_1.__importStar)(require("@tensorflow/tfjs"));
|
|
6
|
-
exports.data = (0, tslib_1.__importStar)(require("./dataset"));
|
|
7
|
-
exports.serialization = (0, tslib_1.__importStar)(require("./serialization"));
|
|
8
|
-
exports.training = (0, tslib_1.__importStar)(require("./training"));
|
|
9
|
-
exports.privacy = (0, tslib_1.__importStar)(require("./privacy"));
|
|
10
|
-
var informant_1 = require("./informant");
|
|
11
|
-
Object.defineProperty(exports, "GraphInformant", { enumerable: true, get: function () { return informant_1.GraphInformant; } });
|
|
12
|
-
Object.defineProperty(exports, "TrainingInformant", { enumerable: true, get: function () { return informant_1.TrainingInformant; } });
|
|
13
|
-
Object.defineProperty(exports, "informant", { enumerable: true, get: function () { return informant_1.informant; } });
|
|
14
|
-
var client_1 = require("./client");
|
|
15
|
-
Object.defineProperty(exports, "Client", { enumerable: true, get: function () { return client_1.Base; } });
|
|
16
|
-
exports.client = (0, tslib_1.__importStar)(require("./client"));
|
|
17
|
-
var weights_1 = require("./weights");
|
|
18
|
-
Object.defineProperty(exports, "WeightsContainer", { enumerable: true, get: function () { return weights_1.WeightsContainer; } });
|
|
19
|
-
Object.defineProperty(exports, "aggregation", { enumerable: true, get: function () { return weights_1.aggregation; } });
|
|
20
|
-
var async_buffer_1 = require("./async_buffer");
|
|
21
|
-
Object.defineProperty(exports, "AsyncBuffer", { enumerable: true, get: function () { return async_buffer_1.AsyncBuffer; } });
|
|
22
|
-
var async_informant_1 = require("./async_informant");
|
|
23
|
-
Object.defineProperty(exports, "AsyncInformant", { enumerable: true, get: function () { return async_informant_1.AsyncInformant; } });
|
|
24
|
-
var logging_1 = require("./logging");
|
|
25
|
-
Object.defineProperty(exports, "Logger", { enumerable: true, get: function () { return logging_1.Logger; } });
|
|
26
|
-
Object.defineProperty(exports, "ConsoleLogger", { enumerable: true, get: function () { return logging_1.ConsoleLogger; } });
|
|
27
|
-
Object.defineProperty(exports, "TrainerLog", { enumerable: true, get: function () { return logging_1.TrainerLog; } });
|
|
28
|
-
var memory_1 = require("./memory");
|
|
29
|
-
Object.defineProperty(exports, "Memory", { enumerable: true, get: function () { return memory_1.Memory; } });
|
|
30
|
-
Object.defineProperty(exports, "ModelType", { enumerable: true, get: function () { return memory_1.ModelType; } });
|
|
31
|
-
Object.defineProperty(exports, "EmptyMemory", { enumerable: true, get: function () { return memory_1.Empty; } });
|
|
32
|
-
var training_1 = require("./training");
|
|
33
|
-
Object.defineProperty(exports, "Disco", { enumerable: true, get: function () { return training_1.Disco; } });
|
|
34
|
-
Object.defineProperty(exports, "TrainingSchemes", { enumerable: true, get: function () { return training_1.TrainingSchemes; } });
|
|
35
|
-
var validation_1 = require("./validation");
|
|
36
|
-
Object.defineProperty(exports, "Validator", { enumerable: true, get: function () { return validation_1.Validator; } });
|
|
37
|
-
var task_1 = require("./task");
|
|
38
|
-
Object.defineProperty(exports, "isTask", { enumerable: true, get: function () { return task_1.isTask; } });
|
|
39
|
-
Object.defineProperty(exports, "isTaskID", { enumerable: true, get: function () { return task_1.isTaskID; } });
|
|
40
|
-
exports.tasks = (0, tslib_1.__importStar)(require("./tasks"));
|
|
41
|
-
(0, tslib_1.__exportStar)(require("./types"), exports);
|
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.GraphInformant = void 0;
|
|
4
|
-
var immutable_1 = require("immutable");
|
|
5
|
-
var GraphInformant = /** @class */ (function () {
|
|
6
|
-
function GraphInformant() {
|
|
7
|
-
this.currentAccuracy = 0;
|
|
8
|
-
this.accuracyDataSeries = (0, immutable_1.Repeat)(0, GraphInformant.NB_EPOCHS_ON_GRAPH).toList();
|
|
9
|
-
}
|
|
10
|
-
GraphInformant.prototype.updateAccuracy = function (accuracy) {
|
|
11
|
-
this.accuracyDataSeries = this.accuracyDataSeries.shift().push(accuracy);
|
|
12
|
-
this.currentAccuracy = accuracy;
|
|
13
|
-
};
|
|
14
|
-
GraphInformant.prototype.data = function () {
|
|
15
|
-
return this.accuracyDataSeries;
|
|
16
|
-
};
|
|
17
|
-
GraphInformant.prototype.accuracy = function () {
|
|
18
|
-
return this.currentAccuracy;
|
|
19
|
-
};
|
|
20
|
-
GraphInformant.NB_EPOCHS_ON_GRAPH = 10;
|
|
21
|
-
return GraphInformant;
|
|
22
|
-
}());
|
|
23
|
-
exports.GraphInformant = GraphInformant;
|