@webiny/api-page-builder-import-export 0.0.0-mt-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +17 -0
- package/exportPages/combine/index.d.ts +19 -0
- package/exportPages/combine/index.js +88 -0
- package/exportPages/process/index.d.ts +26 -0
- package/exportPages/process/index.js +204 -0
- package/exportPages/s3Stream.d.ts +29 -0
- package/exportPages/s3Stream.js +106 -0
- package/exportPages/utils.d.ts +13 -0
- package/exportPages/utils.js +113 -0
- package/exportPages/zipper.d.ts +35 -0
- package/exportPages/zipper.js +137 -0
- package/graphql/crud/pageImportExportTasks.crud.d.ts +5 -0
- package/graphql/crud/pageImportExportTasks.crud.js +394 -0
- package/graphql/crud/pages.crud.d.ts +4 -0
- package/graphql/crud/pages.crud.js +162 -0
- package/graphql/crud.d.ts +3 -0
- package/graphql/crud.js +16 -0
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/pageImportExportTasks.gql.js +80 -0
- package/graphql/graphql/pages.gql.d.ts +4 -0
- package/graphql/graphql/pages.gql.js +72 -0
- package/graphql/graphql/utils/resolve.d.ts +3 -0
- package/graphql/graphql/utils/resolve.js +18 -0
- package/graphql/graphql.d.ts +3 -0
- package/graphql/graphql.js +15 -0
- package/graphql/index.d.ts +3 -0
- package/graphql/index.js +16 -0
- package/graphql/types.d.ts +63 -0
- package/graphql/types.js +5 -0
- package/importPages/client.d.ts +7 -0
- package/importPages/client.js +40 -0
- package/importPages/create/index.d.ts +27 -0
- package/importPages/create/index.js +109 -0
- package/importPages/process/index.d.ts +25 -0
- package/importPages/process/index.js +183 -0
- package/importPages/utils.d.ts +43 -0
- package/importPages/utils.js +539 -0
- package/mockSecurity.d.ts +2 -0
- package/mockSecurity.js +13 -0
- package/package.json +80 -0
- package/types.d.ts +192 -0
- package/types.js +42 -0
@@ -0,0 +1,137 @@
|
|
1
|
+
"use strict";
|
2
|
+
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
4
|
+
|
5
|
+
Object.defineProperty(exports, "__esModule", {
|
6
|
+
value: true
|
7
|
+
});
|
8
|
+
exports.default = exports.ZipOfZip = void 0;
|
9
|
+
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
11
|
+
|
12
|
+
var _archiver = _interopRequireDefault(require("archiver"));
|
13
|
+
|
14
|
+
var _stream = require("stream");
|
15
|
+
|
16
|
+
var path = _interopRequireWildcard(require("path"));
|
17
|
+
|
18
|
+
var _kebabCase = _interopRequireDefault(require("lodash/kebabCase"));
|
19
|
+
|
20
|
+
var _uniqid = _interopRequireDefault(require("uniqid"));
|
21
|
+
|
22
|
+
var _s3Stream = require("./s3Stream");
|
23
|
+
|
24
|
+
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
25
|
+
|
26
|
+
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
27
|
+
|
28
|
+
// TODO: Move "archive" in layer
|
29
|
+
class Zipper {
|
30
|
+
constructor(config) {
|
31
|
+
(0, _defineProperty2.default)(this, "archiveFormat", "zip");
|
32
|
+
(0, _defineProperty2.default)(this, "filesDirName", "assets");
|
33
|
+
(0, _defineProperty2.default)(this, "archiveFileName", void 0);
|
34
|
+
(0, _defineProperty2.default)(this, "config", void 0);
|
35
|
+
this.config = config;
|
36
|
+
this.archiveFileName = (0, _uniqid.default)(`${this.config.archiveFileKey}/`, `-${(0, _kebabCase.default)(this.config.exportInfo.pageTitle)}.zip`);
|
37
|
+
}
|
38
|
+
|
39
|
+
s3DownloadStreams() {
|
40
|
+
const exportInfo = this.config.exportInfo;
|
41
|
+
const prefix = (0, _uniqid.default)("", `-${(0, _kebabCase.default)(exportInfo.pageTitle)}`);
|
42
|
+
const files = exportInfo.files.map(({
|
43
|
+
key
|
44
|
+
}) => {
|
45
|
+
return {
|
46
|
+
stream: _s3Stream.s3Stream.readStream(key),
|
47
|
+
filename: `${prefix}\\${this.filesDirName}\\${path.basename(key)}`
|
48
|
+
};
|
49
|
+
});
|
50
|
+
return [...files, {
|
51
|
+
stream: _stream.Readable.from(exportInfo.pageDataBuffer),
|
52
|
+
filename: `${prefix}\\${exportInfo.pageTitle}.json`
|
53
|
+
}];
|
54
|
+
}
|
55
|
+
|
56
|
+
process() {
|
57
|
+
const {
|
58
|
+
streamPassThrough,
|
59
|
+
streamPassThroughUploadPromise
|
60
|
+
} = _s3Stream.s3Stream.writeStream(this.archiveFileName); // 1. Read all files from S3 using stream.
|
61
|
+
|
62
|
+
|
63
|
+
const s3FilesStreams = this.s3DownloadStreams(); // 2. Prepare zip from the file stream.
|
64
|
+
|
65
|
+
const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
|
66
|
+
|
67
|
+
|
68
|
+
archive.on("error", error => {
|
69
|
+
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
|
70
|
+
}); // Append all file streams to archive.
|
71
|
+
|
72
|
+
s3FilesStreams.forEach(streamDetails => archive.append(streamDetails.stream, {
|
73
|
+
name: streamDetails.filename
|
74
|
+
})); // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
|
75
|
+
|
76
|
+
archive.pipe(streamPassThrough); // Finalize the archive (ie we are done appending files but streams have to finish yet)
|
77
|
+
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
78
|
+
|
79
|
+
archive.finalize(); // 3. Return upload stream promise.
|
80
|
+
|
81
|
+
return streamPassThroughUploadPromise;
|
82
|
+
}
|
83
|
+
|
84
|
+
}
|
85
|
+
|
86
|
+
exports.default = Zipper;
|
87
|
+
const PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;
|
88
|
+
|
89
|
+
class ZipOfZip {
|
90
|
+
constructor(keys) {
|
91
|
+
(0, _defineProperty2.default)(this, "archiveFormat", "zip");
|
92
|
+
(0, _defineProperty2.default)(this, "archiveFileName", void 0);
|
93
|
+
(0, _defineProperty2.default)(this, "keys", void 0);
|
94
|
+
this.keys = keys;
|
95
|
+
this.archiveFileName = (0, _uniqid.default)("", `-${PAGE_EXPORT_BASENAME}`);
|
96
|
+
}
|
97
|
+
|
98
|
+
getFileStreams() {
|
99
|
+
return this.keys.map(key => {
|
100
|
+
return {
|
101
|
+
stream: _s3Stream.s3Stream.readStream(key),
|
102
|
+
filename: `${path.basename(key)}`
|
103
|
+
};
|
104
|
+
});
|
105
|
+
}
|
106
|
+
|
107
|
+
process() {
|
108
|
+
const {
|
109
|
+
streamPassThrough,
|
110
|
+
streamPassThroughUploadPromise
|
111
|
+
} = _s3Stream.s3Stream.writeStream(this.archiveFileName); // 1. Read all files from S3 using stream.
|
112
|
+
|
113
|
+
|
114
|
+
const fileStreamDetails = this.getFileStreams(); // 2. Prepare zip from the file stream.
|
115
|
+
|
116
|
+
const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
|
117
|
+
|
118
|
+
|
119
|
+
archive.on("error", error => {
|
120
|
+
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
|
121
|
+
}); // Append all file streams to archive.
|
122
|
+
|
123
|
+
fileStreamDetails.forEach(streamDetails => archive.append(streamDetails.stream, {
|
124
|
+
name: streamDetails.filename
|
125
|
+
})); // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
|
126
|
+
|
127
|
+
archive.pipe(streamPassThrough); // Finalize the archive (ie we are done appending files but streams have to finish yet)
|
128
|
+
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
|
129
|
+
|
130
|
+
archive.finalize(); // 3. Return upload stream promise.
|
131
|
+
|
132
|
+
return streamPassThroughUploadPromise;
|
133
|
+
}
|
134
|
+
|
135
|
+
}
|
136
|
+
|
137
|
+
exports.ZipOfZip = ZipOfZip;
|
@@ -0,0 +1,5 @@
|
|
1
|
+
import { ContextPlugin } from "@webiny/handler/plugins/ContextPlugin";
|
2
|
+
import { PageImportExportPluginsParams } from "../../types";
|
3
|
+
import { PbPageImportExportContext } from "../types";
|
4
|
+
declare const _default: ({ storageOperations }: PageImportExportPluginsParams) => ContextPlugin<PbPageImportExportContext>;
|
5
|
+
export default _default;
|
@@ -0,0 +1,394 @@
|
|
1
|
+
"use strict";
|
2
|
+
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
4
|
+
|
5
|
+
Object.defineProperty(exports, "__esModule", {
|
6
|
+
value: true
|
7
|
+
});
|
8
|
+
exports.default = void 0;
|
9
|
+
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
11
|
+
|
12
|
+
var _mdbid = _interopRequireDefault(require("mdbid"));
|
13
|
+
|
14
|
+
var _fields = require("@commodo/fields");
|
15
|
+
|
16
|
+
var _commodoFieldsObject = require("commodo-fields-object");
|
17
|
+
|
18
|
+
var _validation = require("@webiny/validation");
|
19
|
+
|
20
|
+
var _ContextPlugin = require("@webiny/handler/plugins/ContextPlugin");
|
21
|
+
|
22
|
+
var _checkBasePermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkBasePermissions"));
|
23
|
+
|
24
|
+
var _checkOwnPermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkOwnPermissions"));
|
25
|
+
|
26
|
+
var _handlerGraphql = require("@webiny/handler-graphql");
|
27
|
+
|
28
|
+
var _types = require("../../types");
|
29
|
+
|
30
|
+
var _error = _interopRequireDefault(require("@webiny/error"));
|
31
|
+
|
32
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
|
33
|
+
|
34
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
35
|
+
|
36
|
+
const validStatus = `${_types.PageImportExportTaskStatus.PENDING}:${_types.PageImportExportTaskStatus.PROCESSING}:${_types.PageImportExportTaskStatus.COMPLETED}:${_types.PageImportExportTaskStatus.FAILED}`;
|
37
|
+
const CreateDataModel = (0, _fields.withFields)({
|
38
|
+
status: (0, _fields.string)({
|
39
|
+
validation: _validation.validation.create(`required,in:${validStatus}`)
|
40
|
+
}),
|
41
|
+
data: (0, _commodoFieldsObject.object)(),
|
42
|
+
input: (0, _commodoFieldsObject.object)(),
|
43
|
+
stats: (0, _commodoFieldsObject.object)(),
|
44
|
+
error: (0, _commodoFieldsObject.object)()
|
45
|
+
})();
|
46
|
+
const UpdateDataModel = (0, _fields.withFields)({
|
47
|
+
status: (0, _fields.string)({
|
48
|
+
validation: _validation.validation.create(`in:${validStatus}`)
|
49
|
+
}),
|
50
|
+
data: (0, _commodoFieldsObject.object)(),
|
51
|
+
input: (0, _commodoFieldsObject.object)(),
|
52
|
+
stats: (0, _commodoFieldsObject.object)(),
|
53
|
+
error: (0, _commodoFieldsObject.object)()
|
54
|
+
})();
|
55
|
+
const PERMISSION_NAME = "pb.page";
|
56
|
+
|
57
|
+
var _default = ({
|
58
|
+
storageOperations
|
59
|
+
}) => new _ContextPlugin.ContextPlugin(async context => {
|
60
|
+
/**
|
61
|
+
* If pageBuilder is not defined on the context, do not continue, but log it.
|
62
|
+
*/
|
63
|
+
if (!context.pageBuilder) {
|
64
|
+
console.log("Missing pageBuilder on context. Skipping Page ImportExportTasks crud.");
|
65
|
+
return;
|
66
|
+
} // Modify context
|
67
|
+
|
68
|
+
|
69
|
+
context.pageBuilder.pageImportExportTask = {
|
70
|
+
storageOperations,
|
71
|
+
|
72
|
+
async getTask(id) {
|
73
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
74
|
+
rwd: "r"
|
75
|
+
});
|
76
|
+
const tenant = context.tenancy.getCurrentTenant();
|
77
|
+
const locale = context.i18nContent.getLocale();
|
78
|
+
const params = {
|
79
|
+
where: {
|
80
|
+
tenant: tenant.id,
|
81
|
+
locale: locale.code,
|
82
|
+
id
|
83
|
+
}
|
84
|
+
};
|
85
|
+
let pageImportExportTask;
|
86
|
+
|
87
|
+
try {
|
88
|
+
pageImportExportTask = await storageOperations.getTask(params);
|
89
|
+
|
90
|
+
if (!pageImportExportTask) {
|
91
|
+
return null;
|
92
|
+
}
|
93
|
+
} catch (ex) {
|
94
|
+
throw new _error.default(ex.message || "Could not get pageImportExportTask by id.", ex.code || "GET_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
95
|
+
params
|
96
|
+
}));
|
97
|
+
}
|
98
|
+
|
99
|
+
const identity = context.security.getIdentity();
|
100
|
+
(0, _checkOwnPermissions.default)(identity, permission, pageImportExportTask);
|
101
|
+
return pageImportExportTask;
|
102
|
+
},
|
103
|
+
|
104
|
+
async listTasks(params) {
|
105
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
106
|
+
rwd: "r"
|
107
|
+
});
|
108
|
+
const tenant = context.tenancy.getCurrentTenant();
|
109
|
+
const locale = context.i18nContent.getLocale();
|
110
|
+
const {
|
111
|
+
sort,
|
112
|
+
limit
|
113
|
+
} = params || {};
|
114
|
+
const listParams = {
|
115
|
+
where: {
|
116
|
+
tenant: tenant.id,
|
117
|
+
locale: locale.code
|
118
|
+
},
|
119
|
+
sort: Array.isArray(sort) && sort.length > 0 ? sort : ["createdOn_ASC"],
|
120
|
+
limit: limit
|
121
|
+
}; // If user can only manage own records, let's add that to the listing.
|
122
|
+
|
123
|
+
if (permission.own) {
|
124
|
+
const identity = context.security.getIdentity();
|
125
|
+
listParams.where.createdBy = identity.id;
|
126
|
+
}
|
127
|
+
|
128
|
+
try {
|
129
|
+
const [items] = await storageOperations.listTasks(listParams);
|
130
|
+
return items;
|
131
|
+
} catch (ex) {
|
132
|
+
throw new _error.default(ex.message || "Could not list all page elements.", ex.code || "LIST_PAGE_ELEMENTS_ERROR", {
|
133
|
+
params
|
134
|
+
});
|
135
|
+
}
|
136
|
+
},
|
137
|
+
|
138
|
+
async createTask(input) {
|
139
|
+
await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
140
|
+
rwd: "w"
|
141
|
+
});
|
142
|
+
const createDataModel = new CreateDataModel().populate(input);
|
143
|
+
await createDataModel.validate();
|
144
|
+
const id = (0, _mdbid.default)();
|
145
|
+
const identity = context.security.getIdentity();
|
146
|
+
const data = await createDataModel.toJSON();
|
147
|
+
|
148
|
+
const pageImportExportTask = _objectSpread(_objectSpread({}, data), {}, {
|
149
|
+
tenant: context.tenancy.getCurrentTenant().id,
|
150
|
+
locale: context.i18nContent.getLocale().code,
|
151
|
+
id,
|
152
|
+
createdOn: new Date().toISOString(),
|
153
|
+
createdBy: {
|
154
|
+
id: identity.id,
|
155
|
+
type: identity.type,
|
156
|
+
displayName: identity.displayName
|
157
|
+
}
|
158
|
+
});
|
159
|
+
|
160
|
+
try {
|
161
|
+
return await storageOperations.createTask({
|
162
|
+
input: data,
|
163
|
+
task: pageImportExportTask
|
164
|
+
});
|
165
|
+
} catch (ex) {
|
166
|
+
throw new _error.default(ex.message || "Could not create pageImportExportTask.", ex.code || "CREATE_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
167
|
+
pageImportExportTask
|
168
|
+
}));
|
169
|
+
}
|
170
|
+
},
|
171
|
+
|
172
|
+
async updateTask(id, input) {
|
173
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
174
|
+
rwd: "w"
|
175
|
+
});
|
176
|
+
const original = await context.pageBuilder.pageImportExportTask.getTask(id);
|
177
|
+
|
178
|
+
if (!original) {
|
179
|
+
throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
|
180
|
+
}
|
181
|
+
|
182
|
+
const identity = context.security.getIdentity();
|
183
|
+
(0, _checkOwnPermissions.default)(identity, permission, original);
|
184
|
+
const updateDataModel = new UpdateDataModel().populate(input);
|
185
|
+
await updateDataModel.validate();
|
186
|
+
const data = await updateDataModel.toJSON({
|
187
|
+
onlyDirty: true
|
188
|
+
});
|
189
|
+
|
190
|
+
const pageImportExportTask = _objectSpread(_objectSpread({}, original), data);
|
191
|
+
|
192
|
+
try {
|
193
|
+
return await storageOperations.updateTask({
|
194
|
+
input: data,
|
195
|
+
original,
|
196
|
+
task: pageImportExportTask
|
197
|
+
});
|
198
|
+
} catch (ex) {
|
199
|
+
throw new _error.default(ex.message || "Could not update pageImportExportTask.", ex.code || "UPDATE_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
200
|
+
original,
|
201
|
+
pageImportExportTask
|
202
|
+
}));
|
203
|
+
}
|
204
|
+
},
|
205
|
+
|
206
|
+
async deleteTask(id) {
|
207
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
208
|
+
rwd: "d"
|
209
|
+
});
|
210
|
+
const pageImportExportTask = await context.pageBuilder.pageImportExportTask.getTask(id);
|
211
|
+
|
212
|
+
if (!pageImportExportTask) {
|
213
|
+
throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
|
214
|
+
}
|
215
|
+
|
216
|
+
const identity = context.security.getIdentity();
|
217
|
+
(0, _checkOwnPermissions.default)(identity, permission, pageImportExportTask);
|
218
|
+
|
219
|
+
try {
|
220
|
+
return await storageOperations.deleteTask({
|
221
|
+
task: pageImportExportTask
|
222
|
+
});
|
223
|
+
} catch (ex) {
|
224
|
+
throw new _error.default(ex.message || "Could not delete pageImportExportTask.", ex.code || "DELETE_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
225
|
+
pageImportExportTask
|
226
|
+
}));
|
227
|
+
}
|
228
|
+
},
|
229
|
+
|
230
|
+
async updateStats(id, input) {
|
231
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
232
|
+
rwd: "w"
|
233
|
+
});
|
234
|
+
const original = await context.pageBuilder.pageImportExportTask.getTask(id);
|
235
|
+
|
236
|
+
if (!original) {
|
237
|
+
throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
|
238
|
+
}
|
239
|
+
|
240
|
+
const identity = context.security.getIdentity();
|
241
|
+
(0, _checkOwnPermissions.default)(identity, permission, original);
|
242
|
+
|
243
|
+
try {
|
244
|
+
return await storageOperations.updateTaskStats({
|
245
|
+
input,
|
246
|
+
original
|
247
|
+
});
|
248
|
+
} catch (ex) {
|
249
|
+
throw new _error.default(ex.message || "Could not update pageImportExportTask.", ex.code || "UPDATE_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
250
|
+
original
|
251
|
+
}));
|
252
|
+
}
|
253
|
+
},
|
254
|
+
|
255
|
+
async createSubTask(parent, id, input) {
|
256
|
+
await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
257
|
+
rwd: "w"
|
258
|
+
});
|
259
|
+
const createDataModel = new CreateDataModel().populate(input);
|
260
|
+
await createDataModel.validate();
|
261
|
+
const identity = context.security.getIdentity();
|
262
|
+
const data = await createDataModel.toJSON();
|
263
|
+
|
264
|
+
const pageImportExportSubTask = _objectSpread(_objectSpread({}, data), {}, {
|
265
|
+
tenant: context.tenancy.getCurrentTenant().id,
|
266
|
+
locale: context.i18nContent.getLocale().code,
|
267
|
+
id: id,
|
268
|
+
parent: parent,
|
269
|
+
createdOn: new Date().toISOString(),
|
270
|
+
createdBy: {
|
271
|
+
id: identity.id,
|
272
|
+
type: identity.type,
|
273
|
+
displayName: identity.displayName
|
274
|
+
}
|
275
|
+
});
|
276
|
+
|
277
|
+
try {
|
278
|
+
return await storageOperations.createSubTask({
|
279
|
+
input: data,
|
280
|
+
subTask: pageImportExportSubTask
|
281
|
+
});
|
282
|
+
} catch (ex) {
|
283
|
+
throw new _error.default(ex.message || "Could not create pageImportExportSubTask.", ex.code || "CREATE_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
284
|
+
pageImportExportSubTask
|
285
|
+
}));
|
286
|
+
}
|
287
|
+
},
|
288
|
+
|
289
|
+
async updateSubTask(parent, subTaskId, input) {
|
290
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
291
|
+
rwd: "w"
|
292
|
+
});
|
293
|
+
const original = await context.pageBuilder.pageImportExportTask.getSubTask(parent, subTaskId);
|
294
|
+
|
295
|
+
if (!original) {
|
296
|
+
throw new _handlerGraphql.NotFoundError(`PageImportExportTask parent: "${parent}" and id: "${subTaskId}" not found.`);
|
297
|
+
}
|
298
|
+
|
299
|
+
const identity = context.security.getIdentity();
|
300
|
+
(0, _checkOwnPermissions.default)(identity, permission, original);
|
301
|
+
const updateDataModel = new UpdateDataModel().populate(input);
|
302
|
+
await updateDataModel.validate();
|
303
|
+
const data = await updateDataModel.toJSON({
|
304
|
+
onlyDirty: true
|
305
|
+
}); // TODO: Merge recursively
|
306
|
+
|
307
|
+
const pageImportExportSubTask = _objectSpread(_objectSpread({}, original), data);
|
308
|
+
|
309
|
+
try {
|
310
|
+
return await storageOperations.updateSubTask({
|
311
|
+
input: data,
|
312
|
+
original,
|
313
|
+
subTask: pageImportExportSubTask
|
314
|
+
});
|
315
|
+
} catch (ex) {
|
316
|
+
throw new _error.default(ex.message || "Could not update pageImportExportSubTask.", ex.code || "UPDATE_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
317
|
+
pageImportExportSubTask,
|
318
|
+
original
|
319
|
+
}));
|
320
|
+
}
|
321
|
+
},
|
322
|
+
|
323
|
+
async getSubTask(parent, subTaskId) {
|
324
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
325
|
+
rwd: "r"
|
326
|
+
});
|
327
|
+
const tenant = context.tenancy.getCurrentTenant();
|
328
|
+
const locale = context.i18nContent.getLocale();
|
329
|
+
const params = {
|
330
|
+
where: {
|
331
|
+
tenant: tenant.id,
|
332
|
+
locale: locale.code,
|
333
|
+
id: subTaskId,
|
334
|
+
parent: parent
|
335
|
+
}
|
336
|
+
};
|
337
|
+
let pageImportExportSubTask;
|
338
|
+
|
339
|
+
try {
|
340
|
+
pageImportExportSubTask = await storageOperations.getSubTask(params);
|
341
|
+
|
342
|
+
if (!pageImportExportSubTask) {
|
343
|
+
return null;
|
344
|
+
}
|
345
|
+
} catch (ex) {
|
346
|
+
throw new _error.default(ex.message || "Could not get pageImportExportSubTask by id.", ex.code || "GET_PAGE_IMPORT_EXPORT_TASK_ERROR", _objectSpread(_objectSpread({}, ex.data || {}), {}, {
|
347
|
+
params
|
348
|
+
}));
|
349
|
+
}
|
350
|
+
|
351
|
+
const identity = context.security.getIdentity();
|
352
|
+
(0, _checkOwnPermissions.default)(identity, permission, pageImportExportSubTask);
|
353
|
+
return pageImportExportSubTask;
|
354
|
+
},
|
355
|
+
|
356
|
+
async listSubTasks(parent, status, limit) {
|
357
|
+
const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
|
358
|
+
rwd: "r"
|
359
|
+
});
|
360
|
+
const tenant = context.tenancy.getCurrentTenant();
|
361
|
+
const locale = context.i18nContent.getLocale();
|
362
|
+
const listParams = {
|
363
|
+
where: {
|
364
|
+
tenant: tenant.id,
|
365
|
+
locale: locale.code,
|
366
|
+
parent: parent,
|
367
|
+
status
|
368
|
+
},
|
369
|
+
limit
|
370
|
+
}; // If user can only manage own records, let's add that to the listing.
|
371
|
+
|
372
|
+
if (permission.own) {
|
373
|
+
const identity = context.security.getIdentity();
|
374
|
+
listParams.where.createdBy = identity.id;
|
375
|
+
}
|
376
|
+
|
377
|
+
try {
|
378
|
+
const [items] = await storageOperations.listSubTasks(listParams);
|
379
|
+
return items;
|
380
|
+
} catch (ex) {
|
381
|
+
throw new _error.default(ex.message || "Could not list all pageImportExportSubTask.", ex.code || "LIST_PAGE_IMPORT_EXPORT_TASK_ERROR", {
|
382
|
+
params: {
|
383
|
+
parent,
|
384
|
+
status,
|
385
|
+
limit
|
386
|
+
}
|
387
|
+
});
|
388
|
+
}
|
389
|
+
}
|
390
|
+
|
391
|
+
};
|
392
|
+
});
|
393
|
+
|
394
|
+
exports.default = _default;
|