@knocklabs/cli 0.1.17 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ function _export(target, all) {
6
+ for(var name in all)Object.defineProperty(target, name, {
7
+ enumerable: true,
8
+ get: all[name]
9
+ });
10
+ }
11
+ _export(exports, {
12
+ MESSAGE_TYPE_JSON: function() {
13
+ return MESSAGE_TYPE_JSON;
14
+ },
15
+ buildMessageTypeDirBundle: function() {
16
+ return buildMessageTypeDirBundle;
17
+ }
18
+ });
19
+ const _lodash = require("lodash");
20
+ const _objectisomorphic = require("../../helpers/object.isomorphic");
21
+ const _constisomorphic = require("../shared/const.isomorphic");
22
+ const _helpersisomorphic = require("../shared/helpers.isomorphic");
23
+ const MESSAGE_TYPE_JSON = "message_type.json";
24
+ const compileExtractionSettings = (messageType)=>{
25
+ const extractableFields = (0, _lodash.get)(messageType, [
26
+ "__annotation",
27
+ "extractable_fields"
28
+ ], {});
29
+ const map = new Map();
30
+ for (const [key] of Object.entries(messageType)){
31
+ // If the field we are on is extractable, then add its extraction settings
32
+ // to the map with the current object path.
33
+ if (key in extractableFields) {
34
+ map.set([
35
+ key
36
+ ], extractableFields[key]);
37
+ }
38
+ }
39
+ return map;
40
+ };
41
+ const buildMessageTypeDirBundle = (remoteMessageType, localMessageType = {})=>{
42
+ const bundle = {};
43
+ const mutRemoteMessageType = (0, _lodash.cloneDeep)(remoteMessageType);
44
+ // A map of extraction settings of every field in the message type
45
+ const compiledExtractionSettings = compileExtractionSettings(mutRemoteMessageType);
46
+ // Iterate through each extractable field, determine whether we need to
47
+ // extract the field content, and if so, perform the extraction.
48
+ for (const [objPathParts, extractionSettings] of compiledExtractionSettings){
49
+ // If this message type doesn't have this field path, then we don't extract.
50
+ if (!(0, _lodash.has)(mutRemoteMessageType, objPathParts)) continue;
51
+ // If the field at this path is extracted in the local message type, then
52
+ // always extract; otherwise extract based on the field settings default.
53
+ const objPathStr = _objectisomorphic.ObjPath.stringify(objPathParts);
54
+ const extractedFilePath = (0, _lodash.get)(localMessageType, `${objPathStr}${_constisomorphic.FILEPATH_MARKER}`);
55
+ const { default: extractByDefault, file_ext: fileExt } = extractionSettings;
56
+ if (!extractedFilePath && !extractByDefault) continue;
57
+ // By this point, we have a field where we need to extract its content.
58
+ const data = (0, _lodash.get)(mutRemoteMessageType, objPathParts);
59
+ const fileName = objPathParts.pop();
60
+ // If we have an extracted file path from the local message type, we use
61
+ // that. In the other case we use the default path.
62
+ const relpath = typeof extractedFilePath === "string" ? extractedFilePath : `${fileName}.${fileExt}`;
63
+ // Perform the extraction by adding the content and its file path to the
64
+ // bundle for writing to the file system later. Then replace the field
65
+ // content with the extracted file path and mark the field as extracted
66
+ // with @ suffix.
67
+ (0, _lodash.set)(bundle, [
68
+ relpath
69
+ ], data);
70
+ (0, _lodash.set)(mutRemoteMessageType, `${objPathStr}${_constisomorphic.FILEPATH_MARKER}`, relpath);
71
+ (0, _lodash.unset)(mutRemoteMessageType, objPathStr);
72
+ }
73
+ // At this point the bundle contains all extractable files, so we finally add
74
+ // the message type JSON relative path + the file content.
75
+ return (0, _lodash.set)(bundle, [
76
+ MESSAGE_TYPE_JSON
77
+ ], (0, _helpersisomorphic.prepareResourceJson)(mutRemoteMessageType));
78
+ };
@@ -0,0 +1,4 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
@@ -0,0 +1,134 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ function _export(target, all) {
6
+ for(var name in all)Object.defineProperty(target, name, {
7
+ enumerable: true,
8
+ get: all[name]
9
+ });
10
+ }
11
+ _export(exports, {
12
+ ensureValidCommandTarget: function() {
13
+ return ensureValidCommandTarget;
14
+ },
15
+ isPartialDir: function() {
16
+ return isPartialDir;
17
+ },
18
+ lsPartialJson: function() {
19
+ return lsPartialJson;
20
+ },
21
+ partialJsonPath: function() {
22
+ return partialJsonPath;
23
+ }
24
+ });
25
+ const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
26
+ const _core = require("@oclif/core");
27
+ const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
28
+ const _processorisomorphic = require("./processor.isomorphic");
29
+ function _getRequireWildcardCache(nodeInterop) {
30
+ if (typeof WeakMap !== "function") return null;
31
+ var cacheBabelInterop = new WeakMap();
32
+ var cacheNodeInterop = new WeakMap();
33
+ return (_getRequireWildcardCache = function(nodeInterop) {
34
+ return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
35
+ })(nodeInterop);
36
+ }
37
+ function _interop_require_wildcard(obj, nodeInterop) {
38
+ if (!nodeInterop && obj && obj.__esModule) {
39
+ return obj;
40
+ }
41
+ if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
42
+ return {
43
+ default: obj
44
+ };
45
+ }
46
+ var cache = _getRequireWildcardCache(nodeInterop);
47
+ if (cache && cache.has(obj)) {
48
+ return cache.get(obj);
49
+ }
50
+ var newObj = {
51
+ __proto__: null
52
+ };
53
+ var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
54
+ for(var key in obj){
55
+ if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
56
+ var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
57
+ if (desc && (desc.get || desc.set)) {
58
+ Object.defineProperty(newObj, key, desc);
59
+ } else {
60
+ newObj[key] = obj[key];
61
+ }
62
+ }
63
+ }
64
+ newObj.default = obj;
65
+ if (cache) {
66
+ cache.set(obj, newObj);
67
+ }
68
+ return newObj;
69
+ }
70
+ const partialJsonPath = (partialDirCtx)=>_nodepath.resolve(partialDirCtx.abspath, _processorisomorphic.PARTIAL_JSON);
71
+ const lsPartialJson = async (dirPath)=>{
72
+ const partialJsonPath = _nodepath.resolve(dirPath, _processorisomorphic.PARTIAL_JSON);
73
+ const exists = await _fsextra.pathExists(partialJsonPath);
74
+ return exists ? partialJsonPath : undefined;
75
+ };
76
+ const isPartialDir = async (dirPath)=>Boolean(await lsPartialJson(dirPath));
77
+ const ensureValidCommandTarget = async (props, runContext)=>{
78
+ const { args, flags } = props;
79
+ const { commandId, resourceDir: resourceDirCtx, cwd: runCwd } = runContext;
80
+ // If the target resource is a different type than the current resource dir
81
+ // type, error out.
82
+ if (resourceDirCtx && resourceDirCtx.type !== "partial") {
83
+ return _core.ux.error(`Cannot run ${commandId} inside a ${resourceDirCtx.type} directory`);
84
+ }
85
+ // Cannot accept both partial key arg and --all flag.
86
+ if (flags.all && args.partialKey) {
87
+ return _core.ux.error(`partialKey arg \`${args.partialKey}\` cannot also be provided when using --all`);
88
+ }
89
+ // --all flag is given, which means no partial key arg.
90
+ if (flags.all) {
91
+ // If --all flag used inside a partial directory, then require a partials
92
+ // dir path.
93
+ if (resourceDirCtx && !flags["partials-dir"]) {
94
+ return _core.ux.error("Missing required flag partials-dir");
95
+ }
96
+ // Targeting all partial dirs in the partials index dir.
97
+ // TODO: Default to the knock project config first if present before cwd.
98
+ const defaultToCwd = {
99
+ abspath: runCwd,
100
+ exists: true
101
+ };
102
+ const indexDirCtx = flags["partials-dir"] || defaultToCwd;
103
+ return {
104
+ type: "partialsIndexDir",
105
+ context: indexDirCtx
106
+ };
107
+ }
108
+ // Partial key arg is given, which means no --all flag.
109
+ if (args.partialKey) {
110
+ if (resourceDirCtx && resourceDirCtx.key !== args.partialKey) {
111
+ return _core.ux.error(`Cannot run ${commandId} \`${args.partialKey}\` inside another partial directory:\n${resourceDirCtx.key}`);
112
+ }
113
+ const targetDirPath = resourceDirCtx ? resourceDirCtx.abspath : _nodepath.resolve(runCwd, args.partialKey);
114
+ const partialDirCtx = {
115
+ type: "partial",
116
+ key: args.partialKey,
117
+ abspath: targetDirPath,
118
+ exists: await isPartialDir(targetDirPath)
119
+ };
120
+ return {
121
+ type: "partialDir",
122
+ context: partialDirCtx
123
+ };
124
+ }
125
+ // From this point on, we have neither a partial key arg nor --all flag.
126
+ // If running inside a partial directory, then use that partial directory.
127
+ if (resourceDirCtx) {
128
+ return {
129
+ type: "partialDir",
130
+ context: resourceDirCtx
131
+ };
132
+ }
133
+ return _core.ux.error("Missing 1 required arg:\npartialKey");
134
+ };
@@ -2,8 +2,11 @@
2
2
  Object.defineProperty(exports, "__esModule", {
3
3
  value: true
4
4
  });
5
+ _export_star(require("./helpers"), exports);
5
6
  _export_star(require("./processor.isomorphic"), exports);
7
+ _export_star(require("./reader"), exports);
6
8
  _export_star(require("./types"), exports);
9
+ _export_star(require("./writer"), exports);
7
10
  function _export_star(from, to) {
8
11
  Object.keys(from).forEach(function(k) {
9
12
  if (k !== "default" && !Object.prototype.hasOwnProperty.call(to, k)) {
@@ -14,34 +14,14 @@ _export(exports, {
14
14
  },
15
15
  buildPartialDirBundle: function() {
16
16
  return buildPartialDirBundle;
17
- },
18
- toPartialJson: function() {
19
- return toPartialJson;
20
17
  }
21
18
  });
22
19
  const _lodash = require("lodash");
23
20
  const _objectisomorphic = require("../../helpers/object.isomorphic");
24
21
  const _constisomorphic = require("../shared/const.isomorphic");
22
+ const _helpersisomorphic = require("../shared/helpers.isomorphic");
25
23
  const _types = require("./types");
26
24
  const PARTIAL_JSON = "partial.json";
27
- /*
28
- * Sanitize the partial content into a format that's appropriate for reading
29
- * and writing, by stripping out any annotation fields and handling readonly
30
- * fields.
31
- */ const toPartialJson = (partial)=>{
32
- var _partial___annotation;
33
- // Move read only field under the dedicated field "__readonly".
34
- const readonlyFields = ((_partial___annotation = partial.__annotation) === null || _partial___annotation === void 0 ? void 0 : _partial___annotation.readonly_fields) || [];
35
- const [readonly, remainder] = (0, _objectisomorphic.split)(partial, readonlyFields);
36
- const partialjson = {
37
- ...remainder,
38
- __readonly: readonly
39
- };
40
- // Strip out all schema annotations, so not to expose them to end users.
41
- return (0, _objectisomorphic.omitDeep)(partialjson, [
42
- "__annotation"
43
- ]);
44
- };
45
25
  // Maps the partial type to the correct file extension. Defaults to 'txt'
46
26
  const partialTypeToFileExt = (type)=>{
47
27
  switch(type){
@@ -87,8 +67,7 @@ const buildPartialDirBundle = (remotePartial, localPartial = {})=>{
87
67
  // A map of extraction settings of every field in the partial
88
68
  const compiledExtractionSettings = compileExtractionSettings(mutRemotePartial);
89
69
  // Iterate through each extractable field, determine whether we need to
90
- // extract the field content, and if so, perform the
91
- // extraction.
70
+ // extract the field content, and if so, perform the extraction.
92
71
  for (const [objPathParts, extractionSettings] of compiledExtractionSettings){
93
72
  // If this partial doesn't have this field path, then we don't extract.
94
73
  if (!(0, _lodash.has)(mutRemotePartial, objPathParts)) continue;
@@ -118,5 +97,5 @@ const buildPartialDirBundle = (remotePartial, localPartial = {})=>{
118
97
  // the partial JSON relative path + the file content.
119
98
  return (0, _lodash.set)(bundle, [
120
99
  PARTIAL_JSON
121
- ], toPartialJson(mutRemotePartial));
100
+ ], (0, _helpersisomorphic.prepareResourceJson)(mutRemotePartial));
122
101
  };
@@ -0,0 +1,198 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ function _export(target, all) {
6
+ for(var name in all)Object.defineProperty(target, name, {
7
+ enumerable: true,
8
+ get: all[name]
9
+ });
10
+ }
11
+ _export(exports, {
12
+ readAllForCommandTarget: function() {
13
+ return readAllForCommandTarget;
14
+ },
15
+ readPartialDir: function() {
16
+ return readPartialDir;
17
+ }
18
+ });
19
+ const _nodepath = /*#__PURE__*/ _interop_require_default(require("node:path"));
20
+ const _core = require("@oclif/core");
21
+ const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
22
+ const _lodash = require("lodash");
23
+ const _error = require("../../helpers/error");
24
+ const _json = require("../../helpers/json");
25
+ const _objectisomorphic = require("../../helpers/object.isomorphic");
26
+ const _helpers = require("../shared/helpers");
27
+ const _constisomorphic = require("../shared/const.isomorphic");
28
+ const _helpers1 = require("./helpers");
29
+ const _processorisomorphic = require("./processor.isomorphic");
30
+ function _interop_require_default(obj) {
31
+ return obj && obj.__esModule ? obj : {
32
+ default: obj
33
+ };
34
+ }
35
+ function _getRequireWildcardCache(nodeInterop) {
36
+ if (typeof WeakMap !== "function") return null;
37
+ var cacheBabelInterop = new WeakMap();
38
+ var cacheNodeInterop = new WeakMap();
39
+ return (_getRequireWildcardCache = function(nodeInterop) {
40
+ return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
41
+ })(nodeInterop);
42
+ }
43
+ function _interop_require_wildcard(obj, nodeInterop) {
44
+ if (!nodeInterop && obj && obj.__esModule) {
45
+ return obj;
46
+ }
47
+ if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
48
+ return {
49
+ default: obj
50
+ };
51
+ }
52
+ var cache = _getRequireWildcardCache(nodeInterop);
53
+ if (cache && cache.has(obj)) {
54
+ return cache.get(obj);
55
+ }
56
+ var newObj = {
57
+ __proto__: null
58
+ };
59
+ var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
60
+ for(var key in obj){
61
+ if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
62
+ var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
63
+ if (desc && (desc.get || desc.set)) {
64
+ Object.defineProperty(newObj, key, desc);
65
+ } else {
66
+ newObj[key] = obj[key];
67
+ }
68
+ }
69
+ }
70
+ newObj.default = obj;
71
+ if (cache) {
72
+ cache.set(obj, newObj);
73
+ }
74
+ return newObj;
75
+ }
76
+ /*
77
+ * For the given list of partial directory contexts, read each partial dir and
78
+ * return partial directory data.
79
+ */ const readPartialsDirs = async (partialDirCtxs, opts = {})=>{
80
+ const partials = [];
81
+ const errors = [];
82
+ for (const partialDirCtx of partialDirCtxs){
83
+ // eslint-disable-next-line no-await-in-loop
84
+ const [partial, readErrors] = await readPartialDir(partialDirCtx, opts);
85
+ if (readErrors.length > 0) {
86
+ const partialJsonPath = _nodepath.default.resolve(partialDirCtx.abspath, _processorisomorphic.PARTIAL_JSON);
87
+ const e = new _error.SourceError((0, _error.formatErrors)(readErrors), partialJsonPath);
88
+ errors.push(e);
89
+ continue;
90
+ }
91
+ partials.push({
92
+ ...partialDirCtx,
93
+ content: partial
94
+ });
95
+ }
96
+ return [
97
+ partials,
98
+ errors
99
+ ];
100
+ };
101
+ const readPartialDir = async (partialDirCtx, opts = {})=>{
102
+ const { abspath } = partialDirCtx;
103
+ const { withExtractedFiles = false, withReadonlyField = false } = opts;
104
+ const dirExists = await _fsextra.pathExists(abspath);
105
+ if (!dirExists) throw new Error(`${abspath} does not exist`);
106
+ const partialJsonPath = await (0, _helpers1.lsPartialJson)(abspath);
107
+ if (!partialJsonPath) throw new Error(`${abspath} is not a partial directory`);
108
+ const result = await (0, _json.readJson)(partialJsonPath);
109
+ if (!result[0]) return result;
110
+ let [partialJson] = result;
111
+ partialJson = withReadonlyField ? partialJson : (0, _objectisomorphic.omitDeep)(partialJson, [
112
+ "__readonly"
113
+ ]);
114
+ return withExtractedFiles ? joinExtractedFiles(partialDirCtx, partialJson) : [
115
+ partialJson,
116
+ []
117
+ ];
118
+ };
119
+ const joinExtractedFiles = async (partialDirCtx, partialJson)=>{
120
+ // Tracks any errors encountered during traversal. Mutated in place.
121
+ const errors = [];
122
+ // Tracks each new valid extracted file path seen (rebased to be relative to
123
+ // partial.json) in the partial json node. Mutated in place, and used
124
+ // to validate the uniqueness of an extracted path encountered.
125
+ const uniqueFilePaths = {};
126
+ (0, _objectisomorphic.mapValuesDeep)(partialJson, (relpath, key, parts)=>{
127
+ // If not marked with the @suffix, there's nothing to do.
128
+ if (!_constisomorphic.FILEPATH_MARKED_RE.test(key)) return;
129
+ const objPathToFieldStr = _objectisomorphic.ObjPath.stringify(parts);
130
+ const inlinObjPathStr = objPathToFieldStr.replace(_constisomorphic.FILEPATH_MARKED_RE, "");
131
+ // If there is inlined content present already, then nothing more to do.
132
+ if ((0, _lodash.hasIn)(partialJson, inlinObjPathStr)) return;
133
+ // Check if the extracted path found at the current field path is valid
134
+ const invalidFilePathError = (0, _helpers.validateExtractedFilePath)(relpath, _nodepath.default.resolve(partialDirCtx.abspath, _processorisomorphic.PARTIAL_JSON), uniqueFilePaths, objPathToFieldStr);
135
+ if (invalidFilePathError) {
136
+ errors.push(invalidFilePathError);
137
+ // Wipe the invalid file path in the node so the final partial json
138
+ // object ends up with only valid file paths, this way partial writer
139
+ // can see only valid file paths and use those when pulling.
140
+ (0, _lodash.set)(partialJson, inlinObjPathStr, undefined);
141
+ (0, _lodash.set)(partialJson, objPathToFieldStr, undefined);
142
+ return;
143
+ }
144
+ // By this point we have a valid extracted file path, so attempt to read the file.
145
+ const [content, readExtractedFileError] = (0, _helpers.readExtractedFileSync)(relpath, partialDirCtx, key);
146
+ if (readExtractedFileError) {
147
+ errors.push(readExtractedFileError);
148
+ // If there's an error, replace the extracted file path with the original one, and set the
149
+ // inlined field path in partial object with empty content, so we know
150
+ // we do not need to try inlining again.
151
+ (0, _lodash.set)(partialJson, objPathToFieldStr, relpath);
152
+ (0, _lodash.set)(partialJson, inlinObjPathStr, undefined);
153
+ return;
154
+ }
155
+ // Inline the file content and remove the extracted file path.
156
+ (0, _lodash.set)(partialJson, objPathToFieldStr, relpath);
157
+ (0, _lodash.set)(partialJson, inlinObjPathStr, content);
158
+ });
159
+ return [
160
+ partialJson,
161
+ errors
162
+ ];
163
+ };
164
+ const readAllForCommandTarget = async (target, opts = {})=>{
165
+ const { type: targetType, context: targetCtx } = target;
166
+ if (!targetCtx.exists) {
167
+ const subject = targetType === "partialDir" ? "a partial directory at" : "partial directories in";
168
+ return _core.ux.error(`Cannot locate ${subject} \`${targetCtx.abspath}\``);
169
+ }
170
+ switch(targetType){
171
+ case "partialDir":
172
+ {
173
+ return readPartialsDirs([
174
+ targetCtx
175
+ ], opts);
176
+ }
177
+ case "partialsIndexDir":
178
+ {
179
+ const dirents = await _fsextra.readdir(targetCtx.abspath, {
180
+ withFileTypes: true
181
+ });
182
+ const promises = dirents.map(async (dirent)=>{
183
+ const abspath = _nodepath.default.resolve(targetCtx.abspath, dirent.name);
184
+ const partialDirCtx = {
185
+ type: "partial",
186
+ key: dirent.name,
187
+ abspath,
188
+ exists: await (0, _helpers1.isPartialDir)(abspath)
189
+ };
190
+ return partialDirCtx;
191
+ });
192
+ const partialDirCtxs = (await Promise.all(promises)).filter((partialDirCtx)=>partialDirCtx.exists);
193
+ return readPartialsDirs(partialDirCtxs, opts);
194
+ }
195
+ default:
196
+ throw new Error(`Invalid partial command target: ${target}`);
197
+ }
198
+ };
@@ -0,0 +1,175 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ function _export(target, all) {
6
+ for(var name in all)Object.defineProperty(target, name, {
7
+ enumerable: true,
8
+ get: all[name]
9
+ });
10
+ }
11
+ _export(exports, {
12
+ prunePartialsIndexDir: function() {
13
+ return prunePartialsIndexDir;
14
+ },
15
+ writePartialDirFromData: function() {
16
+ return writePartialDirFromData;
17
+ },
18
+ writePartialsIndexDir: function() {
19
+ return writePartialsIndexDir;
20
+ }
21
+ });
22
+ const _nodepath = /*#__PURE__*/ _interop_require_wildcard(require("node:path"));
23
+ const _fsextra = /*#__PURE__*/ _interop_require_wildcard(require("fs-extra"));
24
+ const _lodash = require("lodash");
25
+ const _const = require("../../helpers/const");
26
+ const _json = require("../../helpers/json");
27
+ const _helpers = require("./helpers");
28
+ const _processorisomorphic = require("./processor.isomorphic");
29
+ const _reader = require("./reader");
30
+ function _getRequireWildcardCache(nodeInterop) {
31
+ if (typeof WeakMap !== "function") return null;
32
+ var cacheBabelInterop = new WeakMap();
33
+ var cacheNodeInterop = new WeakMap();
34
+ return (_getRequireWildcardCache = function(nodeInterop) {
35
+ return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
36
+ })(nodeInterop);
37
+ }
38
+ function _interop_require_wildcard(obj, nodeInterop) {
39
+ if (!nodeInterop && obj && obj.__esModule) {
40
+ return obj;
41
+ }
42
+ if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
43
+ return {
44
+ default: obj
45
+ };
46
+ }
47
+ var cache = _getRequireWildcardCache(nodeInterop);
48
+ if (cache && cache.has(obj)) {
49
+ return cache.get(obj);
50
+ }
51
+ var newObj = {
52
+ __proto__: null
53
+ };
54
+ var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
55
+ for(var key in obj){
56
+ if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
57
+ var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
58
+ if (desc && (desc.get || desc.set)) {
59
+ Object.defineProperty(newObj, key, desc);
60
+ } else {
61
+ newObj[key] = obj[key];
62
+ }
63
+ }
64
+ }
65
+ newObj.default = obj;
66
+ if (cache) {
67
+ cache.set(obj, newObj);
68
+ }
69
+ return newObj;
70
+ }
71
+ const writePartialDirFromData = async (partialDirCtx, remotePartial)=>{
72
+ // If the partial directory exists on the file system (i.e. previously
73
+ // pulled before), then read the partial file to use as a reference.
74
+ const [localPartial] = partialDirCtx.exists ? await (0, _reader.readPartialDir)(partialDirCtx, {
75
+ withExtractedFiles: true
76
+ }) : [];
77
+ const bundle = (0, _processorisomorphic.buildPartialDirBundle)(remotePartial, localPartial);
78
+ return writePartialDirFromBundle(partialDirCtx, bundle);
79
+ };
80
+ /*
81
+ * A lower level write function that takes a constructed partial dir bundle
82
+ * and writes it into a partial directory on a local file system.
83
+ *
84
+ * It does not make any assumptions about how the partial directory bundle was
85
+ * built; for example, it can be from parsing the partial data fetched from
86
+ * the Knock API, or built manually for scaffolding purposes.
87
+ */ const writePartialDirFromBundle = async (partialDirCtx, partialDirBundle)=>{
88
+ const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
89
+ try {
90
+ if (partialDirCtx.exists) {
91
+ await _fsextra.copy(partialDirCtx.abspath, backupDirPath);
92
+ await _fsextra.emptyDir(partialDirCtx.abspath);
93
+ }
94
+ const promises = Object.entries(partialDirBundle).map(([relpath, fileContent])=>{
95
+ const filePath = _nodepath.resolve(partialDirCtx.abspath, relpath);
96
+ return relpath === _processorisomorphic.PARTIAL_JSON ? _fsextra.outputJson(filePath, fileContent, {
97
+ spaces: _json.DOUBLE_SPACES
98
+ }) : _fsextra.outputFile(filePath, fileContent !== null && fileContent !== void 0 ? fileContent : "");
99
+ });
100
+ await Promise.all(promises);
101
+ } catch (error) {
102
+ // In case of any error, wipe the target directory that is likely in a bad
103
+ // state then restore the backup if one existed before.
104
+ if (partialDirCtx.exists) {
105
+ await _fsextra.emptyDir(partialDirCtx.abspath);
106
+ await _fsextra.copy(backupDirPath, partialDirCtx.abspath);
107
+ } else {
108
+ await _fsextra.remove(partialDirCtx.abspath);
109
+ }
110
+ throw error;
111
+ } finally{
112
+ // Always clean up the backup directory in the temp sandbox.
113
+ await _fsextra.remove(backupDirPath);
114
+ }
115
+ };
116
+ /*
117
+ * Prunes the index directory by removing any files, or directories that aren't
118
+ * partial dirs found in fetched partials. We want to preserve any partial
119
+ * dirs that are going to be updated with remote partials, so extracted links
120
+ * can be respected.
121
+ */ const prunePartialsIndexDir = async (indexDirCtx, remotePartials)=>{
122
+ const partialsByKey = Object.fromEntries(remotePartials.map((w)=>[
123
+ w.key.toLowerCase(),
124
+ w
125
+ ]));
126
+ const dirents = await _fsextra.readdir(indexDirCtx.abspath, {
127
+ withFileTypes: true
128
+ });
129
+ const promises = dirents.map(async (dirent)=>{
130
+ const direntName = dirent.name.toLowerCase();
131
+ const direntPath = _nodepath.resolve(indexDirCtx.abspath, direntName);
132
+ if (await (0, _helpers.isPartialDir)(direntPath) && partialsByKey[direntName]) {
133
+ return;
134
+ }
135
+ await _fsextra.remove(direntPath);
136
+ });
137
+ await Promise.all(promises);
138
+ };
139
+ const writePartialsIndexDir = async (indexDirCtx, remotePartials)=>{
140
+ const backupDirPath = _nodepath.resolve(_const.sandboxDir, (0, _lodash.uniqueId)("backup"));
141
+ try {
142
+ // If the index directory already exists, back it up in the temp sandbox
143
+ // before wiping it clean.
144
+ if (indexDirCtx.exists) {
145
+ await _fsextra.copy(indexDirCtx.abspath, backupDirPath);
146
+ await prunePartialsIndexDir(indexDirCtx, remotePartials);
147
+ }
148
+ // Write given remote partials into the given partials directory path.
149
+ const writePartialDirPromises = remotePartials.map(async (partial)=>{
150
+ const partialDirPath = _nodepath.resolve(indexDirCtx.abspath, partial.key);
151
+ const partialDirCtx = {
152
+ type: "partial",
153
+ key: partial.key,
154
+ abspath: partialDirPath,
155
+ exists: indexDirCtx.exists ? await (0, _helpers.isPartialDir)(partialDirPath) : false
156
+ };
157
+ return writePartialDirFromData(partialDirCtx, partial);
158
+ });
159
+ await Promise.all(writePartialDirPromises);
160
+ } catch (error) {
161
+ console.log(error);
162
+ // In case of any error, wipe the index directory that is likely in a bad
163
+ // state then restore the backup if one existed before.
164
+ if (indexDirCtx.exists) {
165
+ await _fsextra.emptyDir(indexDirCtx.abspath);
166
+ await _fsextra.copy(backupDirPath, indexDirCtx.abspath);
167
+ } else {
168
+ await _fsextra.remove(indexDirCtx.abspath);
169
+ }
170
+ throw error;
171
+ } finally{
172
+ // Always clean up the backup directory in the temp sandbox.
173
+ await _fsextra.remove(backupDirPath);
174
+ }
175
+ };
@@ -0,0 +1,25 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ Object.defineProperty(exports, "prepareResourceJson", {
6
+ enumerable: true,
7
+ get: function() {
8
+ return prepareResourceJson;
9
+ }
10
+ });
11
+ const _objectisomorphic = require("../../helpers/object.isomorphic");
12
+ const prepareResourceJson = (resource)=>{
13
+ var _resource___annotation;
14
+ // Move read only field under the dedicated field "__readonly".
15
+ const readonlyFields = ((_resource___annotation = resource.__annotation) === null || _resource___annotation === void 0 ? void 0 : _resource___annotation.readonly_fields) || [];
16
+ const [readonly, remainder] = (0, _objectisomorphic.split)(resource, readonlyFields);
17
+ const resourceJson = {
18
+ ...remainder,
19
+ __readonly: readonly
20
+ };
21
+ // Strip out all schema annotations, so not to expose them to end users.
22
+ return (0, _objectisomorphic.omitDeep)(resourceJson, [
23
+ "__annotation"
24
+ ]);
25
+ };