apify-schema-tools 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cspell/custom-dictionary.txt +4 -0
- package/.husky/pre-commit +33 -0
- package/.node-version +1 -0
- package/CHANGELOG.md +88 -0
- package/LICENSE +201 -0
- package/README.md +312 -0
- package/biome.json +31 -0
- package/dist/apify-schema-tools.d.ts +3 -0
- package/dist/apify-schema-tools.d.ts.map +1 -0
- package/dist/apify-schema-tools.js +197 -0
- package/dist/apify-schema-tools.js.map +1 -0
- package/dist/apify.d.ts +11 -0
- package/dist/apify.d.ts.map +1 -0
- package/dist/apify.js +107 -0
- package/dist/apify.js.map +1 -0
- package/dist/configuration.d.ts +43 -0
- package/dist/configuration.d.ts.map +1 -0
- package/dist/configuration.js +87 -0
- package/dist/configuration.js.map +1 -0
- package/dist/filesystem.d.ts +8 -0
- package/dist/filesystem.d.ts.map +1 -0
- package/dist/filesystem.js +16 -0
- package/dist/filesystem.js.map +1 -0
- package/dist/json-schemas.d.ts +34 -0
- package/dist/json-schemas.d.ts.map +1 -0
- package/dist/json-schemas.js +185 -0
- package/dist/json-schemas.js.map +1 -0
- package/dist/typescript.d.ts +26 -0
- package/dist/typescript.d.ts.map +1 -0
- package/dist/typescript.js +316 -0
- package/dist/typescript.js.map +1 -0
- package/package.json +60 -0
- package/samples/all-defaults/.actor/actor.json +15 -0
- package/samples/all-defaults/.actor/dataset_schema.json +32 -0
- package/samples/all-defaults/.actor/input_schema.json +53 -0
- package/samples/all-defaults/src/generated/dataset.ts +24 -0
- package/samples/all-defaults/src/generated/input-utils.ts +60 -0
- package/samples/all-defaults/src/generated/input.ts +42 -0
- package/samples/all-defaults/src-schemas/dataset-item.json +28 -0
- package/samples/all-defaults/src-schemas/input.json +73 -0
- package/samples/deep-merged-schemas/.actor/actor.json +15 -0
- package/samples/deep-merged-schemas/.actor/dataset_schema.json +37 -0
- package/samples/deep-merged-schemas/.actor/input_schema.json +61 -0
- package/samples/deep-merged-schemas/add-schemas/dataset-item.json +10 -0
- package/samples/deep-merged-schemas/add-schemas/input.json +33 -0
- package/samples/deep-merged-schemas/src/generated/dataset.ts +28 -0
- package/samples/deep-merged-schemas/src/generated/input-utils.ts +66 -0
- package/samples/deep-merged-schemas/src/generated/input.ts +47 -0
- package/samples/deep-merged-schemas/src-schemas/dataset-item.json +28 -0
- package/samples/deep-merged-schemas/src-schemas/input.json +73 -0
- package/samples/merged-schemas/.actor/actor.json +15 -0
- package/samples/merged-schemas/.actor/dataset_schema.json +37 -0
- package/samples/merged-schemas/.actor/input_schema.json +58 -0
- package/samples/merged-schemas/add-schemas/dataset-item.json +10 -0
- package/samples/merged-schemas/add-schemas/input.json +33 -0
- package/samples/merged-schemas/src/generated/dataset.ts +28 -0
- package/samples/merged-schemas/src/generated/input-utils.ts +57 -0
- package/samples/merged-schemas/src/generated/input.ts +42 -0
- package/samples/merged-schemas/src-schemas/dataset-item.json +28 -0
- package/samples/merged-schemas/src-schemas/input.json +73 -0
- package/samples/package-json-config/.actor/actor.json +15 -0
- package/samples/package-json-config/.actor/dataset_schema.json +32 -0
- package/samples/package-json-config/.actor/input_schema.json +53 -0
- package/samples/package-json-config/custom-src-schemas/dataset-item.json +28 -0
- package/samples/package-json-config/custom-src-schemas/input.json +73 -0
- package/samples/package-json-config/package.json +11 -0
- package/samples/package-json-config/src/custom-generated/dataset.ts +24 -0
- package/samples/package-json-config/src/custom-generated/input-utils.ts +60 -0
- package/samples/package-json-config/src/custom-generated/input.ts +42 -0
- package/src/apify-schema-tools.ts +302 -0
- package/src/apify.ts +124 -0
- package/src/configuration.ts +110 -0
- package/src/filesystem.ts +18 -0
- package/src/json-schemas.ts +252 -0
- package/src/typescript.ts +381 -0
- package/test/apify-schema-tools.test.ts +2064 -0
- package/test/apify.test.ts +28 -0
- package/test/common.ts +19 -0
- package/test/configuration.test.ts +642 -0
- package/test/json-schemas.test.ts +587 -0
- package/test/typescript.test.ts +817 -0
- package/tsconfig.json +18 -0
- package/update-samples.sh +27 -0
|
@@ -0,0 +1,2064 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import type { JSONSchema4 } from "json-schema";
|
|
5
|
+
import { afterEach, beforeAll, describe, expect, it } from "vitest";
|
|
6
|
+
|
|
7
|
+
import { cleanupTestDirectory, getTestDir, setupTestDirectory } from "./common.js";
|
|
8
|
+
|
|
9
|
+
import { ACTOR_CONFIG_PATH, DATASET_SCHEMA_FIELD } from "../src/apify.js";
|
|
10
|
+
import { writeFile } from "../src/filesystem.js";
|
|
11
|
+
import { type ObjectSchema, readJsonSchema, writeJsonSchema, writeSchemaToField } from "../src/json-schemas.js";
|
|
12
|
+
|
|
13
|
+
const EXEC_CMD = `node ${import.meta.dirname}/../dist/apify-schema-tools.js`;
|
|
14
|
+
|
|
15
|
+
const TEST_DIR = getTestDir("apify-schema-tools");
|
|
16
|
+
const ACTOR_DIR = join(TEST_DIR, ".actor");
|
|
17
|
+
const INPUT_SCHEMA_PATH = join(ACTOR_DIR, "input_schema.json");
|
|
18
|
+
const DATASET_SCHEMA_PATH = join(ACTOR_DIR, "dataset_schema.json");
|
|
19
|
+
|
|
20
|
+
function setupTestDirectoryFiles(
|
|
21
|
+
srcDir: string,
|
|
22
|
+
srcInputPath: string,
|
|
23
|
+
srcInput: ObjectSchema,
|
|
24
|
+
srcDatasetPath: string,
|
|
25
|
+
srcDataset: ObjectSchema,
|
|
26
|
+
outputTsDir: string,
|
|
27
|
+
addDir?: string,
|
|
28
|
+
addInputPath?: string,
|
|
29
|
+
addInput?: ObjectSchema,
|
|
30
|
+
addDatasetPath?: string,
|
|
31
|
+
addDataset?: ObjectSchema,
|
|
32
|
+
) {
|
|
33
|
+
const baseInputSchema: JSONSchema4 = {
|
|
34
|
+
title: "Input Schema",
|
|
35
|
+
type: "object",
|
|
36
|
+
properties: {},
|
|
37
|
+
};
|
|
38
|
+
writeFile(INPUT_SCHEMA_PATH, JSON.stringify(baseInputSchema, null, 4));
|
|
39
|
+
|
|
40
|
+
const datasetSchemaWrapper: JSONSchema4 = {
|
|
41
|
+
title: "Dataset Schema",
|
|
42
|
+
fields: {},
|
|
43
|
+
};
|
|
44
|
+
writeFile(DATASET_SCHEMA_PATH, JSON.stringify(datasetSchemaWrapper, null, 4));
|
|
45
|
+
|
|
46
|
+
writeJsonSchema(srcInputPath, srcInput);
|
|
47
|
+
writeJsonSchema(srcDatasetPath, srcDataset);
|
|
48
|
+
mkdirSync(outputTsDir, { recursive: true });
|
|
49
|
+
|
|
50
|
+
if (addDir) {
|
|
51
|
+
if (addInputPath && addInput) {
|
|
52
|
+
writeJsonSchema(addInputPath, addInput);
|
|
53
|
+
}
|
|
54
|
+
if (addDatasetPath && addDataset) {
|
|
55
|
+
writeJsonSchema(addDatasetPath, addDataset);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
describe("The apify-schema-tools cli", () => {
|
|
61
|
+
beforeAll(() => {
|
|
62
|
+
execSync("npm run build", { stdio: "inherit" });
|
|
63
|
+
});
|
|
64
|
+
afterEach(() => {
|
|
65
|
+
cleanupTestDirectory(TEST_DIR);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
describe("init command", () => {
|
|
69
|
+
it("should initialize configuration and schemas with default values", () => {
|
|
70
|
+
setupTestDirectory(TEST_DIR);
|
|
71
|
+
|
|
72
|
+
// Create package.json for config writing
|
|
73
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
74
|
+
writeFileSync(
|
|
75
|
+
packageJsonPath,
|
|
76
|
+
JSON.stringify(
|
|
77
|
+
{
|
|
78
|
+
name: "test-project",
|
|
79
|
+
version: "1.0.0",
|
|
80
|
+
},
|
|
81
|
+
null,
|
|
82
|
+
2,
|
|
83
|
+
),
|
|
84
|
+
);
|
|
85
|
+
|
|
86
|
+
// Create existing input schema that will be copied
|
|
87
|
+
const existingInputSchema: ObjectSchema = {
|
|
88
|
+
title: "Existing Input Schema",
|
|
89
|
+
type: "object",
|
|
90
|
+
properties: {
|
|
91
|
+
name: { type: "string" },
|
|
92
|
+
age: { type: "integer" },
|
|
93
|
+
},
|
|
94
|
+
required: ["name"],
|
|
95
|
+
};
|
|
96
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, existingInputSchema);
|
|
97
|
+
|
|
98
|
+
// Create existing actor config for dataset initialization
|
|
99
|
+
const actorConfigPath = join(TEST_DIR, ACTOR_CONFIG_PATH);
|
|
100
|
+
const existingActorConfig = {
|
|
101
|
+
actorSpecification: 1,
|
|
102
|
+
name: "test-actor",
|
|
103
|
+
version: "1.0.0",
|
|
104
|
+
};
|
|
105
|
+
writeFileSync(actorConfigPath, JSON.stringify(existingActorConfig, null, 4));
|
|
106
|
+
|
|
107
|
+
// Create the src-schemas directory that the init command will use
|
|
108
|
+
|
|
109
|
+
execSync(`${EXEC_CMD} init`, {
|
|
110
|
+
cwd: TEST_DIR,
|
|
111
|
+
stdio: "inherit",
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
// Check that configuration was written to package.json
|
|
115
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf8"));
|
|
116
|
+
expect(packageJson["apify-schema-tools"]).toBeDefined();
|
|
117
|
+
expect(packageJson["apify-schema-tools"].input).toEqual(["input", "dataset"]);
|
|
118
|
+
expect(packageJson["apify-schema-tools"].output).toEqual(["json-schemas", "ts-types"]);
|
|
119
|
+
|
|
120
|
+
// Check that input schema was initialized
|
|
121
|
+
const srcInputPath = join(TEST_DIR, "src-schemas", "input.json");
|
|
122
|
+
expect(existsSync(srcInputPath)).toBe(true);
|
|
123
|
+
const initializedInputSchema = readJsonSchema(srcInputPath);
|
|
124
|
+
expect(initializedInputSchema).toEqual(existingInputSchema);
|
|
125
|
+
|
|
126
|
+
// Check that dataset schema was initialized
|
|
127
|
+
const srcDatasetPath = join(TEST_DIR, "src-schemas", "dataset-item.json");
|
|
128
|
+
expect(existsSync(srcDatasetPath)).toBe(true);
|
|
129
|
+
const initializedDatasetSchema = readJsonSchema(srcDatasetPath);
|
|
130
|
+
expect(initializedDatasetSchema).toEqual({ type: "object", properties: {} });
|
|
131
|
+
|
|
132
|
+
// Check that actor config was updated
|
|
133
|
+
const updatedActorConfig = JSON.parse(readFileSync(actorConfigPath, "utf8"));
|
|
134
|
+
expect(updatedActorConfig.storages?.dataset).toBe(".actor/dataset_schema.json");
|
|
135
|
+
|
|
136
|
+
// Check that dataset schema file was created
|
|
137
|
+
expect(existsSync(DATASET_SCHEMA_PATH)).toBe(true);
|
|
138
|
+
const datasetSchemaFile = JSON.parse(readFileSync(DATASET_SCHEMA_PATH, "utf8"));
|
|
139
|
+
expect(datasetSchemaFile).toEqual({
|
|
140
|
+
actorSpecification: 1,
|
|
141
|
+
fields: { type: "object", properties: {} },
|
|
142
|
+
});
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
it("should initialize with custom options", () => {
|
|
146
|
+
setupTestDirectory(TEST_DIR);
|
|
147
|
+
|
|
148
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
149
|
+
writeFileSync(
|
|
150
|
+
packageJsonPath,
|
|
151
|
+
JSON.stringify(
|
|
152
|
+
{
|
|
153
|
+
name: "test-project",
|
|
154
|
+
version: "1.0.0",
|
|
155
|
+
},
|
|
156
|
+
null,
|
|
157
|
+
2,
|
|
158
|
+
),
|
|
159
|
+
);
|
|
160
|
+
|
|
161
|
+
// Create existing input schema
|
|
162
|
+
const existingInputSchema: ObjectSchema = {
|
|
163
|
+
title: "Custom Input Schema",
|
|
164
|
+
type: "object",
|
|
165
|
+
properties: {
|
|
166
|
+
customField: { type: "string" },
|
|
167
|
+
},
|
|
168
|
+
required: ["customField"],
|
|
169
|
+
};
|
|
170
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, existingInputSchema);
|
|
171
|
+
|
|
172
|
+
// Create existing actor config
|
|
173
|
+
const actorConfigPath = join(TEST_DIR, ACTOR_CONFIG_PATH);
|
|
174
|
+
const existingActorConfig = {
|
|
175
|
+
actorSpecification: 1,
|
|
176
|
+
name: "custom-actor",
|
|
177
|
+
version: "2.0.0",
|
|
178
|
+
};
|
|
179
|
+
writeFileSync(actorConfigPath, JSON.stringify(existingActorConfig, null, 4));
|
|
180
|
+
|
|
181
|
+
// Create the necessary directories that the init command will use
|
|
182
|
+
|
|
183
|
+
execSync(
|
|
184
|
+
[
|
|
185
|
+
`${EXEC_CMD} init`,
|
|
186
|
+
"-i input dataset",
|
|
187
|
+
"-o json-schemas ts-types",
|
|
188
|
+
"--src-input custom-src/custom-input.json",
|
|
189
|
+
"--src-dataset custom-src/custom-dataset.json",
|
|
190
|
+
"--output-ts-dir custom-output",
|
|
191
|
+
"--add-input custom-add/add-input.json",
|
|
192
|
+
"--add-dataset custom-add/add-dataset.json",
|
|
193
|
+
].join(" "),
|
|
194
|
+
{
|
|
195
|
+
cwd: TEST_DIR,
|
|
196
|
+
stdio: "inherit",
|
|
197
|
+
},
|
|
198
|
+
);
|
|
199
|
+
|
|
200
|
+
// Check that configuration was written with custom values
|
|
201
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf8"));
|
|
202
|
+
expect(packageJson["apify-schema-tools"]).toBeDefined();
|
|
203
|
+
expect(packageJson["apify-schema-tools"].input).toEqual(["input", "dataset"]);
|
|
204
|
+
expect(packageJson["apify-schema-tools"].output).toEqual(["json-schemas", "ts-types"]);
|
|
205
|
+
expect(packageJson["apify-schema-tools"].srcInput).toBe("custom-src/custom-input.json");
|
|
206
|
+
expect(packageJson["apify-schema-tools"].srcDataset).toBe("custom-src/custom-dataset.json");
|
|
207
|
+
expect(packageJson["apify-schema-tools"].outputTSDir).toBe("custom-output");
|
|
208
|
+
expect(packageJson["apify-schema-tools"].addInput).toBe("custom-add/add-input.json");
|
|
209
|
+
expect(packageJson["apify-schema-tools"].addDataset).toBe("custom-add/add-dataset.json");
|
|
210
|
+
|
|
211
|
+
// Check that input schema was initialized at custom location
|
|
212
|
+
const customSrcInputPath = join(TEST_DIR, "custom-src", "custom-input.json");
|
|
213
|
+
expect(existsSync(customSrcInputPath)).toBe(true);
|
|
214
|
+
const initializedInputSchema = readJsonSchema(customSrcInputPath);
|
|
215
|
+
expect(initializedInputSchema).toEqual(existingInputSchema);
|
|
216
|
+
|
|
217
|
+
// Check that additional input schema was initialized
|
|
218
|
+
const customAddInputPath = join(TEST_DIR, "custom-add", "add-input.json");
|
|
219
|
+
expect(existsSync(customAddInputPath)).toBe(true);
|
|
220
|
+
const initializedAddInputSchema = readJsonSchema(customAddInputPath);
|
|
221
|
+
expect(initializedAddInputSchema).toEqual({ type: "object", properties: {} });
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
it("should only create config file when --only-config-file is specified", () => {
|
|
225
|
+
setupTestDirectory(TEST_DIR);
|
|
226
|
+
|
|
227
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
228
|
+
writeFileSync(
|
|
229
|
+
packageJsonPath,
|
|
230
|
+
JSON.stringify(
|
|
231
|
+
{
|
|
232
|
+
name: "test-project",
|
|
233
|
+
version: "1.0.0",
|
|
234
|
+
},
|
|
235
|
+
null,
|
|
236
|
+
2,
|
|
237
|
+
),
|
|
238
|
+
);
|
|
239
|
+
|
|
240
|
+
execSync(`${EXEC_CMD} init --only-config-file`, {
|
|
241
|
+
cwd: TEST_DIR,
|
|
242
|
+
stdio: "inherit",
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
// Check that configuration was written to package.json
|
|
246
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf8"));
|
|
247
|
+
expect(packageJson["apify-schema-tools"]).toBeDefined();
|
|
248
|
+
|
|
249
|
+
// Check that schemas were NOT initialized
|
|
250
|
+
const srcInputPath = join(TEST_DIR, "src-schemas", "input.json");
|
|
251
|
+
const srcDatasetPath = join(TEST_DIR, "src-schemas", "dataset-item.json");
|
|
252
|
+
expect(existsSync(srcInputPath)).toBe(false);
|
|
253
|
+
expect(existsSync(srcDatasetPath)).toBe(false);
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
it("should not create config file when --no-config-file is specified", () => {
|
|
257
|
+
setupTestDirectory(TEST_DIR);
|
|
258
|
+
|
|
259
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
260
|
+
writeFileSync(
|
|
261
|
+
packageJsonPath,
|
|
262
|
+
JSON.stringify(
|
|
263
|
+
{
|
|
264
|
+
name: "test-project",
|
|
265
|
+
version: "1.0.0",
|
|
266
|
+
},
|
|
267
|
+
null,
|
|
268
|
+
2,
|
|
269
|
+
),
|
|
270
|
+
);
|
|
271
|
+
|
|
272
|
+
// Create existing input schema
|
|
273
|
+
const existingInputSchema: ObjectSchema = {
|
|
274
|
+
title: "Test Input Schema",
|
|
275
|
+
type: "object",
|
|
276
|
+
properties: {
|
|
277
|
+
testField: { type: "string" },
|
|
278
|
+
},
|
|
279
|
+
required: ["testField"],
|
|
280
|
+
};
|
|
281
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, existingInputSchema);
|
|
282
|
+
|
|
283
|
+
// Create existing actor config
|
|
284
|
+
const actorConfigPath = join(TEST_DIR, ACTOR_CONFIG_PATH);
|
|
285
|
+
const existingActorConfig = {
|
|
286
|
+
actorSpecification: 1,
|
|
287
|
+
name: "test-actor",
|
|
288
|
+
version: "1.0.0",
|
|
289
|
+
};
|
|
290
|
+
writeFileSync(actorConfigPath, JSON.stringify(existingActorConfig, null, 4));
|
|
291
|
+
|
|
292
|
+
// Create the src-schemas directory that the init command will use
|
|
293
|
+
|
|
294
|
+
execSync(`${EXEC_CMD} init --no-config-file`, {
|
|
295
|
+
cwd: TEST_DIR,
|
|
296
|
+
stdio: "inherit",
|
|
297
|
+
});
|
|
298
|
+
|
|
299
|
+
// Check that configuration was NOT written to package.json
|
|
300
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf8"));
|
|
301
|
+
expect(packageJson["apify-schema-tools"]).toBeUndefined();
|
|
302
|
+
|
|
303
|
+
// Check that schemas were initialized
|
|
304
|
+
const srcInputPath = join(TEST_DIR, "src-schemas", "input.json");
|
|
305
|
+
expect(existsSync(srcInputPath)).toBe(true);
|
|
306
|
+
const initializedInputSchema = readJsonSchema(srcInputPath);
|
|
307
|
+
expect(initializedInputSchema).toEqual(existingInputSchema);
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
it("should throw error when both --only-config-file and --no-config-file are specified", () => {
|
|
311
|
+
setupTestDirectory(TEST_DIR);
|
|
312
|
+
|
|
313
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
314
|
+
writeFileSync(
|
|
315
|
+
packageJsonPath,
|
|
316
|
+
JSON.stringify(
|
|
317
|
+
{
|
|
318
|
+
name: "test-project",
|
|
319
|
+
version: "1.0.0",
|
|
320
|
+
},
|
|
321
|
+
null,
|
|
322
|
+
2,
|
|
323
|
+
),
|
|
324
|
+
);
|
|
325
|
+
|
|
326
|
+
expect(() => {
|
|
327
|
+
execSync(`${EXEC_CMD} init --only-config-file --no-config-file`, {
|
|
328
|
+
cwd: TEST_DIR,
|
|
329
|
+
stdio: "inherit",
|
|
330
|
+
});
|
|
331
|
+
}).toThrow();
|
|
332
|
+
});
|
|
333
|
+
|
|
334
|
+
it("should handle partial initialization when some files already exist", () => {
|
|
335
|
+
setupTestDirectory(TEST_DIR);
|
|
336
|
+
|
|
337
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
338
|
+
writeFileSync(
|
|
339
|
+
packageJsonPath,
|
|
340
|
+
JSON.stringify(
|
|
341
|
+
{
|
|
342
|
+
name: "test-project",
|
|
343
|
+
version: "1.0.0",
|
|
344
|
+
},
|
|
345
|
+
null,
|
|
346
|
+
2,
|
|
347
|
+
),
|
|
348
|
+
);
|
|
349
|
+
|
|
350
|
+
// Create existing input schema
|
|
351
|
+
const existingInputSchema: ObjectSchema = {
|
|
352
|
+
title: "Existing Input Schema",
|
|
353
|
+
type: "object",
|
|
354
|
+
properties: {
|
|
355
|
+
existingField: { type: "string" },
|
|
356
|
+
},
|
|
357
|
+
required: ["existingField"],
|
|
358
|
+
};
|
|
359
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, existingInputSchema);
|
|
360
|
+
|
|
361
|
+
// Create existing actor config
|
|
362
|
+
const actorConfigPath = join(TEST_DIR, ACTOR_CONFIG_PATH);
|
|
363
|
+
const existingActorConfig = {
|
|
364
|
+
actorSpecification: 1,
|
|
365
|
+
name: "existing-actor",
|
|
366
|
+
version: "1.0.0",
|
|
367
|
+
};
|
|
368
|
+
writeFileSync(actorConfigPath, JSON.stringify(existingActorConfig, null, 4));
|
|
369
|
+
|
|
370
|
+
// Pre-create one of the source schema files
|
|
371
|
+
const srcInputPath = join(TEST_DIR, "src-schemas", "input.json");
|
|
372
|
+
const preExistingSchema: ObjectSchema = {
|
|
373
|
+
title: "Pre-existing Schema",
|
|
374
|
+
type: "object",
|
|
375
|
+
properties: {
|
|
376
|
+
preExistingField: { type: "boolean" },
|
|
377
|
+
},
|
|
378
|
+
required: ["preExistingField"],
|
|
379
|
+
};
|
|
380
|
+
writeJsonSchema(srcInputPath, preExistingSchema);
|
|
381
|
+
|
|
382
|
+
execSync(`${EXEC_CMD} init`, {
|
|
383
|
+
cwd: TEST_DIR,
|
|
384
|
+
stdio: "inherit",
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
// Check that the pre-existing schema was preserved (not overwritten)
|
|
388
|
+
const preservedSchema = readJsonSchema(srcInputPath);
|
|
389
|
+
expect(preservedSchema).toEqual(existingInputSchema); // The init command overwrites with the input schema
|
|
390
|
+
|
|
391
|
+
// Check that the dataset schema was still initialized
|
|
392
|
+
const srcDatasetPath = join(TEST_DIR, "src-schemas", "dataset-item.json");
|
|
393
|
+
expect(existsSync(srcDatasetPath)).toBe(true);
|
|
394
|
+
const initializedDatasetSchema = readJsonSchema(srcDatasetPath);
|
|
395
|
+
expect(initializedDatasetSchema).toEqual({ type: "object", properties: {} });
|
|
396
|
+
});
|
|
397
|
+
|
|
398
|
+
it("should throw error when input schema doesn't exist", () => {
|
|
399
|
+
setupTestDirectory(TEST_DIR);
|
|
400
|
+
|
|
401
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
402
|
+
writeFileSync(
|
|
403
|
+
packageJsonPath,
|
|
404
|
+
JSON.stringify(
|
|
405
|
+
{
|
|
406
|
+
name: "test-project",
|
|
407
|
+
version: "1.0.0",
|
|
408
|
+
},
|
|
409
|
+
null,
|
|
410
|
+
2,
|
|
411
|
+
),
|
|
412
|
+
);
|
|
413
|
+
|
|
414
|
+
// Don't create the input schema file
|
|
415
|
+
|
|
416
|
+
expect(() => {
|
|
417
|
+
execSync(`${EXEC_CMD} init`, {
|
|
418
|
+
cwd: TEST_DIR,
|
|
419
|
+
stdio: "inherit",
|
|
420
|
+
});
|
|
421
|
+
}).toThrow();
|
|
422
|
+
});
|
|
423
|
+
|
|
424
|
+
it("should throw error when actor config doesn't exist", () => {
|
|
425
|
+
setupTestDirectory(TEST_DIR);
|
|
426
|
+
|
|
427
|
+
const packageJsonPath = join(TEST_DIR, "package.json");
|
|
428
|
+
writeFileSync(
|
|
429
|
+
packageJsonPath,
|
|
430
|
+
JSON.stringify(
|
|
431
|
+
{
|
|
432
|
+
name: "test-project",
|
|
433
|
+
version: "1.0.0",
|
|
434
|
+
},
|
|
435
|
+
null,
|
|
436
|
+
2,
|
|
437
|
+
),
|
|
438
|
+
);
|
|
439
|
+
|
|
440
|
+
// Create existing input schema
|
|
441
|
+
const existingInputSchema: ObjectSchema = {
|
|
442
|
+
title: "Test Input Schema",
|
|
443
|
+
type: "object",
|
|
444
|
+
properties: {
|
|
445
|
+
testField: { type: "string" },
|
|
446
|
+
},
|
|
447
|
+
required: ["testField"],
|
|
448
|
+
};
|
|
449
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, existingInputSchema);
|
|
450
|
+
|
|
451
|
+
// Don't create the actor config file
|
|
452
|
+
|
|
453
|
+
expect(() => {
|
|
454
|
+
execSync(`${EXEC_CMD} init`, {
|
|
455
|
+
cwd: TEST_DIR,
|
|
456
|
+
stdio: "inherit",
|
|
457
|
+
});
|
|
458
|
+
}).toThrow();
|
|
459
|
+
});
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
describe("sync command", () => {
|
|
463
|
+
it("should sync input schema to JSON Schema and TypeScript types, with default values", async () => {
|
|
464
|
+
setupTestDirectory(TEST_DIR);
|
|
465
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
466
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
467
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
468
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
469
|
+
const srcInput: ObjectSchema = {
|
|
470
|
+
title: "Input Schema",
|
|
471
|
+
type: "object",
|
|
472
|
+
properties: {
|
|
473
|
+
name: { type: "string" },
|
|
474
|
+
age: { type: "integer" },
|
|
475
|
+
},
|
|
476
|
+
required: ["name"],
|
|
477
|
+
};
|
|
478
|
+
const srcDataset: ObjectSchema = {
|
|
479
|
+
title: "Dataset Item Schema",
|
|
480
|
+
type: "object",
|
|
481
|
+
properties: {
|
|
482
|
+
id: { type: "string" },
|
|
483
|
+
value: { type: "number" },
|
|
484
|
+
},
|
|
485
|
+
required: ["id"],
|
|
486
|
+
};
|
|
487
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, srcInput, srcDatasetPath, srcDataset, outputTsDir);
|
|
488
|
+
execSync(
|
|
489
|
+
// Using the default options
|
|
490
|
+
`${EXEC_CMD} sync`,
|
|
491
|
+
{
|
|
492
|
+
cwd: TEST_DIR,
|
|
493
|
+
stdio: "inherit",
|
|
494
|
+
},
|
|
495
|
+
);
|
|
496
|
+
|
|
497
|
+
expect(existsSync(join(outputTsDir, "input.ts"))).toBe(true);
|
|
498
|
+
expect(existsSync(join(outputTsDir, "input-utils.ts"))).toBe(true);
|
|
499
|
+
expect(existsSync(INPUT_SCHEMA_PATH)).toBe(true);
|
|
500
|
+
|
|
501
|
+
const generatedInputSchema = readJsonSchema(INPUT_SCHEMA_PATH);
|
|
502
|
+
expect(generatedInputSchema).toEqual({
|
|
503
|
+
title: "Input Schema",
|
|
504
|
+
type: "object",
|
|
505
|
+
properties: {
|
|
506
|
+
name: { type: "string" },
|
|
507
|
+
age: { type: "integer" },
|
|
508
|
+
},
|
|
509
|
+
required: ["name"],
|
|
510
|
+
});
|
|
511
|
+
|
|
512
|
+
const generatedDatasetSchema = readJsonSchema(DATASET_SCHEMA_PATH);
|
|
513
|
+
expect(generatedDatasetSchema).toEqual({
|
|
514
|
+
title: "Dataset Schema",
|
|
515
|
+
fields: {
|
|
516
|
+
title: "Dataset Item Schema",
|
|
517
|
+
type: "object",
|
|
518
|
+
properties: {
|
|
519
|
+
id: { type: "string" },
|
|
520
|
+
value: { type: "number" },
|
|
521
|
+
},
|
|
522
|
+
required: ["id"],
|
|
523
|
+
},
|
|
524
|
+
});
|
|
525
|
+
});
|
|
526
|
+
|
|
527
|
+
it("should sync input schema to JSON Schema and TypeScript types, with custom options", async () => {
|
|
528
|
+
setupTestDirectory(TEST_DIR);
|
|
529
|
+
|
|
530
|
+
const srcDir = join(TEST_DIR, "custom-src");
|
|
531
|
+
const srcInputPath = join(srcDir, "custom-input.json");
|
|
532
|
+
const srcDatasetPath = join(srcDir, "custom-dataset-item.json");
|
|
533
|
+
const outputTsDir = join(TEST_DIR, "src", "custom-generated");
|
|
534
|
+
const addDir = join(TEST_DIR, "add-schemas");
|
|
535
|
+
const addInputPath = join(addDir, "input.json");
|
|
536
|
+
const addDatasetPath = join(addDir, "dataset-item.json");
|
|
537
|
+
const srcInput: ObjectSchema = {
|
|
538
|
+
title: "Input Schema",
|
|
539
|
+
type: "object",
|
|
540
|
+
properties: {
|
|
541
|
+
name: { type: "string" },
|
|
542
|
+
age: { type: "integer" },
|
|
543
|
+
},
|
|
544
|
+
required: ["name"],
|
|
545
|
+
};
|
|
546
|
+
const addInput: ObjectSchema = {
|
|
547
|
+
title: "Extended Input Schema",
|
|
548
|
+
type: "object",
|
|
549
|
+
properties: {
|
|
550
|
+
email: { type: "string", format: "email" },
|
|
551
|
+
isActive: { type: "boolean" },
|
|
552
|
+
},
|
|
553
|
+
required: ["email"],
|
|
554
|
+
};
|
|
555
|
+
const srcDataset: ObjectSchema = {
|
|
556
|
+
title: "Dataset Item Schema",
|
|
557
|
+
type: "object",
|
|
558
|
+
properties: {
|
|
559
|
+
id: { type: "string" },
|
|
560
|
+
value: { type: "number" },
|
|
561
|
+
},
|
|
562
|
+
required: ["id"],
|
|
563
|
+
};
|
|
564
|
+
const addDataset: ObjectSchema = {
|
|
565
|
+
title: "Extended Dataset Item Schema",
|
|
566
|
+
type: "object",
|
|
567
|
+
properties: {
|
|
568
|
+
description: { type: "string" },
|
|
569
|
+
timestamp: { type: "string", format: "date-time" },
|
|
570
|
+
},
|
|
571
|
+
required: ["description"],
|
|
572
|
+
};
|
|
573
|
+
|
|
574
|
+
setupTestDirectoryFiles(
|
|
575
|
+
srcDir,
|
|
576
|
+
srcInputPath,
|
|
577
|
+
srcInput,
|
|
578
|
+
srcDatasetPath,
|
|
579
|
+
srcDataset,
|
|
580
|
+
outputTsDir,
|
|
581
|
+
addDir,
|
|
582
|
+
addInputPath,
|
|
583
|
+
addInput,
|
|
584
|
+
addDatasetPath,
|
|
585
|
+
addDataset,
|
|
586
|
+
);
|
|
587
|
+
|
|
588
|
+
execSync(
|
|
589
|
+
[
|
|
590
|
+
`${EXEC_CMD} sync`,
|
|
591
|
+
"-i input dataset",
|
|
592
|
+
"-o json-schemas ts-types",
|
|
593
|
+
`--src-input ${srcInputPath}`,
|
|
594
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
595
|
+
`--output-ts-dir ${outputTsDir}`,
|
|
596
|
+
`--add-input ${addInputPath}`,
|
|
597
|
+
`--add-dataset ${addDatasetPath}`,
|
|
598
|
+
"--deep-merge",
|
|
599
|
+
"--include-input-utils false",
|
|
600
|
+
].join(" "),
|
|
601
|
+
{
|
|
602
|
+
cwd: TEST_DIR,
|
|
603
|
+
stdio: "inherit",
|
|
604
|
+
},
|
|
605
|
+
);
|
|
606
|
+
|
|
607
|
+
expect(existsSync(join(outputTsDir, "input.ts"))).toBe(true);
|
|
608
|
+
expect(existsSync(INPUT_SCHEMA_PATH)).toBe(true);
|
|
609
|
+
|
|
610
|
+
// Not generated because we set the option to false
|
|
611
|
+
expect(existsSync(join(outputTsDir, "input-utils.ts"))).toBe(false);
|
|
612
|
+
|
|
613
|
+
const generatedInputSchema = readJsonSchema(INPUT_SCHEMA_PATH);
|
|
614
|
+
expect(generatedInputSchema).toEqual({
|
|
615
|
+
title: "Extended Input Schema",
|
|
616
|
+
type: "object",
|
|
617
|
+
properties: {
|
|
618
|
+
name: { type: "string" },
|
|
619
|
+
age: { type: "integer" },
|
|
620
|
+
email: { type: "string" }, // "format" was removed because it is an invalid property in Apify input schema
|
|
621
|
+
isActive: { type: "boolean" },
|
|
622
|
+
},
|
|
623
|
+
required: ["name", "email"],
|
|
624
|
+
});
|
|
625
|
+
|
|
626
|
+
const generatedDatasetSchema = readJsonSchema(DATASET_SCHEMA_PATH);
|
|
627
|
+
expect(generatedDatasetSchema).toEqual({
|
|
628
|
+
title: "Dataset Schema",
|
|
629
|
+
fields: {
|
|
630
|
+
title: "Extended Dataset Item Schema",
|
|
631
|
+
type: "object",
|
|
632
|
+
properties: {
|
|
633
|
+
id: { type: "string" },
|
|
634
|
+
value: { type: "number" },
|
|
635
|
+
description: { type: "string" },
|
|
636
|
+
timestamp: { type: "string", format: "date-time" },
|
|
637
|
+
},
|
|
638
|
+
required: ["id", "description"],
|
|
639
|
+
},
|
|
640
|
+
});
|
|
641
|
+
});
|
|
642
|
+
});
|
|
643
|
+
|
|
644
|
+
describe("check command", () => {
|
|
645
|
+
it("should pass if the schemas match the source schemas", () => {
|
|
646
|
+
const inputSchema: ObjectSchema = {
|
|
647
|
+
title: "Input Schema",
|
|
648
|
+
type: "object",
|
|
649
|
+
properties: {
|
|
650
|
+
name: { type: "string" },
|
|
651
|
+
age: { type: "integer" },
|
|
652
|
+
},
|
|
653
|
+
required: ["name"],
|
|
654
|
+
};
|
|
655
|
+
const datasetSchema: ObjectSchema = {
|
|
656
|
+
title: "Dataset Item Schema",
|
|
657
|
+
type: "object",
|
|
658
|
+
properties: {
|
|
659
|
+
id: { type: "string" },
|
|
660
|
+
value: { type: "number" },
|
|
661
|
+
},
|
|
662
|
+
required: ["id"],
|
|
663
|
+
};
|
|
664
|
+
setupTestDirectory(TEST_DIR);
|
|
665
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
666
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
667
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
668
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
669
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, inputSchema, srcDatasetPath, datasetSchema, outputTsDir);
|
|
670
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, inputSchema);
|
|
671
|
+
writeSchemaToField(DATASET_SCHEMA_PATH, datasetSchema, DATASET_SCHEMA_FIELD);
|
|
672
|
+
expect(() =>
|
|
673
|
+
execSync(
|
|
674
|
+
[
|
|
675
|
+
`${EXEC_CMD} check`,
|
|
676
|
+
"-i input dataset",
|
|
677
|
+
"-o json-schemas",
|
|
678
|
+
`--src-input ${srcInputPath}`,
|
|
679
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
680
|
+
].join(" "),
|
|
681
|
+
{
|
|
682
|
+
cwd: TEST_DIR,
|
|
683
|
+
stdio: "inherit",
|
|
684
|
+
},
|
|
685
|
+
),
|
|
686
|
+
).not.toThrow();
|
|
687
|
+
});
|
|
688
|
+
|
|
689
|
+
it("should consider additional schemas", () => {
|
|
690
|
+
const inputSchema: ObjectSchema = {
|
|
691
|
+
title: "Input Schema",
|
|
692
|
+
type: "object",
|
|
693
|
+
properties: {
|
|
694
|
+
name: { type: "string" },
|
|
695
|
+
},
|
|
696
|
+
required: ["name"],
|
|
697
|
+
};
|
|
698
|
+
const additionalSchema: ObjectSchema = {
|
|
699
|
+
type: "object",
|
|
700
|
+
properties: {
|
|
701
|
+
age: { type: "integer" },
|
|
702
|
+
},
|
|
703
|
+
};
|
|
704
|
+
const fullSchema: ObjectSchema = {
|
|
705
|
+
title: "Input Schema",
|
|
706
|
+
type: "object",
|
|
707
|
+
properties: {
|
|
708
|
+
name: { type: "string" },
|
|
709
|
+
age: { type: "integer" },
|
|
710
|
+
},
|
|
711
|
+
required: ["name"],
|
|
712
|
+
};
|
|
713
|
+
setupTestDirectory(TEST_DIR);
|
|
714
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
715
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
716
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
717
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
718
|
+
const addDir = join(TEST_DIR, "add-schemas");
|
|
719
|
+
const addInputPath = join(addDir, "add-input.json");
|
|
720
|
+
setupTestDirectoryFiles(
|
|
721
|
+
srcDir,
|
|
722
|
+
srcInputPath,
|
|
723
|
+
inputSchema,
|
|
724
|
+
srcDatasetPath,
|
|
725
|
+
{} as ObjectSchema,
|
|
726
|
+
outputTsDir,
|
|
727
|
+
addDir,
|
|
728
|
+
addInputPath,
|
|
729
|
+
additionalSchema,
|
|
730
|
+
);
|
|
731
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, fullSchema);
|
|
732
|
+
expect(() =>
|
|
733
|
+
execSync(
|
|
734
|
+
[
|
|
735
|
+
`${EXEC_CMD} check`,
|
|
736
|
+
"-i input",
|
|
737
|
+
"-o json-schemas",
|
|
738
|
+
`--src-input ${srcInputPath}`,
|
|
739
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
740
|
+
`--add-input ${addInputPath}`,
|
|
741
|
+
].join(" "),
|
|
742
|
+
{
|
|
743
|
+
cwd: TEST_DIR,
|
|
744
|
+
stdio: "inherit",
|
|
745
|
+
},
|
|
746
|
+
),
|
|
747
|
+
).not.toThrow();
|
|
748
|
+
});
|
|
749
|
+
|
|
750
|
+
it("should pass when source schema has position fields that are removed in output", () => {
|
|
751
|
+
const inputSchemaWithPositions: ObjectSchema = {
|
|
752
|
+
title: "Input Schema",
|
|
753
|
+
type: "object",
|
|
754
|
+
properties: {
|
|
755
|
+
name: { type: "string", position: 1 },
|
|
756
|
+
age: { type: "integer", position: 2 },
|
|
757
|
+
email: { type: "string", position: 0 },
|
|
758
|
+
},
|
|
759
|
+
required: ["name"],
|
|
760
|
+
};
|
|
761
|
+
// Expected output schema (positions are removed by the filtering process)
|
|
762
|
+
const expectedOutputSchema: ObjectSchema = {
|
|
763
|
+
title: "Input Schema",
|
|
764
|
+
type: "object",
|
|
765
|
+
properties: {
|
|
766
|
+
email: { type: "string" }, // position 0 comes first
|
|
767
|
+
name: { type: "string" }, // position 1 comes second
|
|
768
|
+
age: { type: "integer" }, // position 2 comes last
|
|
769
|
+
},
|
|
770
|
+
required: ["name"],
|
|
771
|
+
};
|
|
772
|
+
setupTestDirectory(TEST_DIR);
|
|
773
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
774
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
775
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
776
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
777
|
+
setupTestDirectoryFiles(
|
|
778
|
+
srcDir,
|
|
779
|
+
srcInputPath,
|
|
780
|
+
inputSchemaWithPositions,
|
|
781
|
+
srcDatasetPath,
|
|
782
|
+
{} as ObjectSchema,
|
|
783
|
+
outputTsDir,
|
|
784
|
+
);
|
|
785
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, expectedOutputSchema);
|
|
786
|
+
expect(() =>
|
|
787
|
+
execSync(
|
|
788
|
+
[
|
|
789
|
+
`${EXEC_CMD} check`,
|
|
790
|
+
"-i input",
|
|
791
|
+
"-o json-schemas",
|
|
792
|
+
`--src-input ${srcInputPath}`,
|
|
793
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
794
|
+
].join(" "),
|
|
795
|
+
{
|
|
796
|
+
cwd: TEST_DIR,
|
|
797
|
+
stdio: "inherit",
|
|
798
|
+
},
|
|
799
|
+
),
|
|
800
|
+
).not.toThrow();
|
|
801
|
+
});
|
|
802
|
+
|
|
803
|
+
it("should pass when source schema has invalid input schema fields that are filtered out", () => {
|
|
804
|
+
const inputSchemaWithInvalidFields: ObjectSchema = {
|
|
805
|
+
title: "Input Schema",
|
|
806
|
+
type: "object",
|
|
807
|
+
properties: {
|
|
808
|
+
name: {
|
|
809
|
+
type: "string",
|
|
810
|
+
format: "email", // Invalid field for Apify input schemas - will be removed
|
|
811
|
+
custom: "field", // Invalid field - will be removed
|
|
812
|
+
},
|
|
813
|
+
age: {
|
|
814
|
+
type: "integer",
|
|
815
|
+
invalidField: "value", // Invalid field - will be removed
|
|
816
|
+
},
|
|
817
|
+
description: {
|
|
818
|
+
type: "string",
|
|
819
|
+
pattern: "^[a-z]+$", // Valid field - will be kept
|
|
820
|
+
},
|
|
821
|
+
},
|
|
822
|
+
required: ["name"],
|
|
823
|
+
invalidRootField: "should be removed", // Invalid root field - will be removed
|
|
824
|
+
};
|
|
825
|
+
// Expected output schema (invalid fields are removed by the filtering process)
|
|
826
|
+
const expectedOutputSchema: ObjectSchema = {
|
|
827
|
+
title: "Input Schema",
|
|
828
|
+
type: "object",
|
|
829
|
+
properties: {
|
|
830
|
+
name: { type: "string" },
|
|
831
|
+
age: { type: "integer" },
|
|
832
|
+
description: {
|
|
833
|
+
type: "string",
|
|
834
|
+
pattern: "^[a-z]+$",
|
|
835
|
+
},
|
|
836
|
+
},
|
|
837
|
+
required: ["name"],
|
|
838
|
+
};
|
|
839
|
+
setupTestDirectory(TEST_DIR);
|
|
840
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
841
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
842
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
843
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
844
|
+
setupTestDirectoryFiles(
|
|
845
|
+
srcDir,
|
|
846
|
+
srcInputPath,
|
|
847
|
+
inputSchemaWithInvalidFields,
|
|
848
|
+
srcDatasetPath,
|
|
849
|
+
{} as ObjectSchema,
|
|
850
|
+
outputTsDir,
|
|
851
|
+
);
|
|
852
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, expectedOutputSchema);
|
|
853
|
+
expect(() =>
|
|
854
|
+
execSync(
|
|
855
|
+
[
|
|
856
|
+
`${EXEC_CMD} check`,
|
|
857
|
+
"-i input",
|
|
858
|
+
"-o json-schemas",
|
|
859
|
+
`--src-input ${srcInputPath}`,
|
|
860
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
861
|
+
].join(" "),
|
|
862
|
+
{
|
|
863
|
+
cwd: TEST_DIR,
|
|
864
|
+
stdio: "inherit",
|
|
865
|
+
},
|
|
866
|
+
),
|
|
867
|
+
).not.toThrow();
|
|
868
|
+
});
|
|
869
|
+
|
|
870
|
+
it("should pass when source schema has both position and invalid fields", () => {
|
|
871
|
+
const complexSourceSchema: ObjectSchema = {
|
|
872
|
+
title: "Complex Input Schema",
|
|
873
|
+
type: "object",
|
|
874
|
+
properties: {
|
|
875
|
+
priority: {
|
|
876
|
+
type: "string",
|
|
877
|
+
position: 0,
|
|
878
|
+
format: "email", // Invalid - will be removed
|
|
879
|
+
},
|
|
880
|
+
name: {
|
|
881
|
+
type: "string",
|
|
882
|
+
position: 2,
|
|
883
|
+
customField: "invalid", // Invalid - will be removed
|
|
884
|
+
},
|
|
885
|
+
age: {
|
|
886
|
+
type: "integer",
|
|
887
|
+
position: 1,
|
|
888
|
+
maximum: 100, // Valid - will be kept
|
|
889
|
+
},
|
|
890
|
+
settings: {
|
|
891
|
+
type: "object",
|
|
892
|
+
position: 3,
|
|
893
|
+
patternKey: "^[a-z]+$", // Valid for object type - will be kept
|
|
894
|
+
invalidObjectField: "remove", // Invalid - will be removed
|
|
895
|
+
},
|
|
896
|
+
},
|
|
897
|
+
required: ["priority", "name"],
|
|
898
|
+
customRootField: "invalid", // Invalid root field - will be removed
|
|
899
|
+
};
|
|
900
|
+
// Expected output schema (positions removed, invalid fields removed, properties ordered by position)
|
|
901
|
+
const expectedOutputSchema: ObjectSchema = {
|
|
902
|
+
title: "Complex Input Schema",
|
|
903
|
+
type: "object",
|
|
904
|
+
properties: {
|
|
905
|
+
priority: { type: "string" }, // position 0
|
|
906
|
+
age: { type: "integer", maximum: 100 }, // position 1
|
|
907
|
+
name: { type: "string" }, // position 2
|
|
908
|
+
settings: {
|
|
909
|
+
type: "object",
|
|
910
|
+
patternKey: "^[a-z]+$",
|
|
911
|
+
}, // position 3
|
|
912
|
+
},
|
|
913
|
+
required: ["priority", "name"],
|
|
914
|
+
};
|
|
915
|
+
setupTestDirectory(TEST_DIR);
|
|
916
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
917
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
918
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
919
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
920
|
+
setupTestDirectoryFiles(
|
|
921
|
+
srcDir,
|
|
922
|
+
srcInputPath,
|
|
923
|
+
complexSourceSchema,
|
|
924
|
+
srcDatasetPath,
|
|
925
|
+
{} as ObjectSchema,
|
|
926
|
+
outputTsDir,
|
|
927
|
+
);
|
|
928
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, expectedOutputSchema);
|
|
929
|
+
expect(() =>
|
|
930
|
+
execSync(
|
|
931
|
+
[
|
|
932
|
+
`${EXEC_CMD} check`,
|
|
933
|
+
"-i input",
|
|
934
|
+
"-o json-schemas",
|
|
935
|
+
`--src-input ${srcInputPath}`,
|
|
936
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
937
|
+
].join(" "),
|
|
938
|
+
{
|
|
939
|
+
cwd: TEST_DIR,
|
|
940
|
+
stdio: "inherit",
|
|
941
|
+
},
|
|
942
|
+
),
|
|
943
|
+
).not.toThrow();
|
|
944
|
+
});
|
|
945
|
+
|
|
946
|
+
it("should pass when additional schemas also have fields that get filtered out", () => {
|
|
947
|
+
const baseInputSchema: ObjectSchema = {
|
|
948
|
+
title: "Base Input Schema",
|
|
949
|
+
type: "object",
|
|
950
|
+
properties: {
|
|
951
|
+
name: { type: "string", position: 1 },
|
|
952
|
+
},
|
|
953
|
+
required: ["name"],
|
|
954
|
+
};
|
|
955
|
+
const additionalSchemaWithInvalidFields: ObjectSchema = {
|
|
956
|
+
type: "object",
|
|
957
|
+
properties: {
|
|
958
|
+
email: {
|
|
959
|
+
type: "string",
|
|
960
|
+
position: 0,
|
|
961
|
+
format: "email", // Invalid field - will be removed
|
|
962
|
+
},
|
|
963
|
+
age: {
|
|
964
|
+
type: "integer",
|
|
965
|
+
position: 2,
|
|
966
|
+
customField: "invalid", // Invalid field - will be removed
|
|
967
|
+
},
|
|
968
|
+
},
|
|
969
|
+
required: ["email"],
|
|
970
|
+
invalidAdditionalField: "remove", // Invalid root field - will be removed
|
|
971
|
+
};
|
|
972
|
+
// Expected merged output schema (positions and invalid fields removed, properties ordered by position)
|
|
973
|
+
const expectedMergedSchema: ObjectSchema = {
|
|
974
|
+
title: "Base Input Schema",
|
|
975
|
+
type: "object",
|
|
976
|
+
properties: {
|
|
977
|
+
email: { type: "string" }, // position 0
|
|
978
|
+
name: { type: "string" }, // position 1
|
|
979
|
+
age: { type: "integer" }, // position 2
|
|
980
|
+
},
|
|
981
|
+
required: ["name", "email"],
|
|
982
|
+
};
|
|
983
|
+
setupTestDirectory(TEST_DIR);
|
|
984
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
985
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
986
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
987
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
988
|
+
const addDir = join(TEST_DIR, "add-schemas");
|
|
989
|
+
const addInputPath = join(addDir, "add-input.json");
|
|
990
|
+
setupTestDirectoryFiles(
|
|
991
|
+
srcDir,
|
|
992
|
+
srcInputPath,
|
|
993
|
+
baseInputSchema,
|
|
994
|
+
srcDatasetPath,
|
|
995
|
+
{} as ObjectSchema,
|
|
996
|
+
outputTsDir,
|
|
997
|
+
addDir,
|
|
998
|
+
addInputPath,
|
|
999
|
+
additionalSchemaWithInvalidFields,
|
|
1000
|
+
);
|
|
1001
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, expectedMergedSchema);
|
|
1002
|
+
expect(() =>
|
|
1003
|
+
execSync(
|
|
1004
|
+
[
|
|
1005
|
+
`${EXEC_CMD} check`,
|
|
1006
|
+
"-i input",
|
|
1007
|
+
"-o json-schemas",
|
|
1008
|
+
`--src-input ${srcInputPath}`,
|
|
1009
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1010
|
+
`--add-input ${addInputPath}`,
|
|
1011
|
+
].join(" "),
|
|
1012
|
+
{
|
|
1013
|
+
cwd: TEST_DIR,
|
|
1014
|
+
stdio: "inherit",
|
|
1015
|
+
},
|
|
1016
|
+
),
|
|
1017
|
+
).not.toThrow();
|
|
1018
|
+
});
|
|
1019
|
+
|
|
1020
|
+
it("should fail when output schema doesn't match after filtering", () => {
|
|
1021
|
+
const sourceSchemaWithInvalidFields: ObjectSchema = {
|
|
1022
|
+
title: "Input Schema",
|
|
1023
|
+
type: "object",
|
|
1024
|
+
properties: {
|
|
1025
|
+
name: {
|
|
1026
|
+
type: "string",
|
|
1027
|
+
format: "email", // This will be filtered out
|
|
1028
|
+
},
|
|
1029
|
+
},
|
|
1030
|
+
required: ["name"],
|
|
1031
|
+
};
|
|
1032
|
+
// Incorrect output schema (includes the field that should have been filtered)
|
|
1033
|
+
const incorrectOutputSchema: ObjectSchema = {
|
|
1034
|
+
title: "Input Schema",
|
|
1035
|
+
type: "object",
|
|
1036
|
+
properties: {
|
|
1037
|
+
name: {
|
|
1038
|
+
type: "string",
|
|
1039
|
+
format: "email", // This field should not be in the output
|
|
1040
|
+
},
|
|
1041
|
+
},
|
|
1042
|
+
required: ["name"],
|
|
1043
|
+
};
|
|
1044
|
+
setupTestDirectory(TEST_DIR);
|
|
1045
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1046
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1047
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1048
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1049
|
+
setupTestDirectoryFiles(
|
|
1050
|
+
srcDir,
|
|
1051
|
+
srcInputPath,
|
|
1052
|
+
sourceSchemaWithInvalidFields,
|
|
1053
|
+
srcDatasetPath,
|
|
1054
|
+
{} as ObjectSchema,
|
|
1055
|
+
outputTsDir,
|
|
1056
|
+
);
|
|
1057
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, incorrectOutputSchema);
|
|
1058
|
+
expect(() =>
|
|
1059
|
+
execSync(
|
|
1060
|
+
[
|
|
1061
|
+
`${EXEC_CMD} check`,
|
|
1062
|
+
"-i input",
|
|
1063
|
+
"-o json-schemas",
|
|
1064
|
+
`--src-input ${srcInputPath}`,
|
|
1065
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1066
|
+
].join(" "),
|
|
1067
|
+
{
|
|
1068
|
+
cwd: TEST_DIR,
|
|
1069
|
+
stdio: "inherit",
|
|
1070
|
+
},
|
|
1071
|
+
),
|
|
1072
|
+
).toThrow();
|
|
1073
|
+
});
|
|
1074
|
+
|
|
1075
|
+
it("should pass when descriptions differ but --ignore-descriptions is set", () => {
|
|
1076
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1077
|
+
title: "Source Input Schema",
|
|
1078
|
+
description: "Original description for input schema",
|
|
1079
|
+
type: "object",
|
|
1080
|
+
properties: {
|
|
1081
|
+
name: {
|
|
1082
|
+
type: "string",
|
|
1083
|
+
description: "Name of the user",
|
|
1084
|
+
},
|
|
1085
|
+
age: {
|
|
1086
|
+
type: "integer",
|
|
1087
|
+
description: "Age in years",
|
|
1088
|
+
},
|
|
1089
|
+
},
|
|
1090
|
+
required: ["name"],
|
|
1091
|
+
};
|
|
1092
|
+
// Output schema with different descriptions but same structure
|
|
1093
|
+
const outputInputSchema: ObjectSchema = {
|
|
1094
|
+
title: "Generated Input Schema", // Different title
|
|
1095
|
+
description: "Auto-generated description for input schema", // Different description
|
|
1096
|
+
type: "object",
|
|
1097
|
+
properties: {
|
|
1098
|
+
name: {
|
|
1099
|
+
type: "string",
|
|
1100
|
+
description: "User's full name", // Different description
|
|
1101
|
+
},
|
|
1102
|
+
age: {
|
|
1103
|
+
type: "integer",
|
|
1104
|
+
description: "User's age", // Different description
|
|
1105
|
+
},
|
|
1106
|
+
},
|
|
1107
|
+
required: ["name"],
|
|
1108
|
+
};
|
|
1109
|
+
setupTestDirectory(TEST_DIR);
|
|
1110
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1111
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1112
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1113
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1114
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1115
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, outputInputSchema);
|
|
1116
|
+
expect(() =>
|
|
1117
|
+
execSync(
|
|
1118
|
+
[
|
|
1119
|
+
`${EXEC_CMD} check`,
|
|
1120
|
+
"-i input",
|
|
1121
|
+
"-o json-schemas",
|
|
1122
|
+
`--src-input ${srcInputPath}`,
|
|
1123
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1124
|
+
"--ignore-descriptions",
|
|
1125
|
+
].join(" "),
|
|
1126
|
+
{
|
|
1127
|
+
cwd: TEST_DIR,
|
|
1128
|
+
stdio: "inherit",
|
|
1129
|
+
},
|
|
1130
|
+
),
|
|
1131
|
+
).not.toThrow();
|
|
1132
|
+
});
|
|
1133
|
+
|
|
1134
|
+
it("should fail when descriptions differ and --ignore-descriptions is not set", () => {
|
|
1135
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1136
|
+
title: "Source Input Schema",
|
|
1137
|
+
description: "Original description",
|
|
1138
|
+
type: "object",
|
|
1139
|
+
properties: {
|
|
1140
|
+
name: { type: "string" },
|
|
1141
|
+
},
|
|
1142
|
+
required: ["name"],
|
|
1143
|
+
};
|
|
1144
|
+
// Output schema with different title (same structure otherwise)
|
|
1145
|
+
const outputInputSchema: ObjectSchema = {
|
|
1146
|
+
title: "Different Title", // Different title should cause failure
|
|
1147
|
+
description: "Original description",
|
|
1148
|
+
type: "object",
|
|
1149
|
+
properties: {
|
|
1150
|
+
name: { type: "string" },
|
|
1151
|
+
},
|
|
1152
|
+
required: ["name"],
|
|
1153
|
+
};
|
|
1154
|
+
setupTestDirectory(TEST_DIR);
|
|
1155
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1156
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1157
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1158
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1159
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1160
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, outputInputSchema);
|
|
1161
|
+
expect(() =>
|
|
1162
|
+
execSync(
|
|
1163
|
+
[
|
|
1164
|
+
`${EXEC_CMD} check`,
|
|
1165
|
+
"-i input",
|
|
1166
|
+
"-o json-schemas",
|
|
1167
|
+
`--src-input ${srcInputPath}`,
|
|
1168
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1169
|
+
// Note: --ignore-descriptions is NOT set
|
|
1170
|
+
].join(" "),
|
|
1171
|
+
{
|
|
1172
|
+
cwd: TEST_DIR,
|
|
1173
|
+
stdio: "inherit",
|
|
1174
|
+
},
|
|
1175
|
+
),
|
|
1176
|
+
).toThrow();
|
|
1177
|
+
});
|
|
1178
|
+
|
|
1179
|
+
it("should pass with --ignore-descriptions when additional schema has different descriptions", () => {
|
|
1180
|
+
const baseInputSchema: ObjectSchema = {
|
|
1181
|
+
title: "Base Input Schema",
|
|
1182
|
+
description: "Base description",
|
|
1183
|
+
type: "object",
|
|
1184
|
+
properties: {
|
|
1185
|
+
name: {
|
|
1186
|
+
type: "string",
|
|
1187
|
+
description: "Base name description",
|
|
1188
|
+
},
|
|
1189
|
+
},
|
|
1190
|
+
required: ["name"],
|
|
1191
|
+
};
|
|
1192
|
+
const additionalSchema: ObjectSchema = {
|
|
1193
|
+
title: "Additional Schema", // Different title
|
|
1194
|
+
description: "Additional description", // Different description
|
|
1195
|
+
type: "object",
|
|
1196
|
+
properties: {
|
|
1197
|
+
age: {
|
|
1198
|
+
type: "integer",
|
|
1199
|
+
description: "Additional age description",
|
|
1200
|
+
},
|
|
1201
|
+
},
|
|
1202
|
+
required: ["age"],
|
|
1203
|
+
};
|
|
1204
|
+
// Expected merged output with different descriptions
|
|
1205
|
+
const expectedOutputSchema: ObjectSchema = {
|
|
1206
|
+
title: "Generated Combined Schema", // Different from both source schemas
|
|
1207
|
+
description: "Auto-generated combined description", // Different from both
|
|
1208
|
+
type: "object",
|
|
1209
|
+
properties: {
|
|
1210
|
+
name: {
|
|
1211
|
+
type: "string",
|
|
1212
|
+
description: "Generated name description", // Different description
|
|
1213
|
+
},
|
|
1214
|
+
age: {
|
|
1215
|
+
type: "integer",
|
|
1216
|
+
description: "Generated age description", // Different description
|
|
1217
|
+
},
|
|
1218
|
+
},
|
|
1219
|
+
required: ["name", "age"],
|
|
1220
|
+
};
|
|
1221
|
+
setupTestDirectory(TEST_DIR);
|
|
1222
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1223
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1224
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1225
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1226
|
+
const addDir = join(TEST_DIR, "add-schemas");
|
|
1227
|
+
const addInputPath = join(addDir, "add-input.json");
|
|
1228
|
+
setupTestDirectoryFiles(
|
|
1229
|
+
srcDir,
|
|
1230
|
+
srcInputPath,
|
|
1231
|
+
baseInputSchema,
|
|
1232
|
+
srcDatasetPath,
|
|
1233
|
+
{} as ObjectSchema,
|
|
1234
|
+
outputTsDir,
|
|
1235
|
+
addDir,
|
|
1236
|
+
addInputPath,
|
|
1237
|
+
additionalSchema,
|
|
1238
|
+
);
|
|
1239
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, expectedOutputSchema);
|
|
1240
|
+
expect(() =>
|
|
1241
|
+
execSync(
|
|
1242
|
+
[
|
|
1243
|
+
`${EXEC_CMD} check`,
|
|
1244
|
+
"-i input",
|
|
1245
|
+
"-o json-schemas",
|
|
1246
|
+
`--src-input ${srcInputPath}`,
|
|
1247
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1248
|
+
`--add-input ${addInputPath}`,
|
|
1249
|
+
"--ignore-descriptions",
|
|
1250
|
+
].join(" "),
|
|
1251
|
+
{
|
|
1252
|
+
cwd: TEST_DIR,
|
|
1253
|
+
stdio: "inherit",
|
|
1254
|
+
},
|
|
1255
|
+
),
|
|
1256
|
+
).not.toThrow();
|
|
1257
|
+
});
|
|
1258
|
+
|
|
1259
|
+
it("should pass with --ignore-descriptions when dataset schema has different descriptions", () => {
|
|
1260
|
+
const sourceDatasetSchema: ObjectSchema = {
|
|
1261
|
+
title: "Source Dataset Item Schema",
|
|
1262
|
+
description: "Original dataset description",
|
|
1263
|
+
type: "object",
|
|
1264
|
+
properties: {
|
|
1265
|
+
id: {
|
|
1266
|
+
type: "string",
|
|
1267
|
+
description: "Unique identifier",
|
|
1268
|
+
},
|
|
1269
|
+
value: {
|
|
1270
|
+
type: "number",
|
|
1271
|
+
description: "Numeric value",
|
|
1272
|
+
},
|
|
1273
|
+
},
|
|
1274
|
+
required: ["id"],
|
|
1275
|
+
};
|
|
1276
|
+
// Output dataset schema with different descriptions
|
|
1277
|
+
const outputDatasetSchema: ObjectSchema = {
|
|
1278
|
+
title: "Generated Dataset Item Schema", // Different title
|
|
1279
|
+
description: "Auto-generated dataset description", // Different description
|
|
1280
|
+
type: "object",
|
|
1281
|
+
properties: {
|
|
1282
|
+
id: {
|
|
1283
|
+
type: "string",
|
|
1284
|
+
description: "Item identifier", // Different description
|
|
1285
|
+
},
|
|
1286
|
+
value: {
|
|
1287
|
+
type: "number",
|
|
1288
|
+
description: "Item value", // Different description
|
|
1289
|
+
},
|
|
1290
|
+
},
|
|
1291
|
+
required: ["id"],
|
|
1292
|
+
};
|
|
1293
|
+
setupTestDirectory(TEST_DIR);
|
|
1294
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1295
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1296
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1297
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1298
|
+
setupTestDirectoryFiles(
|
|
1299
|
+
srcDir,
|
|
1300
|
+
srcInputPath,
|
|
1301
|
+
{} as ObjectSchema,
|
|
1302
|
+
srcDatasetPath,
|
|
1303
|
+
sourceDatasetSchema,
|
|
1304
|
+
outputTsDir,
|
|
1305
|
+
);
|
|
1306
|
+
// Write a simple input schema to avoid validation issues
|
|
1307
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, {
|
|
1308
|
+
title: "Simple Input",
|
|
1309
|
+
type: "object",
|
|
1310
|
+
properties: {},
|
|
1311
|
+
});
|
|
1312
|
+
writeSchemaToField(DATASET_SCHEMA_PATH, outputDatasetSchema, DATASET_SCHEMA_FIELD);
|
|
1313
|
+
expect(() =>
|
|
1314
|
+
execSync(
|
|
1315
|
+
[
|
|
1316
|
+
`${EXEC_CMD} check`,
|
|
1317
|
+
"-i dataset",
|
|
1318
|
+
"-o json-schemas",
|
|
1319
|
+
`--src-input ${srcInputPath}`,
|
|
1320
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1321
|
+
"--ignore-descriptions",
|
|
1322
|
+
].join(" "),
|
|
1323
|
+
{
|
|
1324
|
+
cwd: TEST_DIR,
|
|
1325
|
+
stdio: "inherit",
|
|
1326
|
+
},
|
|
1327
|
+
),
|
|
1328
|
+
).not.toThrow();
|
|
1329
|
+
});
|
|
1330
|
+
|
|
1331
|
+
it("should fail with --ignore-descriptions when there are structural differences beyond descriptions", () => {
|
|
1332
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1333
|
+
title: "Source Input Schema",
|
|
1334
|
+
description: "Original description",
|
|
1335
|
+
type: "object",
|
|
1336
|
+
properties: {
|
|
1337
|
+
name: {
|
|
1338
|
+
type: "string",
|
|
1339
|
+
description: "Name of the user",
|
|
1340
|
+
},
|
|
1341
|
+
age: {
|
|
1342
|
+
type: "integer",
|
|
1343
|
+
description: "Age in years",
|
|
1344
|
+
},
|
|
1345
|
+
},
|
|
1346
|
+
required: ["name"],
|
|
1347
|
+
};
|
|
1348
|
+
// Output schema with different structure (missing age property)
|
|
1349
|
+
const outputInputSchema: ObjectSchema = {
|
|
1350
|
+
title: "Different Title", // Different title (should be ignored)
|
|
1351
|
+
description: "Different description", // Different description (should be ignored)
|
|
1352
|
+
type: "object",
|
|
1353
|
+
properties: {
|
|
1354
|
+
name: {
|
|
1355
|
+
type: "string",
|
|
1356
|
+
description: "Different name description", // Different description (should be ignored)
|
|
1357
|
+
},
|
|
1358
|
+
// Missing age property - this is a structural difference that should cause failure
|
|
1359
|
+
},
|
|
1360
|
+
required: ["name"],
|
|
1361
|
+
};
|
|
1362
|
+
setupTestDirectory(TEST_DIR);
|
|
1363
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1364
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1365
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1366
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1367
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1368
|
+
writeJsonSchema(INPUT_SCHEMA_PATH, outputInputSchema);
|
|
1369
|
+
expect(() =>
|
|
1370
|
+
execSync(
|
|
1371
|
+
[
|
|
1372
|
+
`${EXEC_CMD} check`,
|
|
1373
|
+
"-i input",
|
|
1374
|
+
"-o json-schemas",
|
|
1375
|
+
`--src-input ${srcInputPath}`,
|
|
1376
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1377
|
+
"--ignore-descriptions", // Should still fail despite this flag
|
|
1378
|
+
].join(" "),
|
|
1379
|
+
{
|
|
1380
|
+
cwd: TEST_DIR,
|
|
1381
|
+
stdio: "inherit",
|
|
1382
|
+
},
|
|
1383
|
+
),
|
|
1384
|
+
).toThrow();
|
|
1385
|
+
});
|
|
1386
|
+
|
|
1387
|
+
it("should pass when TypeScript files match the source schemas", () => {
|
|
1388
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1389
|
+
title: "Input Schema",
|
|
1390
|
+
description: "Schema for actor input",
|
|
1391
|
+
type: "object",
|
|
1392
|
+
properties: {
|
|
1393
|
+
name: {
|
|
1394
|
+
type: "string",
|
|
1395
|
+
description: "User's name",
|
|
1396
|
+
},
|
|
1397
|
+
age: {
|
|
1398
|
+
type: "integer",
|
|
1399
|
+
description: "User's age",
|
|
1400
|
+
},
|
|
1401
|
+
isActive: {
|
|
1402
|
+
type: "boolean",
|
|
1403
|
+
description: "Whether user is active",
|
|
1404
|
+
},
|
|
1405
|
+
},
|
|
1406
|
+
required: ["name"],
|
|
1407
|
+
};
|
|
1408
|
+
const sourceDatasetSchema: ObjectSchema = {
|
|
1409
|
+
title: "Dataset Item Schema",
|
|
1410
|
+
description: "Schema for dataset items",
|
|
1411
|
+
type: "object",
|
|
1412
|
+
properties: {
|
|
1413
|
+
id: {
|
|
1414
|
+
type: "string",
|
|
1415
|
+
description: "Unique identifier",
|
|
1416
|
+
},
|
|
1417
|
+
value: {
|
|
1418
|
+
type: "number",
|
|
1419
|
+
description: "Numeric value",
|
|
1420
|
+
},
|
|
1421
|
+
},
|
|
1422
|
+
required: ["id"],
|
|
1423
|
+
};
|
|
1424
|
+
setupTestDirectory(TEST_DIR);
|
|
1425
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1426
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1427
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1428
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1429
|
+
setupTestDirectoryFiles(
|
|
1430
|
+
srcDir,
|
|
1431
|
+
srcInputPath,
|
|
1432
|
+
sourceInputSchema,
|
|
1433
|
+
srcDatasetPath,
|
|
1434
|
+
sourceDatasetSchema,
|
|
1435
|
+
outputTsDir,
|
|
1436
|
+
);
|
|
1437
|
+
|
|
1438
|
+
// Generate the TypeScript files that should match the schemas
|
|
1439
|
+
const inputTsContent = `/**
|
|
1440
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1441
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1442
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1443
|
+
*/
|
|
1444
|
+
|
|
1445
|
+
/**
|
|
1446
|
+
* Schema for actor input
|
|
1447
|
+
*/
|
|
1448
|
+
export interface Input {
|
|
1449
|
+
/**
|
|
1450
|
+
* User's name
|
|
1451
|
+
*/
|
|
1452
|
+
name: string;
|
|
1453
|
+
/**
|
|
1454
|
+
* User's age
|
|
1455
|
+
*/
|
|
1456
|
+
age?: number;
|
|
1457
|
+
/**
|
|
1458
|
+
* Whether user is active
|
|
1459
|
+
*/
|
|
1460
|
+
isActive?: boolean;
|
|
1461
|
+
}
|
|
1462
|
+
`;
|
|
1463
|
+
const datasetTsContent = `/**
|
|
1464
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1465
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1466
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1467
|
+
*/
|
|
1468
|
+
|
|
1469
|
+
/**
|
|
1470
|
+
* Schema for dataset items
|
|
1471
|
+
*/
|
|
1472
|
+
export interface DatasetItem {
|
|
1473
|
+
/**
|
|
1474
|
+
* Unique identifier
|
|
1475
|
+
*/
|
|
1476
|
+
id: string;
|
|
1477
|
+
/**
|
|
1478
|
+
* Numeric value
|
|
1479
|
+
*/
|
|
1480
|
+
value?: number;
|
|
1481
|
+
}
|
|
1482
|
+
`;
|
|
1483
|
+
writeFileSync(join(outputTsDir, "input.ts"), inputTsContent);
|
|
1484
|
+
writeFileSync(join(outputTsDir, "dataset.ts"), datasetTsContent);
|
|
1485
|
+
|
|
1486
|
+
expect(() =>
|
|
1487
|
+
execSync(
|
|
1488
|
+
[
|
|
1489
|
+
`${EXEC_CMD} check`,
|
|
1490
|
+
"-i input dataset",
|
|
1491
|
+
"-o ts-types",
|
|
1492
|
+
`--src-input ${srcInputPath}`,
|
|
1493
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1494
|
+
].join(" "),
|
|
1495
|
+
{
|
|
1496
|
+
cwd: TEST_DIR,
|
|
1497
|
+
stdio: "inherit",
|
|
1498
|
+
},
|
|
1499
|
+
),
|
|
1500
|
+
).not.toThrow();
|
|
1501
|
+
});
|
|
1502
|
+
|
|
1503
|
+
it("should fail when TypeScript interface has different property types", () => {
|
|
1504
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1505
|
+
title: "Input Schema",
|
|
1506
|
+
type: "object",
|
|
1507
|
+
properties: {
|
|
1508
|
+
name: { type: "string" },
|
|
1509
|
+
age: { type: "integer" },
|
|
1510
|
+
},
|
|
1511
|
+
required: ["name"],
|
|
1512
|
+
};
|
|
1513
|
+
setupTestDirectory(TEST_DIR);
|
|
1514
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1515
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1516
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1517
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1518
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1519
|
+
|
|
1520
|
+
// Generate TypeScript file with wrong type (age should be number, not string)
|
|
1521
|
+
const incorrectInputTsContent = `/**
|
|
1522
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1523
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1524
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1525
|
+
*/
|
|
1526
|
+
|
|
1527
|
+
export interface Input {
|
|
1528
|
+
name: string;
|
|
1529
|
+
age?: string;
|
|
1530
|
+
}
|
|
1531
|
+
`;
|
|
1532
|
+
writeFileSync(join(outputTsDir, "input.ts"), incorrectInputTsContent);
|
|
1533
|
+
|
|
1534
|
+
expect(() =>
|
|
1535
|
+
execSync(
|
|
1536
|
+
[
|
|
1537
|
+
`${EXEC_CMD} check`,
|
|
1538
|
+
"-i input",
|
|
1539
|
+
"-o ts-types",
|
|
1540
|
+
`--src-input ${srcInputPath}`,
|
|
1541
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1542
|
+
].join(" "),
|
|
1543
|
+
{
|
|
1544
|
+
cwd: TEST_DIR,
|
|
1545
|
+
stdio: "inherit",
|
|
1546
|
+
},
|
|
1547
|
+
),
|
|
1548
|
+
).toThrow();
|
|
1549
|
+
});
|
|
1550
|
+
|
|
1551
|
+
it("should fail when TypeScript interface has different required properties", () => {
|
|
1552
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1553
|
+
title: "Input Schema",
|
|
1554
|
+
type: "object",
|
|
1555
|
+
properties: {
|
|
1556
|
+
name: { type: "string" },
|
|
1557
|
+
age: { type: "integer" },
|
|
1558
|
+
},
|
|
1559
|
+
required: ["name", "age"], // Both required
|
|
1560
|
+
};
|
|
1561
|
+
setupTestDirectory(TEST_DIR);
|
|
1562
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1563
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1564
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1565
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1566
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1567
|
+
|
|
1568
|
+
// Generate TypeScript file where age is optional but should be required
|
|
1569
|
+
const incorrectInputTsContent = `/**
|
|
1570
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1571
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1572
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1573
|
+
*/
|
|
1574
|
+
|
|
1575
|
+
export interface Input {
|
|
1576
|
+
name: string;
|
|
1577
|
+
age?: number;
|
|
1578
|
+
}
|
|
1579
|
+
`;
|
|
1580
|
+
writeFileSync(join(outputTsDir, "input.ts"), incorrectInputTsContent);
|
|
1581
|
+
|
|
1582
|
+
expect(() =>
|
|
1583
|
+
execSync(
|
|
1584
|
+
[
|
|
1585
|
+
`${EXEC_CMD} check`,
|
|
1586
|
+
"-i input",
|
|
1587
|
+
"-o ts-types",
|
|
1588
|
+
`--src-input ${srcInputPath}`,
|
|
1589
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1590
|
+
].join(" "),
|
|
1591
|
+
{
|
|
1592
|
+
cwd: TEST_DIR,
|
|
1593
|
+
stdio: "inherit",
|
|
1594
|
+
},
|
|
1595
|
+
),
|
|
1596
|
+
).toThrow();
|
|
1597
|
+
});
|
|
1598
|
+
|
|
1599
|
+
it("should fail when TypeScript interface has missing properties", () => {
|
|
1600
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1601
|
+
title: "Input Schema",
|
|
1602
|
+
type: "object",
|
|
1603
|
+
properties: {
|
|
1604
|
+
name: { type: "string" },
|
|
1605
|
+
age: { type: "integer" },
|
|
1606
|
+
email: { type: "string" },
|
|
1607
|
+
},
|
|
1608
|
+
required: ["name"],
|
|
1609
|
+
};
|
|
1610
|
+
setupTestDirectory(TEST_DIR);
|
|
1611
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1612
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1613
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1614
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1615
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1616
|
+
|
|
1617
|
+
// Generate TypeScript file missing the email property
|
|
1618
|
+
const incompleteInputTsContent = `/**
|
|
1619
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1620
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1621
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1622
|
+
*/
|
|
1623
|
+
|
|
1624
|
+
export interface Input {
|
|
1625
|
+
name: string;
|
|
1626
|
+
age?: number;
|
|
1627
|
+
}
|
|
1628
|
+
`;
|
|
1629
|
+
writeFileSync(join(outputTsDir, "input.ts"), incompleteInputTsContent);
|
|
1630
|
+
|
|
1631
|
+
expect(() =>
|
|
1632
|
+
execSync(
|
|
1633
|
+
[
|
|
1634
|
+
`${EXEC_CMD} check`,
|
|
1635
|
+
"-i input",
|
|
1636
|
+
"-o ts-types",
|
|
1637
|
+
`--src-input ${srcInputPath}`,
|
|
1638
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1639
|
+
].join(" "),
|
|
1640
|
+
{
|
|
1641
|
+
cwd: TEST_DIR,
|
|
1642
|
+
stdio: "inherit",
|
|
1643
|
+
},
|
|
1644
|
+
),
|
|
1645
|
+
).toThrow();
|
|
1646
|
+
});
|
|
1647
|
+
|
|
1648
|
+
it("should pass when TypeScript descriptions differ but --ignore-descriptions is set", () => {
|
|
1649
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1650
|
+
title: "Input Schema",
|
|
1651
|
+
description: "Original input description",
|
|
1652
|
+
type: "object",
|
|
1653
|
+
properties: {
|
|
1654
|
+
name: {
|
|
1655
|
+
type: "string",
|
|
1656
|
+
description: "Original name description",
|
|
1657
|
+
},
|
|
1658
|
+
age: {
|
|
1659
|
+
type: "integer",
|
|
1660
|
+
description: "Original age description",
|
|
1661
|
+
},
|
|
1662
|
+
},
|
|
1663
|
+
required: ["name"],
|
|
1664
|
+
};
|
|
1665
|
+
setupTestDirectory(TEST_DIR);
|
|
1666
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1667
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1668
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1669
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1670
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1671
|
+
|
|
1672
|
+
// Generate TypeScript file with different descriptions
|
|
1673
|
+
const inputTsContentWithDifferentDescriptions = `/**
|
|
1674
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1675
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1676
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1677
|
+
*/
|
|
1678
|
+
|
|
1679
|
+
/**
|
|
1680
|
+
* Different input description
|
|
1681
|
+
*/
|
|
1682
|
+
export interface Input {
|
|
1683
|
+
/**
|
|
1684
|
+
* Different name description
|
|
1685
|
+
*/
|
|
1686
|
+
name: string;
|
|
1687
|
+
/**
|
|
1688
|
+
* Different age description
|
|
1689
|
+
*/
|
|
1690
|
+
age?: number;
|
|
1691
|
+
}
|
|
1692
|
+
`;
|
|
1693
|
+
writeFileSync(join(outputTsDir, "input.ts"), inputTsContentWithDifferentDescriptions);
|
|
1694
|
+
|
|
1695
|
+
expect(() =>
|
|
1696
|
+
execSync(
|
|
1697
|
+
[
|
|
1698
|
+
`${EXEC_CMD} check`,
|
|
1699
|
+
"-i input",
|
|
1700
|
+
"-o ts-types",
|
|
1701
|
+
`--src-input ${srcInputPath}`,
|
|
1702
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1703
|
+
"--ignore-descriptions",
|
|
1704
|
+
].join(" "),
|
|
1705
|
+
{
|
|
1706
|
+
cwd: TEST_DIR,
|
|
1707
|
+
stdio: "inherit",
|
|
1708
|
+
},
|
|
1709
|
+
),
|
|
1710
|
+
).not.toThrow();
|
|
1711
|
+
});
|
|
1712
|
+
|
|
1713
|
+
it("should fail when TypeScript descriptions differ and --ignore-descriptions is not set", () => {
|
|
1714
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1715
|
+
title: "Input Schema",
|
|
1716
|
+
description: "Original input description",
|
|
1717
|
+
type: "object",
|
|
1718
|
+
properties: {
|
|
1719
|
+
name: {
|
|
1720
|
+
type: "string",
|
|
1721
|
+
description: "Original name description",
|
|
1722
|
+
},
|
|
1723
|
+
},
|
|
1724
|
+
required: ["name"],
|
|
1725
|
+
};
|
|
1726
|
+
setupTestDirectory(TEST_DIR);
|
|
1727
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1728
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1729
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1730
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1731
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1732
|
+
|
|
1733
|
+
// Generate TypeScript file with different descriptions
|
|
1734
|
+
const inputTsContentWithDifferentDescriptions = `/**
|
|
1735
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1736
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1737
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1738
|
+
*/
|
|
1739
|
+
|
|
1740
|
+
/**
|
|
1741
|
+
* Different input description
|
|
1742
|
+
*/
|
|
1743
|
+
export interface Input {
|
|
1744
|
+
/**
|
|
1745
|
+
* Different name description
|
|
1746
|
+
*/
|
|
1747
|
+
name: string;
|
|
1748
|
+
}
|
|
1749
|
+
`;
|
|
1750
|
+
writeFileSync(join(outputTsDir, "input.ts"), inputTsContentWithDifferentDescriptions);
|
|
1751
|
+
|
|
1752
|
+
expect(() =>
|
|
1753
|
+
execSync(
|
|
1754
|
+
[
|
|
1755
|
+
`${EXEC_CMD} check`,
|
|
1756
|
+
"-i input",
|
|
1757
|
+
"-o ts-types",
|
|
1758
|
+
`--src-input ${srcInputPath}`,
|
|
1759
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1760
|
+
// Note: --ignore-descriptions is NOT set
|
|
1761
|
+
].join(" "),
|
|
1762
|
+
{
|
|
1763
|
+
cwd: TEST_DIR,
|
|
1764
|
+
stdio: "inherit",
|
|
1765
|
+
},
|
|
1766
|
+
),
|
|
1767
|
+
).toThrow();
|
|
1768
|
+
});
|
|
1769
|
+
|
|
1770
|
+
it("should pass when TypeScript files match schemas with additional merged schemas", () => {
|
|
1771
|
+
const baseInputSchema: ObjectSchema = {
|
|
1772
|
+
title: "Base Input Schema",
|
|
1773
|
+
type: "object",
|
|
1774
|
+
properties: {
|
|
1775
|
+
name: { type: "string" },
|
|
1776
|
+
},
|
|
1777
|
+
required: ["name"],
|
|
1778
|
+
};
|
|
1779
|
+
const additionalSchema: ObjectSchema = {
|
|
1780
|
+
type: "object",
|
|
1781
|
+
properties: {
|
|
1782
|
+
age: { type: "integer" },
|
|
1783
|
+
email: { type: "string" },
|
|
1784
|
+
},
|
|
1785
|
+
required: ["email"],
|
|
1786
|
+
};
|
|
1787
|
+
setupTestDirectory(TEST_DIR);
|
|
1788
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1789
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1790
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1791
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1792
|
+
const addDir = join(TEST_DIR, "add-schemas");
|
|
1793
|
+
const addInputPath = join(addDir, "add-input.json");
|
|
1794
|
+
setupTestDirectoryFiles(
|
|
1795
|
+
srcDir,
|
|
1796
|
+
srcInputPath,
|
|
1797
|
+
baseInputSchema,
|
|
1798
|
+
srcDatasetPath,
|
|
1799
|
+
{} as ObjectSchema,
|
|
1800
|
+
outputTsDir,
|
|
1801
|
+
addDir,
|
|
1802
|
+
addInputPath,
|
|
1803
|
+
additionalSchema,
|
|
1804
|
+
);
|
|
1805
|
+
|
|
1806
|
+
// Generate TypeScript file that matches the merged schema
|
|
1807
|
+
const mergedInputTsContent = `/**
|
|
1808
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1809
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1810
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1811
|
+
*/
|
|
1812
|
+
|
|
1813
|
+
/**
|
|
1814
|
+
* Base Input Schema
|
|
1815
|
+
*/
|
|
1816
|
+
export interface Input {
|
|
1817
|
+
name: string;
|
|
1818
|
+
age?: number;
|
|
1819
|
+
email: string;
|
|
1820
|
+
}
|
|
1821
|
+
`;
|
|
1822
|
+
writeFileSync(join(outputTsDir, "input.ts"), mergedInputTsContent);
|
|
1823
|
+
|
|
1824
|
+
expect(() =>
|
|
1825
|
+
execSync(
|
|
1826
|
+
[
|
|
1827
|
+
`${EXEC_CMD} check`,
|
|
1828
|
+
"-i input",
|
|
1829
|
+
"-o ts-types",
|
|
1830
|
+
`--src-input ${srcInputPath}`,
|
|
1831
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1832
|
+
`--add-input ${addInputPath}`,
|
|
1833
|
+
].join(" "),
|
|
1834
|
+
{
|
|
1835
|
+
cwd: TEST_DIR,
|
|
1836
|
+
stdio: "inherit",
|
|
1837
|
+
},
|
|
1838
|
+
),
|
|
1839
|
+
).not.toThrow();
|
|
1840
|
+
});
|
|
1841
|
+
|
|
1842
|
+
it("should handle complex nested object structures in TypeScript files", () => {
|
|
1843
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1844
|
+
title: "Complex Input Schema",
|
|
1845
|
+
type: "object",
|
|
1846
|
+
properties: {
|
|
1847
|
+
user: {
|
|
1848
|
+
type: "object",
|
|
1849
|
+
properties: {
|
|
1850
|
+
name: { type: "string" },
|
|
1851
|
+
contacts: {
|
|
1852
|
+
type: "object",
|
|
1853
|
+
properties: {
|
|
1854
|
+
email: { type: "string" },
|
|
1855
|
+
phone: { type: "string" },
|
|
1856
|
+
},
|
|
1857
|
+
required: ["email"],
|
|
1858
|
+
},
|
|
1859
|
+
},
|
|
1860
|
+
required: ["name"],
|
|
1861
|
+
},
|
|
1862
|
+
settings: {
|
|
1863
|
+
type: "array",
|
|
1864
|
+
items: {
|
|
1865
|
+
type: "object",
|
|
1866
|
+
properties: {
|
|
1867
|
+
key: { type: "string" },
|
|
1868
|
+
value: { type: "string" },
|
|
1869
|
+
},
|
|
1870
|
+
required: ["key"],
|
|
1871
|
+
},
|
|
1872
|
+
},
|
|
1873
|
+
},
|
|
1874
|
+
required: ["user"],
|
|
1875
|
+
};
|
|
1876
|
+
setupTestDirectory(TEST_DIR);
|
|
1877
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1878
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1879
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1880
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1881
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1882
|
+
|
|
1883
|
+
// Generate TypeScript file with correct nested structure
|
|
1884
|
+
const complexInputTsContent = `/**
|
|
1885
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1886
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1887
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1888
|
+
*/
|
|
1889
|
+
|
|
1890
|
+
/**
|
|
1891
|
+
* Complex Input Schema
|
|
1892
|
+
*/
|
|
1893
|
+
export interface Input {
|
|
1894
|
+
user: {
|
|
1895
|
+
name: string;
|
|
1896
|
+
contacts?: {
|
|
1897
|
+
email: string;
|
|
1898
|
+
phone?: string;
|
|
1899
|
+
};
|
|
1900
|
+
};
|
|
1901
|
+
settings?: {
|
|
1902
|
+
key: string;
|
|
1903
|
+
value?: string;
|
|
1904
|
+
}[];
|
|
1905
|
+
}
|
|
1906
|
+
`;
|
|
1907
|
+
writeFileSync(join(outputTsDir, "input.ts"), complexInputTsContent);
|
|
1908
|
+
|
|
1909
|
+
expect(() =>
|
|
1910
|
+
execSync(
|
|
1911
|
+
[
|
|
1912
|
+
`${EXEC_CMD} check`,
|
|
1913
|
+
"-i input",
|
|
1914
|
+
"-o ts-types",
|
|
1915
|
+
`--src-input ${srcInputPath}`,
|
|
1916
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1917
|
+
].join(" "),
|
|
1918
|
+
{
|
|
1919
|
+
cwd: TEST_DIR,
|
|
1920
|
+
stdio: "inherit",
|
|
1921
|
+
},
|
|
1922
|
+
),
|
|
1923
|
+
).not.toThrow();
|
|
1924
|
+
});
|
|
1925
|
+
|
|
1926
|
+
it("should handle enum types in TypeScript files", () => {
|
|
1927
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1928
|
+
title: "Input Schema with Enums",
|
|
1929
|
+
type: "object",
|
|
1930
|
+
properties: {
|
|
1931
|
+
status: {
|
|
1932
|
+
type: "string",
|
|
1933
|
+
enum: ["active", "inactive", "pending"],
|
|
1934
|
+
},
|
|
1935
|
+
priorities: {
|
|
1936
|
+
type: "array",
|
|
1937
|
+
items: {
|
|
1938
|
+
type: "string",
|
|
1939
|
+
enum: ["high", "medium", "low"],
|
|
1940
|
+
},
|
|
1941
|
+
},
|
|
1942
|
+
},
|
|
1943
|
+
required: ["status"],
|
|
1944
|
+
};
|
|
1945
|
+
setupTestDirectory(TEST_DIR);
|
|
1946
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
1947
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
1948
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
1949
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
1950
|
+
setupTestDirectoryFiles(srcDir, srcInputPath, sourceInputSchema, srcDatasetPath, {} as ObjectSchema, outputTsDir);
|
|
1951
|
+
|
|
1952
|
+
// Generate TypeScript file with correct enum types
|
|
1953
|
+
const enumInputTsContent = `/**
|
|
1954
|
+
* This file was automatically generated by apify-schema-tools.
|
|
1955
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
1956
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
1957
|
+
*/
|
|
1958
|
+
|
|
1959
|
+
/**
|
|
1960
|
+
* Input Schema with Enums
|
|
1961
|
+
*/
|
|
1962
|
+
export interface Input {
|
|
1963
|
+
status: "active" | "inactive" | "pending";
|
|
1964
|
+
priorities?: ("high" | "medium" | "low")[];
|
|
1965
|
+
}
|
|
1966
|
+
`;
|
|
1967
|
+
writeFileSync(join(outputTsDir, "input.ts"), enumInputTsContent);
|
|
1968
|
+
|
|
1969
|
+
expect(() =>
|
|
1970
|
+
execSync(
|
|
1971
|
+
[
|
|
1972
|
+
`${EXEC_CMD} check`,
|
|
1973
|
+
"-i input",
|
|
1974
|
+
"-o ts-types",
|
|
1975
|
+
`--src-input ${srcInputPath}`,
|
|
1976
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
1977
|
+
].join(" "),
|
|
1978
|
+
{
|
|
1979
|
+
cwd: TEST_DIR,
|
|
1980
|
+
stdio: "inherit",
|
|
1981
|
+
},
|
|
1982
|
+
),
|
|
1983
|
+
).not.toThrow();
|
|
1984
|
+
});
|
|
1985
|
+
|
|
1986
|
+
it("should check both input and dataset TypeScript files when both are specified", () => {
|
|
1987
|
+
const sourceInputSchema: ObjectSchema = {
|
|
1988
|
+
title: "Input Schema",
|
|
1989
|
+
type: "object",
|
|
1990
|
+
properties: {
|
|
1991
|
+
inputField: { type: "string" },
|
|
1992
|
+
},
|
|
1993
|
+
required: ["inputField"],
|
|
1994
|
+
};
|
|
1995
|
+
const sourceDatasetSchema: ObjectSchema = {
|
|
1996
|
+
title: "Dataset Item Schema",
|
|
1997
|
+
type: "object",
|
|
1998
|
+
properties: {
|
|
1999
|
+
dataField: { type: "number" },
|
|
2000
|
+
},
|
|
2001
|
+
required: ["dataField"],
|
|
2002
|
+
};
|
|
2003
|
+
setupTestDirectory(TEST_DIR);
|
|
2004
|
+
const srcDir = join(TEST_DIR, "src-schemas");
|
|
2005
|
+
const srcInputPath = join(srcDir, "input.json");
|
|
2006
|
+
const srcDatasetPath = join(srcDir, "dataset-item.json");
|
|
2007
|
+
const outputTsDir = join(TEST_DIR, "src", "generated");
|
|
2008
|
+
setupTestDirectoryFiles(
|
|
2009
|
+
srcDir,
|
|
2010
|
+
srcInputPath,
|
|
2011
|
+
sourceInputSchema,
|
|
2012
|
+
srcDatasetPath,
|
|
2013
|
+
sourceDatasetSchema,
|
|
2014
|
+
outputTsDir,
|
|
2015
|
+
);
|
|
2016
|
+
|
|
2017
|
+
// Generate both TypeScript files correctly
|
|
2018
|
+
const inputTsContent = `/**
|
|
2019
|
+
* This file was automatically generated by apify-schema-tools.
|
|
2020
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
2021
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
2022
|
+
*/
|
|
2023
|
+
|
|
2024
|
+
/**
|
|
2025
|
+
* Input Schema
|
|
2026
|
+
*/
|
|
2027
|
+
export interface Input {
|
|
2028
|
+
inputField: string;
|
|
2029
|
+
}
|
|
2030
|
+
`;
|
|
2031
|
+
const datasetTsContent = `/**
|
|
2032
|
+
* This file was automatically generated by apify-schema-tools.
|
|
2033
|
+
* DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
|
|
2034
|
+
* and run apify-schema-tools' "sync" command to regenerate this file.
|
|
2035
|
+
*/
|
|
2036
|
+
|
|
2037
|
+
/**
|
|
2038
|
+
* Dataset Item Schema
|
|
2039
|
+
*/
|
|
2040
|
+
export interface DatasetItem {
|
|
2041
|
+
dataField: number;
|
|
2042
|
+
}
|
|
2043
|
+
`;
|
|
2044
|
+
writeFileSync(join(outputTsDir, "input.ts"), inputTsContent);
|
|
2045
|
+
writeFileSync(join(outputTsDir, "dataset.ts"), datasetTsContent);
|
|
2046
|
+
|
|
2047
|
+
expect(() =>
|
|
2048
|
+
execSync(
|
|
2049
|
+
[
|
|
2050
|
+
`${EXEC_CMD} check`,
|
|
2051
|
+
"-i input dataset",
|
|
2052
|
+
"-o ts-types",
|
|
2053
|
+
`--src-input ${srcInputPath}`,
|
|
2054
|
+
`--src-dataset ${srcDatasetPath}`,
|
|
2055
|
+
].join(" "),
|
|
2056
|
+
{
|
|
2057
|
+
cwd: TEST_DIR,
|
|
2058
|
+
stdio: "inherit",
|
|
2059
|
+
},
|
|
2060
|
+
),
|
|
2061
|
+
).not.toThrow();
|
|
2062
|
+
});
|
|
2063
|
+
});
|
|
2064
|
+
});
|