@tinybirdco/sdk 0.0.41 → 0.0.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +7 -0
- package/README.md +29 -3
- package/dist/api/resources.d.ts +72 -1
- package/dist/api/resources.d.ts.map +1 -1
- package/dist/api/resources.js +197 -1
- package/dist/api/resources.js.map +1 -1
- package/dist/api/resources.test.js +82 -1
- package/dist/api/resources.test.js.map +1 -1
- package/dist/cli/commands/migrate.d.ts +11 -0
- package/dist/cli/commands/migrate.d.ts.map +1 -0
- package/dist/cli/commands/migrate.js +196 -0
- package/dist/cli/commands/migrate.js.map +1 -0
- package/dist/cli/commands/migrate.test.d.ts +2 -0
- package/dist/cli/commands/migrate.test.d.ts.map +1 -0
- package/dist/cli/commands/migrate.test.js +473 -0
- package/dist/cli/commands/migrate.test.js.map +1 -0
- package/dist/cli/commands/pull.d.ts +59 -0
- package/dist/cli/commands/pull.d.ts.map +1 -0
- package/dist/cli/commands/pull.js +104 -0
- package/dist/cli/commands/pull.js.map +1 -0
- package/dist/cli/commands/pull.test.d.ts +2 -0
- package/dist/cli/commands/pull.test.d.ts.map +1 -0
- package/dist/cli/commands/pull.test.js +140 -0
- package/dist/cli/commands/pull.test.js.map +1 -0
- package/dist/cli/config.d.ts +10 -0
- package/dist/cli/config.d.ts.map +1 -1
- package/dist/cli/config.js +22 -0
- package/dist/cli/config.js.map +1 -1
- package/dist/cli/index.js +77 -0
- package/dist/cli/index.js.map +1 -1
- package/dist/generator/client.js +2 -2
- package/dist/generator/client.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/migrate/discovery.d.ts +7 -0
- package/dist/migrate/discovery.d.ts.map +1 -0
- package/dist/migrate/discovery.js +125 -0
- package/dist/migrate/discovery.js.map +1 -0
- package/dist/migrate/emit-ts.d.ts +4 -0
- package/dist/migrate/emit-ts.d.ts.map +1 -0
- package/dist/migrate/emit-ts.js +387 -0
- package/dist/migrate/emit-ts.js.map +1 -0
- package/dist/migrate/parse-connection.d.ts +3 -0
- package/dist/migrate/parse-connection.d.ts.map +1 -0
- package/dist/migrate/parse-connection.js +74 -0
- package/dist/migrate/parse-connection.js.map +1 -0
- package/dist/migrate/parse-datasource.d.ts +3 -0
- package/dist/migrate/parse-datasource.d.ts.map +1 -0
- package/dist/migrate/parse-datasource.js +324 -0
- package/dist/migrate/parse-datasource.js.map +1 -0
- package/dist/migrate/parse-pipe.d.ts +3 -0
- package/dist/migrate/parse-pipe.d.ts.map +1 -0
- package/dist/migrate/parse-pipe.js +332 -0
- package/dist/migrate/parse-pipe.js.map +1 -0
- package/dist/migrate/parse.d.ts +3 -0
- package/dist/migrate/parse.d.ts.map +1 -0
- package/dist/migrate/parse.js +18 -0
- package/dist/migrate/parse.js.map +1 -0
- package/dist/migrate/parser-utils.d.ts +20 -0
- package/dist/migrate/parser-utils.d.ts.map +1 -0
- package/dist/migrate/parser-utils.js +130 -0
- package/dist/migrate/parser-utils.js.map +1 -0
- package/dist/migrate/types.d.ts +110 -0
- package/dist/migrate/types.d.ts.map +1 -0
- package/dist/migrate/types.js +2 -0
- package/dist/migrate/types.js.map +1 -0
- package/dist/schema/project.d.ts +20 -9
- package/dist/schema/project.d.ts.map +1 -1
- package/dist/schema/project.js +127 -136
- package/dist/schema/project.js.map +1 -1
- package/dist/schema/project.test.js +22 -0
- package/dist/schema/project.test.js.map +1 -1
- package/package.json +2 -1
- package/src/api/resources.test.ts +121 -0
- package/src/api/resources.ts +292 -1
- package/src/cli/commands/migrate.test.ts +564 -0
- package/src/cli/commands/migrate.ts +240 -0
- package/src/cli/commands/pull.test.ts +173 -0
- package/src/cli/commands/pull.ts +177 -0
- package/src/cli/config.ts +26 -0
- package/src/cli/index.ts +112 -0
- package/src/generator/client.ts +2 -2
- package/src/index.ts +1 -1
- package/src/migrate/discovery.ts +151 -0
- package/src/migrate/emit-ts.ts +469 -0
- package/src/migrate/parse-connection.ts +128 -0
- package/src/migrate/parse-datasource.ts +453 -0
- package/src/migrate/parse-pipe.ts +518 -0
- package/src/migrate/parse.ts +20 -0
- package/src/migrate/parser-utils.ts +160 -0
- package/src/migrate/types.ts +125 -0
- package/src/schema/project.test.ts +28 -0
- package/src/schema/project.ts +173 -181
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
import * as fs from "node:fs";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import { discoverResourceFiles } from "../../migrate/discovery.js";
|
|
4
|
+
import { emitMigrationFileContent, validateResourceForEmission } from "../../migrate/emit-ts.js";
|
|
5
|
+
import { parseResourceFile } from "../../migrate/parse.js";
|
|
6
|
+
import { MigrationParseError } from "../../migrate/parser-utils.js";
|
|
7
|
+
import type {
|
|
8
|
+
MigrationError,
|
|
9
|
+
MigrationResult,
|
|
10
|
+
ParsedResource,
|
|
11
|
+
ResourceFile,
|
|
12
|
+
} from "../../migrate/types.js";
|
|
13
|
+
|
|
14
|
+
export interface MigrateCommandOptions {
|
|
15
|
+
cwd?: string;
|
|
16
|
+
patterns: string[];
|
|
17
|
+
out?: string;
|
|
18
|
+
strict?: boolean;
|
|
19
|
+
dryRun?: boolean;
|
|
20
|
+
force?: boolean;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function toMigrationError(resource: ResourceFile, error: unknown): MigrationError {
|
|
24
|
+
const message = (error as Error).message || String(error);
|
|
25
|
+
return {
|
|
26
|
+
filePath: resource.filePath,
|
|
27
|
+
resourceName: resource.name,
|
|
28
|
+
resourceKind: resource.kind,
|
|
29
|
+
message,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function sortResourcesForOutput(resources: ParsedResource[]): ParsedResource[] {
|
|
34
|
+
const order: Record<ParsedResource["kind"], number> = {
|
|
35
|
+
connection: 0,
|
|
36
|
+
datasource: 1,
|
|
37
|
+
pipe: 2,
|
|
38
|
+
};
|
|
39
|
+
return [...resources].sort((a, b) => {
|
|
40
|
+
const byType = order[a.kind] - order[b.kind];
|
|
41
|
+
if (byType !== 0) {
|
|
42
|
+
return byType;
|
|
43
|
+
}
|
|
44
|
+
return a.name.localeCompare(b.name);
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export async function runMigrate(
|
|
49
|
+
options: MigrateCommandOptions
|
|
50
|
+
): Promise<MigrationResult> {
|
|
51
|
+
const cwd = options.cwd ?? process.cwd();
|
|
52
|
+
const strict = options.strict ?? true;
|
|
53
|
+
const dryRun = options.dryRun ?? false;
|
|
54
|
+
const force = options.force ?? false;
|
|
55
|
+
const outputPath = path.isAbsolute(options.out ?? "")
|
|
56
|
+
? (options.out as string)
|
|
57
|
+
: path.resolve(cwd, options.out ?? "tinybird.migration.ts");
|
|
58
|
+
|
|
59
|
+
const errors: MigrationError[] = [];
|
|
60
|
+
|
|
61
|
+
if (!options.patterns || options.patterns.length === 0) {
|
|
62
|
+
return {
|
|
63
|
+
success: false,
|
|
64
|
+
outputPath,
|
|
65
|
+
migrated: [],
|
|
66
|
+
errors: [
|
|
67
|
+
{
|
|
68
|
+
filePath: ".",
|
|
69
|
+
resourceName: "patterns",
|
|
70
|
+
resourceKind: "datasource",
|
|
71
|
+
message: "At least one file, directory, or glob pattern is required.",
|
|
72
|
+
},
|
|
73
|
+
],
|
|
74
|
+
dryRun,
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const discovered = discoverResourceFiles(options.patterns, cwd);
|
|
79
|
+
errors.push(...discovered.errors);
|
|
80
|
+
|
|
81
|
+
const parsedResources: ParsedResource[] = [];
|
|
82
|
+
for (const resource of discovered.resources) {
|
|
83
|
+
try {
|
|
84
|
+
parsedResources.push(parseResourceFile(resource));
|
|
85
|
+
} catch (error) {
|
|
86
|
+
if (error instanceof MigrationParseError) {
|
|
87
|
+
errors.push({
|
|
88
|
+
filePath: error.filePath,
|
|
89
|
+
resourceName: error.resourceName,
|
|
90
|
+
resourceKind: error.resourceKind,
|
|
91
|
+
message: error.message,
|
|
92
|
+
});
|
|
93
|
+
} else {
|
|
94
|
+
errors.push(toMigrationError(resource, error));
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const parsedConnections = parsedResources.filter(
|
|
100
|
+
(resource): resource is Extract<ParsedResource, { kind: "connection" }> =>
|
|
101
|
+
resource.kind === "connection"
|
|
102
|
+
);
|
|
103
|
+
const parsedDatasources = parsedResources.filter(
|
|
104
|
+
(resource): resource is Extract<ParsedResource, { kind: "datasource" }> =>
|
|
105
|
+
resource.kind === "datasource"
|
|
106
|
+
);
|
|
107
|
+
const parsedPipes = parsedResources.filter(
|
|
108
|
+
(resource): resource is Extract<ParsedResource, { kind: "pipe" }> =>
|
|
109
|
+
resource.kind === "pipe"
|
|
110
|
+
);
|
|
111
|
+
|
|
112
|
+
const migrated: ParsedResource[] = [];
|
|
113
|
+
const migratedConnectionNames = new Set<string>();
|
|
114
|
+
const migratedDatasourceNames = new Set<string>();
|
|
115
|
+
|
|
116
|
+
for (const connection of parsedConnections) {
|
|
117
|
+
try {
|
|
118
|
+
validateResourceForEmission(connection);
|
|
119
|
+
migrated.push(connection);
|
|
120
|
+
migratedConnectionNames.add(connection.name);
|
|
121
|
+
} catch (error) {
|
|
122
|
+
errors.push({
|
|
123
|
+
filePath: connection.filePath,
|
|
124
|
+
resourceName: connection.name,
|
|
125
|
+
resourceKind: connection.kind,
|
|
126
|
+
message: (error as Error).message,
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
for (const datasource of parsedDatasources) {
|
|
132
|
+
if (
|
|
133
|
+
datasource.kafka &&
|
|
134
|
+
!migratedConnectionNames.has(datasource.kafka.connectionName)
|
|
135
|
+
) {
|
|
136
|
+
errors.push({
|
|
137
|
+
filePath: datasource.filePath,
|
|
138
|
+
resourceName: datasource.name,
|
|
139
|
+
resourceKind: datasource.kind,
|
|
140
|
+
message: `Datasource references missing/unmigrated connection "${datasource.kafka.connectionName}".`,
|
|
141
|
+
});
|
|
142
|
+
continue;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
try {
|
|
146
|
+
validateResourceForEmission(datasource);
|
|
147
|
+
migrated.push(datasource);
|
|
148
|
+
migratedDatasourceNames.add(datasource.name);
|
|
149
|
+
} catch (error) {
|
|
150
|
+
errors.push({
|
|
151
|
+
filePath: datasource.filePath,
|
|
152
|
+
resourceName: datasource.name,
|
|
153
|
+
resourceKind: datasource.kind,
|
|
154
|
+
message: (error as Error).message,
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
for (const pipe of parsedPipes) {
|
|
160
|
+
if (
|
|
161
|
+
pipe.type === "materialized" &&
|
|
162
|
+
(!pipe.materializedDatasource ||
|
|
163
|
+
!migratedDatasourceNames.has(pipe.materializedDatasource))
|
|
164
|
+
) {
|
|
165
|
+
errors.push({
|
|
166
|
+
filePath: pipe.filePath,
|
|
167
|
+
resourceName: pipe.name,
|
|
168
|
+
resourceKind: pipe.kind,
|
|
169
|
+
message: `Materialized pipe references missing/unmigrated datasource "${pipe.materializedDatasource ?? "(none)"}".`,
|
|
170
|
+
});
|
|
171
|
+
continue;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (
|
|
175
|
+
pipe.type === "copy" &&
|
|
176
|
+
(!pipe.copyTargetDatasource ||
|
|
177
|
+
!migratedDatasourceNames.has(pipe.copyTargetDatasource))
|
|
178
|
+
) {
|
|
179
|
+
errors.push({
|
|
180
|
+
filePath: pipe.filePath,
|
|
181
|
+
resourceName: pipe.name,
|
|
182
|
+
resourceKind: pipe.kind,
|
|
183
|
+
message: `Copy pipe references missing/unmigrated datasource "${pipe.copyTargetDatasource ?? "(none)"}".`,
|
|
184
|
+
});
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
try {
|
|
189
|
+
validateResourceForEmission(pipe);
|
|
190
|
+
migrated.push(pipe);
|
|
191
|
+
} catch (error) {
|
|
192
|
+
errors.push({
|
|
193
|
+
filePath: pipe.filePath,
|
|
194
|
+
resourceName: pipe.name,
|
|
195
|
+
resourceKind: pipe.kind,
|
|
196
|
+
message: (error as Error).message,
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const sortedMigrated = sortResourcesForOutput(migrated);
|
|
202
|
+
let outputContent: string | undefined;
|
|
203
|
+
|
|
204
|
+
if (sortedMigrated.length > 0) {
|
|
205
|
+
try {
|
|
206
|
+
outputContent = emitMigrationFileContent(sortedMigrated);
|
|
207
|
+
} catch (error) {
|
|
208
|
+
errors.push({
|
|
209
|
+
filePath: ".",
|
|
210
|
+
resourceName: "output",
|
|
211
|
+
resourceKind: "datasource",
|
|
212
|
+
message: `Failed to emit migration output: ${(error as Error).message}`,
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
if (!dryRun && outputContent) {
|
|
218
|
+
if (fs.existsSync(outputPath) && !force) {
|
|
219
|
+
errors.push({
|
|
220
|
+
filePath: path.relative(cwd, outputPath),
|
|
221
|
+
resourceName: path.basename(outputPath),
|
|
222
|
+
resourceKind: "datasource",
|
|
223
|
+
message: `Output file already exists: ${outputPath}. Use --force to overwrite.`,
|
|
224
|
+
});
|
|
225
|
+
} else {
|
|
226
|
+
fs.writeFileSync(outputPath, outputContent);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const success = strict ? errors.length === 0 : true;
|
|
231
|
+
return {
|
|
232
|
+
success,
|
|
233
|
+
outputPath,
|
|
234
|
+
migrated: sortedMigrated,
|
|
235
|
+
errors,
|
|
236
|
+
dryRun,
|
|
237
|
+
outputContent,
|
|
238
|
+
};
|
|
239
|
+
}
|
|
240
|
+
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import * as os from "node:os";
|
|
4
|
+
import * as path from "node:path";
|
|
5
|
+
import { runPull } from "./pull.js";
|
|
6
|
+
|
|
7
|
+
vi.mock("../config.js", () => ({
|
|
8
|
+
loadConfigAsync: vi.fn(),
|
|
9
|
+
}));
|
|
10
|
+
|
|
11
|
+
vi.mock("../../api/resources.js", () => ({
|
|
12
|
+
pullAllResourceFiles: vi.fn(),
|
|
13
|
+
}));
|
|
14
|
+
|
|
15
|
+
import { loadConfigAsync } from "../config.js";
|
|
16
|
+
import { pullAllResourceFiles } from "../../api/resources.js";
|
|
17
|
+
|
|
18
|
+
const mockedLoadConfigAsync = vi.mocked(loadConfigAsync);
|
|
19
|
+
const mockedPullAllResourceFiles = vi.mocked(pullAllResourceFiles);
|
|
20
|
+
|
|
21
|
+
describe("Pull Command", () => {
|
|
22
|
+
let tempDir: string;
|
|
23
|
+
|
|
24
|
+
beforeEach(async () => {
|
|
25
|
+
vi.clearAllMocks();
|
|
26
|
+
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "tinybird-pull-test-"));
|
|
27
|
+
|
|
28
|
+
mockedLoadConfigAsync.mockResolvedValue({
|
|
29
|
+
include: ["src/lib/tinybird.ts"],
|
|
30
|
+
token: "p.test-token",
|
|
31
|
+
baseUrl: "https://api.tinybird.co",
|
|
32
|
+
configPath: path.join(tempDir, "tinybird.config.json"),
|
|
33
|
+
cwd: tempDir,
|
|
34
|
+
gitBranch: "feature/pull",
|
|
35
|
+
tinybirdBranch: "feature_pull",
|
|
36
|
+
isMainBranch: false,
|
|
37
|
+
devMode: "branch",
|
|
38
|
+
});
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
afterEach(async () => {
|
|
42
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
43
|
+
vi.resetAllMocks();
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it("writes pulled datasource, pipe, and connection files", async () => {
|
|
47
|
+
mockedPullAllResourceFiles.mockResolvedValue({
|
|
48
|
+
datasources: [
|
|
49
|
+
{
|
|
50
|
+
name: "events",
|
|
51
|
+
type: "datasource",
|
|
52
|
+
filename: "events.datasource",
|
|
53
|
+
content: "SCHEMA >\n timestamp DateTime",
|
|
54
|
+
},
|
|
55
|
+
],
|
|
56
|
+
pipes: [
|
|
57
|
+
{
|
|
58
|
+
name: "top_events",
|
|
59
|
+
type: "pipe",
|
|
60
|
+
filename: "top_events.pipe",
|
|
61
|
+
content: "NODE endpoint\nSQL >\n SELECT 1",
|
|
62
|
+
},
|
|
63
|
+
],
|
|
64
|
+
connections: [
|
|
65
|
+
{
|
|
66
|
+
name: "main_kafka",
|
|
67
|
+
type: "connection",
|
|
68
|
+
filename: "main_kafka.connection",
|
|
69
|
+
content: "TYPE kafka",
|
|
70
|
+
},
|
|
71
|
+
],
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const result = await runPull({ cwd: tempDir, outputDir: "pulled" });
|
|
75
|
+
|
|
76
|
+
expect(result.success).toBe(true);
|
|
77
|
+
expect(result.stats).toEqual({
|
|
78
|
+
datasources: 1,
|
|
79
|
+
pipes: 1,
|
|
80
|
+
connections: 1,
|
|
81
|
+
total: 3,
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const outputPath = path.join(tempDir, "pulled");
|
|
85
|
+
await expect(fs.readFile(path.join(outputPath, "events.datasource"), "utf-8")).resolves.toContain(
|
|
86
|
+
"SCHEMA >"
|
|
87
|
+
);
|
|
88
|
+
await expect(fs.readFile(path.join(outputPath, "top_events.pipe"), "utf-8")).resolves.toContain(
|
|
89
|
+
"NODE endpoint"
|
|
90
|
+
);
|
|
91
|
+
await expect(
|
|
92
|
+
fs.readFile(path.join(outputPath, "main_kafka.connection"), "utf-8")
|
|
93
|
+
).resolves.toContain("TYPE kafka");
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
it("returns error when a file exists and overwrite is disabled", async () => {
|
|
97
|
+
mockedPullAllResourceFiles.mockResolvedValue({
|
|
98
|
+
datasources: [
|
|
99
|
+
{
|
|
100
|
+
name: "events",
|
|
101
|
+
type: "datasource",
|
|
102
|
+
filename: "events.datasource",
|
|
103
|
+
content: "SCHEMA >\n timestamp DateTime",
|
|
104
|
+
},
|
|
105
|
+
],
|
|
106
|
+
pipes: [],
|
|
107
|
+
connections: [],
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
const outputPath = path.join(tempDir, "pulled");
|
|
111
|
+
await fs.mkdir(outputPath, { recursive: true });
|
|
112
|
+
await fs.writeFile(path.join(outputPath, "events.datasource"), "old", "utf-8");
|
|
113
|
+
|
|
114
|
+
const result = await runPull({
|
|
115
|
+
cwd: tempDir,
|
|
116
|
+
outputDir: "pulled",
|
|
117
|
+
overwrite: false,
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
expect(result.success).toBe(false);
|
|
121
|
+
expect(result.error).toContain("File already exists");
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it("overwrites existing files when overwrite is enabled", async () => {
|
|
125
|
+
mockedPullAllResourceFiles.mockResolvedValue({
|
|
126
|
+
datasources: [
|
|
127
|
+
{
|
|
128
|
+
name: "events",
|
|
129
|
+
type: "datasource",
|
|
130
|
+
filename: "events.datasource",
|
|
131
|
+
content: "new-content",
|
|
132
|
+
},
|
|
133
|
+
],
|
|
134
|
+
pipes: [],
|
|
135
|
+
connections: [],
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
const outputPath = path.join(tempDir, "pulled");
|
|
139
|
+
await fs.mkdir(outputPath, { recursive: true });
|
|
140
|
+
await fs.writeFile(path.join(outputPath, "events.datasource"), "old-content", "utf-8");
|
|
141
|
+
|
|
142
|
+
const result = await runPull({
|
|
143
|
+
cwd: tempDir,
|
|
144
|
+
outputDir: "pulled",
|
|
145
|
+
overwrite: true,
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
expect(result.success).toBe(true);
|
|
149
|
+
expect(result.files?.[0]?.status).toBe("overwritten");
|
|
150
|
+
await expect(fs.readFile(path.join(outputPath, "events.datasource"), "utf-8")).resolves.toBe(
|
|
151
|
+
"new-content"
|
|
152
|
+
);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
it("returns error when config loading fails", async () => {
|
|
156
|
+
mockedLoadConfigAsync.mockRejectedValue(new Error("No config found"));
|
|
157
|
+
|
|
158
|
+
const result = await runPull({ cwd: tempDir });
|
|
159
|
+
|
|
160
|
+
expect(result.success).toBe(false);
|
|
161
|
+
expect(result.error).toContain("No config found");
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
it("returns error when pull API fails", async () => {
|
|
165
|
+
mockedPullAllResourceFiles.mockRejectedValue(new Error("Unauthorized"));
|
|
166
|
+
|
|
167
|
+
const result = await runPull({ cwd: tempDir });
|
|
168
|
+
|
|
169
|
+
expect(result.success).toBe(false);
|
|
170
|
+
expect(result.error).toContain("Pull failed");
|
|
171
|
+
expect(result.error).toContain("Unauthorized");
|
|
172
|
+
});
|
|
173
|
+
});
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pull command - downloads all cloud resources as Tinybird datafiles
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import * as fs from "node:fs/promises";
|
|
6
|
+
import * as path from "node:path";
|
|
7
|
+
import { loadConfigAsync } from "../config.js";
|
|
8
|
+
import {
|
|
9
|
+
pullAllResourceFiles,
|
|
10
|
+
type PulledResourceFiles,
|
|
11
|
+
type ResourceFile,
|
|
12
|
+
type ResourceFileType,
|
|
13
|
+
} from "../../api/resources.js";
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Pull command options
|
|
17
|
+
*/
|
|
18
|
+
export interface PullCommandOptions {
|
|
19
|
+
/** Working directory (defaults to cwd) */
|
|
20
|
+
cwd?: string;
|
|
21
|
+
/** Output directory for pulled files (defaults to current directory) */
|
|
22
|
+
outputDir?: string;
|
|
23
|
+
/** Whether to overwrite existing files (defaults to false) */
|
|
24
|
+
overwrite?: boolean;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Single file written by pull
|
|
29
|
+
*/
|
|
30
|
+
export interface PulledFileResult {
|
|
31
|
+
/** Resource name */
|
|
32
|
+
name: string;
|
|
33
|
+
/** Resource type */
|
|
34
|
+
type: ResourceFileType;
|
|
35
|
+
/** Filename written */
|
|
36
|
+
filename: string;
|
|
37
|
+
/** Absolute path written */
|
|
38
|
+
path: string;
|
|
39
|
+
/** Path relative to cwd */
|
|
40
|
+
relativePath: string;
|
|
41
|
+
/** Whether this file was newly created or overwritten */
|
|
42
|
+
status: "created" | "overwritten";
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Pull command result
|
|
47
|
+
*/
|
|
48
|
+
export interface PullCommandResult {
|
|
49
|
+
/** Whether pull was successful */
|
|
50
|
+
success: boolean;
|
|
51
|
+
/** Output directory used */
|
|
52
|
+
outputDir?: string;
|
|
53
|
+
/** Files written */
|
|
54
|
+
files?: PulledFileResult[];
|
|
55
|
+
/** Pull statistics */
|
|
56
|
+
stats?: {
|
|
57
|
+
datasources: number;
|
|
58
|
+
pipes: number;
|
|
59
|
+
connections: number;
|
|
60
|
+
total: number;
|
|
61
|
+
};
|
|
62
|
+
/** Error message if failed */
|
|
63
|
+
error?: string;
|
|
64
|
+
/** Duration in milliseconds */
|
|
65
|
+
durationMs: number;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Convert grouped resources to a flat file list
|
|
70
|
+
*/
|
|
71
|
+
function flattenResources(resources: PulledResourceFiles): ResourceFile[] {
|
|
72
|
+
return [...resources.datasources, ...resources.pipes, ...resources.connections];
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Pull all resources from Tinybird and write them as datafiles
|
|
77
|
+
*/
|
|
78
|
+
export async function runPull(
|
|
79
|
+
options: PullCommandOptions = {}
|
|
80
|
+
): Promise<PullCommandResult> {
|
|
81
|
+
const startTime = Date.now();
|
|
82
|
+
const cwd = options.cwd ?? process.cwd();
|
|
83
|
+
const outputDir = path.resolve(cwd, options.outputDir ?? ".");
|
|
84
|
+
const overwrite = options.overwrite ?? false;
|
|
85
|
+
|
|
86
|
+
let config: Awaited<ReturnType<typeof loadConfigAsync>>;
|
|
87
|
+
try {
|
|
88
|
+
config = await loadConfigAsync(cwd);
|
|
89
|
+
} catch (error) {
|
|
90
|
+
return {
|
|
91
|
+
success: false,
|
|
92
|
+
error: (error as Error).message,
|
|
93
|
+
durationMs: Date.now() - startTime,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
let pulled: PulledResourceFiles;
|
|
98
|
+
try {
|
|
99
|
+
pulled = await pullAllResourceFiles({
|
|
100
|
+
baseUrl: config.baseUrl,
|
|
101
|
+
token: config.token,
|
|
102
|
+
});
|
|
103
|
+
} catch (error) {
|
|
104
|
+
return {
|
|
105
|
+
success: false,
|
|
106
|
+
error: `Pull failed: ${(error as Error).message}`,
|
|
107
|
+
durationMs: Date.now() - startTime,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const allFiles = flattenResources(pulled).sort((a, b) =>
|
|
112
|
+
a.filename.localeCompare(b.filename)
|
|
113
|
+
);
|
|
114
|
+
|
|
115
|
+
try {
|
|
116
|
+
await fs.mkdir(outputDir, { recursive: true });
|
|
117
|
+
|
|
118
|
+
const writtenFiles: PulledFileResult[] = [];
|
|
119
|
+
|
|
120
|
+
for (const file of allFiles) {
|
|
121
|
+
const absolutePath = path.join(outputDir, file.filename);
|
|
122
|
+
let existed = false;
|
|
123
|
+
|
|
124
|
+
try {
|
|
125
|
+
await fs.access(absolutePath);
|
|
126
|
+
existed = true;
|
|
127
|
+
} catch {
|
|
128
|
+
existed = false;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
await fs.writeFile(absolutePath, file.content, {
|
|
132
|
+
encoding: "utf-8",
|
|
133
|
+
flag: overwrite ? "w" : "wx",
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
writtenFiles.push({
|
|
137
|
+
name: file.name,
|
|
138
|
+
type: file.type,
|
|
139
|
+
filename: file.filename,
|
|
140
|
+
path: absolutePath,
|
|
141
|
+
relativePath: path.relative(cwd, absolutePath),
|
|
142
|
+
status: existed ? "overwritten" : "created",
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
return {
|
|
147
|
+
success: true,
|
|
148
|
+
outputDir,
|
|
149
|
+
files: writtenFiles,
|
|
150
|
+
stats: {
|
|
151
|
+
datasources: pulled.datasources.length,
|
|
152
|
+
pipes: pulled.pipes.length,
|
|
153
|
+
connections: pulled.connections.length,
|
|
154
|
+
total: writtenFiles.length,
|
|
155
|
+
},
|
|
156
|
+
durationMs: Date.now() - startTime,
|
|
157
|
+
};
|
|
158
|
+
} catch (error) {
|
|
159
|
+
const err = error as NodeJS.ErrnoException;
|
|
160
|
+
|
|
161
|
+
if (err.code === "EEXIST") {
|
|
162
|
+
return {
|
|
163
|
+
success: false,
|
|
164
|
+
error:
|
|
165
|
+
`File already exists: ${err.path ?? "unknown"}. ` +
|
|
166
|
+
"Use --force to overwrite existing files.",
|
|
167
|
+
durationMs: Date.now() - startTime,
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
return {
|
|
172
|
+
success: false,
|
|
173
|
+
error: `Failed to write files: ${(error as Error).message}`,
|
|
174
|
+
durationMs: Date.now() - startTime,
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
}
|
package/src/cli/config.ts
CHANGED
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import * as fs from "fs";
|
|
6
6
|
import * as path from "path";
|
|
7
|
+
import { config as loadDotenv } from "dotenv";
|
|
7
8
|
import { getCurrentGitBranch, isMainBranch, getTinybirdBranchName } from "./git.js";
|
|
8
9
|
|
|
9
10
|
// Re-export types from config-types.ts (separate file to avoid bundling esbuild)
|
|
@@ -70,6 +71,28 @@ const DEFAULT_CONFIG_FILE = "tinybird.config.json";
|
|
|
70
71
|
*/
|
|
71
72
|
const TINYBIRD_FILE = "lib/tinybird.ts";
|
|
72
73
|
|
|
74
|
+
/**
|
|
75
|
+
* Load .env files from a directory.
|
|
76
|
+
*
|
|
77
|
+
* Priority:
|
|
78
|
+
* 1. .env.local
|
|
79
|
+
* 2. .env
|
|
80
|
+
*
|
|
81
|
+
* Existing process.env values are preserved (dotenv default behavior).
|
|
82
|
+
*/
|
|
83
|
+
export function loadEnvFiles(directory: string): void {
|
|
84
|
+
const envLocalPath = path.join(directory, ".env.local");
|
|
85
|
+
const envPath = path.join(directory, ".env");
|
|
86
|
+
|
|
87
|
+
if (fs.existsSync(envLocalPath)) {
|
|
88
|
+
loadDotenv({ path: envLocalPath });
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (fs.existsSync(envPath)) {
|
|
92
|
+
loadDotenv({ path: envPath });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
73
96
|
/**
|
|
74
97
|
* Detect if project has a src folder
|
|
75
98
|
*/
|
|
@@ -201,6 +224,9 @@ function resolveConfig(config: TinybirdConfig, configPath: string): ResolvedConf
|
|
|
201
224
|
// Get the directory containing the config file
|
|
202
225
|
const configDir = path.dirname(configPath);
|
|
203
226
|
|
|
227
|
+
// Load environment files next to the config before interpolating values.
|
|
228
|
+
loadEnvFiles(configDir);
|
|
229
|
+
|
|
204
230
|
// Resolve token (may contain env vars)
|
|
205
231
|
let resolvedToken: string;
|
|
206
232
|
try {
|